From c8c354d1e669a1f58e321283d676e085019b3574 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josef=20S=C3=B6ntgen?= Date: Thu, 9 May 2019 17:05:27 +0200 Subject: [PATCH] fetchurl: add progress timeout handling The component will now abort an ongoing download attempt if it stalls for given amount of time, the default is 10 seconds. Fixes #3346. --- repos/libports/src/app/fetchurl/README | 7 +++ repos/libports/src/app/fetchurl/component.cc | 63 ++++++++++++++++++-- 2 files changed, 66 insertions(+), 4 deletions(-) diff --git a/repos/libports/src/app/fetchurl/README b/repos/libports/src/app/fetchurl/README index 10a7c9bfd6..c21ce1acab 100644 --- a/repos/libports/src/app/fetchurl/README +++ b/repos/libports/src/app/fetchurl/README @@ -3,6 +3,13 @@ A small frontend to the libcURL library. Configuration ------------- +The component will try to fetch the given resource. If this operation stalls +for a given amount of time, the transfer will be aborted. The duration can be +set by setting the 'progress_timeout' attribute in the '' node. It is +specified in milliseconds and the default value is 10 seconds. + +The following config snippets illustrate its usage. + Load the socket plugin in to the fetchurl binary (print output to log): ! ! Url; @@ -64,6 +65,8 @@ class Fetchurl::Fetch : Genode::List::Element double dltotal = 0; double dlnow = 0; + bool timeout = false; + int fd = -1; Fetch(Main &main, Url const &url, Path const &path, @@ -75,6 +78,16 @@ class Fetchurl::Fetch : Genode::List::Element }; +struct Fetchurl::User_data +{ + Timer::Connection &timer; + Genode::Milliseconds last_ms; + Genode::Milliseconds const max_timeout; + Genode::Milliseconds curr_timeout; + Fetchurl::Fetch &fetch; +}; + + struct Fetchurl::Main { Main(Main const &); @@ -95,6 +108,8 @@ struct Fetchurl::Main Genode::Duration _report_delay { Genode::Milliseconds { 0 } }; + Genode::Milliseconds _progress_timeout { 10u * 1000 }; + void _schedule_report() { using namespace Genode; @@ -117,6 +132,9 @@ struct Fetchurl::Main xml.attribute("url", f->url); xml.attribute("total", f->dltotal); xml.attribute("now", f->dlnow); + if (f->timeout) { + xml.attribute("timeout", true); + } }); } }); @@ -146,6 +164,9 @@ struct Fetchurl::Main } catch (...) { } + _progress_timeout.value = config_node.attribute_value("progress_timeout", + _progress_timeout.value); + auto const parse_fn = [&] (Genode::Xml_node node) { if (!node.has_attribute("url") || !node.has_attribute("path")) { @@ -238,7 +259,14 @@ struct Fetchurl::Main curl_easy_setopt(_curl, CURLOPT_NOPROGRESS, 0L); curl_easy_setopt(_curl, CURLOPT_PROGRESSFUNCTION, progress_callback); - curl_easy_setopt(_curl, CURLOPT_PROGRESSDATA, &_fetch); + User_data ud { + .timer = _timer, + .last_ms = _timer.curr_time().trunc_to_plain_ms(), + .max_timeout = _progress_timeout, + .curr_timeout = Genode::Milliseconds { .value = 0 }, + .fetch = _fetch, + }; + curl_easy_setopt(_curl, CURLOPT_PROGRESSDATA, &ud); curl_easy_setopt(_curl, CURLOPT_SSL_VERIFYPEER, 0L); curl_easy_setopt(_curl, CURLOPT_SSL_VERIFYHOST, 0L); @@ -312,11 +340,38 @@ static int progress_callback(void *userdata, (void)ultotal; (void)ulnow; - Fetchurl::Fetch &fetch = *((Fetchurl::Fetch *)userdata); + using namespace Fetchurl; + using namespace Genode; + + User_data &ud = *reinterpret_cast(userdata); + Timer::Connection &timer = ud.timer; + Fetch &fetch = ud.fetch; + + Milliseconds curr { timer.curr_time().trunc_to_plain_ms() }; + Milliseconds diff { .value = curr.value - ud.last_ms.value }; + ud.last_ms = curr; + + /* + * To catch stuck downloads we increase the timeout time whenever + * the current download rate is same as the last one. When we hit + * the max timeout value, we will abort the download attempt. + */ + + if (dlnow == fetch.dlnow) { + ud.curr_timeout.value += diff.value; + } + else { + ud.curr_timeout.value = 0; + } + bool const timeout = ud.curr_timeout.value >= ud.max_timeout.value; + fetch.dltotal = dltotal; - fetch.dlnow = dlnow; + fetch.dlnow = dlnow; + fetch.timeout = timeout; fetch.main._schedule_report(); - return CURLE_OK; + + /* non-zero return is enough to trigger an abort */ + return timeout ? CURLE_GOT_NOTHING : CURLE_OK; }