diff --git a/config/global/download_extract.in b/config/global/download_extract.in index b23f88eb..d638ab09 100644 --- a/config/global/download_extract.in +++ b/config/global/download_extract.in @@ -20,6 +20,31 @@ config ONLY_DOWNLOAD Usefull to pre-retrieve the tarballs before going off-line. +config CONNECT_TIMEOUT + int + prompt "connection timeout" + default 10 + help + From the curl manual: + Maximum time in seconds that you allow the connection to the server to take. + + The scenario is as follows; + - some enterprise networks have firewalls that prohibit FTP traffic, while + still allowing HTTP + - most download sites have http:// equivalent for the ftp:// URL + - after this number of seconds, it is considered that the connection could + not be established, and the next URL in the list is tried, until we reach + an URL that will go through the firewall, most probably an http:// URL. + + If you have a slow network, you'd better set this value higher than the default + 10s. If you know a firewall is blocking connections, but your network is globally + fast, you can try to lower this value to jump more quickly to allowed URLs. YMMV. + + Note that this value applies equally to wget if you have that installed. + + Of course, you'd be better off to use a proxy, as offered by the following + choice of options. + choice bool prompt "Proxy type" diff --git a/scripts/functions b/scripts/functions index c4f739a1..a5a4f943 100644 --- a/scripts/functions +++ b/scripts/functions @@ -283,22 +283,22 @@ CT_DoGetFileWget() { # With automated download as we are doing, it can be very dangerous to use # -c to continue the downloads. It's far better to simply overwrite the # destination file - # Some company networks have proxies to connect to to the internet, but - # it's not easy to detect them, and wget may never timeout while connecting, - # so force a global 120s timeout. - wget -T 120 -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \ - || wget -T 120 -nc --progress=dot:binary --tries=3 "$1" \ + # Some company networks have firewalls to connect to the internet, but it's + # not easy to detect them, and wget does not timeout by default while + # connecting, so force a global ${CT_CONNECT_TIMEOUT}-second timeout. + wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \ + || wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 "$1" \ || true } # Download an URL using curl # Usage: CT_DoGetFileCurl CT_DoGetFileCurl() { - # Note: comments about wget method are also valid here + # Note: comments about wget method (above) are also valid here # Plus: no good progress indicator is available with curl, # so output is consigned to oblivion - curl --ftp-pasv -O --retry 3 "$1" --connect-timeout 120 >/dev/null \ - || curl -O --retry 3 "$1" --connect-timeout 120 >/dev/null \ + curl --ftp-pasv -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \ + || curl -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \ || true }