Matthias Kaehlcke <matthias@kaehlcke.net> reported hanged downloads on his

network, most probably due to proxies. Have downloaders (wget and curl)
timeout on too slow connections (they don't by default).

 scripts/functions |   17    12     5     0 ++++++++++++-----
 1 file changed, 12 insertions(+), 5 deletions(-)
This commit is contained in:
Yann E. MORIN" 2008-04-13 18:16:58 +00:00
parent 524ec3d73d
commit b3841f0aa9

View File

@ -283,16 +283,23 @@ CT_DoGetFileWget() {
# With automated download as we are doing, it can be very dangerous to use
# -c to continue the downloads. It's far better to simply overwrite the
# destination file
wget -nc --progress=dot:binary --tries=3 --passive-ftp "$1" || wget -nc --progress=dot:binary --tries=3 "$1" || true
# Some company networks have proxies to connec to to the internet, but it's
# not easy to detect them, and wget may never timeout while connecting, so
# force a global 120s timeout.
wget -T 120 -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \
|| wget -T 120 -nc --progress=dot:binary --tries=3 "$1" \
|| true
}
# Download an URL using curl
# Usage: CT_DoGetFileCurl <URL>
CT_DoGetFileCurl() {
# Note: comments about wget method are also valid here
# Plus: no good progreess indicator is available with curl,
# so output is consigned to oblivion
curl --ftp-pasv -O --retry 3 "$1" >/dev/null || curl -O --retry 3 "$1" >/dev/null || true
# Note: comments about wget method are also valid here
# Plus: no good progress indicator is available with curl,
# so output is consigned to oblivion
curl --ftp-pasv -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
|| curl -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
|| true
}
_wget=`CT_Which wget`