scripts: in case of failed download, remove partial files

It happens from time to time that the server mis-behaves, and breaks the
connection right in the middle of nowhere, for no good reason, leaving us
with a partial file, on which the extract pass would choke.

Remove partial downloads, to fail early.

Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
(transplanted from eefacabd9e72f3d578e5dc5908026ed780f9a240)
This commit is contained in:
Yann E. MORIN" 2010-08-11 20:07:18 +02:00
parent d46cb2c97e
commit bb831ed28f

View File

@ -363,7 +363,7 @@ CT_DoGetFileWget() {
# connecting, so force a global ${CT_CONNECT_TIMEOUT}-second timeout.
CT_DoExecLog ALL wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \
|| CT_DoExecLog ALL wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 "$1" \
|| true
|| rm -f "${1##*/}"
}
# Download an URL using curl
@ -374,7 +374,7 @@ CT_DoGetFileCurl() {
# so, be silent.
CT_DoExecLog ALL curl -s --ftp-pasv -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f \
|| CT_DoExecLog ALL curl -s -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f \
|| true
|| rm -f "${1##*/}"
}
# Download using aria2