Detect both wget/curl and allow user to select the agent

Signed-off-by: Alexey Neyman <stilor@att.net>
This commit is contained in:
Alexey Neyman 2017-02-12 13:51:42 -08:00
parent c31ed45a65
commit 35f89c4064
6 changed files with 98 additions and 33 deletions

View File

@ -62,6 +62,8 @@ export awk := @AWK@
export grep := @GREP@
export make := @MAKE@
export sed := @SED@
export wget := @wget@
export curl := @curl@
export libtool := @LIBTOOL@
export libtoolize := @LIBTOOLIZE@
export objcopy := @OBJCOPY@
@ -155,6 +157,7 @@ uninstall: real-uninstall
# Build rules
build-bin: $(PROG_NAME) \
scripts/scripts.mk \
scripts/crosstool-NG.sh \
scripts/saveSample.sh \
scripts/showConfig.sh
@ -187,6 +190,8 @@ define sed_it
-e 's,@@CT_make@@,$(make),g;' \
-e 's,@@CT_bash@@,$(bash),g;' \
-e 's,@@CT_awk@@,$(awk),g;' \
-e 's,@@CT_wget@@,$(wget),g;' \
-e 's,@@CT_curl@@,$(curl),g;' \
$< >$@
endef
@ -258,6 +263,8 @@ FORCE:
clean-bin:
@echo " RM '$(PROG_NAME)'"
@rm -f $(PROG_NAME)
@echo " RM 'scripts/scripts.mk'"
@rm -f scripts/scripts.mk
@echo " RM 'scripts/crosstool-NG.sh'"
@rm -f scripts/crosstool-NG.sh
@echo " RM 'scripts/saveSample.sh'"

View File

@ -2,6 +2,23 @@
comment "Downloading"
choice
prompt "Download agent"
config DOWNLOAD_AGENT_WGET
bool "wget"
depends on CONFIGURE_has_wget
config DOWNLOAD_AGENT_CURL
bool "curl"
depends on CONFIGURE_has_curl
config DOWNLOAD_AGENT_NONE
bool "none (forbid download)"
select FORBID_DOWNLOAD
endchoice
config FORBID_DOWNLOAD
bool
prompt "Forbid downloads"
@ -11,7 +28,7 @@ config FORBID_DOWNLOAD
If you do not have network connectivity when you run crosstool-NG,
and some files are missing, it can be a long time before crosstool-NG
fails.
Saying 'y' here will prevent crosstool-NG from downloading missing
files, thus failing early so that you don't get stuck.
@ -22,7 +39,7 @@ config FORCE_DOWNLOAD
prompt "Force downloads"
help
Force downloading tarballs, even if one already exists.
Useful if you suspect a tarball to be damaged.
config CONNECT_TIMEOUT
@ -31,7 +48,7 @@ config CONNECT_TIMEOUT
default 10
help
Maximum time in seconds that you allow the connection to the server to take.
The scenario is as follows;
- some enterprise networks have firewalls that prohibit FTP traffic, while
still allowing HTTP
@ -39,13 +56,29 @@ config CONNECT_TIMEOUT
- after this number of seconds, it is considered that the connection could
not be established, and the next URL in the list is tried, until we reach
an URL that will go through the firewall, most probably an http:// URL.
If you have a slow network, you'd better set this value higher than the default
10s. If you know a firewall is blocking connections, but your network is globally
fast, you can try to lower this value to jump more quickly to allowed URLs. YMMV.
If '-1' is specified, no timeout reconfiguration options are passed to wget.
if DOWNLOAD_AGENT_WGET
config DOWNLOAD_WGET_OPTIONS
string "Extra options to wget"
default "--passive-ftp --tries=3 -nc --progress=dot:binary"
endif
if DOWNLOAD_AGENT_CURL
config DOWNLOAD_CURL_OPTIONS
string "Extra options to curl"
default "--location --ftp-pasv --retry 3 --fail --silent"
endif
config ONLY_DOWNLOAD
bool
prompt "Stop after downloading tarballs"

View File

@ -218,12 +218,22 @@ ACX_CHECK_PROGS_REQ([makeinfo], [makeinfo])
ACX_CHECK_PROGS_REQ([cut], [cut])
ACX_CHECK_PROGS_REQ([stat], [stat])
ACX_CHECK_PROGS_REQ([readlink], [readlink])
ACX_CHECK_PROGS_REQ([wget], [wget])
ACX_CHECK_PROGS_REQ([tar], [tar])
ACX_CHECK_PROGS_REQ([gzip], [gzip])
ACX_CHECK_PROGS_REQ([bzip2], [bzip2])
ACX_CHECK_PROGS_REQ([help2man], [help2man])
# Not a fatal failure even if we have neither - the tarballs may
# be provided in a local directory.
AC_CHECK_PROGS([wget], [wget])
ACX_SET_KCONFIG_OPTION([wget])
AC_SUBST([wget])
AC_CHECK_PROGS([curl], [curl])
ACX_SET_KCONFIG_OPTION([curl])
AC_SUBST([curl])
#--------------------------------------------------------------------
# Still boring, but remember the path, now...
#--------------------------------------------------------------------

View File

@ -28,7 +28,7 @@ include $(CT_LIB_DIR)/paths.mk
# Some distributions (eg. Ubuntu) thought it wise to point /bin/sh to
# a truly POSIX-conforming shell, ash in this case. This is not so good
# as we, smart (haha!) developers (as smart we ourselves think we are),
# got used to bashisms, and are enclined to easiness... So force use of
# got used to bashisms, and are inclined to easiness... So force use of
# bash.
export SHELL=$(bash)
@ -36,7 +36,7 @@ export SHELL=$(bash)
# We do not need any GREP_OPTIONS anyway, so set it to empty.
export GREP_OPTIONS=
# Make the restart/stop steps availabe to scripts/crostool-NG.sh
# Make the restart/stop steps available to scripts/crostool-NG.sh
export CT_STOP:=$(STOP)
export CT_RESTART:=$(RESTART)

View File

@ -663,29 +663,34 @@ CT_DoGetFile() {
local url="${1}"
local dest="${CT_TARBALLS_DIR}/${url##*/}"
local tmp="${dest}.tmp-dl"
local ok
local T
# Remove potential left-over from a previous run
rm -f "${tmp}"
# We also retry a few times, in case there is a transient error (eg. behind
# a dynamic IP that changes during the transfer...)
# With automated download as we are doing, it can be very dangerous to
# continue the downloads. It's far better to simply overwrite the
# destination file.
# Some company networks have firewalls to connect to the internet, but it's
# not easy to detect them, so force a global ${CT_CONNECT_TIMEOUT}-second
# timeout.
if [ ${CT_CONNECT_TIMEOUT} = -1 ]; then
T=
else
T="-T ${CT_CONNECT_TIMEOUT}"
# Replace a special value of '-1' with empty string
if [ ${CT_CONNECT_TIMEOUT} != -1 ]; then
T="${CT_CONNECT_TIMEOUT}"
fi
if CT_DoExecLog ALL wget --passive-ftp --tries=3 -nc \
--progress=dot:binary \
${T} \
-O "${tmp}" \
"${url}"
then
if [ "${CT_DOWNLOAD_AGENT_WGET}" = "y" ]; then
if CT_DoExecLog ALL wget ${CT_DOWNLOAD_WGET_OPTIONS} \
${T:+-T ${T}} \
-O "${tmp}" \
"${url}"; then
ok=y
fi
elif [ "${CT_DOWNLOAD_AGENT_CURL}" = "y" ]; then
if CT_DoExecLog ALL curl ${CT_DOWNLOAD_CURL_OPTIONS} \
${T:+--connect-timeout ${T}} \
-o "${tmp}" \
"${url}"; then
ok=y
fi
fi
if [ "${ok}" = "y" ]; then
# Success, we got it, good!
mv "${tmp}" "${dest}"
CT_DoLog DEBUG "Got it from: \"${url}\""

View File

@ -26,9 +26,19 @@ updatetools: $(CONFIG_SUB_DEST) $(CONFIG_GUESS_DEST)
# ----------------------------------------------------------
# How to retrieve the tools
wget_opt=-o /dev/null
ifeq ($(strip $(V)),2)
wget_opt=
ifneq ($(strip $(V)),2)
wget_silent_opt = -o /dev/null
curl_silent_opt = --silent
endif
ifneq (@@CT_wget@@,)
download_cmd = wget --passive-ftp $(wget_silent_opt) -O $@
else
ifneq (@@CT_curl@@,)
download_cmd = curl --ftp-pasv $(curl_silent_opt) -o $@
else
download_cmd = $(error wget or curl needed for downloads)
endif
endif
PHONY += scripts
@ -37,13 +47,13 @@ scripts:
$(SILENT)mkdir -p $@
$(CONFIG_SUB_DEST): scripts FORCE
@$(CT_ECHO) ' WGET $@'
$(SILENT)wget $(wget_opt) -O $@ $(CONFIG_SUB_SRC)
@$(CT_ECHO) ' DOWNLOAD $@'
$(SILENT)$(download_cmd) $(CONFIG_SUB_SRC)
$(SILENT)chmod u+rwx,go+rx-w $@
$(CONFIG_GUESS_DEST): scripts FORCE
@$(CT_ECHO) ' WGET $@'
$(SILENT)wget $(wget_opt) -O $@ $(CONFIG_GUESS_SRC)
@$(CT_ECHO) ' DOWNLOAD $@'
$(SILENT)$(download_cmd) $(CONFIG_GUESS_SRC)
$(SILENT)chmod u+rwx,go+rx-w $@
# ----------------------------------------------------------