2015-11-26 12:58:40 +00:00
|
|
|
# -*- mode: sh; tab-width: 4 -*-
|
|
|
|
# vi: ts=4:sw=4:sts=4:et
|
2016-03-21 18:18:53 +00:00
|
|
|
# This file contains some useful common functions
|
2007-02-24 11:00:05 +00:00
|
|
|
# Copyright 2007 Yann E. MORIN
|
|
|
|
# Licensed under the GPL v2. See COPYING in the root of this package
|
|
|
|
|
2007-05-27 20:22:06 +00:00
|
|
|
# Prepare the fault handler
|
2007-02-24 11:00:05 +00:00
|
|
|
CT_OnError() {
|
2011-04-07 22:08:57 +00:00
|
|
|
local ret=$?
|
2012-10-06 21:48:07 +00:00
|
|
|
local result
|
|
|
|
local old_trap
|
2011-04-07 22:08:57 +00:00
|
|
|
local intro
|
scripts: fix dumping execution backtrace
Dumping the backtrace has been broken since changeset #652e56d6d35a:
scripts: execute each steps in a subshell
We can spawn sub-sub-shells in some cases.
The way the fault handler works is to dump the backtrace, but to avoid
printing it once for every sub-shell (which could get quite confusing),
it simply exits when it detects that it is being run in a sub-shell,
leaving to the top-level shell the work to dump the backtrace.
Because each step is executed in its own sub-shell, the variable arrays
that contain the step name, the source file and line number, are lost
when exiting the per-step sub-shell.
Hence, the backtrace is currently limited to printing only the top-level
main procedure of the shell.
Fix this thus:
- when dumping the bckatraces for the steps & the functions, remember
it was dumped, and only dump it if it was not already dumped
- at the top-level shell, print the hints
Also, rename the top-level step label.
Reported-by: Benoît Thébaudeau <benoit.thebaudeau@advansee.com>
Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
2011-12-13 22:32:39 +00:00
|
|
|
local file line func
|
|
|
|
local step step_depth
|
|
|
|
|
|
|
|
# To avoid printing the backtace for each sub-shell
|
|
|
|
# up to the top-level, just remember we've dumped it
|
2012-10-14 23:46:15 +00:00
|
|
|
if [ ! -f "${CT_WORK_DIR}/backtrace" ]; then
|
2015-10-29 01:09:47 +00:00
|
|
|
[ -d "${CT_WORK_DIR}" ] && touch "${CT_WORK_DIR}/backtrace"
|
scripts: fix dumping execution backtrace
Dumping the backtrace has been broken since changeset #652e56d6d35a:
scripts: execute each steps in a subshell
We can spawn sub-sub-shells in some cases.
The way the fault handler works is to dump the backtrace, but to avoid
printing it once for every sub-shell (which could get quite confusing),
it simply exits when it detects that it is being run in a sub-shell,
leaving to the top-level shell the work to dump the backtrace.
Because each step is executed in its own sub-shell, the variable arrays
that contain the step name, the source file and line number, are lost
when exiting the per-step sub-shell.
Hence, the backtrace is currently limited to printing only the top-level
main procedure of the shell.
Fix this thus:
- when dumping the bckatraces for the steps & the functions, remember
it was dumped, and only dump it if it was not already dumped
- at the top-level shell, print the hints
Also, rename the top-level step label.
Reported-by: Benoît Thébaudeau <benoit.thebaudeau@advansee.com>
Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
2011-12-13 22:32:39 +00:00
|
|
|
|
|
|
|
# Print steps backtrace
|
|
|
|
step_depth=${CT_STEP_COUNT}
|
|
|
|
CT_STEP_COUNT=1 # To have a zero-indentation
|
|
|
|
CT_DoLog ERROR ""
|
|
|
|
CT_DoLog ERROR ">>"
|
|
|
|
intro="Build failed"
|
|
|
|
for((step=step_depth; step>0; step--)); do
|
|
|
|
CT_DoLog ERROR ">> ${intro} in step '${CT_STEP_MESSAGE[${step}]}'"
|
|
|
|
intro=" called"
|
|
|
|
done
|
|
|
|
|
|
|
|
# Print functions backtrace
|
|
|
|
intro="Error happened in"
|
|
|
|
CT_DoLog ERROR ">>"
|
|
|
|
for((depth=1; ${BASH_LINENO[$((${depth}-1))]}>0; depth++)); do
|
|
|
|
file="${BASH_SOURCE[${depth}]#${CT_LIB_DIR}/}"
|
|
|
|
func="${FUNCNAME[${depth}]}"
|
|
|
|
line="@${BASH_LINENO[${depth}-1]:-?}"
|
|
|
|
CT_DoLog ERROR ">> ${intro}: ${func}[${file}${line}]"
|
|
|
|
intro=" called from"
|
|
|
|
done
|
2012-10-06 21:48:07 +00:00
|
|
|
|
|
|
|
# If the user asked for interactive debugging, dump him/her to a shell
|
|
|
|
if [ "${CT_DEBUG_INTERACTIVE}" = "y" ]; then
|
|
|
|
# We do not want this sub-shell exit status to be caught, because
|
|
|
|
# it is absolutely legit that it exits with non-zero.
|
|
|
|
# Save the trap handler to restore it after our debug-shell
|
|
|
|
old_trap="$(trap -p ERR)"
|
|
|
|
trap -- ERR
|
|
|
|
(
|
2012-10-30 00:22:25 +00:00
|
|
|
exec >&6 2>&7 <&8
|
2012-10-06 21:48:07 +00:00
|
|
|
printf "\r \n\nCurrent command"
|
|
|
|
if [ -n "${cur_cmd}" ]; then
|
|
|
|
printf ":\n %s\n" "${cur_cmd}"
|
|
|
|
else
|
|
|
|
printf " (unknown), "
|
|
|
|
fi
|
|
|
|
printf "exited with error code: %d\n" ${ret}
|
|
|
|
printf "Please fix it up and finish by exiting the shell with one of these values:\n"
|
|
|
|
printf " 1 fixed, continue with next build command\n"
|
|
|
|
if [ -n "${cur_cmd}" ]; then
|
|
|
|
printf " 2 repeat this build command\n"
|
|
|
|
fi
|
|
|
|
printf " 3 abort build\n\n"
|
|
|
|
while true; do
|
|
|
|
${bash} --rcfile <(printf "PS1='ct-ng:\w> '\nPROMPT_COMMAND=''\n") -i
|
|
|
|
result=$?
|
|
|
|
case $result in
|
|
|
|
1) printf "\nContinuing past the failed command.\n\n"
|
|
|
|
break
|
|
|
|
;;
|
|
|
|
2) if [ -n "${cur_cmd}" ]; then
|
|
|
|
printf "\nRe-trying last command.\n\n"
|
|
|
|
break
|
|
|
|
fi
|
|
|
|
;;
|
2012-11-22 18:43:26 +00:00
|
|
|
3) break;;
|
2012-10-06 21:48:07 +00:00
|
|
|
esac
|
2012-11-22 18:43:26 +00:00
|
|
|
printf "\nPlease exit with one of these values:\n"
|
|
|
|
printf " 1 fixed, continue with next build command\n"
|
|
|
|
if [ -n "${cur_cmd}" ]; then
|
|
|
|
printf " 2 repeat this build command\n"
|
|
|
|
fi
|
|
|
|
printf " 3 abort build\n"
|
2012-10-06 21:48:07 +00:00
|
|
|
done
|
|
|
|
exit $result
|
|
|
|
)
|
|
|
|
result=$?
|
|
|
|
# Restore the trap handler
|
|
|
|
eval "${old_trap}"
|
|
|
|
case "${result}" in
|
2012-11-22 18:43:26 +00:00
|
|
|
1) rm -f "${CT_WORK_DIR}/backtrace"; touch "${CT_BUILD_DIR}/skip"; return;;
|
2012-10-06 21:48:07 +00:00
|
|
|
2) rm -f "${CT_WORK_DIR}/backtrace"; touch "${CT_BUILD_DIR}/repeat"; return;;
|
|
|
|
# 3 is an abort, continue...
|
|
|
|
esac
|
|
|
|
fi
|
2011-04-07 22:08:57 +00:00
|
|
|
fi
|
|
|
|
|
scripts: fix dumping execution backtrace
Dumping the backtrace has been broken since changeset #652e56d6d35a:
scripts: execute each steps in a subshell
We can spawn sub-sub-shells in some cases.
The way the fault handler works is to dump the backtrace, but to avoid
printing it once for every sub-shell (which could get quite confusing),
it simply exits when it detects that it is being run in a sub-shell,
leaving to the top-level shell the work to dump the backtrace.
Because each step is executed in its own sub-shell, the variable arrays
that contain the step name, the source file and line number, are lost
when exiting the per-step sub-shell.
Hence, the backtrace is currently limited to printing only the top-level
main procedure of the shell.
Fix this thus:
- when dumping the bckatraces for the steps & the functions, remember
it was dumped, and only dump it if it was not already dumped
- at the top-level shell, print the hints
Also, rename the top-level step label.
Reported-by: Benoît Thébaudeau <benoit.thebaudeau@advansee.com>
Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
2011-12-13 22:32:39 +00:00
|
|
|
# And finally, in top-level shell, print some hints
|
|
|
|
if [ ${BASH_SUBSHELL} -eq 0 ]; then
|
|
|
|
# Help diagnose the error
|
|
|
|
CT_STEP_COUNT=1 # To have a zero-indentation
|
|
|
|
CT_DoLog ERROR ">>"
|
|
|
|
if [ "${CT_LOG_TO_FILE}" = "y" ]; then
|
|
|
|
CT_DoLog ERROR ">> For more info on this error, look at the file: '${tmp_log_file#${CT_TOP_DIR}/}'"
|
|
|
|
fi
|
|
|
|
CT_DoLog ERROR ">> There is a list of known issues, some with workarounds, in:"
|
|
|
|
CT_DoLog ERROR ">> '${CT_DOC_DIR#${CT_TOP_DIR}/}/B - Known issues.txt'"
|
2011-04-07 22:08:57 +00:00
|
|
|
|
scripts: fix dumping execution backtrace
Dumping the backtrace has been broken since changeset #652e56d6d35a:
scripts: execute each steps in a subshell
We can spawn sub-sub-shells in some cases.
The way the fault handler works is to dump the backtrace, but to avoid
printing it once for every sub-shell (which could get quite confusing),
it simply exits when it detects that it is being run in a sub-shell,
leaving to the top-level shell the work to dump the backtrace.
Because each step is executed in its own sub-shell, the variable arrays
that contain the step name, the source file and line number, are lost
when exiting the per-step sub-shell.
Hence, the backtrace is currently limited to printing only the top-level
main procedure of the shell.
Fix this thus:
- when dumping the bckatraces for the steps & the functions, remember
it was dumped, and only dump it if it was not already dumped
- at the top-level shell, print the hints
Also, rename the top-level step label.
Reported-by: Benoît Thébaudeau <benoit.thebaudeau@advansee.com>
Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
2011-12-13 22:32:39 +00:00
|
|
|
CT_DoLog ERROR ""
|
|
|
|
CT_DoEnd ERROR
|
2012-10-14 23:46:15 +00:00
|
|
|
rm -f "${CT_WORK_DIR}/backtrace"
|
scripts: fix dumping execution backtrace
Dumping the backtrace has been broken since changeset #652e56d6d35a:
scripts: execute each steps in a subshell
We can spawn sub-sub-shells in some cases.
The way the fault handler works is to dump the backtrace, but to avoid
printing it once for every sub-shell (which could get quite confusing),
it simply exits when it detects that it is being run in a sub-shell,
leaving to the top-level shell the work to dump the backtrace.
Because each step is executed in its own sub-shell, the variable arrays
that contain the step name, the source file and line number, are lost
when exiting the per-step sub-shell.
Hence, the backtrace is currently limited to printing only the top-level
main procedure of the shell.
Fix this thus:
- when dumping the bckatraces for the steps & the functions, remember
it was dumped, and only dump it if it was not already dumped
- at the top-level shell, print the hints
Also, rename the top-level step label.
Reported-by: Benoît Thébaudeau <benoit.thebaudeau@advansee.com>
Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
2011-12-13 22:32:39 +00:00
|
|
|
fi
|
2007-02-24 11:00:05 +00:00
|
|
|
exit $ret
|
|
|
|
}
|
2007-05-27 20:22:06 +00:00
|
|
|
|
|
|
|
# Install the fault handler
|
2007-02-24 11:00:05 +00:00
|
|
|
trap CT_OnError ERR
|
|
|
|
|
2007-05-27 20:22:06 +00:00
|
|
|
# Inherit the fault handler in subshells and functions
|
2007-02-24 11:00:05 +00:00
|
|
|
set -E
|
2007-05-27 20:22:06 +00:00
|
|
|
|
|
|
|
# Make pipes fail on the _first_ failed command
|
|
|
|
# Not supported on bash < 3.x, but we need it, so drop the obsoleting bash-2.x
|
2007-02-24 11:00:05 +00:00
|
|
|
set -o pipefail
|
|
|
|
|
2007-05-27 20:22:06 +00:00
|
|
|
# Don't hash commands' locations, and search every time it is requested.
|
|
|
|
# This is slow, but needed because of the static/shared core gcc which shall
|
|
|
|
# always match to shared if it exists, and only fallback to static if the
|
|
|
|
# shared is not found
|
|
|
|
set +o hashall
|
|
|
|
|
2007-06-16 22:23:53 +00:00
|
|
|
# Log policy:
|
|
|
|
# - first of all, save stdout so we can see the live logs: fd #6
|
2012-10-30 00:22:25 +00:00
|
|
|
# (also save stdin and stderr for use by CT_DEBUG_INTERACTIVE)
|
|
|
|
exec 6>&1 7>&2 8<&0
|
2011-03-19 23:02:21 +00:00
|
|
|
# - then point stdout to the log file
|
|
|
|
tmp_log_file="${CT_TOP_DIR}/build.log"
|
|
|
|
rm -f "${tmp_log_file}"
|
2007-06-16 22:23:53 +00:00
|
|
|
exec >>"${tmp_log_file}"
|
|
|
|
|
2007-02-24 11:00:05 +00:00
|
|
|
# The different log levels:
|
|
|
|
CT_LOG_LEVEL_ERROR=0
|
|
|
|
CT_LOG_LEVEL_WARN=1
|
|
|
|
CT_LOG_LEVEL_INFO=2
|
|
|
|
CT_LOG_LEVEL_EXTRA=3
|
2010-10-22 20:02:57 +00:00
|
|
|
CT_LOG_LEVEL_CFG=4
|
|
|
|
CT_LOG_LEVEL_FILE=5
|
2010-10-22 20:02:49 +00:00
|
|
|
CT_LOG_LEVEL_STATE=6
|
|
|
|
CT_LOG_LEVEL_ALL=7
|
|
|
|
CT_LOG_LEVEL_DEBUG=8
|
2007-02-24 11:00:05 +00:00
|
|
|
|
2008-11-28 23:33:04 +00:00
|
|
|
# Make it easy to use \n and !
|
2008-11-28 17:51:39 +00:00
|
|
|
CR=$(printf "\n")
|
2008-11-28 23:33:04 +00:00
|
|
|
BANG='!'
|
2008-11-28 17:51:39 +00:00
|
|
|
|
2007-02-24 11:00:05 +00:00
|
|
|
# A function to log what is happening
|
|
|
|
# Different log level are available:
|
|
|
|
# - ERROR: A serious, fatal error occurred
|
|
|
|
# - WARN: A non fatal, non serious error occurred, take your responsbility with the generated build
|
|
|
|
# - INFO: Informational messages
|
|
|
|
# - EXTRA: Extra informational messages
|
2010-10-22 20:02:57 +00:00
|
|
|
# - CFG: Output of various "./configure"-type scripts
|
|
|
|
# - FILE: File / archive unpacking.
|
2010-10-22 20:02:49 +00:00
|
|
|
# - STATE: State save & restore
|
Huge fixes to glibc build, so that we can build at least (and at last):
- use ports addon even when installing headers,
- use optimisation (-O) when installing headers, to avoid unnecessary warnings (thanks Robert P. J. DAY for pointing this out!),
- lowest kernel version to use is only X.Y.Z, not X.Y.Z.T,
- a bit of preparations for NPTL (RSN I hope),
- fix fixing the linker scripts (changing the backup file is kind of useless and stupid);
Shut uClibc finish step: there really is nothing to do;
Add a patch for glibc-2.3.6 weak aliases handling on some archs (ARM and ALPHA at least);
Did not catch the make errors: fixed the pattern matching in scripts/functions;
Introduce a new log level, ALL:
- send components' build messages there,
- DEBUG log level is destined only for crosstool-NG debug messages,
- migrate sub-actions to use appropriate log levels;
Update the armeb-unknown-linux-gnu sample:
- it builds!
- uses gcc-4.0.4 and glibc-2.3.6,
- updated to latest config options set.
2007-05-08 17:48:32 +00:00
|
|
|
# - ALL: Component's build messages
|
2010-10-22 20:02:49 +00:00
|
|
|
# - DEBUG: Internal debug messages
|
2007-02-24 11:00:05 +00:00
|
|
|
# Usage: CT_DoLog <level> [message]
|
|
|
|
# If message is empty, then stdin will be logged.
|
|
|
|
CT_DoLog() {
|
2007-04-23 20:30:34 +00:00
|
|
|
local max_level LEVEL level cur_l cur_L
|
|
|
|
local l
|
2007-02-24 11:00:05 +00:00
|
|
|
eval max_level="\${CT_LOG_LEVEL_${CT_LOG_LEVEL_MAX}}"
|
|
|
|
# Set the maximum log level to DEBUG if we have none
|
Huge fixes to glibc build, so that we can build at least (and at last):
- use ports addon even when installing headers,
- use optimisation (-O) when installing headers, to avoid unnecessary warnings (thanks Robert P. J. DAY for pointing this out!),
- lowest kernel version to use is only X.Y.Z, not X.Y.Z.T,
- a bit of preparations for NPTL (RSN I hope),
- fix fixing the linker scripts (changing the backup file is kind of useless and stupid);
Shut uClibc finish step: there really is nothing to do;
Add a patch for glibc-2.3.6 weak aliases handling on some archs (ARM and ALPHA at least);
Did not catch the make errors: fixed the pattern matching in scripts/functions;
Introduce a new log level, ALL:
- send components' build messages there,
- DEBUG log level is destined only for crosstool-NG debug messages,
- migrate sub-actions to use appropriate log levels;
Update the armeb-unknown-linux-gnu sample:
- it builds!
- uses gcc-4.0.4 and glibc-2.3.6,
- updated to latest config options set.
2007-05-08 17:48:32 +00:00
|
|
|
[ -z "${max_level}" ] && max_level=${CT_LOG_LEVEL_DEBUG}
|
2007-02-24 11:00:05 +00:00
|
|
|
|
2007-04-23 20:30:34 +00:00
|
|
|
LEVEL="$1"; shift
|
2007-02-24 11:00:05 +00:00
|
|
|
eval level="\${CT_LOG_LEVEL_${LEVEL}}"
|
|
|
|
|
|
|
|
if [ $# -eq 0 ]; then
|
|
|
|
cat -
|
|
|
|
else
|
2015-10-30 03:58:52 +00:00
|
|
|
echo -e "${*}"
|
2008-11-28 17:51:39 +00:00
|
|
|
fi |( IFS="${CR}" # We want the full lines, even leading spaces
|
2008-05-20 21:32:39 +00:00
|
|
|
_prog_bar_cpt=0
|
|
|
|
_prog_bar[0]='/'
|
|
|
|
_prog_bar[1]='-'
|
|
|
|
_prog_bar[2]='\'
|
|
|
|
_prog_bar[3]='|'
|
2007-02-24 11:00:05 +00:00
|
|
|
indent=$((2*CT_STEP_COUNT))
|
|
|
|
while read line; do
|
2007-04-23 20:30:34 +00:00
|
|
|
case "${CT_LOG_SEE_TOOLS_WARN},${line}" in
|
|
|
|
y,*"warning:"*) cur_L=WARN; cur_l=${CT_LOG_LEVEL_WARN};;
|
2007-05-20 13:48:26 +00:00
|
|
|
y,*"WARNING:"*) cur_L=WARN; cur_l=${CT_LOG_LEVEL_WARN};;
|
2007-04-23 20:30:34 +00:00
|
|
|
*"error:"*) cur_L=ERROR; cur_l=${CT_LOG_LEVEL_ERROR};;
|
2008-10-12 11:09:57 +00:00
|
|
|
*"make["*"]: *** ["*) cur_L=ERROR; cur_l=${CT_LOG_LEVEL_ERROR};;
|
2007-04-23 20:30:34 +00:00
|
|
|
*) cur_L="${LEVEL}"; cur_l="${level}";;
|
|
|
|
esac
|
2008-05-20 21:32:39 +00:00
|
|
|
# There will always be a log file (stdout, fd #1), be it /dev/null
|
|
|
|
printf "[%-5s]%*s%s%s\n" "${cur_L}" "${indent}" " " "${line}"
|
2007-04-23 20:30:34 +00:00
|
|
|
if [ ${cur_l} -le ${max_level} ]; then
|
2008-05-20 21:32:39 +00:00
|
|
|
# Only print to console (fd #6) if log level is high enough.
|
2011-04-07 09:52:23 +00:00
|
|
|
printf "${CT_LOG_PROGRESS_BAR:+\r}[%-5s]%*s%s%s\n" "${cur_L}" "${indent}" " " "${line}" >&6
|
2007-05-09 13:11:04 +00:00
|
|
|
fi
|
|
|
|
if [ "${CT_LOG_PROGRESS_BAR}" = "y" ]; then
|
2008-05-20 21:32:39 +00:00
|
|
|
printf "\r[%02d:%02d] %s " $((SECONDS/60)) $((SECONDS%60)) "${_prog_bar[$((_prog_bar_cpt/10))]}" >&6
|
|
|
|
_prog_bar_cpt=$(((_prog_bar_cpt+1)%40))
|
2007-02-24 11:00:05 +00:00
|
|
|
fi
|
|
|
|
done
|
|
|
|
)
|
|
|
|
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2008-05-25 22:25:50 +00:00
|
|
|
# Execute an action, and log its messages
|
2011-03-15 22:18:37 +00:00
|
|
|
# It is possible to even log local variable assignments (a-la: var=val ./cmd opts)
|
|
|
|
# Usage: CT_DoExecLog <level> [VAR=val...] <command> [parameters...]
|
2008-05-25 22:25:50 +00:00
|
|
|
CT_DoExecLog() {
|
|
|
|
local level="$1"
|
2012-10-06 21:48:07 +00:00
|
|
|
local cur_cmd
|
2012-10-21 20:27:17 +00:00
|
|
|
local ret
|
2008-05-25 22:25:50 +00:00
|
|
|
shift
|
2011-03-15 22:18:37 +00:00
|
|
|
(
|
|
|
|
for i in "$@"; do
|
2012-10-06 21:48:07 +00:00
|
|
|
cur_cmd+="'${i}' "
|
2011-03-15 22:18:37 +00:00
|
|
|
done
|
|
|
|
while true; do
|
|
|
|
case "${1}" in
|
|
|
|
*=*) eval export "'${1}'"; shift;;
|
|
|
|
*) break;;
|
|
|
|
esac
|
|
|
|
done
|
2012-10-06 21:48:07 +00:00
|
|
|
# This while-loop goes hand-in-hand with the ERR trap handler:
|
|
|
|
# - if the command terminates successfully, then we hit the break
|
|
|
|
# statement, and we exit the loop
|
|
|
|
# - if the command terminates in error, then the ERR handler kicks
|
|
|
|
# in, then:
|
|
|
|
# - if the user did *not* ask for interactive debugging, the ERR
|
|
|
|
# handler exits, and we hit the end of the sub-shell
|
|
|
|
# - if the user did ask for interactive debugging, the ERR handler
|
|
|
|
# spawns a shell. Upon termination of this shell, the ERR handler
|
|
|
|
# examines the exit status of the shell:
|
|
|
|
# - if 1, the ERR handler returns; then we hit the else statement,
|
|
|
|
# then the break, and we exit the 'while' loop, to continue the
|
|
|
|
# build;
|
|
|
|
# - if 2, the ERR handler touches the repeat file, and returns;
|
|
|
|
# then we hit the if statement, and we loop for one more
|
|
|
|
# iteration;
|
|
|
|
# - if 3, the ERR handler exits with the command's exit status,
|
|
|
|
# and we're dead;
|
|
|
|
# - for any other exit status of the shell, the ERR handler
|
|
|
|
# prints an informational message, and respawns the shell
|
|
|
|
#
|
|
|
|
# This allows a user to get an interactive shell that has the same
|
|
|
|
# environment (PATH and so on) that the failed command was ran with.
|
|
|
|
while true; do
|
|
|
|
rm -f "${CT_BUILD_DIR}/repeat"
|
|
|
|
CT_DoLog DEBUG "==> Executing: ${cur_cmd}"
|
|
|
|
"${@}" 2>&1 |CT_DoLog "${level}"
|
2012-10-21 20:27:17 +00:00
|
|
|
ret="${?}"
|
2012-10-06 21:48:07 +00:00
|
|
|
if [ -f "${CT_BUILD_DIR}/repeat" ]; then
|
2012-11-22 18:43:26 +00:00
|
|
|
rm -f "${CT_BUILD_DIR}/repeat"
|
2012-10-06 21:48:07 +00:00
|
|
|
continue
|
2012-11-22 18:43:26 +00:00
|
|
|
elif [ -f "${CT_BUILD_DIR}/skip" ]; then
|
|
|
|
rm -f "${CT_BUILD_DIR}/skip"
|
|
|
|
ret=0
|
|
|
|
break
|
2012-10-06 21:48:07 +00:00
|
|
|
else
|
|
|
|
break
|
|
|
|
fi
|
|
|
|
done
|
2012-10-21 20:27:17 +00:00
|
|
|
exit ${ret}
|
2011-03-15 22:18:37 +00:00
|
|
|
)
|
|
|
|
# Catch failure of the sub-shell
|
|
|
|
[ $? -eq 0 ]
|
2008-05-25 22:25:50 +00:00
|
|
|
}
|
|
|
|
|
2007-05-17 16:22:51 +00:00
|
|
|
# Tail message to be logged whatever happens
|
|
|
|
# Usage: CT_DoEnd <level>
|
|
|
|
CT_DoEnd()
|
|
|
|
{
|
2007-05-29 19:56:21 +00:00
|
|
|
local level="$1"
|
2008-05-20 21:32:39 +00:00
|
|
|
CT_STOP_DATE=$(CT_DoDate +%s%N)
|
|
|
|
CT_STOP_DATE_HUMAN=$(CT_DoDate +%Y%m%d.%H%M%S)
|
2008-06-20 11:58:13 +00:00
|
|
|
if [ "${level}" != "ERROR" ]; then
|
2008-06-19 15:31:04 +00:00
|
|
|
CT_DoLog "${level:-INFO}" "Build completed at ${CT_STOP_DATE_HUMAN}"
|
|
|
|
fi
|
2007-05-17 16:22:51 +00:00
|
|
|
elapsed=$((CT_STOP_DATE-CT_STAR_DATE))
|
|
|
|
elapsed_min=$((elapsed/(60*1000*1000*1000)))
|
2008-05-20 21:32:39 +00:00
|
|
|
elapsed_sec=$(printf "%02d" $(((elapsed%(60*1000*1000*1000))/(1000*1000*1000))))
|
|
|
|
elapsed_csec=$(printf "%02d" $(((elapsed%(1000*1000*1000))/(10*1000*1000))))
|
2007-05-29 19:56:21 +00:00
|
|
|
CT_DoLog ${level:-INFO} "(elapsed: ${elapsed_min}:${elapsed_sec}.${elapsed_csec})"
|
2007-05-17 16:22:51 +00:00
|
|
|
}
|
|
|
|
|
2010-07-30 15:50:34 +00:00
|
|
|
# Remove entries referring to . and other relative paths
|
2010-07-29 17:30:37 +00:00
|
|
|
# Usage: CT_SanitizePath
|
|
|
|
CT_SanitizePath() {
|
|
|
|
local new
|
2010-07-30 15:50:34 +00:00
|
|
|
local p
|
2010-07-29 17:30:37 +00:00
|
|
|
local IFS=:
|
|
|
|
for p in $PATH; do
|
2010-07-30 15:50:34 +00:00
|
|
|
# Only accept absolute paths;
|
|
|
|
# Note: as a special case the empty string in PATH is equivalent to .
|
|
|
|
if [ -n "${p}" -a -z "${p%%/*}" ]; then
|
2010-07-29 17:30:37 +00:00
|
|
|
new="${new}${new:+:}${p}"
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
PATH="${new}"
|
|
|
|
}
|
|
|
|
|
2016-03-21 18:18:53 +00:00
|
|
|
# Sanitize the directory name contained in the variable passed as argument:
|
2011-01-25 20:59:03 +00:00
|
|
|
# - remove duplicate /
|
2016-03-21 18:18:53 +00:00
|
|
|
# - remove . (current dir) at the beginning, in the middle or at the end
|
|
|
|
# - resolve .. (parent dir) if there is a previous component
|
|
|
|
# - remove .. (parent dir) if at the root
|
|
|
|
#
|
|
|
|
# Usage: CT_SanitizeVarDir CT_PREFIX_DIR
|
|
|
|
CT_SanitizeVarDir() {
|
2011-01-25 20:59:03 +00:00
|
|
|
local var
|
|
|
|
local old_dir
|
2016-03-21 18:18:53 +00:00
|
|
|
local new_dir tmp
|
2011-01-25 20:59:03 +00:00
|
|
|
|
|
|
|
for var in "$@"; do
|
|
|
|
eval "old_dir=\"\${${var}}\""
|
2016-03-21 18:18:53 +00:00
|
|
|
new_dir=$( echo "${old_dir}" | ${awk} '
|
|
|
|
{
|
|
|
|
isabs = $1 == "" # Started with a slash
|
|
|
|
trail = $NF == "" # Ending with a slash
|
|
|
|
ncomp = 0 # Components in a path so far
|
|
|
|
for (i = 1; i <= NF; i++) {
|
|
|
|
# Double-slash or current dir? Ignore
|
|
|
|
if ($i == "" || $i == ".") {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
# .. pops the last component unless it is at the beginning
|
|
|
|
if ($i == ".." && ncomp != 0 && comps[ncomp] != "..") {
|
|
|
|
ncomp--;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
comps[++ncomp] = $i;
|
|
|
|
}
|
|
|
|
seencomp = 0
|
|
|
|
for (i = 1; i <= ncomp; i++) {
|
|
|
|
if (comps[i] == ".." && isabs) {
|
|
|
|
# /../ at the beginning is equivalent to /
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
printf "%s%s", isabs || i != 1 ? "/" : "", comps[i];
|
|
|
|
seencomp = 1;
|
|
|
|
}
|
|
|
|
if (!seencomp && !isabs && !trail) {
|
|
|
|
# Eliminated all components, but no trailing slash -
|
|
|
|
# if the result is appened with /foo, must not become absolute
|
|
|
|
printf ".";
|
|
|
|
}
|
|
|
|
if ((!seencomp && isabs) || (seencomp && trail)) {
|
|
|
|
printf "/";
|
|
|
|
}
|
|
|
|
}' FS=/ )
|
2011-01-25 20:59:03 +00:00
|
|
|
eval "${var}=\"${new_dir}\""
|
2016-03-21 18:18:53 +00:00
|
|
|
CT_DoLog DEBUG "Sanitized '${var}': '${old_dir}' -> '${new_dir}'"
|
2011-01-25 20:59:03 +00:00
|
|
|
done
|
|
|
|
}
|
|
|
|
|
2007-05-17 16:22:51 +00:00
|
|
|
# Abort the execution with an error message
|
2007-02-24 11:00:05 +00:00
|
|
|
# Usage: CT_Abort <message>
|
|
|
|
CT_Abort() {
|
2007-05-23 21:08:24 +00:00
|
|
|
CT_DoLog ERROR "$1"
|
2012-10-17 20:01:25 +00:00
|
|
|
false
|
2007-02-24 11:00:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# Test a condition, and print a message if satisfied
|
|
|
|
# Usage: CT_Test <message> <tests>
|
|
|
|
CT_Test() {
|
|
|
|
local ret
|
|
|
|
local m="$1"
|
|
|
|
shift
|
2010-04-13 20:54:57 +00:00
|
|
|
CT_DoLog DEBUG "Testing '! ( $* )'"
|
2007-02-24 11:00:05 +00:00
|
|
|
test "$@" && CT_DoLog WARN "$m"
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
# Test a condition, and abort with an error message if satisfied
|
|
|
|
# Usage: CT_TestAndAbort <message> <tests>
|
|
|
|
CT_TestAndAbort() {
|
|
|
|
local m="$1"
|
|
|
|
shift
|
2010-04-13 20:54:57 +00:00
|
|
|
CT_DoLog DEBUG "Testing '! ( $* )'"
|
2007-02-24 11:00:05 +00:00
|
|
|
test "$@" && CT_Abort "$m"
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
# Test a condition, and abort with an error message if not satisfied
|
|
|
|
# Usage: CT_TestAndAbort <message> <tests>
|
|
|
|
CT_TestOrAbort() {
|
|
|
|
local m="$1"
|
|
|
|
shift
|
2010-04-13 20:54:57 +00:00
|
|
|
CT_DoLog DEBUG "Testing '$*'"
|
2007-02-24 11:00:05 +00:00
|
|
|
test "$@" || CT_Abort "$m"
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
# Test the presence of a tool, or abort if not found
|
|
|
|
# Usage: CT_HasOrAbort <tool>
|
|
|
|
CT_HasOrAbort() {
|
2008-08-09 17:40:44 +00:00
|
|
|
CT_TestAndAbort "'${1}' not found and needed for successful toolchain build." -z "$(CT_Which "${1}")"
|
2007-02-24 11:00:05 +00:00
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2012-08-22 16:28:07 +00:00
|
|
|
# Search a program: wrap "which" for those system where "which"
|
|
|
|
# verbosely says there is no match (such as on Mandriva).
|
2007-07-07 09:58:14 +00:00
|
|
|
# Usage: CT_Which <filename>
|
|
|
|
CT_Which() {
|
|
|
|
which "$1" 2>/dev/null || true
|
|
|
|
}
|
|
|
|
|
2007-02-24 11:00:05 +00:00
|
|
|
# Get current date with nanosecond precision
|
|
|
|
# On those system not supporting nanosecond precision, faked with rounding down
|
|
|
|
# to the highest entire second
|
|
|
|
# Usage: CT_DoDate <fmt>
|
|
|
|
CT_DoDate() {
|
2014-08-26 22:29:46 +00:00
|
|
|
date "$1" |${sed} -r -e 's/%?N$/000000000/;'
|
2007-02-24 11:00:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
CT_STEP_COUNT=1
|
scripts: fix dumping execution backtrace
Dumping the backtrace has been broken since changeset #652e56d6d35a:
scripts: execute each steps in a subshell
We can spawn sub-sub-shells in some cases.
The way the fault handler works is to dump the backtrace, but to avoid
printing it once for every sub-shell (which could get quite confusing),
it simply exits when it detects that it is being run in a sub-shell,
leaving to the top-level shell the work to dump the backtrace.
Because each step is executed in its own sub-shell, the variable arrays
that contain the step name, the source file and line number, are lost
when exiting the per-step sub-shell.
Hence, the backtrace is currently limited to printing only the top-level
main procedure of the shell.
Fix this thus:
- when dumping the bckatraces for the steps & the functions, remember
it was dumped, and only dump it if it was not already dumped
- at the top-level shell, print the hints
Also, rename the top-level step label.
Reported-by: Benoît Thébaudeau <benoit.thebaudeau@advansee.com>
Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
2011-12-13 22:32:39 +00:00
|
|
|
CT_STEP_MESSAGE[${CT_STEP_COUNT}]="(top-level)"
|
2007-02-24 11:00:05 +00:00
|
|
|
# Memorise a step being done so that any error is caught
|
|
|
|
# Usage: CT_DoStep <loglevel> <message>
|
|
|
|
CT_DoStep() {
|
2008-05-20 21:32:39 +00:00
|
|
|
local start=$(CT_DoDate +%s%N)
|
2007-02-24 11:00:05 +00:00
|
|
|
CT_DoLog "$1" "================================================================="
|
|
|
|
CT_DoLog "$1" "$2"
|
|
|
|
CT_STEP_COUNT=$((CT_STEP_COUNT+1))
|
|
|
|
CT_STEP_LEVEL[${CT_STEP_COUNT}]="$1"; shift
|
|
|
|
CT_STEP_START[${CT_STEP_COUNT}]="${start}"
|
|
|
|
CT_STEP_MESSAGE[${CT_STEP_COUNT}]="$1"
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
# End the step just being done
|
|
|
|
# Usage: CT_EndStep
|
|
|
|
CT_EndStep() {
|
2008-05-20 21:32:39 +00:00
|
|
|
local stop=$(CT_DoDate +%s%N)
|
2014-08-26 22:29:46 +00:00
|
|
|
local duration=$(printf "%032d" $((stop-${CT_STEP_START[${CT_STEP_COUNT}]})) \
|
|
|
|
|${sed} -r -e 's/([[:digit:]]{2})[[:digit:]]{7}$/\.\1/; s/^0+//; s/^\./0\./;'
|
|
|
|
)
|
2008-06-19 15:31:04 +00:00
|
|
|
local elapsed=$(printf "%02d:%02d" $((SECONDS/60)) $((SECONDS%60)))
|
2007-02-24 11:00:05 +00:00
|
|
|
local level="${CT_STEP_LEVEL[${CT_STEP_COUNT}]}"
|
|
|
|
local message="${CT_STEP_MESSAGE[${CT_STEP_COUNT}]}"
|
|
|
|
CT_STEP_COUNT=$((CT_STEP_COUNT-1))
|
2008-06-19 15:31:04 +00:00
|
|
|
CT_DoLog "${level}" "${message}: done in ${duration}s (at ${elapsed})"
|
2007-02-24 11:00:05 +00:00
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
# Pushes into a directory, and pops back
|
|
|
|
CT_Pushd() {
|
2013-05-05 16:34:20 +00:00
|
|
|
CT_DoLog DEBUG "Entering '$1'"
|
2007-02-24 11:00:05 +00:00
|
|
|
pushd "$1" >/dev/null 2>&1
|
|
|
|
}
|
|
|
|
CT_Popd() {
|
|
|
|
popd >/dev/null 2>&1
|
|
|
|
}
|
|
|
|
|
2011-07-24 18:08:13 +00:00
|
|
|
# Create a dir and cd or pushd into it
|
|
|
|
# Usage: CT_mkdir_cd <dir/to/create>
|
|
|
|
# CT_mkdir_pushd <dir/to/create>
|
|
|
|
CT_mkdir_cd() {
|
|
|
|
local dir="${1}"
|
|
|
|
|
|
|
|
mkdir -p "${dir}"
|
|
|
|
cd "${dir}"
|
|
|
|
}
|
|
|
|
CT_mkdir_pushd() {
|
|
|
|
local dir="${1}"
|
|
|
|
|
|
|
|
mkdir -p "${dir}"
|
|
|
|
CT_Pushd "${dir}"
|
|
|
|
}
|
|
|
|
|
2007-02-24 11:00:05 +00:00
|
|
|
# Creates a temporary directory
|
|
|
|
# $1: variable to assign to
|
|
|
|
# Usage: CT_MktempDir foo
|
|
|
|
CT_MktempDir() {
|
|
|
|
# Some mktemp do not allow more than 6 Xs
|
2009-01-04 12:43:54 +00:00
|
|
|
eval "$1"=$(mktemp -q -d "${CT_BUILD_DIR}/tmp.XXXXXX")
|
2007-02-24 11:00:05 +00:00
|
|
|
CT_TestOrAbort "Could not make temporary directory" -n "${!1}" -a -d "${!1}"
|
2008-08-11 12:22:47 +00:00
|
|
|
CT_DoLog DEBUG "Made temporary directory '${!1}'"
|
|
|
|
return 0
|
2007-02-24 11:00:05 +00:00
|
|
|
}
|
|
|
|
|
2009-01-20 20:10:50 +00:00
|
|
|
# Removes one or more directories, even if it is read-only, or its parent is
|
2009-01-12 21:35:23 +00:00
|
|
|
# Usage: CT_DoForceRmdir dir [...]
|
|
|
|
CT_DoForceRmdir() {
|
2009-01-20 20:10:50 +00:00
|
|
|
local dir
|
|
|
|
local mode
|
|
|
|
for dir in "${@}"; do
|
|
|
|
[ -d "${dir}" ] || continue
|
2010-05-19 16:22:32 +00:00
|
|
|
case "$CT_SYS_OS" in
|
2010-07-17 22:34:38 +00:00
|
|
|
Linux|CYGWIN*)
|
2010-05-19 16:22:32 +00:00
|
|
|
mode="$(stat -c '%a' "$(dirname "${dir}")")"
|
|
|
|
;;
|
|
|
|
Darwin|*BSD)
|
|
|
|
mode="$(stat -f '%Lp' "$(dirname "${dir}")")"
|
|
|
|
;;
|
|
|
|
*)
|
|
|
|
CT_Abort "Unhandled host OS $CT_SYS_OS"
|
|
|
|
;;
|
|
|
|
esac
|
2009-02-01 17:04:00 +00:00
|
|
|
CT_DoExecLog ALL chmod u+w "$(dirname "${dir}")"
|
|
|
|
CT_DoExecLog ALL chmod -R u+w "${dir}"
|
2009-01-20 20:10:50 +00:00
|
|
|
CT_DoExecLog ALL rm -rf "${dir}"
|
2009-02-01 17:04:00 +00:00
|
|
|
CT_DoExecLog ALL chmod ${mode} "$(dirname "${dir}")"
|
2009-01-20 20:10:50 +00:00
|
|
|
done
|
2009-01-12 21:35:23 +00:00
|
|
|
}
|
|
|
|
|
2007-02-24 11:00:05 +00:00
|
|
|
# Echoes the specified string on stdout until the pipe breaks.
|
|
|
|
# Doesn't fail
|
|
|
|
# $1: string to echo
|
2015-11-17 10:48:09 +00:00
|
|
|
# Usage: CT_DoYes "" |${make} oldconfig
|
2007-02-24 11:00:05 +00:00
|
|
|
CT_DoYes() {
|
|
|
|
yes "$1" || true
|
|
|
|
}
|
2007-05-07 09:04:02 +00:00
|
|
|
|
2009-05-27 21:07:37 +00:00
|
|
|
# Add the specified directory to LD_LIBRARY_PATH, and export it
|
|
|
|
# If the specified patch is already present, just export
|
|
|
|
# $1: path to add
|
|
|
|
# $2: add as 'first' or 'last' path, 'first' is assumed if $2 is empty
|
|
|
|
# Usage CT_SetLibPath /some/where/lib [first|last]
|
|
|
|
CT_SetLibPath() {
|
|
|
|
local path="$1"
|
|
|
|
local pos="$2"
|
|
|
|
|
|
|
|
case ":${LD_LIBRARY_PATH}:" in
|
|
|
|
*:"${path}":*) ;;
|
|
|
|
*) case "${pos}" in
|
|
|
|
last)
|
|
|
|
CT_DoLog DEBUG "Adding '${path}' at end of LD_LIBRARY_PATH"
|
|
|
|
LD_LIBRARY_PATH="${LD_LIBRARY_PATH:+${LD_LIBRARY_PATH}:}${path}"
|
|
|
|
;;
|
|
|
|
first|"")
|
|
|
|
CT_DoLog DEBUG "Adding '${path}' at start of LD_LIBRARY_PATH"
|
|
|
|
LD_LIBRARY_PATH="${path}${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}"
|
|
|
|
;;
|
|
|
|
*)
|
|
|
|
CT_Abort "Incorrect position '${pos}' to add '${path}' to LD_LIBRARY_PATH"
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
CT_DoLog DEBUG "==> LD_LIBRARY_PATH='${LD_LIBRARY_PATH}'"
|
|
|
|
export LD_LIBRARY_PATH
|
|
|
|
}
|
|
|
|
|
2011-05-03 22:04:23 +00:00
|
|
|
# Build up the list of allowed tarball extensions
|
|
|
|
# Add them in the prefered order; most preferred comes first
|
|
|
|
CT_DoListTarballExt() {
|
2012-01-16 22:45:16 +00:00
|
|
|
if [ "${CT_CONFIGURE_has_xz}" = "y" ]; then
|
2011-08-14 15:51:38 +00:00
|
|
|
printf ".tar.xz\n"
|
|
|
|
fi
|
2011-08-25 22:07:51 +00:00
|
|
|
if [ "${CT_CONFIGURE_has_lzma}" = "y" \
|
2012-01-16 22:45:16 +00:00
|
|
|
-o "${CT_CONFIGURE_has_xz}" = "y" ]; then
|
2011-08-25 16:30:18 +00:00
|
|
|
printf ".tar.lzma\n"
|
|
|
|
fi
|
2011-05-03 22:04:23 +00:00
|
|
|
printf ".tar.bz2\n"
|
|
|
|
printf ".tar.gz\n.tgz\n"
|
|
|
|
printf ".tar\n"
|
2013-03-31 04:07:38 +00:00
|
|
|
printf ".zip\n"
|
2011-05-03 22:04:23 +00:00
|
|
|
}
|
|
|
|
|
2007-05-10 21:33:35 +00:00
|
|
|
# Get the file name extension of a component
|
2008-07-25 10:02:43 +00:00
|
|
|
# Usage: CT_GetFileExtension <component_name-component_version> [extension]
|
2009-11-29 23:05:45 +00:00
|
|
|
# If found, echoes the extension to stdout, and return 0
|
|
|
|
# If not found, echoes nothing on stdout, and return !0.
|
2007-05-10 21:33:35 +00:00
|
|
|
CT_GetFileExtension() {
|
|
|
|
local ext
|
|
|
|
local file="$1"
|
2008-07-25 10:02:43 +00:00
|
|
|
shift
|
|
|
|
local first_ext="$1"
|
2007-05-10 21:33:35 +00:00
|
|
|
|
2007-06-16 18:04:05 +00:00
|
|
|
# we need to also check for an empty extension for those very
|
|
|
|
# peculiar components that don't have one (such as sstrip from
|
|
|
|
# buildroot).
|
2011-05-03 22:04:23 +00:00
|
|
|
for ext in ${first_ext} $(CT_DoListTarballExt) /.git ''; do
|
2015-03-13 15:21:38 +00:00
|
|
|
if [ -e "${CT_TARBALLS_DIR}/${file}${ext}" -o -L "${CT_TARBALLS_DIR}/${file}${ext}" ]; then
|
2007-05-10 21:33:35 +00:00
|
|
|
echo "${ext}"
|
2009-11-29 23:05:45 +00:00
|
|
|
exit 0
|
2007-05-10 21:33:35 +00:00
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2009-11-29 23:05:45 +00:00
|
|
|
exit 1
|
2007-05-10 21:33:35 +00:00
|
|
|
}
|
|
|
|
|
2010-11-16 16:49:15 +00:00
|
|
|
# Try to retrieve the specified URL (HTTP or FTP)
|
|
|
|
# Usage: CT_DoGetFile <URL>
|
|
|
|
# This functions always returns true (0), as it can be legitimate not
|
|
|
|
# to find the requested URL (think about snapshots, different layouts
|
|
|
|
# for different gcc versions, etc...).
|
|
|
|
CT_DoGetFile() {
|
2011-06-02 21:56:13 +00:00
|
|
|
local url="${1}"
|
|
|
|
local dest="${CT_TARBALLS_DIR}/${url##*/}"
|
2010-11-28 23:26:39 +00:00
|
|
|
local tmp="${dest}.tmp-dl"
|
2010-11-16 16:49:15 +00:00
|
|
|
|
2010-11-28 23:26:39 +00:00
|
|
|
# Remove potential left-over from a previous run
|
|
|
|
rm -f "${tmp}"
|
|
|
|
|
2010-11-16 16:49:15 +00:00
|
|
|
# We also retry a few times, in case there is a transient error (eg. behind
|
|
|
|
# a dynamic IP that changes during the transfer...)
|
|
|
|
# With automated download as we are doing, it can be very dangerous to
|
|
|
|
# continue the downloads. It's far better to simply overwrite the
|
|
|
|
# destination file.
|
2008-05-06 20:30:49 +00:00
|
|
|
# Some company networks have firewalls to connect to the internet, but it's
|
2011-09-09 13:34:04 +00:00
|
|
|
# not easy to detect them, so force a global ${CT_CONNECT_TIMEOUT}-second
|
|
|
|
# timeout.
|
2014-05-10 02:07:02 +00:00
|
|
|
if [ ${CT_CONNECT_TIMEOUT} = -1 ]; then
|
|
|
|
T=
|
|
|
|
else
|
|
|
|
T="-T ${CT_CONNECT_TIMEOUT}"
|
|
|
|
fi
|
2011-11-07 20:40:28 +00:00
|
|
|
if CT_DoExecLog ALL wget --passive-ftp --tries=3 -nc \
|
|
|
|
--progress=dot:binary \
|
2014-05-10 02:07:02 +00:00
|
|
|
${T} \
|
2011-11-07 20:40:28 +00:00
|
|
|
-O "${tmp}" \
|
2011-09-09 13:48:59 +00:00
|
|
|
"${url}"
|
|
|
|
then
|
|
|
|
# Success, we got it, good!
|
2010-11-28 23:26:39 +00:00
|
|
|
mv "${tmp}" "${dest}"
|
2012-10-21 20:27:17 +00:00
|
|
|
CT_DoLog DEBUG "Got it from: \"${url}\""
|
2010-11-28 23:26:39 +00:00
|
|
|
else
|
|
|
|
# Woops...
|
|
|
|
rm -f "${tmp}"
|
2012-10-21 20:27:17 +00:00
|
|
|
CT_DoLog DEBUG "Not at this location: \"${url}\""
|
2010-11-28 23:26:39 +00:00
|
|
|
fi
|
2007-05-07 09:04:02 +00:00
|
|
|
}
|
|
|
|
|
2009-01-04 12:43:54 +00:00
|
|
|
# This function tries to retrieve a tarball form a local directory
|
|
|
|
# Usage: CT_GetLocal <basename> [.extension]
|
|
|
|
CT_GetLocal() {
|
|
|
|
local basename="$1"
|
|
|
|
local first_ext="$2"
|
|
|
|
local ext
|
|
|
|
|
|
|
|
# Do we already have it in *our* tarballs dir?
|
2009-11-29 23:05:45 +00:00
|
|
|
if ext="$( CT_GetFileExtension "${basename}" ${first_ext} )"; then
|
2009-01-04 12:43:54 +00:00
|
|
|
CT_DoLog DEBUG "Already have '${basename}'"
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ -n "${CT_LOCAL_TARBALLS_DIR}" ]; then
|
|
|
|
CT_DoLog DEBUG "Trying to retrieve an already downloaded copy of '${basename}'"
|
|
|
|
# We'd rather have a bzip2'ed tarball, then gzipped tarball, plain tarball,
|
|
|
|
# or, as a failover, a file without extension.
|
2011-05-03 22:04:23 +00:00
|
|
|
for ext in ${first_ext} $(CT_DoListTarballExt) ''; do
|
2009-01-04 12:43:54 +00:00
|
|
|
CT_DoLog DEBUG "Trying '${CT_LOCAL_TARBALLS_DIR}/${basename}${ext}'"
|
|
|
|
if [ -r "${CT_LOCAL_TARBALLS_DIR}/${basename}${ext}" -a \
|
|
|
|
"${CT_FORCE_DOWNLOAD}" != "y" ]; then
|
|
|
|
CT_DoLog DEBUG "Got '${basename}' from local storage"
|
|
|
|
CT_DoExecLog ALL ln -s "${CT_LOCAL_TARBALLS_DIR}/${basename}${ext}" "${CT_TARBALLS_DIR}/${basename}${ext}"
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
fi
|
|
|
|
return 1
|
|
|
|
}
|
|
|
|
|
2012-10-04 03:26:14 +00:00
|
|
|
# This function gets the custom source from either a tarball or directory
|
2015-11-26 12:58:40 +00:00
|
|
|
# Usage: CT_GetCustom <name> <version> <location>
|
2012-10-04 03:26:14 +00:00
|
|
|
CT_GetCustom() {
|
2015-11-26 12:58:40 +00:00
|
|
|
local component_name="$1"
|
|
|
|
local component_version="$2"
|
|
|
|
local component_location="$3"
|
|
|
|
|
|
|
|
# Some local variables we use to help us figure out what to do
|
|
|
|
local component_location_type="dir" # str: 'file' or 'dir'
|
|
|
|
local component_location_filename="" # filename... if it's a file
|
|
|
|
|
|
|
|
CT_TestAndAbort \
|
|
|
|
"${component_name}: Custom location setting is empty" \
|
|
|
|
-z "${component_location}"
|
|
|
|
|
|
|
|
CT_TestAndAbort \
|
|
|
|
"${component_name}: Custom version setting is empty" \
|
|
|
|
-z "${component_version}"
|
|
|
|
|
|
|
|
if [ -f "${component_location}" ]; then
|
|
|
|
component_location_type="file"
|
|
|
|
component_location_filename="$(basename ${component_location})"
|
|
|
|
elif [ -d "${component_location}" ]; then
|
|
|
|
# Yes, it's the default, but it rules out the else case in the `if'.
|
|
|
|
component_location_type="dir"
|
|
|
|
# as -d and -f say: it's a <directory|file> and is readable!
|
|
|
|
else
|
|
|
|
CT_Abort "${component_name}: Unable to read ${component_location}, make sure the setting is correct and double check the permissions!"
|
2012-10-04 03:26:14 +00:00
|
|
|
fi
|
|
|
|
|
2015-11-26 12:58:40 +00:00
|
|
|
if [ "${component_location_type}" = "file" ]; then
|
|
|
|
CT_DoLog EXTRA "Got '${component_location}' from custom location"
|
2012-10-04 03:26:14 +00:00
|
|
|
# We need to know the custom tarball extension,
|
|
|
|
# so we can create a properly-named symlink, which
|
|
|
|
# we use later on in 'extract'
|
2015-11-26 12:58:40 +00:00
|
|
|
case "${component_location}" in
|
|
|
|
*.tar.xz|*.tar.bz2|*.tar.lzma|*.tar.gz|*.tgz|*.tar|*.zip) ;;
|
|
|
|
*) CT_Abort "Unknown extension for custom tarball '${component_location}'" ;;
|
2012-10-04 03:26:14 +00:00
|
|
|
esac
|
2015-11-26 12:58:40 +00:00
|
|
|
[ ! -L "${CT_TARBALLS_DIR}/${component_location_filename}" ] && \
|
|
|
|
CT_DoExecLog DEBUG ln -sf "${component_location}" \
|
|
|
|
"${CT_TARBALLS_DIR}/${component_location_filename}"
|
|
|
|
elif [ "${component_location_type}" = "dir" ]; then
|
|
|
|
CT_DoLog EXTRA "Got '${component_location}' from custom location"
|
|
|
|
[ ! -d "${CT_SRC_DIR}/${component_name}-${component_version}" ] && \
|
2016-02-19 09:24:19 +00:00
|
|
|
CT_DoExecLog DEBUG ln -sf "${component_location}" \
|
2015-11-26 12:58:40 +00:00
|
|
|
"${CT_SRC_DIR}/${component_name}-${component_version}"
|
|
|
|
|
|
|
|
# Don't try to extract from source directory, it's extracted!
|
|
|
|
touch "${CT_SRC_DIR}/.${component_name}-${component_version}.extracted"
|
2012-10-04 03:26:14 +00:00
|
|
|
fi
|
2015-11-26 12:58:40 +00:00
|
|
|
# Don't patch a custom source, it's custom!
|
|
|
|
touch "${CT_SRC_DIR}/.${component_name}-${component_version}.patched"
|
2012-10-04 03:26:14 +00:00
|
|
|
}
|
|
|
|
|
2009-01-04 12:43:54 +00:00
|
|
|
# This function saves the specified to local storage if possible,
|
|
|
|
# and if so, symlinks it for later usage
|
|
|
|
# Usage: CT_SaveLocal </full/path/file.name>
|
|
|
|
CT_SaveLocal() {
|
|
|
|
local file="$1"
|
|
|
|
local basename="${file##*/}"
|
|
|
|
|
|
|
|
if [ "${CT_SAVE_TARBALLS}" = "y" ]; then
|
2009-01-12 18:54:34 +00:00
|
|
|
CT_DoLog EXTRA "Saving '${basename}' to local storage"
|
2009-01-04 12:43:54 +00:00
|
|
|
# The file may already exist if downloads are forced: remove it first
|
|
|
|
CT_DoExecLog ALL rm -f "${CT_LOCAL_TARBALLS_DIR}/${basename}"
|
|
|
|
CT_DoExecLog ALL mv -f "${file}" "${CT_LOCAL_TARBALLS_DIR}"
|
|
|
|
CT_DoExecLog ALL ln -s "${CT_LOCAL_TARBALLS_DIR}/${basename}" "${file}"
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2007-05-07 09:04:02 +00:00
|
|
|
# Download the file from one of the URLs passed as argument
|
2009-01-04 12:43:54 +00:00
|
|
|
# Usage: CT_GetFile <basename> [.extension] <url> [url ...]
|
2007-05-07 09:04:02 +00:00
|
|
|
CT_GetFile() {
|
|
|
|
local ext
|
2011-08-02 22:00:44 +00:00
|
|
|
local -a URLS
|
|
|
|
local url
|
2007-05-07 09:04:02 +00:00
|
|
|
local file="$1"
|
2009-01-04 12:43:54 +00:00
|
|
|
local first_ext
|
2007-05-07 09:04:02 +00:00
|
|
|
shift
|
2008-07-24 06:42:29 +00:00
|
|
|
# If next argument starts with a dot, then this is not an URL,
|
|
|
|
# and we can consider that it is a preferred extension.
|
2007-07-12 19:52:09 +00:00
|
|
|
case "$1" in
|
2008-07-24 06:42:29 +00:00
|
|
|
.*) first_ext="$1"
|
2007-07-12 19:52:09 +00:00
|
|
|
shift
|
|
|
|
;;
|
|
|
|
esac
|
2007-05-07 09:04:02 +00:00
|
|
|
|
2009-01-04 12:43:54 +00:00
|
|
|
# Does it exist localy?
|
2011-08-10 21:09:06 +00:00
|
|
|
if CT_GetLocal "${file}" ${first_ext}; then
|
|
|
|
return 0
|
|
|
|
fi
|
2009-01-04 12:43:54 +00:00
|
|
|
# No, it does not...
|
2007-05-07 09:04:02 +00:00
|
|
|
|
2012-08-12 11:45:42 +00:00
|
|
|
# If not allowed to download from the Internet, don't
|
|
|
|
if [ "${CT_FORBID_DOWNLOAD}" = "y" ]; then
|
|
|
|
CT_DoLog DEBUG "Not allowed to download from the Internet, aborting ${file} download"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2008-08-01 09:23:58 +00:00
|
|
|
# Try to retrieve the file
|
2008-08-12 07:47:19 +00:00
|
|
|
CT_DoLog EXTRA "Retrieving '${file}'"
|
2008-08-01 09:23:58 +00:00
|
|
|
|
2008-10-31 18:27:27 +00:00
|
|
|
# Add URLs on the LAN mirror
|
|
|
|
if [ "${CT_USE_MIRROR}" = "y" ]; then
|
2009-04-19 16:58:21 +00:00
|
|
|
CT_TestOrAbort "Please set the mirror base URL" -n "${CT_MIRROR_BASE_URL}"
|
2011-08-02 22:00:44 +00:00
|
|
|
URLS+=( "${CT_MIRROR_BASE_URL}/${file%-*}" )
|
|
|
|
URLS+=( "${CT_MIRROR_BASE_URL}" )
|
2008-07-19 22:45:17 +00:00
|
|
|
fi
|
|
|
|
|
2012-08-12 11:45:42 +00:00
|
|
|
if [ "${CT_FORCE_MIRROR}" != "y" ]; then
|
2011-08-02 21:10:37 +00:00
|
|
|
URLS+=( "${@}" )
|
|
|
|
fi
|
2011-08-02 21:11:25 +00:00
|
|
|
|
2008-10-31 18:27:27 +00:00
|
|
|
# Scan all URLs in turn, and try to grab a tarball from there
|
2010-01-14 19:54:47 +00:00
|
|
|
# Do *not* try git trees (ext=/.git), this is handled in a specific
|
|
|
|
# wrapper, below
|
2011-05-03 22:04:23 +00:00
|
|
|
for ext in ${first_ext} $(CT_DoListTarballExt) ''; do
|
2007-05-18 15:54:42 +00:00
|
|
|
# Try all urls in turn
|
2011-08-02 22:00:44 +00:00
|
|
|
for url in "${URLS[@]}"; do
|
2011-08-01 16:56:15 +00:00
|
|
|
[ -n "${url}" ] || continue
|
2008-05-20 21:32:39 +00:00
|
|
|
CT_DoLog DEBUG "Trying '${url}/${file}${ext}'"
|
2007-07-15 17:00:30 +00:00
|
|
|
CT_DoGetFile "${url}/${file}${ext}"
|
2011-05-30 21:24:11 +00:00
|
|
|
if [ -f "${CT_TARBALLS_DIR}/${file}${ext}" ]; then
|
2008-08-17 14:13:00 +00:00
|
|
|
CT_DoLog DEBUG "Got '${file}' from the Internet"
|
2009-01-04 12:43:54 +00:00
|
|
|
CT_SaveLocal "${CT_TARBALLS_DIR}/${file}${ext}"
|
2007-07-15 17:00:30 +00:00
|
|
|
return 0
|
|
|
|
fi
|
2007-05-18 15:54:42 +00:00
|
|
|
done
|
|
|
|
done
|
2007-05-07 09:04:02 +00:00
|
|
|
|
2011-08-10 21:09:06 +00:00
|
|
|
# Just return error, someone may want to catch and handle the error
|
2015-01-28 06:43:25 +00:00
|
|
|
# (eg. glibc add-ons can be missing).
|
2011-05-30 21:24:11 +00:00
|
|
|
return 1
|
2007-05-07 09:04:02 +00:00
|
|
|
}
|
|
|
|
|
2009-01-04 12:43:54 +00:00
|
|
|
# Checkout from CVS, and build the associated tarball
|
|
|
|
# The tarball will be called ${basename}.tar.bz2
|
|
|
|
# Prerequisite: either the server does not require password,
|
|
|
|
# or the user must already be logged in.
|
|
|
|
# 'tag' is the tag to retrieve. Must be specified, but can be empty.
|
|
|
|
# If dirname is specified, then module will be renamed to dirname
|
|
|
|
# prior to building the tarball.
|
2009-10-26 18:49:50 +00:00
|
|
|
# Usage: CT_GetCVS <basename> <url> <module> <tag> [dirname[=subdir]]
|
|
|
|
# Note: if '=subdir' is given, then it is used instead of 'module'.
|
2009-01-04 12:43:54 +00:00
|
|
|
CT_GetCVS() {
|
|
|
|
local basename="$1"
|
|
|
|
local uri="$2"
|
|
|
|
local module="$3"
|
|
|
|
local tag="${4:+-r ${4}}"
|
|
|
|
local dirname="$5"
|
|
|
|
local tmp_dir
|
|
|
|
|
2011-08-02 16:26:53 +00:00
|
|
|
# First try locally, then the mirror
|
|
|
|
if CT_GetFile "${basename}"; then
|
|
|
|
# Got it! Return early! :-)
|
|
|
|
return 0
|
|
|
|
fi
|
2009-01-04 12:43:54 +00:00
|
|
|
|
2011-08-02 21:10:37 +00:00
|
|
|
if [ "${CT_FORBID_DOWNLOAD}" = "y" ]; then
|
|
|
|
CT_DoLog WARN "Downloads forbidden, not trying cvs retrieval"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2009-01-04 12:43:54 +00:00
|
|
|
CT_MktempDir tmp_dir
|
|
|
|
CT_Pushd "${tmp_dir}"
|
|
|
|
|
|
|
|
CT_DoExecLog ALL cvs -z 9 -d "${uri}" co -P ${tag} "${module}"
|
2009-10-26 18:49:50 +00:00
|
|
|
if [ -n "${dirname}" ]; then
|
|
|
|
case "${dirname}" in
|
|
|
|
*=*)
|
2009-10-26 21:00:30 +00:00
|
|
|
CT_DoExecLog DEBUG mv "${dirname#*=}" "${dirname%%=*}"
|
|
|
|
CT_DoExecLog ALL tar cjf "${CT_TARBALLS_DIR}/${basename}.tar.bz2" "${dirname%%=*}"
|
2009-10-26 18:49:50 +00:00
|
|
|
;;
|
|
|
|
*)
|
|
|
|
CT_DoExecLog ALL mv "${module}" "${dirname}"
|
|
|
|
CT_DoExecLog ALL tar cjf "${CT_TARBALLS_DIR}/${basename}.tar.bz2" "${dirname:-${module}}"
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
fi
|
2009-01-04 12:43:54 +00:00
|
|
|
CT_SaveLocal "${CT_TARBALLS_DIR}/${basename}.tar.bz2"
|
|
|
|
|
|
|
|
CT_Popd
|
2009-03-08 11:14:54 +00:00
|
|
|
CT_DoExecLog ALL rm -rf "${tmp_dir}"
|
|
|
|
}
|
|
|
|
|
|
|
|
# Check out from SVN, and build the associated tarball
|
|
|
|
# The tarball will be called ${basename}.tar.bz2
|
|
|
|
# Prerequisite: either the server does not require password,
|
|
|
|
# or the user must already be logged in.
|
|
|
|
# 'rev' is the revision to retrieve
|
|
|
|
# Usage: CT_GetSVN <basename> <url> [rev]
|
|
|
|
CT_GetSVN() {
|
|
|
|
local basename="$1"
|
|
|
|
local uri="$2"
|
|
|
|
local rev="$3"
|
|
|
|
|
2011-08-02 16:28:10 +00:00
|
|
|
# First try locally, then the mirror
|
|
|
|
if CT_GetFile "${basename}"; then
|
|
|
|
# Got it! Return early! :-)
|
|
|
|
return 0
|
|
|
|
fi
|
2009-03-08 11:14:54 +00:00
|
|
|
|
2011-08-02 21:10:37 +00:00
|
|
|
if [ "${CT_FORBID_DOWNLOAD}" = "y" ]; then
|
|
|
|
CT_DoLog WARN "Downloads forbidden, not trying svn retrieval"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2009-03-08 11:14:54 +00:00
|
|
|
CT_MktempDir tmp_dir
|
|
|
|
CT_Pushd "${tmp_dir}"
|
|
|
|
|
2011-05-30 22:57:36 +00:00
|
|
|
if ! CT_DoExecLog ALL svn export ${rev:+-r ${rev}} "${uri}" "${basename}"; then
|
|
|
|
CT_DoLog WARN "Could not retrieve '${basename}'"
|
|
|
|
return 1
|
|
|
|
fi
|
2009-03-08 11:14:54 +00:00
|
|
|
CT_DoExecLog ALL tar cjf "${CT_TARBALLS_DIR}/${basename}.tar.bz2" "${basename}"
|
|
|
|
CT_SaveLocal "${CT_TARBALLS_DIR}/${basename}.tar.bz2"
|
|
|
|
|
|
|
|
CT_Popd
|
2009-01-04 12:43:54 +00:00
|
|
|
CT_DoExecLog ALL rm -rf "${tmp_dir}"
|
|
|
|
}
|
|
|
|
|
2010-01-14 19:54:47 +00:00
|
|
|
# Clone a git tree
|
|
|
|
# Tries the given URLs in turn until one can get cloned. No tarball will be created.
|
|
|
|
# Prerequisites: either the server does not require password,
|
|
|
|
# or the user has already taken any action to authenticate to the server.
|
|
|
|
# The cloned tree will *not* be stored in the local tarballs dir!
|
2015-01-11 23:04:05 +00:00
|
|
|
# cset_or_ref can be a branch or tag, if specified as 'ref=name'
|
|
|
|
# In this case, 'git ls-remote' is used to get the sha1 and can also
|
|
|
|
# be used to get a list valid refs (e.g. HEAD, refs/heads/master, refs/tags/v3.3.0)
|
|
|
|
# Usage: CT_GetGit <basename> <cset_or_ref> <url> <out_cset>
|
2010-01-14 19:54:47 +00:00
|
|
|
CT_GetGit() {
|
2014-05-11 15:47:53 +00:00
|
|
|
local basename="${1}"
|
2015-01-11 23:04:05 +00:00
|
|
|
local cset_or_ref="${2}"
|
2014-05-11 15:47:53 +00:00
|
|
|
local url="${3}"
|
2015-01-11 23:04:05 +00:00
|
|
|
local _out_cset="${4}"
|
|
|
|
|
2015-11-10 06:55:32 +00:00
|
|
|
local ref=$(echo "${cset_or_ref}" | ${sed} -n 's/^ref=\(.*\)/\1/p')
|
2015-01-11 23:04:05 +00:00
|
|
|
if [ -n "$ref" ]; then
|
|
|
|
local matches=$(git ls-remote --exit-code "$url" --refs "${ref}")
|
|
|
|
local result=$?
|
|
|
|
CT_TestAndAbort "Failed to find git ref ${ref} at ${url}" "${result}" != "0"
|
|
|
|
if [ $( echo "$matches" | wc -l) -gt 1 ]; then
|
|
|
|
CT_DoLog WARN "Ambiguous ref ${ref} at ${url}, using first"
|
|
|
|
fi
|
|
|
|
local cset=$(echo "$matches" | head -n1 | cut -c1-6)
|
|
|
|
CT_DoLog INFO "ref ${ref} at ${url} has cset of ${cset}"
|
|
|
|
else
|
|
|
|
local cset=${cset_or_ref}
|
|
|
|
CT_DoLog INFO "cset ${cset}"
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ -n "${_out_cset}" ]; then
|
|
|
|
eval ${_out_cset}=\${cset}
|
|
|
|
fi
|
|
|
|
|
2014-05-11 15:47:53 +00:00
|
|
|
local dir="${CT_TARBALLS_DIR}/${basename}-${cset}.git"
|
2015-01-11 23:04:05 +00:00
|
|
|
local file="${basename}-${cset}.tar.gz"
|
2014-05-11 15:47:53 +00:00
|
|
|
local dest="${CT_TARBALLS_DIR}/${file}"
|
|
|
|
local tmp="${CT_TARBALLS_DIR}/${file}.tmp-dl"
|
|
|
|
|
2015-01-11 23:04:05 +00:00
|
|
|
# Do we already have it?
|
2014-05-11 15:47:53 +00:00
|
|
|
if CT_GetLocal "${file}"; then
|
2015-01-11 23:04:05 +00:00
|
|
|
echo ${cset}
|
2014-05-11 15:47:53 +00:00
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
# Nope...
|
2010-01-14 19:54:47 +00:00
|
|
|
|
2011-08-02 21:10:37 +00:00
|
|
|
if [ "${CT_FORBID_DOWNLOAD}" = "y" ]; then
|
|
|
|
CT_DoLog WARN "Downloads forbidden, not trying git retrieval"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2014-05-11 15:47:53 +00:00
|
|
|
# Add URLs on the LAN mirror
|
|
|
|
# We subvert the normal download method, just to look for
|
|
|
|
# looking at the local mirror
|
|
|
|
if CT_GetFile "${basename}-${cset}" .tar.gz; then
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
CT_DoLog EXTRA "Retrieving '${basename}-${cset}' (git)"
|
|
|
|
|
|
|
|
# Remove potential left-over from a previous run
|
|
|
|
CT_DoExecLog ALL rm -rf "${tmp}.tar.gz" "${tmp}.tar" "${tmp}" "${dir}"
|
|
|
|
|
|
|
|
if CT_DoExecLog ALL git clone "${url}" "${dir}"; then
|
|
|
|
# Yep, cloned OK
|
|
|
|
CT_Pushd "${dir}"
|
|
|
|
CT_DoExecLog ALL git archive --format=tar \
|
|
|
|
--prefix="${basename}-${cset}/" \
|
|
|
|
-o "${tmp}.tar" \
|
|
|
|
"${cset}"
|
|
|
|
CT_DoExecLog ALL gzip -9 "${tmp}.tar"
|
|
|
|
CT_DoExecLog ALL mv -f "${tmp}.tar.gz" "${dest}"
|
|
|
|
CT_SaveLocal "${dest}"
|
|
|
|
CT_DoExecLog ALL rm -rf "${tmp}.tar.gz" "${tmp}.tar" "${tmp}" "${dir}"
|
2010-01-14 19:54:47 +00:00
|
|
|
CT_Popd
|
2015-01-11 23:04:05 +00:00
|
|
|
echo ${cset}
|
|
|
|
return 0
|
2010-01-14 19:54:47 +00:00
|
|
|
else
|
2014-05-11 15:47:53 +00:00
|
|
|
# Woops...
|
|
|
|
CT_DoExecLog ALL rm -rf "${dir}"
|
|
|
|
CT_DoLog Debug "Could not clone '${basename}'"
|
|
|
|
return 1
|
2010-01-14 19:54:47 +00:00
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2009-01-05 23:02:43 +00:00
|
|
|
# Extract a tarball
|
2007-05-07 09:04:02 +00:00
|
|
|
# Some tarballs need to be extracted in specific places. Eg.: glibc addons
|
|
|
|
# must be extracted in the glibc directory; uCLibc locales must be extracted
|
2009-01-04 22:17:53 +00:00
|
|
|
# in the extra/locale sub-directory of uClibc. This is taken into account
|
|
|
|
# by the caller, that did a 'cd' into the correct path before calling us
|
|
|
|
# and sets nochdir to 'nochdir'.
|
2010-01-14 19:54:47 +00:00
|
|
|
# Note also that this function handles the git trees!
|
2013-03-31 04:07:38 +00:00
|
|
|
# Usage: CT_Extract [nochdir] <basename> [options]
|
2010-01-14 19:54:47 +00:00
|
|
|
# where 'options' are dependent on the source (eg. git branch/tag...)
|
2009-01-05 23:02:43 +00:00
|
|
|
CT_Extract() {
|
2010-01-12 18:24:03 +00:00
|
|
|
local nochdir="$1"
|
|
|
|
local basename
|
2009-11-29 23:05:45 +00:00
|
|
|
local ext
|
2011-08-25 22:07:51 +00:00
|
|
|
local lzma_prog
|
2011-08-14 15:28:35 +00:00
|
|
|
local -a tar_opts
|
2009-11-29 23:05:45 +00:00
|
|
|
|
2010-01-12 18:24:03 +00:00
|
|
|
if [ "${nochdir}" = "nochdir" ]; then
|
|
|
|
shift
|
|
|
|
nochdir="$(pwd)"
|
|
|
|
else
|
|
|
|
nochdir="${CT_SRC_DIR}"
|
|
|
|
fi
|
|
|
|
|
|
|
|
basename="$1"
|
|
|
|
shift
|
|
|
|
|
2010-01-09 16:31:25 +00:00
|
|
|
# Check if already extracted
|
|
|
|
if [ -e "${CT_SRC_DIR}/.${basename}.extracted" ]; then
|
|
|
|
CT_DoLog DEBUG "Already extracted '${basename}'"
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
|
2015-11-25 23:56:38 +00:00
|
|
|
if ! ext="$(CT_GetFileExtension "${basename}")"; then
|
|
|
|
CT_DoLog WARN "'${basename}' not found in '${CT_TARBALLS_DIR}'"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
local full_file="${CT_TARBALLS_DIR}/${basename}${ext}"
|
|
|
|
|
2009-11-30 20:34:50 +00:00
|
|
|
# Check if previously partially extracted
|
|
|
|
if [ -e "${CT_SRC_DIR}/.${basename}.extracting" ]; then
|
|
|
|
CT_DoLog ERROR "The '${basename}' sources were partially extracted."
|
|
|
|
CT_DoLog ERROR "Please remove first:"
|
|
|
|
CT_DoLog ERROR " - the source dir for '${basename}', in '${CT_SRC_DIR}'"
|
|
|
|
CT_DoLog ERROR " - the file '${CT_SRC_DIR}/.${basename}.extracting'"
|
|
|
|
CT_Abort "I'll stop now to avoid any carnage..."
|
|
|
|
fi
|
|
|
|
CT_DoExecLog DEBUG touch "${CT_SRC_DIR}/.${basename}.extracting"
|
|
|
|
|
2010-01-12 18:24:03 +00:00
|
|
|
CT_Pushd "${nochdir}"
|
2009-01-05 23:02:43 +00:00
|
|
|
|
|
|
|
CT_DoLog EXTRA "Extracting '${basename}'"
|
2011-07-29 11:04:49 +00:00
|
|
|
CT_DoExecLog FILE mkdir -p "${basename}"
|
2011-08-14 15:28:35 +00:00
|
|
|
tar_opts=( "--strip-components=1" )
|
|
|
|
tar_opts+=( "-C" "${basename}" )
|
|
|
|
tar_opts+=( "-xv" )
|
2011-08-25 22:07:51 +00:00
|
|
|
|
|
|
|
# One note here:
|
|
|
|
# - lzma can be handled either with 'xz' or 'lzma'
|
|
|
|
# - we get lzma tarball only if either or both are available
|
|
|
|
# - so, if we get an lzma tarball, and either 'xz' or 'lzma' is
|
|
|
|
# missing, we can assume the other is available
|
|
|
|
if [ "${CT_CONFIGURE_has_lzma}" = "y" ]; then
|
2011-11-22 09:08:10 +00:00
|
|
|
lzma_prog="lzma -fdc"
|
2011-08-25 22:07:51 +00:00
|
|
|
else
|
2011-11-22 09:08:10 +00:00
|
|
|
lzma_prog="xz -fdc"
|
2011-08-25 22:07:51 +00:00
|
|
|
fi
|
2007-05-07 09:04:02 +00:00
|
|
|
case "${ext}" in
|
2011-11-22 09:08:10 +00:00
|
|
|
.tar.xz) xz -fdc "${full_file}" | CT_DoExecLog FILE tar "${tar_opts[@]}" -f -;;
|
|
|
|
.tar.lzma) ${lzma_prog} "${full_file}" | CT_DoExecLog FILE tar "${tar_opts[@]}" -f -;;
|
|
|
|
.tar.bz2) bzip2 -dc "${full_file}" | CT_DoExecLog FILE tar "${tar_opts[@]}" -f -;;
|
|
|
|
.tar.gz|.tgz) gzip -dc "${full_file}" | CT_DoExecLog FILE tar "${tar_opts[@]}" -f -;;
|
2011-05-03 22:04:23 +00:00
|
|
|
.tar) CT_DoExecLog FILE tar "${tar_opts[@]}" -f "${full_file}";;
|
2013-03-31 04:07:38 +00:00
|
|
|
.zip) CT_DoExecLog FILE unzip "${@}" "${full_file}";;
|
2010-01-14 19:54:47 +00:00
|
|
|
/.git) CT_ExtractGit "${basename}" "${@}";;
|
2011-05-30 21:24:45 +00:00
|
|
|
*) CT_DoLog WARN "Don't know how to handle '${basename}${ext}': unknown extension"
|
|
|
|
return 1
|
|
|
|
;;
|
2007-05-07 09:04:02 +00:00
|
|
|
esac
|
2009-01-05 23:02:43 +00:00
|
|
|
|
2010-01-14 19:54:47 +00:00
|
|
|
# Don't mark as being extracted for git
|
|
|
|
case "${ext}" in
|
|
|
|
/.git) ;;
|
|
|
|
*) CT_DoExecLog DEBUG touch "${CT_SRC_DIR}/.${basename}.extracted";;
|
|
|
|
esac
|
2009-11-30 20:34:50 +00:00
|
|
|
CT_DoExecLog DEBUG rm -f "${CT_SRC_DIR}/.${basename}.extracting"
|
2009-01-05 23:02:43 +00:00
|
|
|
|
2010-01-12 18:24:03 +00:00
|
|
|
CT_Popd
|
2009-01-05 23:02:43 +00:00
|
|
|
}
|
|
|
|
|
2011-10-07 13:06:44 +00:00
|
|
|
# Create a working git clone of a local git repository
|
2010-01-14 19:54:47 +00:00
|
|
|
# Usage: CT_ExtractGit <basename> [ref]
|
|
|
|
# where 'ref' is the reference to use:
|
|
|
|
# the full name of a branch, like "remotes/origin/branch_name"
|
|
|
|
# a date as understandable by git, like "YYYY-MM-DD[ hh[:mm[:ss]]]"
|
|
|
|
# a tag name
|
2011-10-07 13:06:44 +00:00
|
|
|
# If 'ref' is not given, the current repository HEAD will be used
|
2010-01-14 19:54:47 +00:00
|
|
|
CT_ExtractGit() {
|
|
|
|
local basename="${1}"
|
|
|
|
local ref="${2}"
|
2011-10-07 13:06:44 +00:00
|
|
|
local repo
|
2010-01-14 19:54:47 +00:00
|
|
|
local ref_type
|
|
|
|
|
|
|
|
# pushd now to be able to get git revlist in case ref is a date
|
2011-10-07 13:06:44 +00:00
|
|
|
repo="${CT_TARBALLS_DIR}/${basename}"
|
|
|
|
CT_Pushd "${repo}"
|
2010-01-14 19:54:47 +00:00
|
|
|
|
|
|
|
# What kind of reference is ${ref} ?
|
|
|
|
if [ -z "${ref}" ]; then
|
2011-10-07 13:06:44 +00:00
|
|
|
ref_type=head
|
|
|
|
ref=$(git rev-list -n1 HEAD)
|
2014-08-26 22:52:18 +00:00
|
|
|
elif git tag |${grep} -E "^${ref}$" >/dev/null 2>&1; then
|
2010-01-14 19:54:47 +00:00
|
|
|
ref_type=tag
|
2014-08-26 22:52:18 +00:00
|
|
|
elif git branch -a --no-color |${grep} -E "^. ${ref}$" >/dev/null 2>&1; then
|
2010-01-14 19:54:47 +00:00
|
|
|
ref_type=branch
|
|
|
|
elif date -d "${ref}" >/dev/null 2>&1; then
|
|
|
|
ref_type=date
|
|
|
|
ref=$(git rev-list -n1 --before="${ref}")
|
|
|
|
else
|
|
|
|
CT_Abort "Reference '${ref}' is an incorrect git reference: neither tag, branch nor date"
|
|
|
|
fi
|
|
|
|
|
2011-10-07 13:06:44 +00:00
|
|
|
CT_Popd
|
2010-01-14 19:54:47 +00:00
|
|
|
|
2011-10-07 13:06:44 +00:00
|
|
|
CT_DoExecLog FILE rmdir "${basename}"
|
2010-01-14 19:54:47 +00:00
|
|
|
case "${ref_type}" in
|
2011-10-07 13:06:44 +00:00
|
|
|
branch) CT_DoExecLog FILE git clone -b "${ref}" "${repo}" "${basename}" ;;
|
|
|
|
*) CT_DoExecLog FILE git clone "${repo}" "${basename}"
|
|
|
|
CT_Pushd "${basename}"
|
|
|
|
CT_DoExecLog FILE git checkout "${ref}"
|
|
|
|
CT_Popd
|
|
|
|
;;
|
2010-01-14 19:54:47 +00:00
|
|
|
esac
|
|
|
|
}
|
|
|
|
|
2009-01-05 23:02:43 +00:00
|
|
|
# Patches the specified component
|
2010-01-12 18:24:03 +00:00
|
|
|
# See CT_Extract, above, for explanations on 'nochdir'
|
2010-04-11 21:18:10 +00:00
|
|
|
# Usage: CT_Patch [nochdir] <packagename> <packageversion>
|
|
|
|
# If the package directory is *not* packagename-packageversion, then
|
|
|
|
# the caller must cd into the proper directory first, and call us
|
|
|
|
# with nochdir
|
2009-01-05 23:02:43 +00:00
|
|
|
CT_Patch() {
|
2010-01-12 18:24:03 +00:00
|
|
|
local nochdir="$1"
|
2010-04-11 21:18:10 +00:00
|
|
|
local pkgname
|
|
|
|
local version
|
|
|
|
local pkgdir
|
2010-01-12 18:24:03 +00:00
|
|
|
local base_file
|
|
|
|
local ver_file
|
2009-08-31 10:05:52 +00:00
|
|
|
local d
|
|
|
|
local -a patch_dirs
|
|
|
|
local bundled_patch_dir
|
|
|
|
local local_patch_dir
|
2014-07-27 01:04:53 +00:00
|
|
|
local bundled_exp_patch_dir
|
|
|
|
local local_exp_patch_dir
|
2009-01-05 23:02:43 +00:00
|
|
|
|
2010-01-12 18:24:03 +00:00
|
|
|
if [ "${nochdir}" = "nochdir" ]; then
|
|
|
|
shift
|
2010-04-13 18:05:47 +00:00
|
|
|
pkgname="$1"
|
|
|
|
version="$2"
|
|
|
|
pkgdir="${pkgname}-${version}"
|
2010-01-12 18:24:03 +00:00
|
|
|
nochdir="$(pwd)"
|
|
|
|
else
|
2010-04-13 18:05:47 +00:00
|
|
|
pkgname="$1"
|
|
|
|
version="$2"
|
|
|
|
pkgdir="${pkgname}-${version}"
|
2010-04-11 21:18:10 +00:00
|
|
|
nochdir="${CT_SRC_DIR}/${pkgdir}"
|
2010-01-12 18:24:03 +00:00
|
|
|
fi
|
|
|
|
|
2009-01-05 23:02:43 +00:00
|
|
|
# Check if already patched
|
2010-04-11 21:18:10 +00:00
|
|
|
if [ -e "${CT_SRC_DIR}/.${pkgdir}.patched" ]; then
|
|
|
|
CT_DoLog DEBUG "Already patched '${pkgdir}'"
|
2009-01-05 23:02:43 +00:00
|
|
|
return 0
|
2007-05-07 09:04:02 +00:00
|
|
|
fi
|
|
|
|
|
2009-03-27 21:30:47 +00:00
|
|
|
# Check if already partially patched
|
2010-04-11 21:18:10 +00:00
|
|
|
if [ -e "${CT_SRC_DIR}/.${pkgdir}.patching" ]; then
|
|
|
|
CT_DoLog ERROR "The '${pkgdir}' sources were partially patched."
|
2009-03-27 21:30:47 +00:00
|
|
|
CT_DoLog ERROR "Please remove first:"
|
2010-04-11 21:18:10 +00:00
|
|
|
CT_DoLog ERROR " - the source dir for '${pkgdir}', in '${CT_SRC_DIR}'"
|
|
|
|
CT_DoLog ERROR " - the file '${CT_SRC_DIR}/.${pkgdir}.extracted'"
|
|
|
|
CT_DoLog ERROR " - the file '${CT_SRC_DIR}/.${pkgdir}.patching'"
|
2009-03-27 21:30:47 +00:00
|
|
|
CT_Abort "I'll stop now to avoid any carnage..."
|
|
|
|
fi
|
2010-04-11 21:18:10 +00:00
|
|
|
touch "${CT_SRC_DIR}/.${pkgdir}.patching"
|
2009-03-27 21:30:47 +00:00
|
|
|
|
2010-01-12 18:24:03 +00:00
|
|
|
CT_Pushd "${nochdir}"
|
2009-01-04 22:17:53 +00:00
|
|
|
|
2010-04-11 21:18:10 +00:00
|
|
|
CT_DoLog EXTRA "Patching '${pkgdir}'"
|
2007-05-07 09:04:02 +00:00
|
|
|
|
2010-04-11 21:18:10 +00:00
|
|
|
bundled_patch_dir="${CT_LIB_DIR}/patches/${pkgname}/${version}"
|
2015-06-21 23:53:43 +00:00
|
|
|
bundled_patch_arch_dir="${bundled_patch_dir}/${CT_ARCH}"
|
2010-04-11 21:18:10 +00:00
|
|
|
local_patch_dir="${CT_LOCAL_PATCH_DIR}/${pkgname}/${version}"
|
2009-08-31 10:05:52 +00:00
|
|
|
|
|
|
|
case "${CT_PATCH_ORDER}" in
|
2015-06-21 23:53:43 +00:00
|
|
|
bundled) patch_dirs=("${bundled_patch_dir}" "${bundled_patch_arch_dir}");;
|
2015-06-02 17:47:58 +00:00
|
|
|
local) patch_dirs=("${local_patch_dir}");;
|
2015-06-21 23:53:43 +00:00
|
|
|
bundled,local) patch_dirs=("${bundled_patch_dir}" "${bundled_patch_arch_dir}" "${local_patch_dir}");;
|
|
|
|
local,bundled) patch_dirs=("${local_patch_dir}" "${bundled_patch_dir}" "${bundled_patch_arch_dir}");;
|
2009-11-16 19:15:45 +00:00
|
|
|
none) patch_dirs=;;
|
2009-08-31 10:05:52 +00:00
|
|
|
esac
|
|
|
|
|
|
|
|
for d in "${patch_dirs[@]}"; do
|
|
|
|
CT_DoLog DEBUG "Looking for patches in '${d}'..."
|
|
|
|
if [ -n "${d}" -a -d "${d}" ]; then
|
|
|
|
for p in "${d}"/*.patch; do
|
2007-05-07 09:04:02 +00:00
|
|
|
if [ -f "${p}" ]; then
|
2015-11-17 10:48:09 +00:00
|
|
|
CT_DoExecLog ALL ${patch} --no-backup-if-mismatch -g0 -F1 -p1 -f -i "${p}"
|
2007-05-07 09:04:02 +00:00
|
|
|
fi
|
|
|
|
done
|
2009-08-31 10:47:58 +00:00
|
|
|
if [ "${CT_PATCH_SINGLE}" = "y" ]; then
|
|
|
|
break
|
|
|
|
fi
|
2007-05-07 09:04:02 +00:00
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2008-05-15 20:45:18 +00:00
|
|
|
if [ "${CT_OVERIDE_CONFIG_GUESS_SUB}" = "y" ]; then
|
|
|
|
CT_DoLog ALL "Overiding config.guess and config.sub"
|
|
|
|
for cfg in config_guess config_sub; do
|
2008-12-16 18:12:34 +00:00
|
|
|
eval ${cfg}="${CT_LIB_DIR}/scripts/${cfg/_/.}"
|
|
|
|
[ -e "${CT_TOP_DIR}/scripts/${cfg/_/.}" ] && eval ${cfg}="${CT_TOP_DIR}/scripts/${cfg/_/.}"
|
2008-08-09 17:46:56 +00:00
|
|
|
# Can't use CT_DoExecLog because of the '{} \;' to be passed un-mangled to find
|
2015-06-21 23:54:16 +00:00
|
|
|
find . -type f -name "${cfg/_/.}" \
|
|
|
|
-exec chmod -v u+w {} \; \
|
|
|
|
-exec cp -v "${!cfg}" {} \; |CT_DoLog ALL
|
2008-05-15 20:45:18 +00:00
|
|
|
done
|
|
|
|
fi
|
|
|
|
|
2010-04-12 22:07:00 +00:00
|
|
|
CT_DoExecLog DEBUG touch "${CT_SRC_DIR}/.${pkgdir}.patched"
|
|
|
|
CT_DoExecLog DEBUG rm -f "${CT_SRC_DIR}/.${pkgdir}.patching"
|
2009-01-05 23:02:43 +00:00
|
|
|
|
2010-01-12 18:24:03 +00:00
|
|
|
CT_Popd
|
2007-05-07 09:04:02 +00:00
|
|
|
}
|
|
|
|
|
2007-07-01 19:04:20 +00:00
|
|
|
# Two wrappers to call config.(guess|sub) either from CT_TOP_DIR or CT_LIB_DIR.
|
|
|
|
# Those from CT_TOP_DIR, if they exist, will be be more recent than those from CT_LIB_DIR.
|
|
|
|
CT_DoConfigGuess() {
|
2008-12-16 18:12:34 +00:00
|
|
|
if [ -x "${CT_TOP_DIR}/scripts/config.guess" ]; then
|
|
|
|
"${CT_TOP_DIR}/scripts/config.guess"
|
2007-07-01 19:04:20 +00:00
|
|
|
else
|
2008-12-16 18:12:34 +00:00
|
|
|
"${CT_LIB_DIR}/scripts/config.guess"
|
2007-07-01 19:04:20 +00:00
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
|
|
|
CT_DoConfigSub() {
|
2008-12-16 18:12:34 +00:00
|
|
|
if [ -x "${CT_TOP_DIR}/scripts/config.sub" ]; then
|
|
|
|
"${CT_TOP_DIR}/scripts/config.sub" "$@"
|
2007-07-01 19:04:20 +00:00
|
|
|
else
|
2008-12-16 18:12:34 +00:00
|
|
|
"${CT_LIB_DIR}/scripts/config.sub" "$@"
|
2007-07-01 19:04:20 +00:00
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
multilib: Determine which options may pass through.
On some arches (e.g. MIPS) the options like -mabi do not work if
specified more than once (see the comment in 100-gcc.sh). Therefore,
we need to determine which of the options produced by <arch>.sh can
be passed to multilib builds and which must be removed (i.e., which
options vary among the multilibs).
This presents a chicken-and-egg problem. GCC developers, in their
infinite wisdom, do not allow arbitrary multilib specification to be
supplied to GCC's configure. Instead, the target (and sometimes some
extra options) determine the set of multilibs - which may include
different CPUs, different ABIs, different endianness, different FPUs,
different floating-point ABIs, ... That is, we don't know which parts
vary until we build GCC and ask it.
So, the solution implemented here is:
- For multilib builds, start with empty CT_ARCH_TARGET_CFLAGS/LDFLAGS.
- For multilib builds, require core pass 1. Pass 1 does not build any
target binaries, so at that point, our target options have not been
used yet.
- Provide an API to modify the environment variables for the steps that
follow the current one.
- As a part of multilib-related housekeeping, determine the variable
part of multilibs and filter out these options; pass the rest into
CT_TARGET_CFLAGS/LDFLAGS.
This still does not handle extra dependencies between GCC options (like
-ma implying -mcpu=X -mtune=Y, etc.) but I feel that would complicate
matters too much. Let's leave this until there's a compelling case for
it.
Also, query GCC's sysroot suffix for targets that use it (SuperH,
for example) - the default multilib may not work if the command line
specifies the default option explicitly (%sysroot_suffix_spec is not
aware of multilib defaults).
Signed-off-by: Alexey Neyman <stilor@att.net>
2016-03-30 19:15:54 +00:00
|
|
|
# Normally, each step is executed in a sub-shell and thus cannot modify the
|
|
|
|
# environment for the next step(s). When this is needed, it can do so by
|
|
|
|
# invoking this function.
|
|
|
|
# Usage: CT_EnvModify VAR VALUE
|
|
|
|
CT_EnvModify() {
|
|
|
|
echo "${1}=\"${2}\"" >> "${CT_BUILD_DIR}/env.modify.sh"
|
|
|
|
}
|
|
|
|
|
2007-08-15 16:18:35 +00:00
|
|
|
# Compute the target tuple from what is provided by the user
|
|
|
|
# Usage: CT_DoBuildTargetTuple
|
2007-05-07 09:04:02 +00:00
|
|
|
# In fact this function takes the environment variables to build the target
|
2007-08-15 16:18:35 +00:00
|
|
|
# tuple. It is needed both by the normal build sequence, as well as the
|
2007-05-07 09:04:02 +00:00
|
|
|
# sample saving sequence.
|
2007-08-15 16:18:35 +00:00
|
|
|
CT_DoBuildTargetTuple() {
|
2007-09-14 21:17:59 +00:00
|
|
|
# Set the endianness suffix, and the default endianness gcc option
|
2011-11-20 20:01:34 +00:00
|
|
|
case "${CT_ARCH_ENDIAN}" in
|
|
|
|
big)
|
|
|
|
target_endian_eb=eb
|
2014-05-11 21:43:52 +00:00
|
|
|
target_endian_be=be
|
2007-09-14 21:17:59 +00:00
|
|
|
target_endian_el=
|
2014-05-11 21:43:52 +00:00
|
|
|
target_endian_le=
|
2007-09-15 21:44:18 +00:00
|
|
|
CT_ARCH_ENDIAN_CFLAG="-mbig-endian"
|
2012-05-06 14:42:59 +00:00
|
|
|
CT_ARCH_ENDIAN_LDFLAG="-Wl,-EB"
|
2007-09-14 21:17:59 +00:00
|
|
|
;;
|
2011-11-20 20:01:34 +00:00
|
|
|
little)
|
|
|
|
target_endian_eb=
|
2014-05-11 21:43:52 +00:00
|
|
|
target_endian_be=
|
2007-09-14 21:17:59 +00:00
|
|
|
target_endian_el=el
|
2014-05-11 21:43:52 +00:00
|
|
|
target_endian_le=le
|
2007-09-15 21:44:18 +00:00
|
|
|
CT_ARCH_ENDIAN_CFLAG="-mlittle-endian"
|
2012-05-06 14:42:59 +00:00
|
|
|
CT_ARCH_ENDIAN_LDFLAG="-Wl,-EL"
|
2007-09-14 21:17:59 +00:00
|
|
|
;;
|
2007-05-07 09:04:02 +00:00
|
|
|
esac
|
2007-09-14 21:17:59 +00:00
|
|
|
|
2014-05-11 21:55:16 +00:00
|
|
|
# Set the bitness suffix
|
|
|
|
case "${CT_ARCH_BITNESS}" in
|
|
|
|
32)
|
|
|
|
target_bits_32=32
|
|
|
|
target_bits_64=
|
|
|
|
;;
|
|
|
|
64)
|
|
|
|
target_bits_32=
|
|
|
|
target_bits_64=64
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
|
2008-10-23 13:45:48 +00:00
|
|
|
# Build the default architecture tuple part
|
2013-01-20 12:58:22 +00:00
|
|
|
CT_TARGET_ARCH="${CT_ARCH}${CT_ARCH_SUFFIX}"
|
2008-10-23 13:45:48 +00:00
|
|
|
|
2007-09-14 21:17:59 +00:00
|
|
|
# Set defaults for the system part of the tuple. Can be overriden
|
|
|
|
# by architecture-specific values.
|
|
|
|
case "${CT_LIBC}" in
|
2008-08-11 12:22:47 +00:00
|
|
|
*glibc) CT_TARGET_SYS=gnu;;
|
2007-09-14 21:17:59 +00:00
|
|
|
uClibc) CT_TARGET_SYS=uclibc;;
|
2014-08-01 02:01:46 +00:00
|
|
|
musl) CT_TARGET_SYS=musl;;
|
2015-06-21 23:53:06 +00:00
|
|
|
avr-libc)
|
|
|
|
# avr-libc only seems to work with the non-canonical "avr" target.
|
|
|
|
CT_TARGET_SKIP_CONFIG_SUB=y
|
|
|
|
CT_TARGET_SYS= # CT_TARGET_SYS must be empty too
|
|
|
|
;;
|
2009-10-21 20:51:39 +00:00
|
|
|
*) CT_TARGET_SYS=elf;;
|
2007-05-07 09:04:02 +00:00
|
|
|
esac
|
2007-09-14 21:17:59 +00:00
|
|
|
|
2007-09-15 21:44:18 +00:00
|
|
|
# Set the default values for ARCH, ABI, CPU, TUNE, FPU and FLOAT
|
2008-05-14 17:39:18 +00:00
|
|
|
unset CT_ARCH_ARCH_CFLAG CT_ARCH_ABI_CFLAG CT_ARCH_CPU_CFLAG CT_ARCH_TUNE_CFLAG CT_ARCH_FPU_CFLAG CT_ARCH_FLOAT_CFLAG
|
|
|
|
unset CT_ARCH_WITH_ARCH CT_ARCH_WITH_ABI CT_ARCH_WITH_CPU CT_ARCH_WITH_TUNE CT_ARCH_WITH_FPU CT_ARCH_WITH_FLOAT
|
2007-09-15 21:44:18 +00:00
|
|
|
[ "${CT_ARCH_ARCH}" ] && { CT_ARCH_ARCH_CFLAG="-march=${CT_ARCH_ARCH}"; CT_ARCH_WITH_ARCH="--with-arch=${CT_ARCH_ARCH}"; }
|
|
|
|
[ "${CT_ARCH_ABI}" ] && { CT_ARCH_ABI_CFLAG="-mabi=${CT_ARCH_ABI}"; CT_ARCH_WITH_ABI="--with-abi=${CT_ARCH_ABI}"; }
|
|
|
|
[ "${CT_ARCH_CPU}" ] && { CT_ARCH_CPU_CFLAG="-mcpu=${CT_ARCH_CPU}"; CT_ARCH_WITH_CPU="--with-cpu=${CT_ARCH_CPU}"; }
|
2007-10-30 19:13:51 +00:00
|
|
|
[ "${CT_ARCH_TUNE}" ] && { CT_ARCH_TUNE_CFLAG="-mtune=${CT_ARCH_TUNE}"; CT_ARCH_WITH_TUNE="--with-tune=${CT_ARCH_TUNE}"; }
|
2007-09-15 21:44:18 +00:00
|
|
|
[ "${CT_ARCH_FPU}" ] && { CT_ARCH_FPU_CFLAG="-mfpu=${CT_ARCH_FPU}"; CT_ARCH_WITH_FPU="--with-fpu=${CT_ARCH_FPU}"; }
|
2011-10-19 02:27:32 +00:00
|
|
|
|
|
|
|
case "${CT_ARCH_FLOAT}" in
|
2011-10-19 02:27:32 +00:00
|
|
|
hard)
|
|
|
|
CT_ARCH_FLOAT_CFLAG="-mhard-float"
|
|
|
|
CT_ARCH_WITH_FLOAT="--with-float=hard"
|
|
|
|
;;
|
2011-10-19 02:27:32 +00:00
|
|
|
soft)
|
|
|
|
CT_ARCH_FLOAT_CFLAG="-msoft-float"
|
|
|
|
CT_ARCH_WITH_FLOAT="--with-float=soft"
|
|
|
|
;;
|
2011-10-19 02:27:32 +00:00
|
|
|
softfp)
|
|
|
|
CT_ARCH_FLOAT_CFLAG="-mfloat-abi=softfp"
|
|
|
|
CT_ARCH_WITH_FLOAT="--with-float=softfp"
|
|
|
|
;;
|
2011-10-19 02:27:32 +00:00
|
|
|
esac
|
2007-09-15 21:44:18 +00:00
|
|
|
|
2008-10-23 13:45:48 +00:00
|
|
|
# Build the default kernel tuple part
|
|
|
|
CT_TARGET_KERNEL="${CT_KERNEL}"
|
2008-10-23 13:00:45 +00:00
|
|
|
|
2008-10-23 13:45:48 +00:00
|
|
|
# Overide the default values with the components specific settings
|
2008-10-23 13:00:45 +00:00
|
|
|
CT_DoArchTupleValues
|
2008-10-23 13:45:48 +00:00
|
|
|
CT_DoKernelTupleValues
|
2007-09-14 21:17:59 +00:00
|
|
|
|
2007-09-15 21:44:18 +00:00
|
|
|
# Finish the target tuple construction
|
2010-04-09 19:04:27 +00:00
|
|
|
CT_TARGET="${CT_TARGET_ARCH}"
|
|
|
|
CT_TARGET="${CT_TARGET}${CT_TARGET_VENDOR:+-${CT_TARGET_VENDOR}}"
|
|
|
|
CT_TARGET="${CT_TARGET}${CT_TARGET_KERNEL:+-${CT_TARGET_KERNEL}}"
|
|
|
|
CT_TARGET="${CT_TARGET}${CT_TARGET_SYS:+-${CT_TARGET_SYS}}"
|
2008-12-09 22:02:20 +00:00
|
|
|
|
|
|
|
# Sanity checks
|
|
|
|
__sed_alias=""
|
|
|
|
if [ -n "${CT_TARGET_ALIAS_SED_EXPR}" ]; then
|
2014-08-26 22:29:46 +00:00
|
|
|
__sed_alias=$(echo "${CT_TARGET}" |${sed} -r -e "${CT_TARGET_ALIAS_SED_EXPR}")
|
2008-12-09 22:02:20 +00:00
|
|
|
fi
|
|
|
|
case ":${CT_TARGET_VENDOR}:${CT_TARGET_ALIAS}:${__sed_alias}:" in
|
|
|
|
:*" "*:*:*:) CT_Abort "Don't use spaces in the vendor string, it breaks things.";;
|
|
|
|
:*"-"*:*:*:) CT_Abort "Don't use dashes in the vendor string, it breaks things.";;
|
|
|
|
:*:*" "*:*:) CT_Abort "Don't use spaces in the target alias, it breaks things.";;
|
|
|
|
:*:*:*" "*:) CT_Abort "Don't use spaces in the target sed transform, it breaks things.";;
|
|
|
|
esac
|
|
|
|
|
|
|
|
# Canonicalise it
|
2015-06-21 23:53:06 +00:00
|
|
|
if [ "${CT_TARGET_SKIP_CONFIG_SUB}" != "y" ]; then
|
|
|
|
CT_TARGET=$(CT_DoConfigSub "${CT_TARGET}")
|
|
|
|
fi
|
|
|
|
|
2007-09-15 21:44:18 +00:00
|
|
|
# Prepare the target CFLAGS
|
2008-08-07 15:18:18 +00:00
|
|
|
CT_ARCH_TARGET_CFLAGS="${CT_ARCH_TARGET_CFLAGS} ${CT_ARCH_ENDIAN_CFLAG}"
|
2008-05-21 22:00:52 +00:00
|
|
|
CT_ARCH_TARGET_CFLAGS="${CT_ARCH_TARGET_CFLAGS} ${CT_ARCH_ARCH_CFLAG}"
|
2007-09-16 17:59:18 +00:00
|
|
|
CT_ARCH_TARGET_CFLAGS="${CT_ARCH_TARGET_CFLAGS} ${CT_ARCH_ABI_CFLAG}"
|
|
|
|
CT_ARCH_TARGET_CFLAGS="${CT_ARCH_TARGET_CFLAGS} ${CT_ARCH_CPU_CFLAG}"
|
|
|
|
CT_ARCH_TARGET_CFLAGS="${CT_ARCH_TARGET_CFLAGS} ${CT_ARCH_TUNE_CFLAG}"
|
|
|
|
CT_ARCH_TARGET_CFLAGS="${CT_ARCH_TARGET_CFLAGS} ${CT_ARCH_FPU_CFLAG}"
|
|
|
|
CT_ARCH_TARGET_CFLAGS="${CT_ARCH_TARGET_CFLAGS} ${CT_ARCH_FLOAT_CFLAG}"
|
2008-05-21 22:00:52 +00:00
|
|
|
|
|
|
|
# Now on for the target LDFLAGS
|
2008-08-07 15:18:18 +00:00
|
|
|
CT_ARCH_TARGET_LDFLAGS="${CT_ARCH_TARGET_LDFLAGS} ${CT_ARCH_ENDIAN_LDFLAG}"
|
multilib: Determine which options may pass through.
On some arches (e.g. MIPS) the options like -mabi do not work if
specified more than once (see the comment in 100-gcc.sh). Therefore,
we need to determine which of the options produced by <arch>.sh can
be passed to multilib builds and which must be removed (i.e., which
options vary among the multilibs).
This presents a chicken-and-egg problem. GCC developers, in their
infinite wisdom, do not allow arbitrary multilib specification to be
supplied to GCC's configure. Instead, the target (and sometimes some
extra options) determine the set of multilibs - which may include
different CPUs, different ABIs, different endianness, different FPUs,
different floating-point ABIs, ... That is, we don't know which parts
vary until we build GCC and ask it.
So, the solution implemented here is:
- For multilib builds, start with empty CT_ARCH_TARGET_CFLAGS/LDFLAGS.
- For multilib builds, require core pass 1. Pass 1 does not build any
target binaries, so at that point, our target options have not been
used yet.
- Provide an API to modify the environment variables for the steps that
follow the current one.
- As a part of multilib-related housekeeping, determine the variable
part of multilibs and filter out these options; pass the rest into
CT_TARGET_CFLAGS/LDFLAGS.
This still does not handle extra dependencies between GCC options (like
-ma implying -mcpu=X -mtune=Y, etc.) but I feel that would complicate
matters too much. Let's leave this until there's a compelling case for
it.
Also, query GCC's sysroot suffix for targets that use it (SuperH,
for example) - the default multilib may not work if the command line
specifies the default option explicitly (%sysroot_suffix_spec is not
aware of multilib defaults).
Signed-off-by: Alexey Neyman <stilor@att.net>
2016-03-30 19:15:54 +00:00
|
|
|
|
|
|
|
# Now, a multilib quirk. We may not be able to pass CT_ARCH_TARGET_CFLAGS
|
|
|
|
# and CT_ARCH_TARGET_LDFLAGS to gcc: even though GCC build appends the multilib
|
|
|
|
# flags afterwards, on some architectures the build breaks because some
|
|
|
|
# flags do not completely override each other. For example, on mips target,
|
|
|
|
# 'gcc -mabi=32' and 'gcc -mabi=n32' both work, but 'gcc -mabi=32 -mabi=n32'
|
|
|
|
# triggers an internal linker error. Likely a bug in GNU binutils, but we
|
|
|
|
# have to work it around for now: *do not pass the CT_ARCH_TARGET_ flags*.
|
|
|
|
# Instead, save them into a different variable here. Then, after the first
|
|
|
|
# core pass, we'll know which of them vary with multilibs (i.e. must be
|
|
|
|
# filtered out).
|
|
|
|
if [ "${CT_MULTILIB}" = "y" ]; then
|
|
|
|
CT_ARCH_TARGET_CFLAGS_MULTILIB="${CT_ARCH_TARGET_CFLAGS}"
|
|
|
|
CT_ARCH_TARGET_CFLAGS=
|
|
|
|
CT_ARCH_TARGET_LDFLAGS_MULTILIB="${CT_ARCH_TARGET_LDFLAGS}"
|
|
|
|
CT_ARCH_TARGET_LDFLAGS=
|
|
|
|
fi
|
2007-05-07 09:04:02 +00:00
|
|
|
}
|
2007-05-22 20:46:07 +00:00
|
|
|
|
2014-04-12 12:16:52 +00:00
|
|
|
# This function determines the target tuple for a given set of compiler
|
|
|
|
# flags, using either GCC's multiarch feature (if supported; if not,
|
|
|
|
# GCC prints nothing and exits with status 0), falling back to calling
|
|
|
|
# the architecture-specific functions.
|
|
|
|
CT_DoMultilibTarget() {
|
|
|
|
local target="$1"; shift
|
|
|
|
local -a multi_flags=( "$@" )
|
|
|
|
local gcc_multiarch
|
|
|
|
|
|
|
|
gcc_multiarch=$( "${CT_TARGET}-gcc" -print-multiarch "${multi_flags[@]}" )
|
|
|
|
if [ -n "${gcc_multiarch}" ]; then
|
|
|
|
echo "${gcc_multiarch}"
|
|
|
|
return
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Fall back to arch-specific guesswork
|
|
|
|
CT_DoArchMultilibTarget "${target}" "${multi_flags[@]}"
|
|
|
|
}
|
|
|
|
|
2007-05-22 20:46:07 +00:00
|
|
|
# This function does pause the build until the user strikes "Return"
|
|
|
|
# Usage: CT_DoPause [optional_message]
|
|
|
|
CT_DoPause() {
|
|
|
|
local foo
|
|
|
|
local message="${1:-Pausing for your pleasure}"
|
|
|
|
CT_DoLog INFO "${message}"
|
2008-05-20 21:32:39 +00:00
|
|
|
read -p "Press 'Enter' to continue, or Ctrl-C to stop..." foo >&6
|
2007-05-22 20:46:07 +00:00
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2010-04-13 18:52:38 +00:00
|
|
|
# This function creates a tarball of the specified directory, but
|
|
|
|
# only if it exists
|
|
|
|
# Usage: CT_DoTarballIfExists <dir> <tarball_basename> [extra_tar_options [...]]
|
|
|
|
CT_DoTarballIfExists() {
|
|
|
|
local dir="$1"
|
|
|
|
local tarball="$2"
|
|
|
|
shift 2
|
|
|
|
local -a extra_tar_opts=( "$@" )
|
2010-04-13 19:47:34 +00:00
|
|
|
local -a compress
|
2010-04-13 18:52:38 +00:00
|
|
|
|
|
|
|
case "${CT_DEBUG_CT_SAVE_STEPS_GZIP}" in
|
2010-04-13 19:47:34 +00:00
|
|
|
y) compress=( gzip -c -3 - ); tar_ext=.gz;;
|
|
|
|
*) compress=( cat - ); tar_ext=;;
|
2010-04-13 18:52:38 +00:00
|
|
|
esac
|
|
|
|
|
|
|
|
if [ -d "${dir}" ]; then
|
|
|
|
CT_DoLog DEBUG " Saving '${dir}'"
|
2010-04-13 19:47:34 +00:00
|
|
|
{ tar c -C "${dir}" -v -f - "${extra_tar_opts[@]}" . \
|
|
|
|
|"${compress[@]}" >"${tarball}.tar${tar_ext}" ;
|
2014-08-26 22:29:46 +00:00
|
|
|
} 2>&1 |${sed} -r -e 's/^/ /;' |CT_DoLog STATE
|
2010-04-13 18:52:38 +00:00
|
|
|
else
|
2010-10-22 20:02:49 +00:00
|
|
|
CT_DoLog STATE " Not saving '${dir}': does not exist"
|
2010-04-13 18:52:38 +00:00
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
|
|
|
# This function extracts a tarball to the specified directory, but
|
|
|
|
# only if the tarball exists
|
2010-10-22 20:02:49 +00:00
|
|
|
# Usage: CT_DoExtractTarballIfExists <tarball_basename> <dir> [extra_tar_options [...]]
|
2010-04-13 18:52:38 +00:00
|
|
|
CT_DoExtractTarballIfExists() {
|
|
|
|
local tarball="$1"
|
|
|
|
local dir="$2"
|
|
|
|
shift 2
|
|
|
|
local -a extra_tar_opts=( "$@" )
|
2010-04-13 19:47:34 +00:00
|
|
|
local -a uncompress
|
2010-04-13 18:52:38 +00:00
|
|
|
|
|
|
|
case "${CT_DEBUG_CT_SAVE_STEPS_GZIP}" in
|
2010-04-13 19:47:34 +00:00
|
|
|
y) uncompress=( gzip -c -d ); tar_ext=.gz;;
|
|
|
|
*) uncompress=( cat ); tar_ext=;;
|
2010-04-13 18:52:38 +00:00
|
|
|
esac
|
|
|
|
|
|
|
|
if [ -f "${tarball}.tar${tar_ext}" ]; then
|
|
|
|
CT_DoLog DEBUG " Restoring '${dir}'"
|
|
|
|
CT_DoForceRmdir "${dir}"
|
|
|
|
CT_DoExecLog DEBUG mkdir -p "${dir}"
|
2010-04-13 19:47:34 +00:00
|
|
|
{ "${uncompress[@]}" "${tarball}.tar${tar_ext}" \
|
|
|
|
|tar x -C "${dir}" -v -f - "${extra_tar_opts[@]}" ;
|
2014-08-26 22:29:46 +00:00
|
|
|
} 2>&1 |${sed} -r -e 's/^/ /;' |CT_DoLog STATE
|
2010-04-13 18:52:38 +00:00
|
|
|
else
|
2010-10-22 20:02:49 +00:00
|
|
|
CT_DoLog STATE " Not restoring '${dir}': does not exist"
|
2010-04-13 18:52:38 +00:00
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2007-05-22 20:46:07 +00:00
|
|
|
# This function saves the state of the toolchain to be able to restart
|
|
|
|
# at any one point
|
|
|
|
# Usage: CT_DoSaveState <next_step_name>
|
|
|
|
CT_DoSaveState() {
|
|
|
|
[ "${CT_DEBUG_CT_SAVE_STEPS}" = "y" ] || return 0
|
|
|
|
local state_name="$1"
|
|
|
|
local state_dir="${CT_STATE_DIR}/${state_name}"
|
|
|
|
|
2015-11-14 02:21:54 +00:00
|
|
|
CT_DoLog INFO "Saving state to restart at step '${state_name}'..."
|
2009-01-12 18:54:34 +00:00
|
|
|
|
2007-05-22 20:46:07 +00:00
|
|
|
rm -rf "${state_dir}"
|
|
|
|
mkdir -p "${state_dir}"
|
|
|
|
|
2010-10-22 20:02:49 +00:00
|
|
|
CT_DoLog STATE " Saving environment and aliases"
|
2008-07-27 14:25:19 +00:00
|
|
|
# We must omit shell functions, and some specific bash variables
|
|
|
|
# that break when restoring the environment, later. We could do
|
2009-04-20 21:10:03 +00:00
|
|
|
# all the processing in the awk script, but a sed is easier...
|
2015-11-17 10:48:09 +00:00
|
|
|
set |${awk} '
|
2008-10-29 22:27:30 +00:00
|
|
|
BEGIN { _p = 1; }
|
|
|
|
$0~/^[^ ]+ \(\)/ { _p = 0; }
|
|
|
|
_p == 1
|
|
|
|
$0 == "}" { _p = 1; }
|
2014-08-26 22:29:46 +00:00
|
|
|
' |${sed} -r -e '/^BASH_(ARGC|ARGV|LINENO|SOURCE|VERSINFO)=/d;
|
2008-07-27 14:25:19 +00:00
|
|
|
/^(UID|EUID)=/d;
|
|
|
|
/^(FUNCNAME|GROUPS|PPID|SHELLOPTS)=/d;' >"${state_dir}/env.sh"
|
2007-05-22 20:46:07 +00:00
|
|
|
|
2010-12-19 23:07:29 +00:00
|
|
|
CT_DoTarballIfExists "${CT_BUILDTOOLS_PREFIX_DIR}" "${state_dir}/buildtools_dir"
|
2010-04-13 18:52:38 +00:00
|
|
|
CT_DoTarballIfExists "${CT_CONFIG_DIR}" "${state_dir}/config_dir"
|
|
|
|
CT_DoTarballIfExists "${CT_PREFIX_DIR}" "${state_dir}/prefix_dir" --exclude '*.log'
|
2007-05-22 20:46:07 +00:00
|
|
|
|
2011-03-19 23:02:21 +00:00
|
|
|
CT_DoLog STATE " Saving log file"
|
|
|
|
exec >/dev/null
|
|
|
|
case "${CT_DEBUG_CT_SAVE_STEPS_GZIP}" in
|
|
|
|
y) gzip -3 -c "${tmp_log_file}" >"${state_dir}/log.gz";;
|
|
|
|
*) cat "${tmp_log_file}" >"${state_dir}/log";;
|
|
|
|
esac
|
|
|
|
exec >>"${tmp_log_file}"
|
2007-05-22 20:46:07 +00:00
|
|
|
}
|
|
|
|
|
2007-05-27 20:22:06 +00:00
|
|
|
# This function restores a previously saved state
|
2007-05-22 20:46:07 +00:00
|
|
|
# Usage: CT_DoLoadState <state_name>
|
|
|
|
CT_DoLoadState(){
|
|
|
|
local state_name="$1"
|
|
|
|
local state_dir="${CT_STATE_DIR}/${state_name}"
|
2007-05-25 19:30:42 +00:00
|
|
|
local old_RESTART="${CT_RESTART}"
|
|
|
|
local old_STOP="${CT_STOP}"
|
2007-05-22 20:46:07 +00:00
|
|
|
|
2008-05-20 21:32:39 +00:00
|
|
|
CT_TestOrAbort "The previous build did not reach the point where it could be restarted at '${CT_RESTART}'" -d "${state_dir}"
|
2007-05-28 20:57:40 +00:00
|
|
|
|
2007-05-22 20:46:07 +00:00
|
|
|
# We need to do something special with the log file!
|
|
|
|
if [ "${CT_LOG_TO_FILE}" = "y" ]; then
|
|
|
|
exec >"${state_dir}/tail.log"
|
|
|
|
fi
|
2009-03-24 17:37:52 +00:00
|
|
|
|
2015-11-14 02:21:54 +00:00
|
|
|
CT_DoLog INFO "Restoring state at step '${state_name}', as requested."
|
2007-05-22 20:46:07 +00:00
|
|
|
|
2010-04-13 18:52:38 +00:00
|
|
|
CT_DoExtractTarballIfExists "${state_dir}/prefix_dir" "${CT_PREFIX_DIR}"
|
|
|
|
CT_DoExtractTarballIfExists "${state_dir}/config_dir" "${CT_CONFIG_DIR}"
|
2010-12-19 23:07:29 +00:00
|
|
|
CT_DoExtractTarballIfExists "${state_dir}/buildtools_dir" "${CT_BUILDTOOLS_PREFIX_DIR}"
|
2010-04-11 13:37:53 +00:00
|
|
|
|
2007-05-22 20:46:07 +00:00
|
|
|
# Restore the environment, discarding any error message
|
|
|
|
# (for example, read-only bash internals)
|
2010-10-22 20:02:49 +00:00
|
|
|
CT_DoLog STATE " Restoring environment"
|
2007-05-22 20:46:07 +00:00
|
|
|
. "${state_dir}/env.sh" >/dev/null 2>&1 || true
|
|
|
|
|
2007-05-25 19:30:42 +00:00
|
|
|
# Restore the new RESTART and STOP steps
|
|
|
|
CT_RESTART="${old_RESTART}"
|
|
|
|
CT_STOP="${old_STOP}"
|
|
|
|
unset old_stop old_restart
|
|
|
|
|
2011-03-19 23:02:21 +00:00
|
|
|
CT_DoLog STATE " Restoring log file"
|
|
|
|
exec >/dev/null
|
|
|
|
case "${CT_DEBUG_CT_SAVE_STEPS_GZIP}" in
|
2013-11-15 20:16:52 +00:00
|
|
|
y) gzip -dc "${state_dir}/log.gz" >"${tmp_log_file}";;
|
2011-03-19 23:02:21 +00:00
|
|
|
*) cat "${state_dir}/log" >"${tmp_log_file}";;
|
|
|
|
esac
|
|
|
|
cat "${state_dir}/tail.log" >>"${tmp_log_file}"
|
|
|
|
exec >>"${tmp_log_file}"
|
|
|
|
rm -f "${state_dir}/tail.log"
|
2007-05-22 20:46:07 +00:00
|
|
|
}
|
2015-11-10 06:55:32 +00:00
|
|
|
|
|
|
|
# This function sets a kconfig option to a specific value in a .config file
|
|
|
|
# Usage: CT_KconfigSetOption <option> <value> <file>
|
|
|
|
CT_KconfigSetOption() {
|
|
|
|
option="$1"
|
|
|
|
value="$2"
|
|
|
|
file="$3"
|
|
|
|
|
|
|
|
${grep} -E -q "^${option}=.*" "${file}" && \
|
|
|
|
${sed} -i -r -e "s;^${option}=.*$;${option}=${value};" "${file}" || \
|
|
|
|
${grep} -E -q "^# ${option} is not set$" "${file}" && \
|
|
|
|
${sed} -i -r -e "s;^# ${option} is not set$;${option}=${value};" "${file}" || \
|
|
|
|
echo "${option}=${value}" >> "${file}"
|
|
|
|
}
|
|
|
|
|
|
|
|
# This function enables a kconfig option to '=y' in a .config file
|
|
|
|
# Usage: CT_KconfigEnableOption <option> <file>
|
|
|
|
CT_KconfigEnableOption() {
|
|
|
|
option="$1"
|
|
|
|
file="$2"
|
|
|
|
|
|
|
|
CT_KconfigSetOption "${option}" "y" "${file}"
|
|
|
|
}
|
|
|
|
|
|
|
|
# This function disables a kconfig option in a .config file
|
|
|
|
# Usage: CT_KconfigDisableOption <option> <file>
|
|
|
|
CT_KconfigDisableOption() {
|
|
|
|
option="${1}"
|
|
|
|
file="${2}"
|
|
|
|
|
|
|
|
${grep} -E -q "^# ${option} is not set$" "${file}" || \
|
|
|
|
${grep} -E -q "^${option}=.*$" "${file}" && \
|
|
|
|
${sed} -i -r -e "s;^${option}=.*$;# ${option} is not set;" "${file}" || \
|
|
|
|
echo "# ${option} is not set" >> "${file}"
|
|
|
|
}
|
|
|
|
|
|
|
|
# This function deletes a kconfig option in a .config file, no matter if it
|
|
|
|
# is set or commented out.
|
|
|
|
# Usage: CT_KconfigDeleteOption <option> <file>
|
|
|
|
CT_KconfigDeleteOption() {
|
|
|
|
option="${1}"
|
|
|
|
file="${2}"
|
|
|
|
|
|
|
|
${grep} -E -q "^# ${option} is not set$" "${file}" && \
|
|
|
|
${sed} -i -r -e "/^# ${option} is not set$/d" "${file}" || \
|
|
|
|
${grep} -E -q "^${option}=.*$" "${file}" && \
|
|
|
|
${sed} -i -r -e "/^${option}=.*$/d" "${file}" || true
|
|
|
|
}
|