mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-18 12:26:26 +00:00
7b1e792732
* arrow_up: Update ggerganov/llama.cpp Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * deps(llama.cpp): update build variables to follow upstream Update build recipes with https://github.com/ggerganov/llama.cpp/pull/8006 Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Disable shared libs by default in llama.cpp Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Disable shared libs in llama.cpp Makefile Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Disable metal embedding for now, until it is tested Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * fix(mac): explicitly enable metal Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * debug Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * fix typo Signed-off-by: Ettore Di Giacinto <mudler@localai.io> --------- Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Signed-off-by: Ettore Di Giacinto <mudler@localai.io> Co-authored-by: mudler <2420543+mudler@users.noreply.github.com>
51 lines
1.4 KiB
Bash
Executable File
51 lines
1.4 KiB
Bash
Executable File
#!/bin/bash
|
|
set -e
|
|
|
|
cd /build
|
|
|
|
# If we have set EXTRA_BACKENDS, then we need to prepare the backends
|
|
if [ -n "$EXTRA_BACKENDS" ]; then
|
|
echo "EXTRA_BACKENDS: $EXTRA_BACKENDS"
|
|
# Space separated list of backends
|
|
for backend in $EXTRA_BACKENDS; do
|
|
echo "Preparing backend: $backend"
|
|
make -C $backend
|
|
done
|
|
fi
|
|
|
|
if [ "$REBUILD" != "false" ]; then
|
|
rm -rf ./local-ai
|
|
make build -j${BUILD_PARALLELISM:-1}
|
|
else
|
|
echo "@@@@@"
|
|
echo "Skipping rebuild"
|
|
echo "@@@@@"
|
|
echo "If you are experiencing issues with the pre-compiled builds, try setting REBUILD=true"
|
|
echo "If you are still experiencing issues with the build, try setting CMAKE_ARGS and disable the instructions set as needed:"
|
|
echo 'CMAKE_ARGS="-DGGML_F16C=OFF -DGGML_AVX512=OFF -DGGML_AVX2=OFF -DGGML_FMA=OFF"'
|
|
echo "see the documentation at: https://localai.io/basics/build/index.html"
|
|
echo "Note: See also https://github.com/go-skynet/LocalAI/issues/288"
|
|
echo "@@@@@"
|
|
echo "CPU info:"
|
|
grep -e "model\sname" /proc/cpuinfo | head -1
|
|
grep -e "flags" /proc/cpuinfo | head -1
|
|
if grep -q -e "\savx\s" /proc/cpuinfo ; then
|
|
echo "CPU: AVX found OK"
|
|
else
|
|
echo "CPU: no AVX found"
|
|
fi
|
|
if grep -q -e "\savx2\s" /proc/cpuinfo ; then
|
|
echo "CPU: AVX2 found OK"
|
|
else
|
|
echo "CPU: no AVX2 found"
|
|
fi
|
|
if grep -q -e "\savx512" /proc/cpuinfo ; then
|
|
echo "CPU: AVX512 found OK"
|
|
else
|
|
echo "CPU: no AVX512 found"
|
|
fi
|
|
echo "@@@@@"
|
|
fi
|
|
|
|
exec ./local-ai "$@"
|