mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-19 20:57:54 +00:00
fix(aio): correctly detect intel systems (#1931)
Also rename SIZE to PROFILE
This commit is contained in:
parent
2bba62ca4d
commit
eab4a91a9b
@ -5,54 +5,77 @@ echo "===> LocalAI All-in-One (AIO) container starting..."
|
|||||||
GPU_ACCELERATION=false
|
GPU_ACCELERATION=false
|
||||||
GPU_VENDOR=""
|
GPU_VENDOR=""
|
||||||
|
|
||||||
|
function check_intel() {
|
||||||
|
if lspci | grep -E 'VGA|3D' | grep -iq intel; then
|
||||||
|
echo "Intel GPU detected"
|
||||||
|
if [ -d /opt/intel ]; then
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=intel
|
||||||
|
else
|
||||||
|
echo "Intel GPU detected, but Intel GPU drivers are not installed. GPU acceleration will not be available."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_nvidia_wsl() {
|
||||||
|
if lspci | grep -E 'VGA|3D' | grep -iq "Microsoft Corporation Device 008e"; then
|
||||||
|
# We make the assumption this WSL2 cars is NVIDIA, then check for nvidia-smi
|
||||||
|
# Make sure the container was run with `--gpus all` as the only required parameter
|
||||||
|
echo "NVIDIA GPU detected via WSL2"
|
||||||
|
# nvidia-smi should be installed in the container
|
||||||
|
if nvidia-smi; then
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=nvidia
|
||||||
|
else
|
||||||
|
echo "NVIDIA GPU detected via WSL2, but nvidia-smi is not installed. GPU acceleration will not be available."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_amd() {
|
||||||
|
if lspci | grep -E 'VGA|3D' | grep -iq amd; then
|
||||||
|
echo "AMD GPU detected"
|
||||||
|
# Check if ROCm is installed
|
||||||
|
if [ -d /opt/rocm ]; then
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=amd
|
||||||
|
else
|
||||||
|
echo "AMD GPU detected, but ROCm is not installed. GPU acceleration will not be available."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_nvidia() {
|
||||||
|
if lspci | grep -E 'VGA|3D' | grep -iq nvidia; then
|
||||||
|
echo "NVIDIA GPU detected"
|
||||||
|
# nvidia-smi should be installed in the container
|
||||||
|
if nvidia-smi; then
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=nvidia
|
||||||
|
else
|
||||||
|
echo "NVIDIA GPU detected, but nvidia-smi is not installed. GPU acceleration will not be available."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_metal() {
|
||||||
|
if system_profiler SPDisplaysDataType | grep -iq 'Metal'; then
|
||||||
|
echo "Apple Metal supported GPU detected"
|
||||||
|
GPU_ACCELERATION=true
|
||||||
|
GPU_VENDOR=apple
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
function detect_gpu() {
|
function detect_gpu() {
|
||||||
case "$(uname -s)" in
|
case "$(uname -s)" in
|
||||||
Linux)
|
Linux)
|
||||||
if lspci | grep -E 'VGA|3D' | grep -iq nvidia; then
|
check_nvidia
|
||||||
echo "NVIDIA GPU detected"
|
check_amd
|
||||||
# nvidia-smi should be installed in the container
|
check_intel
|
||||||
if nvidia-smi; then
|
check_nvidia_wsl
|
||||||
GPU_ACCELERATION=true
|
|
||||||
GPU_VENDOR=nvidia
|
|
||||||
else
|
|
||||||
echo "NVIDIA GPU detected, but nvidia-smi is not installed. GPU acceleration will not be available."
|
|
||||||
fi
|
|
||||||
elif lspci | grep -E 'VGA|3D' | grep -iq amd; then
|
|
||||||
echo "AMD GPU detected"
|
|
||||||
# Check if ROCm is installed
|
|
||||||
if [ -d /opt/rocm ]; then
|
|
||||||
GPU_ACCELERATION=true
|
|
||||||
GPU_VENDOR=amd
|
|
||||||
else
|
|
||||||
echo "AMD GPU detected, but ROCm is not installed. GPU acceleration will not be available."
|
|
||||||
fi
|
|
||||||
elif lspci | grep -E 'VGA|3D' | grep -iq intel; then
|
|
||||||
echo "Intel GPU detected"
|
|
||||||
if [ -d /opt/intel ]; then
|
|
||||||
GPU_ACCELERATION=true
|
|
||||||
GPU_VENDOR=intel
|
|
||||||
else
|
|
||||||
echo "Intel GPU detected, but Intel GPU drivers are not installed. GPU acceleration will not be available."
|
|
||||||
fi
|
|
||||||
elif lspci | grep -E 'VGA|3D' | grep -iq "Microsoft Corporation Device 008e"; then
|
|
||||||
# We make the assumption this WSL2 cars is NVIDIA, then check for nvidia-smi
|
|
||||||
# Make sure the container was run with `--gpus all` as the only required parameter
|
|
||||||
echo "NVIDIA GPU detected via WSL2"
|
|
||||||
# nvidia-smi should be installed in the container
|
|
||||||
if nvidia-smi; then
|
|
||||||
GPU_ACCELERATION=true
|
|
||||||
GPU_VENDOR=nvidia
|
|
||||||
else
|
|
||||||
echo "NVIDIA GPU detected via WSL2, but nvidia-smi is not installed. GPU acceleration will not be available."
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
;;
|
;;
|
||||||
Darwin)
|
Darwin)
|
||||||
if system_profiler SPDisplaysDataType | grep -iq 'Metal'; then
|
check_metal
|
||||||
echo "Apple Metal supported GPU detected"
|
|
||||||
GPU_ACCELERATION=true
|
|
||||||
GPU_VENDOR=apple
|
|
||||||
fi
|
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
@ -96,8 +119,8 @@ function check_vars() {
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$SIZE" ]; then
|
if [ -z "$PROFILE" ]; then
|
||||||
echo "SIZE environment variable is not set. Please set it to one of the following: cpu, gpu-8g, gpu-16g, apple"
|
echo "PROFILE environment variable is not set. Please set it to one of the following: cpu, gpu-8g, gpu-16g, apple"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@ -105,11 +128,11 @@ function check_vars() {
|
|||||||
detect_gpu
|
detect_gpu
|
||||||
detect_gpu_size
|
detect_gpu_size
|
||||||
|
|
||||||
SIZE="${SIZE:-$GPU_SIZE}" # default to cpu
|
PROFILE="${PROFILE:-$GPU_SIZE}" # default to cpu
|
||||||
export MODELS="${MODELS:-/aio/${SIZE}/embeddings.yaml,/aio/${SIZE}/text-to-speech.yaml,/aio/${SIZE}/image-gen.yaml,/aio/${SIZE}/text-to-text.yaml,/aio/${SIZE}/speech-to-text.yaml,/aio/${SIZE}/vision.yaml}"
|
export MODELS="${MODELS:-/aio/${PROFILE}/embeddings.yaml,/aio/${PROFILE}/text-to-speech.yaml,/aio/${PROFILE}/image-gen.yaml,/aio/${PROFILE}/text-to-text.yaml,/aio/${PROFILE}/speech-to-text.yaml,/aio/${PROFILE}/vision.yaml}"
|
||||||
|
|
||||||
check_vars
|
check_vars
|
||||||
|
|
||||||
echo "Starting LocalAI with the following models: $MODELS"
|
echo "===> Starting LocalAI[$PROFILE] with the following models: $MODELS"
|
||||||
|
|
||||||
/build/entrypoint.sh "$@"
|
/build/entrypoint.sh "$@"
|
||||||
|
@ -46,7 +46,7 @@ The AIO Images are inheriting the same environment variables as the base images
|
|||||||
|
|
||||||
| Variable | Default | Description |
|
| Variable | Default | Description |
|
||||||
| ---------------------| ------- | ----------- |
|
| ---------------------| ------- | ----------- |
|
||||||
| `SIZE` | Auto-detected | The size of the model to use. Available: `cpu`, `gpu-8g` |
|
| `PROFILE` | Auto-detected | The size of the model to use. Available: `cpu`, `gpu-8g` |
|
||||||
| `MODELS` | Auto-detected | A list of models YAML Configuration file URI/URL (see also [running models]({{%relref "docs/getting-started/run-other-models" %}})) |
|
| `MODELS` | Auto-detected | A list of models YAML Configuration file URI/URL (see also [running models]({{%relref "docs/getting-started/run-other-models" %}})) |
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user