fix(aio): correctly detect intel systems (#1931)

Also rename SIZE to PROFILE
This commit is contained in:
Ettore Di Giacinto 2024-03-30 12:04:32 +01:00 committed by GitHub
parent 2bba62ca4d
commit eab4a91a9b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 72 additions and 49 deletions

View File

@ -5,28 +5,8 @@ echo "===> LocalAI All-in-One (AIO) container starting..."
GPU_ACCELERATION=false GPU_ACCELERATION=false
GPU_VENDOR="" GPU_VENDOR=""
function detect_gpu() { function check_intel() {
case "$(uname -s)" in if lspci | grep -E 'VGA|3D' | grep -iq intel; then
Linux)
if lspci | grep -E 'VGA|3D' | grep -iq nvidia; then
echo "NVIDIA GPU detected"
# nvidia-smi should be installed in the container
if nvidia-smi; then
GPU_ACCELERATION=true
GPU_VENDOR=nvidia
else
echo "NVIDIA GPU detected, but nvidia-smi is not installed. GPU acceleration will not be available."
fi
elif lspci | grep -E 'VGA|3D' | grep -iq amd; then
echo "AMD GPU detected"
# Check if ROCm is installed
if [ -d /opt/rocm ]; then
GPU_ACCELERATION=true
GPU_VENDOR=amd
else
echo "AMD GPU detected, but ROCm is not installed. GPU acceleration will not be available."
fi
elif lspci | grep -E 'VGA|3D' | grep -iq intel; then
echo "Intel GPU detected" echo "Intel GPU detected"
if [ -d /opt/intel ]; then if [ -d /opt/intel ]; then
GPU_ACCELERATION=true GPU_ACCELERATION=true
@ -34,7 +14,11 @@ function detect_gpu() {
else else
echo "Intel GPU detected, but Intel GPU drivers are not installed. GPU acceleration will not be available." echo "Intel GPU detected, but Intel GPU drivers are not installed. GPU acceleration will not be available."
fi fi
elif lspci | grep -E 'VGA|3D' | grep -iq "Microsoft Corporation Device 008e"; then fi
}
function check_nvidia_wsl() {
if lspci | grep -E 'VGA|3D' | grep -iq "Microsoft Corporation Device 008e"; then
# We make the assumption this WSL2 cars is NVIDIA, then check for nvidia-smi # We make the assumption this WSL2 cars is NVIDIA, then check for nvidia-smi
# Make sure the container was run with `--gpus all` as the only required parameter # Make sure the container was run with `--gpus all` as the only required parameter
echo "NVIDIA GPU detected via WSL2" echo "NVIDIA GPU detected via WSL2"
@ -46,13 +30,52 @@ function detect_gpu() {
echo "NVIDIA GPU detected via WSL2, but nvidia-smi is not installed. GPU acceleration will not be available." echo "NVIDIA GPU detected via WSL2, but nvidia-smi is not installed. GPU acceleration will not be available."
fi fi
fi fi
;; }
Darwin)
function check_amd() {
if lspci | grep -E 'VGA|3D' | grep -iq amd; then
echo "AMD GPU detected"
# Check if ROCm is installed
if [ -d /opt/rocm ]; then
GPU_ACCELERATION=true
GPU_VENDOR=amd
else
echo "AMD GPU detected, but ROCm is not installed. GPU acceleration will not be available."
fi
fi
}
function check_nvidia() {
if lspci | grep -E 'VGA|3D' | grep -iq nvidia; then
echo "NVIDIA GPU detected"
# nvidia-smi should be installed in the container
if nvidia-smi; then
GPU_ACCELERATION=true
GPU_VENDOR=nvidia
else
echo "NVIDIA GPU detected, but nvidia-smi is not installed. GPU acceleration will not be available."
fi
fi
}
function check_metal() {
if system_profiler SPDisplaysDataType | grep -iq 'Metal'; then if system_profiler SPDisplaysDataType | grep -iq 'Metal'; then
echo "Apple Metal supported GPU detected" echo "Apple Metal supported GPU detected"
GPU_ACCELERATION=true GPU_ACCELERATION=true
GPU_VENDOR=apple GPU_VENDOR=apple
fi fi
}
function detect_gpu() {
case "$(uname -s)" in
Linux)
check_nvidia
check_amd
check_intel
check_nvidia_wsl
;;
Darwin)
check_metal
;; ;;
esac esac
} }
@ -96,8 +119,8 @@ function check_vars() {
exit 1 exit 1
fi fi
if [ -z "$SIZE" ]; then if [ -z "$PROFILE" ]; then
echo "SIZE environment variable is not set. Please set it to one of the following: cpu, gpu-8g, gpu-16g, apple" echo "PROFILE environment variable is not set. Please set it to one of the following: cpu, gpu-8g, gpu-16g, apple"
exit 1 exit 1
fi fi
} }
@ -105,11 +128,11 @@ function check_vars() {
detect_gpu detect_gpu
detect_gpu_size detect_gpu_size
SIZE="${SIZE:-$GPU_SIZE}" # default to cpu PROFILE="${PROFILE:-$GPU_SIZE}" # default to cpu
export MODELS="${MODELS:-/aio/${SIZE}/embeddings.yaml,/aio/${SIZE}/text-to-speech.yaml,/aio/${SIZE}/image-gen.yaml,/aio/${SIZE}/text-to-text.yaml,/aio/${SIZE}/speech-to-text.yaml,/aio/${SIZE}/vision.yaml}" export MODELS="${MODELS:-/aio/${PROFILE}/embeddings.yaml,/aio/${PROFILE}/text-to-speech.yaml,/aio/${PROFILE}/image-gen.yaml,/aio/${PROFILE}/text-to-text.yaml,/aio/${PROFILE}/speech-to-text.yaml,/aio/${PROFILE}/vision.yaml}"
check_vars check_vars
echo "Starting LocalAI with the following models: $MODELS" echo "===> Starting LocalAI[$PROFILE] with the following models: $MODELS"
/build/entrypoint.sh "$@" /build/entrypoint.sh "$@"

View File

@ -46,7 +46,7 @@ The AIO Images are inheriting the same environment variables as the base images
| Variable | Default | Description | | Variable | Default | Description |
| ---------------------| ------- | ----------- | | ---------------------| ------- | ----------- |
| `SIZE` | Auto-detected | The size of the model to use. Available: `cpu`, `gpu-8g` | | `PROFILE` | Auto-detected | The size of the model to use. Available: `cpu`, `gpu-8g` |
| `MODELS` | Auto-detected | A list of models YAML Configuration file URI/URL (see also [running models]({{%relref "docs/getting-started/run-other-models" %}})) | | `MODELS` | Auto-detected | A list of models YAML Configuration file URI/URL (see also [running models]({{%relref "docs/getting-started/run-other-models" %}})) |