mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-19 12:47:54 +00:00
5d1018495f
* feat(intel): add diffusers support * try to consume upstream container image * Debug * Manually install deps * Map transformers/hf cache dir to modelpath if not specified * fix(compel): update initialization, pass by all gRPC options * fix: add dependencies, implement transformers for xpu * base it from the oneapi image * Add pillow * set threads if specified when launching the API * Skip conda install if intel * defaults to non-intel * ci: add to pipelines * prepare compel only if enabled * Skip conda install if intel * fix cleanup * Disable compel by default * Install torch 2.1.0 with Intel * Skip conda on some setups * Detect python * Quiet output * Do not override system python with conda * Prefer python3 * Fixups * exllama2: do not install without conda (overrides pytorch version) * exllama/exllama2: do not install if not using cuda * Add missing dataset dependency * Small fixups, symlink to python, add requirements * Add neural_speed to the deps * correctly handle model offloading * fix: device_map == xpu * go back at calling python, fixed at dockerfile level * Exllama2 restricted to only nvidia gpus * Tokenizer to xpu
38 lines
1022 B
Bash
38 lines
1022 B
Bash
#!/bin/bash
|
|
set -ex
|
|
|
|
SKIP_CONDA=${SKIP_CONDA:-0}
|
|
|
|
# Check if environment exist
|
|
conda_env_exists(){
|
|
! conda list --name "${@}" >/dev/null 2>/dev/null
|
|
}
|
|
|
|
if [ $SKIP_CONDA -eq 1 ]; then
|
|
echo "Skipping conda environment installation"
|
|
else
|
|
export PATH=$PATH:/opt/conda/bin
|
|
if conda_env_exists "transformers" ; then
|
|
echo "Creating virtual environment..."
|
|
conda env create --name transformers --file $1
|
|
echo "Virtual environment created."
|
|
else
|
|
echo "Virtual environment already exists."
|
|
fi
|
|
fi
|
|
|
|
if [ -d "/opt/intel" ]; then
|
|
# Intel GPU: If the directory exists, we assume we are using the intel image
|
|
# (no conda env)
|
|
# https://github.com/intel/intel-extension-for-pytorch/issues/538
|
|
pip install intel-extension-for-transformers datasets sentencepiece tiktoken neural_speed
|
|
fi
|
|
|
|
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
if [ $SKIP_CONDA -eq 0 ]; then
|
|
# Activate conda environment
|
|
source activate transformers
|
|
fi
|
|
|
|
pip cache purge
|
|
fi |