mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-23 14:32:25 +00:00
949da7792d
* deps(conda): use transformers with vllm * join vllm, exllama, exllama2, split petals
24 lines
516 B
Bash
Executable File
24 lines
516 B
Bash
Executable File
#!/bin/bash
|
|
|
|
##
|
|
## A bash script wrapper that runs the exllama server with conda
|
|
|
|
export PATH=$PATH:/opt/conda/bin
|
|
|
|
CONDA_ENV=petals
|
|
|
|
# Activate conda environment
|
|
# if source is available use it, or use conda
|
|
#
|
|
if [ -f /opt/conda/bin/activate ]; then
|
|
source activate $CONDA_ENV
|
|
else
|
|
eval "$(conda shell.bash hook)"
|
|
conda activate $CONDA_ENV
|
|
fi
|
|
|
|
# get the directory where the bash script is located
|
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
|
|
|
python $DIR/backend_petals.py $@
|