mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-19 04:37:53 +00:00
949da7792d
* deps(conda): use transformers with vllm * join vllm, exllama, exllama2, split petals
17 lines
345 B
Bash
Executable File
17 lines
345 B
Bash
Executable File
#!/bin/bash
|
|
|
|
##
|
|
## A bash script wrapper that runs the exllama server with conda
|
|
|
|
export PATH=$PATH:/opt/conda/bin
|
|
|
|
# Activate conda environment
|
|
source activate transformers
|
|
|
|
# get the directory where the bash script is located
|
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
|
|
|
cd $DIR
|
|
|
|
python $DIR/exllama2_backend.py $@
|