fixed vllm service

This commit is contained in:
Saifeddine ALOUI 2024-02-19 23:27:34 +01:00
parent 2fb9f39082
commit 3a3e58e28f
3 changed files with 3 additions and 3 deletions

View File

@ -17,6 +17,6 @@ export PATH
echo "Installing vllm"
$HOME/miniconda3/bin/conda create -n vllm python=3.9 -y
echo "Activating vllm environment"
$HOME/miniconda3/bin/conda activate vllm
source activate vllm
pip install vllm
echo "Done"

View File

@ -105,7 +105,7 @@ class Service:
# run vllm
if platform.system() == 'Windows':
subprocess.Popen(['wsl', 'bash', '~/run_vllm.sh '])
subprocess.Popen(['wsl', 'bash', '$HOME/run_vllm.sh '])
else:
subprocess.Popen(['bash', f'{Path.home()}/run_vllm.sh'])

View File

@ -3,7 +3,7 @@
cd ~/vllm
PATH="$HOME/miniconda3/bin:$PATH"
export PATH
$HOME/miniconda3/bin/conda activate vllm && python -m vllm.entrypoints.openai.api_server --model %1
source activate vllm && python -m vllm.entrypoints.openai.api_server --model %1
# Wait for all background processes to finish
wait