2023-09-21 01:39:25 +00:00
|
|
|
#!/bin/bash
|
2023-09-21 19:35:15 +00:00
|
|
|
echo changing dir to lollms-webui
|
2023-09-21 01:39:25 +00:00
|
|
|
cd ~/lollms-webui
|
|
|
|
# activate conda environment
|
2023-09-21 19:35:15 +00:00
|
|
|
echo sourcing miniconda
|
2023-09-21 01:39:25 +00:00
|
|
|
source ~/miniconda/etc/profile.d/conda.sh
|
2023-09-21 19:35:15 +00:00
|
|
|
echo activating environment
|
2023-09-21 01:39:25 +00:00
|
|
|
conda activate ./env
|
2023-09-21 19:35:15 +00:00
|
|
|
echo running server
|
2023-09-21 01:39:25 +00:00
|
|
|
# Run petals server
|
2023-09-21 22:43:28 +00:00
|
|
|
# Prompt the user for a model name
|
|
|
|
read -p "Enter the model name (press Enter for default petals-team/StableBeluga2): " modelName
|
|
|
|
|
|
|
|
# Use the default model name if no input is provided
|
|
|
|
if [ -z "$modelName" ]; then
|
|
|
|
modelName="petals-team/StableBeluga2"
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Run the Python command with the chosen model name
|
2023-09-27 11:48:48 +00:00
|
|
|
python3 -m petals.cli.run_server "$modelName" --public_name https://github.com/ParisNeo/lollms-webui
|