From 48449ee694d0fa0b3b6b3d611d125c7bc15c978c Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Fri, 22 Sep 2023 00:43:28 +0200 Subject: [PATCH] updated instalss --- scripts/wsl/install_script.sh | 21 ++++++++++++++++++--- scripts/wsl/petals_server.sh | 11 ++++++++++- 2 files changed, 28 insertions(+), 4 deletions(-) diff --git a/scripts/wsl/install_script.sh b/scripts/wsl/install_script.sh index 34ffc151..9b743427 100644 --- a/scripts/wsl/install_script.sh +++ b/scripts/wsl/install_script.sh @@ -18,6 +18,8 @@ wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/ bash ~/miniconda.sh -b -p ~/miniconda rm ~/miniconda.sh source ~/miniconda/etc/profile.d/conda.sh +#make it permanant +echo 'source ~/miniconda/etc/profile.d/conda.sh' >> ~/.bashrc # Clone the git repository git clone https://github.com/ParisNeo/lollms-webui.git ~/lollms-webui @@ -28,12 +30,25 @@ conda create --prefix ./env python=3.10 pip -y conda activate ./env # install cuda -conda install -c anaconda cudatoolkit==11.7 -export LD_LIBRARY_PATH=/path/to/directory:$LD_LIBRARY_PATH +wget https://developer.download.nvidia.com/compute/cuda/repos/wsl-ubuntu/x86_64/cuda-wsl-ubuntu.pin +sudo mv cuda-wsl-ubuntu.pin /etc/apt/preferences.d/cuda-repository-pin-600 +sudo apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/wsl-ubuntu/x86_64/3bf863cc.pub +sudo add-apt-repository "deb https://developer.download.nvidia.com/compute/cuda/repos/wsl-ubuntu/x86_64/ /" +sudo apt-get update +sudo apt-get -y install cuda +# Add cuda to the path +export PATH=/usr/local/cuda/bin:$PATH #make it permanant -echo "export LD_LIBRARY_PATH=\"\$LD_LIBRARY_PATH:$new_path\"" >> ~/.bashrc +echo 'export PATH=/usr/local/cuda/bin:$PATH' >> ~/.bashrc +export LD_LIBRARY_PATH=/usr/local/cuda-12.2/targets/x86_64-linux/lib/:$LD_LIBRARY_PATH +#make it permanant +echo "export LD_LIBRARY_PATH=/usr/local/cuda-12.2/targets/x86_64-linux/lib/:$LD_LIBRARY_PATH" >> ~/.bashrc # Install requirements pip install -r requirements.txt +# by default ubuntu will start in lollms-webui path +echo 'cd ~/lollms-webui' >> ~/.bashrc +# Add automatic conda activate +echo 'conda activate ./env' >> ~/.bashrc # Exit WSL exit diff --git a/scripts/wsl/petals_server.sh b/scripts/wsl/petals_server.sh index e22bf41f..49433e24 100644 --- a/scripts/wsl/petals_server.sh +++ b/scripts/wsl/petals_server.sh @@ -8,4 +8,13 @@ echo activating environment conda activate ./env echo running server # Run petals server -python3 -m petals.cli.run_server petals-team/StableBeluga2 +# Prompt the user for a model name +read -p "Enter the model name (press Enter for default petals-team/StableBeluga2): " modelName + +# Use the default model name if no input is provided +if [ -z "$modelName" ]; then + modelName="petals-team/StableBeluga2" +fi + +# Run the Python command with the chosen model name +python3 -m petals.cli.run_server "$modelName"