upgraded scripts

This commit is contained in:
Saifeddine ALOUI 2023-08-31 08:42:13 +02:00
parent 4c3d114d36
commit 09f6650c12
5 changed files with 131 additions and 4 deletions

View File

@ -0,0 +1,37 @@
#!/bin/bash
echo Starting LOLLMS Web UI...
echo " ___ ___ ___ ___ ___ ___ "
echo " /\__\ /\ \ /\__\ /\__\ /\__\ /\ \ "
echo " /:/ / /::\ \ /:/ / /:/ / /::| | /::\ \ "
echo " /:/ / /:/\:\ \ /:/ / /:/ / /:|:| | /:/\ \ \ "
echo " /:/ / /:/ \:\ \ /:/ / /:/ / /:/|:|__|__ _\:\~\ \ \ "
echo " /:/__/ /:/__/ \:\__\ /:/__/ /:/__/ /:/ |::::\__\ /\ \:\ \ \__\ "
echo " \:\ \ \:\ \ /:/ / \:\ \ \:\ \ \/__/~~/:/ / \:\ \:\ \/__/ "
echo " \:\ \ \:\ /:/ / \:\ \ \:\ \ /:/ / \:\ \:\__\ "
echo " \:\ \ \:\/:/ / \:\ \ \:\ \ /:/ / \:\/:/ / "
echo " \:\__\ \::/ / \:\__\ \:\__\ /:/ / \::/ / "
echo " \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ "
echo " By ParisNeo"
cd "$(dirname "$0")"
# better isolation for virtual environment
CONDA_SHLVL=""
PYTHONNOUSERSITE=1
PYTHONPATH=""
PYTHONHOME=""
TEMP="./installer_files/temp"
TMP="./installer_files/temp"
INSTALL_ENV_DIR="./installer_files/lollms_env"
MINICONDA_DIR="./installer_files/miniconda3"
if [ ! -f "$MINICONDA_DIR/bin/activate" ]; then
echo "Miniconda not found."
exit 1
fi
source "$MINICONDA_DIR/bin/activate" "$INSTALL_ENV_DIR"
read -rp "Conda environment activated"

View File

@ -38,7 +38,8 @@ echo " \:\__\ \::/ / \:\__\ \:\__\ /:/ / \::/ / "
echo " \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ "
echo " By ParisNeo"
echo "Please specify if you want to use a GPU or CPU. Note thaty only NVidea GPUs are supported?"
echo "Please specify if you want to use a GPU or CPU."
echo "*Note* that only NVidea GPUs (cuda) or AMD GPUs (rocm) are supported."
echo "A) Enable Cuda (for nvidia GPUS)"
echo "B) Enable ROCm (for AMD GPUs)"
echo "C) Run CPU mode"
@ -149,6 +150,18 @@ else
cp scripts/linux_update.sh ../
fi
if [[ -e "../linux_conda_session.sh" ]]; then
echo "Linux update found"
else
cp scripts/linux_conda_session.sh ../
fi
if [[ -e "../linux_update_models.sh" ]]; then
echo "Linux update found"
else
cp scripts/linux_update_models.sh ../
fi
if [[ "${gpuchoice^^}" == "C" ]]; then
echo "This is a .no_gpu file." > .no_gpu
echo "You have chosen to use only CPU on this system."

View File

@ -0,0 +1,63 @@
#!/bin/bash
echo Starting LOLLMS Web UI...
echo " ___ ___ ___ ___ ___ ___ "
echo " /\__\ /\ \ /\__\ /\__\ /\__\ /\ \ "
echo " /:/ / /::\ \ /:/ / /:/ / /::| | /::\ \ "
echo " /:/ / /:/\:\ \ /:/ / /:/ / /:|:| | /:/\ \ \ "
echo " /:/ / /:/ \:\ \ /:/ / /:/ / /:/|:|__|__ _\:\~\ \ \ "
echo " /:/__/ /:/__/ \:\__\ /:/__/ /:/__/ /:/ |::::\__\ /\ \:\ \ \__\ "
echo " \:\ \ \:\ \ /:/ / \:\ \ \:\ \ \/__/~~/:/ / \:\ \:\ \/__/ "
echo " \:\ \ \:\ /:/ / \:\ \ \:\ \ /:/ / \:\ \:\__\ "
echo " \:\ \ \:\/:/ / \:\ \ \:\ \ /:/ / \:\/:/ / "
echo " \:\__\ \::/ / \:\__\ \:\__\ /:/ / \::/ / "
echo " \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ "
echo " By ParisNeo"
echo " Models list update script"
cd "$(dirname "$0")"
# better isolation for virtual environment
CONDA_SHLVL=""
PYTHONNOUSERSITE=1
PYTHONPATH=""
PYTHONHOME=""
TEMP="./installer_files/temp"
TMP="./installer_files/temp"
INSTALL_ENV_DIR="./installer_files/lollms_env"
MINICONDA_DIR="./installer_files/miniconda3"
if [ ! -f "$MINICONDA_DIR/bin/activate" ]; then
echo "Miniconda not found."
exit 1
fi
source "$MINICONDA_DIR/bin/activate" "$INSTALL_ENV_DIR"
# Set your repository URL and file path
repository_url="https://github.com/ParisNeo/lollms_bindings_zoo.git"
# Set the destination folder where the file will be downloaded
destination_folder="downloaded_files"
# Create the destination folder if it doesn't exist
if [ ! -d "$destination_folder" ]; then
mkdir "$destination_folder"
fi
# Clone the repository (if not already cloned)
if [ ! -d "$destination_folder/repository" ]; then
git clone "$repository_url" "$destination_folder/repository"
fi
cd "$destination_folder/repository"
echo "Updating models"
cp hugging_face/models.yaml ../../personal_data/bindings_zoo/hugging_face/models.yaml
cp c_transformers/models.yaml ../../personal_data/bindings_zoo/c_transformers/models.yaml
cp llama_cpp_official/models.yaml ../../personal_data/bindings_zoo/llama_cpp_official/models.yaml
cp gpt_4all/models.yaml ../../personal_data/bindings_zoo/gpt_4all/models.yaml
cp py_llama_cpp/models.yaml ../../personal_data/bindings_zoo/py_llama_cpp/models.yaml
cp gptq/models.yaml ../../personal_data/bindings_zoo/gptq/models.yaml
cp exllama/models.yaml ../../personal_data/bindings_zoo/exllama/models.yaml
echo "Updating models"

View File

@ -34,7 +34,8 @@ echo " \:\__\ \::/ / \:\__\ \:\__\ /:/ / \::/ / "
echo " \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ "
echo " By ParisNeo"
echo "Please specify if you want to use a GPU or CPU. Note that only Nvidia GPUs are supported?"
echo "Please specify if you want to use a GPU or CPU."
echo "*Note* that only NVidea GPUs (cuda) or AMD GPUs (rocm) are supported."
echo "A) Enable GPU"
echo "B) Run CPU mode"
echo

View File

@ -32,11 +32,11 @@ echo " \:\ \ \:\ /:/ / \:\ \ \:\ \ /:/ / \:\ \:\__\ "
echo " \:\ \ \:\/:/ / \:\ \ \:\ \ /:/ / \:\/:/ / "
echo " \:\__\ \::/ / \:\__\ \:\__\ /:/ / \::/ / "
echo " \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ "
echo
echo By ParisNeo
:retry
echo Please specify if you want to use a GPU or CPU. Note thaty only NVidea GPUs are supported?
echo Please specify if you want to use a GPU or CPU.
echo *Note* that only NVidea GPUs (cuda) or AMD GPUs (rocm) are supported.
echo A) Enable cuda GPU
echo B) Enable ROCm compatible GPU (AMD and other GPUs)
echo C) Run CPU mode
@ -131,6 +131,19 @@ if exist ..\win_update.bat (
copy scripts/win_update.bat ..\
)
if exist ..\win_conda_session.bat (
echo win conda session script found
) else (
copy scripts/win_conda_session.bat ..\
)
if exist ..\win_update_models.bat (
echo Win update models found
) else (
copy scripts/win_update_models.bat ..\
)
setlocal enabledelayedexpansion