From ed6afa3e25f00fb0973f9d54909f748b8493f8ff Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Thu, 11 Jan 2024 01:52:19 +0100 Subject: [PATCH] upgraded ollama --- lollms/app.py | 12 ++++++- lollms/configs/config.yaml | 6 +++- lollms/server/configs/config.yaml | 6 +++- .../ollama/{install.sh => install_ollama.sh} | 4 +-- lollms/services/ollama/lollms_ollama.py | 35 ++++++++++--------- 5 files changed, 41 insertions(+), 22 deletions(-) rename lollms/services/ollama/{install.sh => install_ollama.sh} (96%) diff --git a/lollms/app.py b/lollms/app.py index 989f127..9131b9b 100644 --- a/lollms/app.py +++ b/lollms/app.py @@ -16,7 +16,8 @@ from functools import partial from flask_socketio import SocketIO import subprocess import importlib -import sys +import sys, os +import platform class LollmsApplication(LoLLMsCom): @@ -55,6 +56,15 @@ class LollmsApplication(LoLLMsCom): self.tts = None if not free_mode: + if self.config.enable_ollama_service: + try: + from lollms.services.ollama.lollms_ollama import Service + self.tts = Service(self, base_url=self.config.ollama_base_url) + except Exception as ex: + trace_exception(ex) + self.warning(f"Couldn't load Ollama") + + if self.config.enable_voice_service and load_voice_service: try: from lollms.services.xtts.lollms_xtts import LollmsXTTS diff --git a/lollms/configs/config.yaml b/lollms/configs/config.yaml index fd5fc3c..8caeafd 100644 --- a/lollms/configs/config.yaml +++ b/lollms/configs/config.yaml @@ -1,5 +1,5 @@ # =================== Lord Of Large Language Models Configuration file =========================== -version: 40 +version: 41 binding_name: null model_name: null @@ -60,6 +60,10 @@ current_language: en enable_sd_service: false sd_base_url: http://127.0.0.1:7860 +# ollama service +enable_ollama_service: false +ollama_base_url: http://0.0.0.0:11434 + # Audio media_on: false audio_in_language: 'en-US' diff --git a/lollms/server/configs/config.yaml b/lollms/server/configs/config.yaml index fd5fc3c..8caeafd 100644 --- a/lollms/server/configs/config.yaml +++ b/lollms/server/configs/config.yaml @@ -1,5 +1,5 @@ # =================== Lord Of Large Language Models Configuration file =========================== -version: 40 +version: 41 binding_name: null model_name: null @@ -60,6 +60,10 @@ current_language: en enable_sd_service: false sd_base_url: http://127.0.0.1:7860 +# ollama service +enable_ollama_service: false +ollama_base_url: http://0.0.0.0:11434 + # Audio media_on: false audio_in_language: 'en-US' diff --git a/lollms/services/ollama/install.sh b/lollms/services/ollama/install_ollama.sh similarity index 96% rename from lollms/services/ollama/install.sh rename to lollms/services/ollama/install_ollama.sh index 9313f2f..0f3c13e 100644 --- a/lollms/services/ollama/install.sh +++ b/lollms/services/ollama/install_ollama.sh @@ -9,7 +9,7 @@ error() { echo "ERROR $*"; exit 1; } warning() { echo "WARNING: $*"; } OLLAMA_DIR=~/ollama - +echo "" if [ ! -d $OLLAMA_DIR ]; then mkdir $OLLAMA_DIR echo "Folder $OLLAMA_DIR created successfully!" @@ -56,7 +56,7 @@ if [ -n "$NEEDS" ]; then fi status "Downloading ollama..." -curl --fail --show-error --location --progress-bar -o $OLLAMA_DIR "https://ollama.ai/download/ollama-linux-$ARCH" +curl --fail --show-error --location --progress-bar -o $OLLAMA_DIR/ollama "https://ollama.ai/download/ollama-linux-$ARCH" status "Installing ollama to OLLAMA_DIR..." diff --git a/lollms/services/ollama/lollms_ollama.py b/lollms/services/ollama/lollms_ollama.py index f9393e0..804e7f1 100644 --- a/lollms/services/ollama/lollms_ollama.py +++ b/lollms/services/ollama/lollms_ollama.py @@ -43,20 +43,22 @@ def verify_ollama(lollms_paths:LollmsPaths): return sd_folder.exists() -def install_ollama(): +def install_ollama(lollms_app:LollmsApplication): if platform.system() == 'Windows': - if os.path.exists('C:\\Windows\\System32\\wsl.exe'): - subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format(str(Path(__file__).parent / 'install.sh'))]) - subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format(str(Path(__file__).parent / 'run_ollama.sh'))]) - subprocess.run(['wsl', 'bash', str(Path.home() / 'install.sh')]) - else: + root_path = "/mnt/"+"".join(str(Path(__file__).parent).replace("\\","/").split(":")) + if not os.path.exists('C:\\Windows\\System32\\wsl.exe'): + if not lollms_app.YesNoMessage("No WSL is detected on your system. Do you want me to install it for you? Ollama won't be abble to work without wsl."): + return False subprocess.run(['wsl', '--install', 'Ubuntu']) - subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format(str(Path(__file__).parent / 'install.sh'))]) - subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format(str(Path(__file__).parent / 'run_ollama.sh'))]) - subprocess.run(['wsl', 'bash', str(Path.home() / 'install.sh')]) + subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format( root_path + '/install_ollama.sh')]) + subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format( root_path + '/run_ollama.sh')]) + subprocess.run(['wsl', 'bash', '~/install_ollama.sh']) else: - subprocess.run(['bash', str(Path(__file__).parent / 'install.sh')]) - + root_path = str(Path(__file__).parent) + subprocess.run(['cp {} ~'.format( root_path + '/install_ollama.sh')]) + subprocess.run(['cp {} ~'.format( root_path + '/run_ollama.sh')]) + subprocess.run(['bash', '~/install_ollama.sh']) + return True class Service: def __init__( self, @@ -64,8 +66,7 @@ class Service: base_url="http://127.0.0.1:11434", wait_max_retries = 5 ): - if base_url=="" or base_url=="http://127.0.0.1:11434": - base_url = None + self.base_url = base_url # Get the current directory lollms_paths = app.lollms_paths self.app = app @@ -85,17 +86,17 @@ class Service: # run ollama if platform.system() == 'Windows': - if os.path.exists('C:\\Windows\\System32\\wsl.exe'): - subprocess.run(['wsl', 'bash', str(Path(__file__).parent / 'run_ollama.sh')]) + subprocess.run(['wsl', 'bash', '~/run_ollama.sh']) else: - subprocess.run(['bash', str(Path(__file__).parent / 'install.sh')]) + subprocess.run(['bash', '~/run_ollama.sh']) + # Wait until the service is available at http://127.0.0.1:7860/ self.wait_for_service(max_retries=wait_max_retries) def wait_for_service(self, max_retries = 150, show_warning=True): - url = f"{self.xtts_base_url}/languages" + url = f"{self.base_url}" # Adjust this value as needed retries = 0