upgraded ollama

This commit is contained in:
Saifeddine ALOUI 2024-01-11 01:52:19 +01:00
parent 21d481e56a
commit ed6afa3e25
5 changed files with 41 additions and 22 deletions
lollms

@ -16,7 +16,8 @@ from functools import partial
from flask_socketio import SocketIO
import subprocess
import importlib
import sys
import sys, os
import platform
class LollmsApplication(LoLLMsCom):
@ -55,6 +56,15 @@ class LollmsApplication(LoLLMsCom):
self.tts = None
if not free_mode:
if self.config.enable_ollama_service:
try:
from lollms.services.ollama.lollms_ollama import Service
self.tts = Service(self, base_url=self.config.ollama_base_url)
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load Ollama")
if self.config.enable_voice_service and load_voice_service:
try:
from lollms.services.xtts.lollms_xtts import LollmsXTTS

@ -1,5 +1,5 @@
# =================== Lord Of Large Language Models Configuration file ===========================
version: 40
version: 41
binding_name: null
model_name: null
@ -60,6 +60,10 @@ current_language: en
enable_sd_service: false
sd_base_url: http://127.0.0.1:7860
# ollama service
enable_ollama_service: false
ollama_base_url: http://0.0.0.0:11434
# Audio
media_on: false
audio_in_language: 'en-US'

@ -1,5 +1,5 @@
# =================== Lord Of Large Language Models Configuration file ===========================
version: 40
version: 41
binding_name: null
model_name: null
@ -60,6 +60,10 @@ current_language: en
enable_sd_service: false
sd_base_url: http://127.0.0.1:7860
# ollama service
enable_ollama_service: false
ollama_base_url: http://0.0.0.0:11434
# Audio
media_on: false
audio_in_language: 'en-US'

@ -9,7 +9,7 @@ error() { echo "ERROR $*"; exit 1; }
warning() { echo "WARNING: $*"; }
OLLAMA_DIR=~/ollama
echo ""
if [ ! -d $OLLAMA_DIR ]; then
mkdir $OLLAMA_DIR
echo "Folder $OLLAMA_DIR created successfully!"
@ -56,7 +56,7 @@ if [ -n "$NEEDS" ]; then
fi
status "Downloading ollama..."
curl --fail --show-error --location --progress-bar -o $OLLAMA_DIR "https://ollama.ai/download/ollama-linux-$ARCH"
curl --fail --show-error --location --progress-bar -o $OLLAMA_DIR/ollama "https://ollama.ai/download/ollama-linux-$ARCH"
status "Installing ollama to OLLAMA_DIR..."

@ -43,20 +43,22 @@ def verify_ollama(lollms_paths:LollmsPaths):
return sd_folder.exists()
def install_ollama():
def install_ollama(lollms_app:LollmsApplication):
if platform.system() == 'Windows':
if os.path.exists('C:\\Windows\\System32\\wsl.exe'):
subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format(str(Path(__file__).parent / 'install.sh'))])
subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format(str(Path(__file__).parent / 'run_ollama.sh'))])
subprocess.run(['wsl', 'bash', str(Path.home() / 'install.sh')])
else:
root_path = "/mnt/"+"".join(str(Path(__file__).parent).replace("\\","/").split(":"))
if not os.path.exists('C:\\Windows\\System32\\wsl.exe'):
if not lollms_app.YesNoMessage("No WSL is detected on your system. Do you want me to install it for you? Ollama won't be abble to work without wsl."):
return False
subprocess.run(['wsl', '--install', 'Ubuntu'])
subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format(str(Path(__file__).parent / 'install.sh'))])
subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format(str(Path(__file__).parent / 'run_ollama.sh'))])
subprocess.run(['wsl', 'bash', str(Path.home() / 'install.sh')])
subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format( root_path + '/install_ollama.sh')])
subprocess.run(['wsl', 'bash', '-c', 'cp {} ~'.format( root_path + '/run_ollama.sh')])
subprocess.run(['wsl', 'bash', '~/install_ollama.sh'])
else:
subprocess.run(['bash', str(Path(__file__).parent / 'install.sh')])
root_path = str(Path(__file__).parent)
subprocess.run(['cp {} ~'.format( root_path + '/install_ollama.sh')])
subprocess.run(['cp {} ~'.format( root_path + '/run_ollama.sh')])
subprocess.run(['bash', '~/install_ollama.sh'])
return True
class Service:
def __init__(
self,
@ -64,8 +66,7 @@ class Service:
base_url="http://127.0.0.1:11434",
wait_max_retries = 5
):
if base_url=="" or base_url=="http://127.0.0.1:11434":
base_url = None
self.base_url = base_url
# Get the current directory
lollms_paths = app.lollms_paths
self.app = app
@ -85,17 +86,17 @@ class Service:
# run ollama
if platform.system() == 'Windows':
if os.path.exists('C:\\Windows\\System32\\wsl.exe'):
subprocess.run(['wsl', 'bash', str(Path(__file__).parent / 'run_ollama.sh')])
subprocess.run(['wsl', 'bash', '~/run_ollama.sh'])
else:
subprocess.run(['bash', str(Path(__file__).parent / 'install.sh')])
subprocess.run(['bash', '~/run_ollama.sh'])
# Wait until the service is available at http://127.0.0.1:7860/
self.wait_for_service(max_retries=wait_max_retries)
def wait_for_service(self, max_retries = 150, show_warning=True):
url = f"{self.xtts_base_url}/languages"
url = f"{self.base_url}"
# Adjust this value as needed
retries = 0