added new server

This commit is contained in:
Saifeddine ALOUI 2024-02-19 22:40:20 +01:00
parent da2d2d9a63
commit a20a3f67ed
8 changed files with 250 additions and 15 deletions

View File

@ -125,10 +125,7 @@ class LollmsApplication(LoLLMsCom):
def get_uploads_path(self, client_id): def get_uploads_path(self, client_id):
return self.lollms_paths.personal_uploads_path return self.lollms_paths.personal_uploads_path
def start_servers( self, def start_servers( self ):
load_voice_service=True,
load_sd_service=True,
):
if self.config.enable_ollama_service: if self.config.enable_ollama_service:
try: try:
from lollms.services.ollama.lollms_ollama import Service from lollms.services.ollama.lollms_ollama import Service
@ -137,15 +134,23 @@ class LollmsApplication(LoLLMsCom):
trace_exception(ex) trace_exception(ex)
self.warning(f"Couldn't load Ollama") self.warning(f"Couldn't load Ollama")
if self.config.vllm_service:
try:
from lollms.services.vllm.lollms_vllm import Service
self.vllm = Service(self, base_url=self.config.vllm_url)
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load Ollama")
if self.config.enable_voice_service and load_voice_service:
if self.config.enable_voice_service:
try: try:
from lollms.services.xtts.lollms_xtts import LollmsXTTS from lollms.services.xtts.lollms_xtts import LollmsXTTS
self.tts = LollmsXTTS(self, voice_samples_path=lollms_paths.custom_voices_path, xtts_base_url=self.config.xtts_base_url, wait_for_service=False) self.tts = LollmsXTTS(self, voice_samples_path=self.lollms_paths.custom_voices_path, xtts_base_url=self.config.xtts_base_url, wait_for_service=False)
except: except:
self.warning(f"Couldn't load XTTS") self.warning(f"Couldn't load XTTS")
if self.config.enable_sd_service and load_sd_service: if self.config.enable_sd_service:
try: try:
from lollms.services.sd.lollms_sd import LollmsSD from lollms.services.sd.lollms_sd import LollmsSD
self.sd = LollmsSD(self, auto_sd_base_url=self.config.sd_base_url) self.sd = LollmsSD(self, auto_sd_base_url=self.config.sd_base_url)

View File

@ -0,0 +1,41 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_motion_ctrl")
def install_motion_ctrl():
try:
lollmsElfServer.ShowBlockingMessage("Installing Motion Ctrl api server\nPlease stand by")
from lollms.services.motion_ctrl.lollms_motion_ctrl import install_motion_ctrl
install_motion_ctrl(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -44,10 +44,16 @@ def install_vllm():
@router.get("/start_vllm") @router.get("/start_vllm")
def start_vllm(): def start_vllm():
try: try:
if not hasattr(lollmsElfServer,"vllm") or lollmsElfServer.vllm is none: if hasattr(lollmsElfServer,"vllm") and lollmsElfServer.vllm is not None:
return {"status":False, 'error':"Service is already on"}
if not hasattr(lollmsElfServer,"vllm") or lollmsElfServer.vllm is None:
lollmsElfServer.ShowBlockingMessage("Loading vllm server\nPlease stand by") lollmsElfServer.ShowBlockingMessage("Loading vllm server\nPlease stand by")
from lollms.services.vllm.lollms_vllm import get_vllm from lollms.services.vllm.lollms_vllm import get_vllm
if get_vllm(lollmsElfServer): server = get_vllm(lollmsElfServer)
if server:
lollmsElfServer.vllm = server(lollmsElfServer, lollmsElfServer.config.vllm_url)
lollmsElfServer.HideBlockingMessage() lollmsElfServer.HideBlockingMessage()
return {"status":True} return {"status":True}
else: else:

View File

@ -0,0 +1,174 @@
# Title LollmsMotionCtrl
# Licence: MIT
# Author : Paris Neo
# Adapted from the work of mix1009's sdwebuiapi
# check it out : https://github.com/mix1009/sdwebuiapi/tree/main
# Here is a copy of the LICENCE https://github.com/mix1009/sdwebuiapi/blob/main/LICENSE
# All rights are reserved
from pathlib import Path
import sys
from lollms.app import LollmsApplication
from lollms.paths import LollmsPaths
from lollms.config import TypedConfig, ConfigTemplate, BaseConfig
from lollms.utilities import get_conda_path
import time
import io
import sys
import requests
import os
import base64
import subprocess
import time
import json
import platform
from dataclasses import dataclass
from PIL import Image, PngImagePlugin
from enum import Enum
from typing import List, Dict, Any
from ascii_colors import ASCIIColors, trace_exception
from lollms.paths import LollmsPaths
from lollms.utilities import git_pull
import subprocess
def verify_motion_ctrl(lollms_paths:LollmsPaths):
# Clone repository
root_dir = lollms_paths.personal_path
shared_folder = root_dir/"shared"
motion_ctrl_folder = shared_folder / "auto_motion_ctrl"
return motion_ctrl_folder.exists()
def install_motion_ctrl(lollms_app:LollmsApplication):
root_dir = lollms_app.lollms_paths.personal_path
shared_folder = root_dir/"shared"
motion_ctrl_folder = shared_folder / "auto_motion_ctrl"
if motion_ctrl_folder.exists():
if not lollms_app.YesNoMessage("I have detected that there is a previous installation of stable diffusion.\nShould I remove it and continue installing?"):
return
else:
motion_ctrl_folder.unlink(True)
subprocess.run(["git", "clone", "https://github.com/ParisNeo/MotionCtrl.git", str(motion_ctrl_folder)])
import conda.cli
env_name = "MotionCtrl"
conda.cli.main('conda', 'create', '--name', env_name, 'python=3.10', '--yes')
# Replace 'your_env_name' with the name of the environment you created
activate_env_command = f"conda activate {env_name} && "
pip_install_command = "pip install -r " + str(motion_ctrl_folder) + "/requirements.txt"
# Run the combined command
subprocess.run(activate_env_command + pip_install_command, shell=True, executable='/bin/bash')
#pip install -r requirements.txt
ASCIIColors.green("Motion ctrl installed successfully")
def get_motion_ctrl(lollms_paths:LollmsPaths):
root_dir = lollms_paths.personal_path
shared_folder = root_dir/"shared"
motion_ctrl_folder = shared_folder / "auto_motion_ctrl"
motion_ctrl_script_path = motion_ctrl_folder / "lollms_motion_ctrl.py"
git_pull(motion_ctrl_folder)
if motion_ctrl_script_path.exists():
ASCIIColors.success("lollms_motion_ctrl found.")
ASCIIColors.success("Loading source file...",end="")
# use importlib to load the module from the file path
from lollms.services.motion_ctrl.lollms_motion_ctrl import LollmsMotionCtrl
ASCIIColors.success("ok")
return LollmsMotionCtrl
class LollmsMotionCtrl:
has_controlnet = False
def __init__(
self,
app:LollmsApplication,
wm = "Artbot",
max_retries=50,
sampler="Euler a",
steps=20,
use_https=False,
username=None,
password=None,
auto_motion_ctrl_base_url=None,
share=False,
wait_for_service=True
):
if auto_motion_ctrl_base_url=="" or auto_motion_ctrl_base_url=="http://127.0.0.1:7860":
auto_motion_ctrl_base_url = None
# Get the current directory
lollms_paths = app.lollms_paths
self.app = app
root_dir = lollms_paths.personal_path
self.wm = wm
# Store the path to the script
if auto_motion_ctrl_base_url is None:
self.auto_motion_ctrl_base_url = "http://127.0.0.1:7860"
if not verify_motion_ctrl(lollms_paths):
install_motion_ctrl(app.lollms_paths)
else:
self.auto_motion_ctrl_base_url = auto_motion_ctrl_base_url
self.auto_motion_ctrl_url = self.auto_motion_ctrl_base_url+"/sdapi/v1"
shared_folder = root_dir/"shared"
self.motion_ctrl_folder = shared_folder / "motion_ctrl"
self.output_dir = root_dir / "outputs/motion_ctrl"
self.output_dir.mkdir(parents=True, exist_ok=True)
ASCIIColors.red(" ")
ASCIIColors.red(" __ _____ __ __ _____ _____ _____ ____ ")
ASCIIColors.red("| | | | | | | | | __| | __| \ ")
ASCIIColors.red("| |__| | | |__| |__| | | |__ | |__ | | |")
ASCIIColors.red("|_____|_____|_____|_____|_|_|_|_____|_____|_____|____/ ")
ASCIIColors.red(" |_____| ")
ASCIIColors.red(" Forked from TencentARC's MotionCtrl api")
ASCIIColors.red(" Integration in lollms by ParisNeo")
motion_ctrl_folder = shared_folder / "auto_motion_ctrl"
env_name = "MotionCtrl"
if not self.wait_for_service(1,False) and auto_motion_ctrl_base_url is None:
ASCIIColors.info("Loading lollms_motion_ctrl")
os.environ['motion_ctrl_WEBUI_RESTARTING'] = '1' # To forbid sd webui from showing on the browser automatically
# Get the current operating system
os_name = platform.system()
conda_path = get_conda_path()
# Construct the command to activate the environment and start the server
if os_name == "Windows":
activate_env_command = f"call {conda_path}\\activate {env_name} && "
else: # Linux or macOS
activate_env_command = f"source {conda_path}/activate {env_name} && "
start_server_command = f"python -m app --share"
# Run the combined command
if os_name == "Windows":
subprocess.run(activate_env_command + start_server_command, shell=True, cwd=motion_ctrl_folder)
else: # Linux or macOS
subprocess.run(activate_env_command + start_server_command, shell=True, executable='/bin/bash', cwd=motion_ctrl_folder)
# Wait until the service is available at http://127.0.0.1:7860/
if wait_for_service:
self.wait_for_service(max_retries=max_retries)
else:
ASCIIColors.warning("We are not waiting for the MotionCtrl service to be up.\nThis means that you may need to wait a bit before you can use it.")
self.default_sampler = sampler
self.default_steps = steps
self.session = requests.Session()
if username and password:
self.set_auth(username, password)
else:
self.check_controlnet()

View File

@ -8,15 +8,15 @@ else
echo Installing conda echo Installing conda
curl -LOk https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh curl -LOk https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
./Miniconda3-latest-Linux-x86_64.sh -b ./Miniconda3-latest-Linux-x86_64.sh -b
$HOME/miniconda3/bin/conda init --all
rm ./Miniconda3-latest-Linux-x86_64.sh rm ./Miniconda3-latest-Linux-x86_64.sh
echo Done echo Done
fi fi
PATH="$HOME/miniconda3/bin:$PATH" echo "Initializing conda"
conda init $HOME/miniconda3/bin/conda init --all
export PATH export PATH
echo "Installing vllm" echo "Installing vllm"
conda create -n vllm python=3.9 -y $HOME/miniconda3/bin/conda create -n vllm python=3.9 -y
conda activate vllm echo "Activating vllm environment"
$HOME/miniconda3/bin/conda activate vllm
pip install vllm pip install vllm
echo "Done" echo "Done"

View File

@ -72,6 +72,7 @@ def get_vllm(lollms_app:LollmsApplication):
ASCIIColors.success("Loading source file...",end="") ASCIIColors.success("Loading source file...",end="")
# use importlib to load the module from the file path # use importlib to load the module from the file path
ASCIIColors.success("ok") ASCIIColors.success("ok")
return Service return Service
else: else:
return None return None

View File

@ -3,7 +3,7 @@
cd ~/vllm cd ~/vllm
PATH="$HOME/miniconda3/bin:$PATH" PATH="$HOME/miniconda3/bin:$PATH"
export PATH export PATH
conda activate vllm && python -m vllm.entrypoints.openai.api_server --model %1 $HOME/miniconda3/bin/conda activate vllm && python -m vllm.entrypoints.openai.api_server --model %1
# Wait for all background processes to finish # Wait for all background processes to finish
wait wait

View File

@ -35,6 +35,14 @@ import os
import sys import sys
def get_conda_path():
# Get the path to the Python executable that's running the script
python_executable_path = sys.executable
# Construct the path to the 'conda' executable based on the Python executable path
# Assuming that 'conda' is in the same directory as the Python executable
conda_executable_path = os.path.join(os.path.dirname(python_executable_path), 'conda')
return conda_executable_path
def yes_or_no_input(prompt): def yes_or_no_input(prompt):
while True: while True:
user_input = input(prompt + " (yes/no): ").lower() user_input = input(prompt + " (yes/no): ").lower()