upgraded position

This commit is contained in:
Saifeddine ALOUI 2024-01-30 00:08:46 +01:00
parent b5de353df3
commit cd202fd74f
11 changed files with 422 additions and 3 deletions

View File

@ -0,0 +1,43 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_ollama")
def install_ollama():
try:
lollmsElfServer.ShowBlockingMessage("Installing ollama server\nPlease stand by")
from lollms.services.ollama.lollms_ollama import install_ollama
if install_ollama(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -0,0 +1,42 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that concerns petals service
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_petals")
def install_petals():
try:
lollmsElfServer.ShowBlockingMessage("Installing petals server\nPlease stand by")
from lollms.services.petals.lollms_petals import install_petals
if install_petals(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -0,0 +1,41 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_sd")
def install_sd():
try:
lollmsElfServer.ShowBlockingMessage("Installing SD api server\nPlease stand by")
from lollms.services.sd.lollms_sd import install_sd
install_sd(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -0,0 +1,51 @@
"""
project: lollms_user
file: lollms_user.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to manipulate user information.
"""
from fastapi import APIRouter
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string
from ascii_colors import ASCIIColors
from api.db import DiscussionsDB
from pathlib import Path
from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod
import tqdm
from fastapi import FastAPI, UploadFile, File
import shutil
class PersonalPathParameters(BaseModel):
path:str
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer = LOLLMSWebUI.get_instance()
@router.get("/switch_personal_path")
def switch_personal_path(data:PersonalPathParameters):
path = data.path
global_paths_cfg = Path("./global_paths_cfg.yaml")
if global_paths_cfg.exists():
try:
cfg = BaseConfig()
cfg.load_config(global_paths_cfg)
cfg.lollms_personal_path = path
cfg.save_config(global_paths_cfg)
return {"status": True}
except Exception as ex:
print(ex)
return {"status": False, 'error':f"Couldn't switch path: {ex}"}
@router.post("/upload_avatar")
def upload_avatar(avatar: UploadFile = File(...)):
with open(lollmsElfServer.lollms_paths.personal_user_infos_path/avatar.filename, "wb") as buffer:
shutil.copyfileobj(avatar.file, buffer)
return {"status": True,"fileName":avatar.filename}

View File

@ -0,0 +1,61 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that concerns petals service
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_vllm")
def install_vllm():
try:
lollmsElfServer.ShowBlockingMessage("Installing vllm server\nPlease stand by")
from lollms.services.vllm.lollms_vllm import install_vllm
if install_vllm(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}
@router.get("/start_vllm")
def start_vllm():
try:
if not hasattr(lollmsElfServer,"vllm") or lollmsElfServer.vllm is none:
lollmsElfServer.ShowBlockingMessage("Loading vllm server\nPlease stand by")
from lollms.services.vllm.lollms_vllm import get_vllm
if get_vllm(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
else:
return {"status":False, 'error':'Service already running'}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -0,0 +1,116 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/list_voices")
def list_voices():
ASCIIColors.yellow("Listing voices")
voices=["main_voice"]
voices_dir:Path=lollmsElfServer.lollms_paths.custom_voices_path
voices += [v.stem for v in voices_dir.iterdir() if v.suffix==".wav"]
return {"voices":voices}
@router.post("/set_voice")
async def set_voice(request: Request):
"""
Changes current voice
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
lollmsElfServer.config.current_voice=data["voice"]
if lollmsElfServer.config.auto_save:
lollmsElfServer.config.save_config()
return {"status":True}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/text2Audio")
async def text2Audio(request: Request):
"""
Executes Python code and returns the output.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
# Get the JSON data from the POST request.
try:
from lollms.services.xtts.lollms_xtts import LollmsXTTS
if lollmsElfServer.tts is None:
lollmsElfServer.tts = LollmsXTTS(lollmsElfServer, voice_samples_path=Path(__file__).parent/"voices", xtts_base_url= lollmsElfServer.config.xtts_base_url)
except:
return {"url": None}
voice=data.get("voice",lollmsElfServer.config.current_voice)
index = find_first_available_file_index(lollmsElfServer.tts.output_folder, "voice_sample_",".wav")
output_fn=data.get("fn",f"voice_sample_{index}.wav")
if voice is None:
voice = "main_voice"
lollmsElfServer.info("Starting to build voice")
try:
from lollms.services.xtts.lollms_xtts import LollmsXTTS
if lollmsElfServer.tts is None:
lollmsElfServer.tts = LollmsXTTS(lollmsElfServer, voice_samples_path=Path(__file__).parent/"voices", xtts_base_url= lollmsElfServer.config.xtts_base_url)
language = lollmsElfServer.config.current_language# convert_language_name()
if voice!="main_voice":
voices_folder = lollmsElfServer.lollms_paths.custom_voices_path
else:
voices_folder = Path(__file__).parent.parent/"voices"
lollmsElfServer.tts.set_speaker_folder(voices_folder)
url = f"audio/{output_fn}"
preprocessed_text= add_period(data['text'])
lollmsElfServer.tts.tts_to_file(preprocessed_text, f"{voice}.wav", f"{output_fn}", language=language)
lollmsElfServer.info("Voice file ready")
return {"url": url}
except:
return {"url": None}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.get("/install_xtts")
def install_xtts():
try:
from lollms.services.xtts.lollms_xtts import install_xtts
lollmsElfServer.ShowBlockingMessage("Installing xTTS api server\nPlease stand by")
install_xtts(lollmsElfServer)
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -0,0 +1,22 @@
#!/bin/bash
# Check if miniconda3/bin/conda exists
if [ -e "$HOME/miniconda3/bin/conda" ]; then
echo "Conda is installed!"
else
echo "Conda is not installed. Please install it first."
echo Installing conda
curl -LOk https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
./Miniconda3-latest-Linux-x86_64.sh -b
$HOME/miniconda3/bin/conda init --all
rm ./Miniconda3-latest-Linux-x86_64.sh
echo Done
fi
PATH="$HOME/miniconda3/bin:$PATH"
conda init
export PATH
echo "Installing vllm"
conda create -n vllm python=3.9 -y
conda activate vllm
pip install vllm
echo "Done"

View File

@ -60,6 +60,18 @@ def install_petals(lollms_app:LollmsApplication):
subprocess.run(['cp {} {}'.format( root_path + '/run_petals.sh', home)])
subprocess.run(['bash', f'{home}/install_petals.sh'])
return True
def get_petals(lollms_app:LollmsApplication):
if verify_petals(lollms_app.lollms_paths):
ASCIIColors.success("lollms_vllm found.")
ASCIIColors.success("Loading source file...",end="")
# use importlib to load the module from the file path
ASCIIColors.success("ok")
return Service
else:
return None
class Service:
def __init__(
self,

View File

@ -0,0 +1,9 @@
#!/bin/bash
cd ~/vllm
PATH="$HOME/miniconda3/bin:$PATH"
export PATH
conda activate vllm && python -m vllm.entrypoints.openai.api_server --model %1
# Wait for all background processes to finish
wait

View File

@ -13,5 +13,10 @@ else
echo Done
fi
PATH="$HOME/miniconda3/bin:$PATH"
conda init
export PATH
conda create -n vllm python=3.9 -y && conda activate vllm && pip install vllm --user
echo "Installing vllm"
conda create -n vllm python=3.9 -y
conda activate vllm
pip install vllm
echo "Done"

View File

@ -38,8 +38,9 @@ def verify_vllm(lollms_paths:LollmsPaths):
root_dir = lollms_paths.personal_path
shared_folder = root_dir/"shared"
sd_folder = shared_folder / "auto_sd"
return sd_folder.exists()
vllm_folder = shared_folder / "vllm"
return vllm_folder.exists()
def install_vllm(lollms_app:LollmsApplication):
@ -58,7 +59,23 @@ def install_vllm(lollms_app:LollmsApplication):
subprocess.run(['cp {} {}'.format( root_path + '/install_vllm.sh', home)])
subprocess.run(['cp {} {}'.format( root_path + '/run_vllm.sh', home)])
subprocess.run(['bash', f'{home}/install_vllm.sh'])
root_dir = lollms_app.lollms_paths.personal_path
shared_folder = root_dir/"shared"
vllm_folder = shared_folder / "vllm"
vllm_folder.mkdir(exist_ok=True, parents=True)
return True
def get_vllm(lollms_app:LollmsApplication):
if verify_vllm(lollms_app.lollms_paths):
ASCIIColors.success("lollms_vllm found.")
ASCIIColors.success("Loading source file...",end="")
# use importlib to load the module from the file path
ASCIIColors.success("ok")
return Service
else:
return None
class Service:
def __init__(
self,