From 3581e608e55dec29cc0926ca03065accb445d72c Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Wed, 3 Jan 2024 01:40:30 +0100 Subject: [PATCH] Added the new version of elf --- configs/config.yaml | 13 ++++- lollms/app.py | 14 ++++- lollms/audio_gen_modules/lollms_xtts.py | 14 +++-- lollms/configs/config.yaml | 13 ++++- lollms/paths.py | 24 +++++---- lollms/server/elf.py | 70 +++++++++++++++++++++++++ lollms/server/endpoints/lollms_infos.py | 15 ++++++ lollms/server/new_app.py | 49 +++++++++++++++++ 8 files changed, 189 insertions(+), 23 deletions(-) create mode 100644 lollms/server/elf.py create mode 100644 lollms/server/endpoints/lollms_infos.py create mode 100644 lollms/server/new_app.py diff --git a/configs/config.yaml b/configs/config.yaml index 515781b..e357897 100644 --- a/configs/config.yaml +++ b/configs/config.yaml @@ -1,5 +1,5 @@ # =================== Lord Of Large Language Models Configuration file =========================== -version: 36 +version: 39 binding_name: null model_name: null @@ -56,6 +56,10 @@ auto_read: false current_voice: null current_language: en +# Image generation service +enable_sd_service: false +sd_base_url: http://127.0.0.1:7860 + # Audio media_on: false audio_in_language: 'en-US' @@ -84,4 +88,9 @@ data_vectorization_make_persistance: false # If true, the data will be persistan # Helpers -pdf_latex_path: null \ No newline at end of file +pdf_latex_path: null + +# boosting information +positive_boost: null +negative_boost: null +force_output_language_to_be: null diff --git a/lollms/app.py b/lollms/app.py index 2ab7da8..3973744 100644 --- a/lollms/app.py +++ b/lollms/app.py @@ -27,6 +27,8 @@ class LollmsApplication(LoLLMsCom): lollms_paths:LollmsPaths, load_binding=True, load_model=True, + load_voice_service=True, + load_sd_service=True, try_select_binding=False, try_select_model=False, callback=None, @@ -50,13 +52,21 @@ class LollmsApplication(LoLLMsCom): self.long_term_memory = None self.tts = None - if self.config.enable_voice_service: + + if self.config.enable_voice_service and load_voice_service: try: from lollms.audio_gen_modules.lollms_xtts import LollmsXTTS - self.tts = LollmsXTTS(self, voice_samples_path=lollms_paths.custom_voices_path) + self.tts = LollmsXTTS(self, voice_samples_path=lollms_paths.custom_voices_path, xtts_base_url=self.config.xtts_base_url) except: self.warning(f"Couldn't load XTTS") + if self.config.enable_sd_service and load_sd_service: + try: + from lollms.image_gen_modules.lollms_sd import LollmsSD + self.tts = LollmsSD(self, auto_sd_base_url=self.config.sd_base_url) + except: + self.warning(f"Couldn't load SD") + try: if config.auto_update: # Clone the repository to the target path diff --git a/lollms/audio_gen_modules/lollms_xtts.py b/lollms/audio_gen_modules/lollms_xtts.py index e2be9ef..38d520c 100644 --- a/lollms/audio_gen_modules/lollms_xtts.py +++ b/lollms/audio_gen_modules/lollms_xtts.py @@ -97,14 +97,12 @@ class LollmsXTTS: shared_folder = root_dir/"shared" self.xtts_folder = shared_folder / "xtts" - - ASCIIColors.red(".____ ________ .____ .____ _____ _________ ____ __________________________________ ") - ASCIIColors.red("| | \_____ \ | | | | / \ / _____/ \ \/ /\__ ___/\__ ___/ _____/ ") - ASCIIColors.red("| | / | \| | | | / \ / \ \_____ \ ______ \ / | | | | \_____ \ ") - ASCIIColors.red("| |___/ | \ |___| |___/ Y \/ \ /_____/ / \ | | | | / \ ") - ASCIIColors.red("|_______ \_______ /_______ \_______ \____|__ /_______ / /___/\ \ |____| |____| /_______ / ") - ASCIIColors.red(" \/ \/ \/ \/ \/ \/ \_/ \/ ") - + ASCIIColors.red(" __ ___ __ __ __ __ ___ _ ") + ASCIIColors.red(" / / /___\/ / / / /\/\ / _\ \ \/ / |_| |_ ___ ") + ASCIIColors.red(" / / // // / / / / \ \ \ _____\ /| __| __/ __| ") + ASCIIColors.red("/ /___/ \_// /___/ /___/ /\/\ \_\ \_____/ \| |_| |_\__ \ ") + ASCIIColors.red("\____/\___/\____/\____/\/ \/\__/ /_/\_\\__|\__|___/ ") + ASCIIColors.red(" Forked from daswer123's XTTS server") ASCIIColors.red(" Integration in lollms by ParisNeo using daswer123's webapi ") diff --git a/lollms/configs/config.yaml b/lollms/configs/config.yaml index 515781b..e357897 100644 --- a/lollms/configs/config.yaml +++ b/lollms/configs/config.yaml @@ -1,5 +1,5 @@ # =================== Lord Of Large Language Models Configuration file =========================== -version: 36 +version: 39 binding_name: null model_name: null @@ -56,6 +56,10 @@ auto_read: false current_voice: null current_language: en +# Image generation service +enable_sd_service: false +sd_base_url: http://127.0.0.1:7860 + # Audio media_on: false audio_in_language: 'en-US' @@ -84,4 +88,9 @@ data_vectorization_make_persistance: false # If true, the data will be persistan # Helpers -pdf_latex_path: null \ No newline at end of file +pdf_latex_path: null + +# boosting information +positive_boost: null +negative_boost: null +force_output_language_to_be: null diff --git a/lollms/paths.py b/lollms/paths.py index 0d5a304..422cdd7 100644 --- a/lollms/paths.py +++ b/lollms/paths.py @@ -26,15 +26,21 @@ class LollmsPaths: def __init__(self, global_paths_cfg_path=None, lollms_path=None, personal_path=None, custom_default_cfg_path=None, tool_prefix=""): self.global_paths_cfg_path = global_paths_cfg_path if self.global_paths_cfg_path is not None: - try: - with(open(self.global_paths_cfg_path,"r") as f): - infos = yaml.safe_load(f) - if lollms_path is None: - lollms_path = infos["lollms_path"] - if personal_path is None: - personal_path = infos["lollms_personal_path"] - except Exception as ex: - ASCIIColors.error(ex) + if self.global_paths_cfg_path.exists(): + try: + with(open(self.global_paths_cfg_path,"r") as f): + infos = yaml.safe_load(f) + if lollms_path is None: + lollms_path = infos["lollms_path"] + if personal_path is None: + personal_path = infos["lollms_personal_path"] + except Exception as ex: + ASCIIColors.error(ex) + else: + infos={ + "lollms_path":None, + "lollms_personal_path":None + } diff --git a/lollms/server/elf.py b/lollms/server/elf.py new file mode 100644 index 0000000..9bb88f0 --- /dev/null +++ b/lollms/server/elf.py @@ -0,0 +1,70 @@ +""" +File: lollms_web_ui.py +Author: ParisNeo +Description: Singleton class for the LoLLMS web UI. + +This class provides a singleton instance of the LoLLMS web UI, allowing access to its functionality and data across multiple endpoints. +""" + +from lollms.app import LollmsApplication +from lollms.main_config import LOLLMSConfig +from lollms.paths import LollmsPaths + +class LOLLMSElfServer(LollmsApplication): + __instance = None + + @staticmethod + def build_instance( + config: LOLLMSConfig, + lollms_paths: LollmsPaths, + load_binding=True, + load_model=True, + try_select_binding=False, + try_select_model=False, + callback=None, + socketio = None + ): + if LOLLMSElfServer.__instance is None: + LOLLMSElfServer( + config, + lollms_paths, + load_binding=load_binding, + load_model=load_model, + try_select_binding=try_select_binding, + try_select_model=try_select_model, + callback=callback, + socketio=socketio + ) + return LOLLMSElfServer.__instance + @staticmethod + def get_instance(): + return LOLLMSElfServer.__instance + + def __init__( + self, + config: LOLLMSConfig, + lollms_paths: LollmsPaths, + load_binding=True, + load_model=True, + try_select_binding=False, + try_select_model=False, + callback=None, + socketio=None + ) -> None: + super().__init__( + "LOLLMSElfServer", + config, + lollms_paths, + load_binding=load_binding, + load_model=load_model, + try_select_binding=try_select_binding, + try_select_model=try_select_model, + callback=callback, + socketio=socketio + ) + if LOLLMSElfServer.__instance is not None: + raise Exception("This class is a singleton!") + else: + LOLLMSElfServer.__instance = self + + # Other methods and properties of the LoLLMSWebUI singleton class diff --git a/lollms/server/endpoints/lollms_infos.py b/lollms/server/endpoints/lollms_infos.py new file mode 100644 index 0000000..fe3351e --- /dev/null +++ b/lollms/server/endpoints/lollms_infos.py @@ -0,0 +1,15 @@ +from fastapi import APIRouter +from lollms_webui import LoLLMSWebUI + +router = APIRouter() +lollmsWebUI = LoLLMSWebUI.get_instance() + +@router.get("/users") +def get_users(): + # Your code here + pass + +@router.post("/users") +def create_user(): + # Your code here + pass diff --git a/lollms/server/new_app.py b/lollms/server/new_app.py new file mode 100644 index 0000000..1374b98 --- /dev/null +++ b/lollms/server/new_app.py @@ -0,0 +1,49 @@ +""" +File: lollms_web_ui.py +Author: ParisNeo +Description: Singleton class for the LoLLMS web UI. + +This file is the entry point to the webui. +""" + +from fastapi import FastAPI +from fastapi.staticfiles import StaticFiles +from lollms.app import LollmsApplication +from lollms.paths import LollmsPaths +from lollms.main_config import LOLLMSConfig +from lollms.server.elf import LOLLMSElfServer +from pathlib import Path +from ascii_colors import ASCIIColors +import socketio +import uvicorn +import argparse + +app = FastAPI() +sio = socketio.AsyncServer(async_mode="asgi") + +app.mount("/socket.io", socketio.ASGIApp(sio)) +#app.mount("/socket.io", StaticFiles(directory="path/to/socketio.js")) + + +if __name__ == "__main__": + # Parsong parameters + parser = argparse.ArgumentParser(description="Start the chatbot FastAPI app.") + + parser.add_argument( + "--host", type=str, default=None, help="the hostname to listen on" + ) + parser.add_argument("--port", type=int, default=None, help="the port to listen on") + + args = parser.parse_args() + root_path = Path(__file__).parent + lollms_paths = LollmsPaths.find_paths(force_local=True, custom_default_cfg_path="configs/config.yaml") + config = LOLLMSConfig.autoload(lollms_paths) + if args.host: + config.host=args.host + if args.port: + config.port=args.port + + LOLLMSElfServer.build_instance(config=config, lollms_paths=lollms_paths, socketio=sio) + from lollms.server.endpoints.lollms_infos import * + + uvicorn.run(app, host=config.host, port=config.port) \ No newline at end of file