mirror of
https://github.com/ParisNeo/lollms.git
synced 2024-12-30 09:28:51 +00:00
Added the new version of elf
This commit is contained in:
parent
402d01cfe0
commit
3581e608e5
@ -1,5 +1,5 @@
|
||||
# =================== Lord Of Large Language Models Configuration file ===========================
|
||||
version: 36
|
||||
version: 39
|
||||
binding_name: null
|
||||
model_name: null
|
||||
|
||||
@ -56,6 +56,10 @@ auto_read: false
|
||||
current_voice: null
|
||||
current_language: en
|
||||
|
||||
# Image generation service
|
||||
enable_sd_service: false
|
||||
sd_base_url: http://127.0.0.1:7860
|
||||
|
||||
# Audio
|
||||
media_on: false
|
||||
audio_in_language: 'en-US'
|
||||
@ -84,4 +88,9 @@ data_vectorization_make_persistance: false # If true, the data will be persistan
|
||||
|
||||
|
||||
# Helpers
|
||||
pdf_latex_path: null
|
||||
pdf_latex_path: null
|
||||
|
||||
# boosting information
|
||||
positive_boost: null
|
||||
negative_boost: null
|
||||
force_output_language_to_be: null
|
||||
|
@ -27,6 +27,8 @@ class LollmsApplication(LoLLMsCom):
|
||||
lollms_paths:LollmsPaths,
|
||||
load_binding=True,
|
||||
load_model=True,
|
||||
load_voice_service=True,
|
||||
load_sd_service=True,
|
||||
try_select_binding=False,
|
||||
try_select_model=False,
|
||||
callback=None,
|
||||
@ -50,13 +52,21 @@ class LollmsApplication(LoLLMsCom):
|
||||
self.long_term_memory = None
|
||||
|
||||
self.tts = None
|
||||
if self.config.enable_voice_service:
|
||||
|
||||
if self.config.enable_voice_service and load_voice_service:
|
||||
try:
|
||||
from lollms.audio_gen_modules.lollms_xtts import LollmsXTTS
|
||||
self.tts = LollmsXTTS(self, voice_samples_path=lollms_paths.custom_voices_path)
|
||||
self.tts = LollmsXTTS(self, voice_samples_path=lollms_paths.custom_voices_path, xtts_base_url=self.config.xtts_base_url)
|
||||
except:
|
||||
self.warning(f"Couldn't load XTTS")
|
||||
|
||||
if self.config.enable_sd_service and load_sd_service:
|
||||
try:
|
||||
from lollms.image_gen_modules.lollms_sd import LollmsSD
|
||||
self.tts = LollmsSD(self, auto_sd_base_url=self.config.sd_base_url)
|
||||
except:
|
||||
self.warning(f"Couldn't load SD")
|
||||
|
||||
try:
|
||||
if config.auto_update:
|
||||
# Clone the repository to the target path
|
||||
|
@ -97,14 +97,12 @@ class LollmsXTTS:
|
||||
shared_folder = root_dir/"shared"
|
||||
self.xtts_folder = shared_folder / "xtts"
|
||||
|
||||
|
||||
ASCIIColors.red(".____ ________ .____ .____ _____ _________ ____ __________________________________ ")
|
||||
ASCIIColors.red("| | \_____ \ | | | | / \ / _____/ \ \/ /\__ ___/\__ ___/ _____/ ")
|
||||
ASCIIColors.red("| | / | \| | | | / \ / \ \_____ \ ______ \ / | | | | \_____ \ ")
|
||||
ASCIIColors.red("| |___/ | \ |___| |___/ Y \/ \ /_____/ / \ | | | | / \ ")
|
||||
ASCIIColors.red("|_______ \_______ /_______ \_______ \____|__ /_______ / /___/\ \ |____| |____| /_______ / ")
|
||||
ASCIIColors.red(" \/ \/ \/ \/ \/ \/ \_/ \/ ")
|
||||
|
||||
ASCIIColors.red(" __ ___ __ __ __ __ ___ _ ")
|
||||
ASCIIColors.red(" / / /___\/ / / / /\/\ / _\ \ \/ / |_| |_ ___ ")
|
||||
ASCIIColors.red(" / / // // / / / / \ \ \ _____\ /| __| __/ __| ")
|
||||
ASCIIColors.red("/ /___/ \_// /___/ /___/ /\/\ \_\ \_____/ \| |_| |_\__ \ ")
|
||||
ASCIIColors.red("\____/\___/\____/\____/\/ \/\__/ /_/\_\\__|\__|___/ ")
|
||||
|
||||
ASCIIColors.red(" Forked from daswer123's XTTS server")
|
||||
ASCIIColors.red(" Integration in lollms by ParisNeo using daswer123's webapi ")
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
# =================== Lord Of Large Language Models Configuration file ===========================
|
||||
version: 36
|
||||
version: 39
|
||||
binding_name: null
|
||||
model_name: null
|
||||
|
||||
@ -56,6 +56,10 @@ auto_read: false
|
||||
current_voice: null
|
||||
current_language: en
|
||||
|
||||
# Image generation service
|
||||
enable_sd_service: false
|
||||
sd_base_url: http://127.0.0.1:7860
|
||||
|
||||
# Audio
|
||||
media_on: false
|
||||
audio_in_language: 'en-US'
|
||||
@ -84,4 +88,9 @@ data_vectorization_make_persistance: false # If true, the data will be persistan
|
||||
|
||||
|
||||
# Helpers
|
||||
pdf_latex_path: null
|
||||
pdf_latex_path: null
|
||||
|
||||
# boosting information
|
||||
positive_boost: null
|
||||
negative_boost: null
|
||||
force_output_language_to_be: null
|
||||
|
@ -26,15 +26,21 @@ class LollmsPaths:
|
||||
def __init__(self, global_paths_cfg_path=None, lollms_path=None, personal_path=None, custom_default_cfg_path=None, tool_prefix=""):
|
||||
self.global_paths_cfg_path = global_paths_cfg_path
|
||||
if self.global_paths_cfg_path is not None:
|
||||
try:
|
||||
with(open(self.global_paths_cfg_path,"r") as f):
|
||||
infos = yaml.safe_load(f)
|
||||
if lollms_path is None:
|
||||
lollms_path = infos["lollms_path"]
|
||||
if personal_path is None:
|
||||
personal_path = infos["lollms_personal_path"]
|
||||
except Exception as ex:
|
||||
ASCIIColors.error(ex)
|
||||
if self.global_paths_cfg_path.exists():
|
||||
try:
|
||||
with(open(self.global_paths_cfg_path,"r") as f):
|
||||
infos = yaml.safe_load(f)
|
||||
if lollms_path is None:
|
||||
lollms_path = infos["lollms_path"]
|
||||
if personal_path is None:
|
||||
personal_path = infos["lollms_personal_path"]
|
||||
except Exception as ex:
|
||||
ASCIIColors.error(ex)
|
||||
else:
|
||||
infos={
|
||||
"lollms_path":None,
|
||||
"lollms_personal_path":None
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
70
lollms/server/elf.py
Normal file
70
lollms/server/elf.py
Normal file
@ -0,0 +1,70 @@
|
||||
"""
|
||||
File: lollms_web_ui.py
|
||||
Author: ParisNeo
|
||||
Description: Singleton class for the LoLLMS web UI.
|
||||
|
||||
This class provides a singleton instance of the LoLLMS web UI, allowing access to its functionality and data across multiple endpoints.
|
||||
"""
|
||||
|
||||
from lollms.app import LollmsApplication
|
||||
from lollms.main_config import LOLLMSConfig
|
||||
from lollms.paths import LollmsPaths
|
||||
|
||||
class LOLLMSElfServer(LollmsApplication):
|
||||
__instance = None
|
||||
|
||||
@staticmethod
|
||||
def build_instance(
|
||||
config: LOLLMSConfig,
|
||||
lollms_paths: LollmsPaths,
|
||||
load_binding=True,
|
||||
load_model=True,
|
||||
try_select_binding=False,
|
||||
try_select_model=False,
|
||||
callback=None,
|
||||
socketio = None
|
||||
):
|
||||
if LOLLMSElfServer.__instance is None:
|
||||
LOLLMSElfServer(
|
||||
config,
|
||||
lollms_paths,
|
||||
load_binding=load_binding,
|
||||
load_model=load_model,
|
||||
try_select_binding=try_select_binding,
|
||||
try_select_model=try_select_model,
|
||||
callback=callback,
|
||||
socketio=socketio
|
||||
)
|
||||
return LOLLMSElfServer.__instance
|
||||
@staticmethod
|
||||
def get_instance():
|
||||
return LOLLMSElfServer.__instance
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: LOLLMSConfig,
|
||||
lollms_paths: LollmsPaths,
|
||||
load_binding=True,
|
||||
load_model=True,
|
||||
try_select_binding=False,
|
||||
try_select_model=False,
|
||||
callback=None,
|
||||
socketio=None
|
||||
) -> None:
|
||||
super().__init__(
|
||||
"LOLLMSElfServer",
|
||||
config,
|
||||
lollms_paths,
|
||||
load_binding=load_binding,
|
||||
load_model=load_model,
|
||||
try_select_binding=try_select_binding,
|
||||
try_select_model=try_select_model,
|
||||
callback=callback,
|
||||
socketio=socketio
|
||||
)
|
||||
if LOLLMSElfServer.__instance is not None:
|
||||
raise Exception("This class is a singleton!")
|
||||
else:
|
||||
LOLLMSElfServer.__instance = self
|
||||
|
||||
# Other methods and properties of the LoLLMSWebUI singleton class
|
15
lollms/server/endpoints/lollms_infos.py
Normal file
15
lollms/server/endpoints/lollms_infos.py
Normal file
@ -0,0 +1,15 @@
|
||||
from fastapi import APIRouter
|
||||
from lollms_webui import LoLLMSWebUI
|
||||
|
||||
router = APIRouter()
|
||||
lollmsWebUI = LoLLMSWebUI.get_instance()
|
||||
|
||||
@router.get("/users")
|
||||
def get_users():
|
||||
# Your code here
|
||||
pass
|
||||
|
||||
@router.post("/users")
|
||||
def create_user():
|
||||
# Your code here
|
||||
pass
|
49
lollms/server/new_app.py
Normal file
49
lollms/server/new_app.py
Normal file
@ -0,0 +1,49 @@
|
||||
"""
|
||||
File: lollms_web_ui.py
|
||||
Author: ParisNeo
|
||||
Description: Singleton class for the LoLLMS web UI.
|
||||
|
||||
This file is the entry point to the webui.
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from lollms.app import LollmsApplication
|
||||
from lollms.paths import LollmsPaths
|
||||
from lollms.main_config import LOLLMSConfig
|
||||
from lollms.server.elf import LOLLMSElfServer
|
||||
from pathlib import Path
|
||||
from ascii_colors import ASCIIColors
|
||||
import socketio
|
||||
import uvicorn
|
||||
import argparse
|
||||
|
||||
app = FastAPI()
|
||||
sio = socketio.AsyncServer(async_mode="asgi")
|
||||
|
||||
app.mount("/socket.io", socketio.ASGIApp(sio))
|
||||
#app.mount("/socket.io", StaticFiles(directory="path/to/socketio.js"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Parsong parameters
|
||||
parser = argparse.ArgumentParser(description="Start the chatbot FastAPI app.")
|
||||
|
||||
parser.add_argument(
|
||||
"--host", type=str, default=None, help="the hostname to listen on"
|
||||
)
|
||||
parser.add_argument("--port", type=int, default=None, help="the port to listen on")
|
||||
|
||||
args = parser.parse_args()
|
||||
root_path = Path(__file__).parent
|
||||
lollms_paths = LollmsPaths.find_paths(force_local=True, custom_default_cfg_path="configs/config.yaml")
|
||||
config = LOLLMSConfig.autoload(lollms_paths)
|
||||
if args.host:
|
||||
config.host=args.host
|
||||
if args.port:
|
||||
config.port=args.port
|
||||
|
||||
LOLLMSElfServer.build_instance(config=config, lollms_paths=lollms_paths, socketio=sio)
|
||||
from lollms.server.endpoints.lollms_infos import *
|
||||
|
||||
uvicorn.run(app, host=config.host, port=config.port)
|
Loading…
Reference in New Issue
Block a user