This commit is contained in:
Saifeddine ALOUI 2024-01-05 03:40:55 +01:00
parent 5ab35e254b
commit 89659d6986
4 changed files with 169 additions and 14 deletions

View File

@ -0,0 +1,24 @@
from fastapi import APIRouter
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string
from ascii_colors import ASCIIColors
class GenerateRequest(BaseModel):
text: str
router = APIRouter()
elf_server = LOLLMSWebUI.get_instance()
@router.post("/generate")
def lollms_generate(request_data: GenerateRequest):
pass
@router.get("/list_discussions")
def list_discussions():
discussions = elf_server.db.get_discussions()
return discussions

View File

@ -8,9 +8,51 @@ This class provides a singleton instance of the LoLLMS web UI, allowing access t
from lollms.server.elf_server import LOLLMSElfServer
from lollms.main_config import LOLLMSConfig
from lollms.helpers import trace_exception
from lollms.paths import LollmsPaths
from ascii_colors import ASCIIColors
from datetime import datetime
from api.db import DiscussionsDB, Discussion
from pathlib import Path
try:
from lollms.media import WebcamImageSender, AudioRecorder
Media_on=True
except:
ASCIIColors.warning("Couldn't load media library.\nYou will not be able to perform any of the media linked operations. please verify the logs and install any required installations")
Media_on=False
class LOLLMSWebUI(LOLLMSElfServer):
__instance = None
@staticmethod
def build_instance(
config: LOLLMSConfig,
lollms_paths: LollmsPaths,
load_binding=True,
load_model=True,
load_voice_service=True,
load_sd_service=True,
try_select_binding=False,
try_select_model=False,
callback=None,
socketio = None
):
if LOLLMSWebUI.__instance is None:
LOLLMSWebUI(
config,
lollms_paths,
load_binding=load_binding,
load_model=load_model,
load_sd_service=load_sd_service,
load_voice_service=load_voice_service,
try_select_binding=try_select_binding,
try_select_model=try_select_model,
callback=callback,
socketio=socketio
)
return LOLLMSWebUI.__instance
def __init__(
self,
config: LOLLMSConfig,
@ -38,4 +80,80 @@ class LOLLMSWebUI(LOLLMSElfServer):
)
self.app_name = "LOLLMSWebUI"
self.busy = False
self.nb_received_tokens = 0
self.config_file_path = config.file_path
self.cancel_gen = False
# Keeping track of current discussion and message
self._current_user_message_id = 0
self._current_ai_message_id = 0
self._message_id = 0
self.db_path = config["db_path"]
if Path(self.db_path).is_absolute():
# Create database object
self.db = DiscussionsDB(self.db_path)
else:
# Create database object
self.db = DiscussionsDB(self.lollms_paths.personal_databases_path/self.db_path)
# If the database is empty, populate it with tables
ASCIIColors.info("Checking discussions database... ",end="")
self.db.create_tables()
self.db.add_missing_columns()
ASCIIColors.success("ok")
# prepare vectorization
if self.config.data_vectorization_activate and self.config.use_discussions_history:
try:
ASCIIColors.yellow("Loading long term memory")
folder = self.lollms_paths.personal_databases_path/"vectorized_dbs"
folder.mkdir(parents=True, exist_ok=True)
self.build_long_term_skills_memory()
ASCIIColors.yellow("Ready")
except Exception as ex:
trace_exception(ex)
self.long_term_memory = None
else:
self.long_term_memory = None
# This is used to keep track of messages
self.download_infos={}
self.connections = {
0:{
"current_discussion":None,
"generated_text":"",
"cancel_generation": False,
"generation_thread": None,
"processing":False,
"schedule_for_deletion":False,
"continuing": False,
"first_chunk": True,
}
}
if Media_on:
try:
self.webcam = WebcamImageSender(socketio,lollmsCom=self)
except:
self.webcam = None
try:
self.rec_output_folder = lollms_paths.personal_outputs_path/"audio_rec"
self.rec_output_folder.mkdir(exist_ok=True, parents=True)
self.summoned = False
self.audio_cap = AudioRecorder(socketio,self.rec_output_folder/"rt.wav", callback=self.audio_callback,lollmsCom=self)
except:
self.audio_cap = None
self.rec_output_folder = None
else:
self.webcam = None
self.rec_output_folder = None
# Other methods and properties of the LoLLMSWebUI singleton class

View File

@ -8,6 +8,7 @@ This file is the entry point to the webui.
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.responses import HTMLResponse
from lollms.app import LollmsApplication
from lollms.paths import LollmsPaths
from lollms.main_config import LOLLMSConfig
@ -19,11 +20,30 @@ import uvicorn
import argparse
app = FastAPI()
sio = socketio.AsyncServer(async_mode="asgi")
# Create a Socket.IO server
sio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*") # Enable CORS for all origins
app.mount("/socket.io", socketio.ASGIApp(sio))
#app.mount("/socket.io", StaticFiles(directory="path/to/socketio.js"))
# Define a WebSocket event handler
@sio.event
async def connect(sid, environ):
print(f"Connected: {sid}")
@sio.event
async def disconnect(sid):
print(f"Disconnected: {sid}")
@sio.event
async def message(sid, data):
print(f"Message from {sid}: {data}")
await sio.send(sid, "Message received!")
#app.mount("/socket.io", StaticFiles(directory="path/to/socketio.js"))
if __name__ == "__main__":
# Parsong parameters
@ -48,9 +68,13 @@ if __name__ == "__main__":
# Import all endpoints
from lollms.server.endpoints.lollms_infos import router as lollms_infos_router
from lollms.server.endpoints.lollms_generator import router as lollms_generator_router
from endpoints.lollms_discussion import router as lollms_discussion_router
app.include_router(lollms_infos_router)
app.include_router(lollms_generator_router)
app.include_router(lollms_discussion_router)
app.mount("/", StaticFiles(directory=Path(__file__).parent/"web"/"dist", html=True), name="static")
uvicorn.run(app, host=config.host, port=config.port)

View File

@ -64,18 +64,7 @@ lollms_app = LollmsApplication(
load_sd_service=False,
socketio=sio)
# Serve the index.html file for all routes
@app.get("/{full_path:path}")
async def serve_index(request: Request, full_path: Path):
if str(full_path).endswith(".js"):
return FileResponse(root_path/"scripts/python/lollms_installer/frontend/dist"/full_path, media_type="application/javascript")
if str(full_path).endswith(".css"):
return FileResponse(root_path/"scripts/python/lollms_installer/frontend/dist"/full_path)
if str(full_path).endswith(".html"):
return FileResponse(root_path/"scripts/python/lollms_installer/frontend/dist"/full_path)
return FileResponse(root_path/"scripts/python/lollms_installer/frontend/dist/index.html")
# app.mount("/", StaticFiles(directory=root_path/"scripts/python/lollms_installer/frontend/dist"), name="static")
app.mount("/", StaticFiles(directory=Path(__file__).parent/"frontend"/"dist", html=True), name="static")
class InstallProperties(BaseModel):
mode: str