lollms webui

This commit is contained in:
Saifeddine ALOUI 2024-01-08 01:08:47 +01:00
parent 53dbdeb49e
commit fc63abaebb
10 changed files with 171 additions and 71 deletions

72
app.py
View File

@ -301,6 +301,9 @@ try:
self.add_endpoint("/get_active_personality_settings", "get_active_personality_settings", self.get_active_personality_settings, methods=["GET"])
self.add_endpoint("/set_active_personality_settings", "set_active_personality_settings", self.set_active_personality_settings, methods=["POST"])
self.add_endpoint("/get_current_personality_path_infos", "get_current_personality_path_infos", self.get_current_personality_path_infos, methods=["GET"])
self.add_endpoint("/get_personality", "get_personality", self.get_personality, methods=["GET"])
self.add_endpoint("/get_current_personality", "get_current_personality", self.get_current_personality, methods=["GET"])
self.add_endpoint("/get_all_personalities", "get_all_personalities", self.get_all_personalities, methods=["GET"])
self.add_endpoint("/uploads/<path:filename>", "serve_uploads", self.serve_uploads, methods=["GET"])
@ -339,6 +342,10 @@ try:
self.add_endpoint("/make_title", "make_title", self.make_title, methods=["POST"])
self.add_endpoint("/export", "export", self.export, methods=["GET"])
self.add_endpoint("/export_multiple_discussions", "export_multiple_discussions", self.export_multiple_discussions, methods=["POST"])
self.add_endpoint("/import_multiple_discussions", "import_multiple_discussions", self.import_multiple_discussions, methods=["POST"])
self.add_endpoint("/get_generation_status", "get_generation_status", self.get_generation_status, methods=["GET"])
self.add_endpoint("/stop_gen", "stop_gen", self.stop_gen, methods=["GET"])
@ -366,72 +373,31 @@ try:
self.add_endpoint("/update_setting", "update_setting", self.update_setting, methods=["POST"])
self.add_endpoint("/apply_settings", "apply_settings", self.apply_settings, methods=["POST"])
self.add_endpoint("/save_settings", "save_settings", self.save_settings, methods=["POST"])
self.add_endpoint("/save_settings", "save_settings", self.save_settings, methods=["POST"])
self.add_endpoint("/open_code_folder", "open_code_folder", self.open_code_folder, methods=["POST"])
self.add_endpoint("/open_code_folder_in_vs_code", "open_code_folder_in_vs_code", self.open_code_folder_in_vs_code, methods=["POST"])
self.add_endpoint("/open_code_in_vs_code", "open_code_in_vs_code", self.open_code_in_vs_code, methods=["POST"])
self.add_endpoint("/open_file", "open_file", self.open_file, methods=["GET"])
# ----
self.add_endpoint("/get_server_address", "get_server_address", self.get_server_address, methods=["GET"])
self.add_endpoint(
"/get_current_personality", "get_current_personality", self.get_current_personality, methods=["GET"]
)
self.add_endpoint(
"/get_all_personalities", "get_all_personalities", self.get_all_personalities, methods=["GET"]
)
self.add_endpoint(
"/get_personality", "get_personality", self.get_personality, methods=["GET"]
)
self.add_endpoint(
"/reset", "reset", self.reset, methods=["GET"]
)
self.add_endpoint(
"/export_multiple_discussions", "export_multiple_discussions", self.export_multiple_discussions, methods=["POST"]
)
self.add_endpoint(
"/import_multiple_discussions", "import_multiple_discussions", self.import_multiple_discussions, methods=["POST"]
)
self.add_endpoint(
"/list_voices", "list_voices", self.list_voices, methods=["GET"]
@ -467,10 +433,6 @@ try:
"/install_sd", "install_sd", self.install_sd, methods=["GET"]
)
self.add_endpoint("/open_code_folder", "open_code_folder", self.open_code_folder, methods=["POST"])
self.add_endpoint("/open_code_folder_in_vs_code", "open_code_folder_in_vs_code", self.open_code_folder_in_vs_code, methods=["POST"])
self.add_endpoint("/open_code_in_vs_code", "open_code_in_vs_code", self.open_code_in_vs_code, methods=["POST"])
self.add_endpoint("/open_file", "open_file", self.open_file, methods=["GET"])
self.add_endpoint("/update_binding_settings", "update_binding_settings", self.update_binding_settings, methods=["GET"])

View File

@ -127,7 +127,7 @@ async def open_code_folder_in_vs_code(request: Request):
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/open_file")
async def open_file(request: Request):
"""
Opens code in vs code.
@ -147,6 +147,7 @@ async def open_file(request: Request):
return {"status":False,"error":str(ex)}
@router.post("/open_code_in_vs_code")
async def open_code_in_vs_code(request: Request):
"""
Opens code in vs code.
@ -179,6 +180,7 @@ async def open_code_in_vs_code(request: Request):
return {"status":False,"error":str(ex)}
@router.post("/open_code_folder")
async def open_code_folder(request: Request):
"""
Opens code folder.

View File

@ -7,12 +7,12 @@ description:
application. These routes allow users to manipulate the discussion elements.
"""
from fastapi import APIRouter
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from ascii_colors import ASCIIColors
from api.db import DiscussionsDB, Discussion
@ -143,4 +143,50 @@ def delete_discussion(data: DeleteDiscussionParameters):
lollmsElfServer.connections[client_id]["current_discussion"] = Discussion(discussion_id, lollmsElfServer.db)
lollmsElfServer.connections[client_id]["current_discussion"].delete_discussion()
lollmsElfServer.connections[client_id]["current_discussion"] = None
return {'status':True}
return {'status':True}
# ----------------------------- import/export --------------------
@router.post("/export_multiple_discussions")
async def export_multiple_discussions(request: Request):
"""
Opens code in vs code.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
discussion_ids = data["discussion_ids"]
export_format = data["export_format"]
if export_format=="json":
discussions = lollmsElfServer.db.export_discussions_to_json(discussion_ids)
elif export_format=="markdown":
discussions = lollmsElfServer.db.export_discussions_to_markdown(discussion_ids)
else:
discussions = lollmsElfServer.db.export_discussions_to_markdown(discussion_ids)
return discussions
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/import_multiple_discussions")
async def import_multiple_discussions(request: Request):
"""
Opens code in vs code.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
discussions = (await request.json())["jArray"]
lollmsElfServer.db.import_from_json(discussions)
return discussions
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}

View File

@ -0,0 +1,63 @@
"""
project: lollms
file: lollms_discussion_events.py
author: ParisNeo
description:
This module contains a set of Socketio routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes are specific to discussion operation
"""
from fastapi import APIRouter, Request
from fastapi import HTTPException
from pydantic import BaseModel
import pkg_resources
from lollms.server.elf_server import LOLLMSElfServer
from fastapi.responses import FileResponse
from lollms.binding import BindingBuilder, InstallOption
from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality
from lollms.types import MSG_TYPE, SENDER_TYPES
from lollms.utilities import load_config, trace_exception, gc
from lollms.utilities import find_first_available_file_index, convert_language_name
from lollms_webui import LOLLMSWebUI
from pathlib import Path
from typing import List
import socketio
import threading
import os
from api.db import Discussion
from datetime import datetime
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------------------- events -----------------------------------------
def add_events(sio:socketio):
@sio.on('create_empty_message')
def create_empty_message(sid, data):
client_id = sid
type = data.get("type",0)
message = data.get("message","")
if type==0:
ASCIIColors.info(f"Building empty User message requested by : {client_id}")
# send the message to the bot
print(f"Creating an empty message for AI answer orientation")
if lollmsElfServer.connections[client_id]["current_discussion"]:
if not lollmsElfServer.model:
lollmsElfServer.error("No model selected. Please make sure you select a model before starting generation", client_id = client_id)
return
lollmsElfServer.new_message(client_id, lollmsElfServer.config.user_name, message, sender_type=SENDER_TYPES.SENDER_TYPES_USER, open=True)
else:
if lollmsElfServer.personality is None:
lollmsElfServer.warning("Select a personality")
return
ASCIIColors.info(f"Building empty AI message requested by : {client_id}")
# send the message to the bot
print(f"Creating an empty message for AI answer orientation")
if lollmsElfServer.connections[client_id]["current_discussion"]:
if not lollmsElfServer.model:
lollmsElfServer.error("No model selected. Please make sure you select a model before starting generation", client_id=client_id)
return
lollmsElfServer.new_message(client_id, lollmsElfServer.personality.name, "[edit this to put your ai answer start]", open=True)

@ -1 +1 @@
Subproject commit fc1598a2f59ceb721a3ef8412c0fc035344226b6
Subproject commit c7ca64c5c8351631adae352d6a0cb250b7aa137f

View File

@ -1010,7 +1010,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
model = self.config["model_name"],
personality = self.config["personalities"][self.config["active_personality_id"]],
) # first the content is empty, but we'll fill it at the end
run_async(
try:
self.socketio.emit('new_message',
{
"sender": sender,
@ -1033,8 +1033,29 @@ class LOLLMSWebUI(LOLLMSElfServer):
'open': open
}, to=client_id
)
)
except:
asyncio.run(self.socketio.emit('new_message',
{
"sender": sender,
"message_type": message_type.value,
"sender_type": SENDER_TYPES.SENDER_TYPES_AI.value,
"content": content,
"parameters": parameters,
"metadata": metadata,
"ui": ui,
"id": msg.id,
"parent_message_id": msg.parent_message_id,
'binding': self.config["binding_name"],
'model' : self.config["model_name"],
'personality': self.config["personalities"][self.config["active_personality_id"]],
'created_at': self.connections[client_id]["current_discussion"].current_message.created_at,
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at,
'open': open
}, to=client_id
))
def update_message(self, client_id, chunk,
parameters=None,
metadata=[],

View File

@ -80,8 +80,12 @@ if __name__ == "__main__":
from lollms.server.events.lollms_generation_events import add_events as lollms_generation_events_add
from lollms.server.events.lollms_personality_events import add_events as lollms_personality_events_add
from events.lollms_generation_events import add_events as lollms_webui_generation_events_add
from events.lollms_discussion_events import add_events as lollms_webui_discussion_events_add
from events.lollms_chatbox_events import add_events as lollms_chatbox_events_add
app.include_router(lollms_infos_router)
app.include_router(lollms_binding_files_server_router)
@ -106,8 +110,10 @@ if __name__ == "__main__":
lollms_generation_events_add(sio)
lollms_personality_events_add(sio)
lollms_webui_generation_events_add(sio)
lollms_webui_discussion_events_add(sio)
lollms_chatbox_events_add(sio)
app.mount("/extensions", StaticFiles(directory=Path(__file__).parent/"web"/"dist", html=True), name="extensions")

View File

@ -153,9 +153,9 @@ else
fi
cd scripts/python/lollms_installer
python main.py
cd ..
# cd scripts/python/lollms_installer
# python main.py
# cd ..
PrintBigMessage() {
echo

View File

@ -165,9 +165,9 @@ else
fi
cd scripts/python/lollms_installer
python main.py
cd ..
# cd scripts/python/lollms_installer
# python main.py
# cd ..
PrintBigMessage() {
echo

View File

@ -152,9 +152,9 @@ echo Install failed
goto endend
:end
cd scripts\python\lollms_installer
call python main.py
cd ..
@rem cd scripts\python\lollms_installer
@rem call python main.py
@rem cd ..
echo Installation complete.
:endend
pause