Enhanced ui

This commit is contained in:
Saifeddine ALOUI 2024-01-09 01:15:51 +01:00
parent 7b5f695dab
commit f7ec143426
14 changed files with 242 additions and 31 deletions

12
app.py
View File

@ -383,22 +383,16 @@ try:
self.add_endpoint("/open_file", "open_file", self.open_file, methods=["GET"])
self.add_endpoint("/reset", "reset", self.reset, methods=["GET"])
self.add_endpoint("/get_server_address", "get_server_address", self.get_server_address, methods=["GET"])
# ----
self.add_endpoint("/get_server_address", "get_server_address", self.get_server_address, methods=["GET"])
self.add_endpoint(
"/reset", "reset", self.reset, methods=["GET"]
)
self.add_endpoint(
"/list_voices", "list_voices", self.list_voices, methods=["GET"]
)

204
endpoints/lollms_xtts.py Normal file
View File

@ -0,0 +1,204 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from ascii_colors import ASCIIColors
import os
import platform
from utilities.execution_engines.python_execution_engine import execute_python
from utilities.execution_engines.latex_execution_engine import execute_latex
from utilities.execution_engines.shell_execution_engine import execute_bash
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
@router.post("/execute_code")
async def execute_code(request: Request):
"""
Executes Python code and returns the output.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
code = data["code"]
discussion_id = data.get("discussion_id","unknown_discussion")
message_id = data.get("message_id","unknown_message")
language = data.get("language","python")
ASCIIColors.info("Executing python code:")
ASCIIColors.yellow(code)
if language=="python":
return execute_python(code, discussion_id, message_id)
elif language=="latex":
return execute_latex(code, discussion_id, message_id)
elif language in ["bash","shell","cmd","powershell"]:
return execute_bash(code, discussion_id, message_id)
return {"output": "Unsupported language", "execution_time": 0}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/open_code_folder")
async def open_code_folder(request: Request):
"""
Opens code folder.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
discussion_id = data.get("discussion_id","unknown_discussion")
ASCIIColors.info("Opening folder:")
# Create a temporary file.
root_folder = lollmsElfServer.lollms_paths.personal_outputs_path/"discussions"/f"d_{discussion_id}"
root_folder.mkdir(parents=True,exist_ok=True)
if platform.system() == 'Windows':
os.startfile(str(root_folder))
elif platform.system() == 'Linux':
os.system('xdg-open ' + str(root_folder))
elif platform.system() == 'Darwin':
os.system('open ' + str(root_folder))
return {"output": "OK", "execution_time": 0}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/open_code_folder_in_vs_code")
async def open_code_folder_in_vs_code(request: Request):
"""
Opens code folder.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
code = data["code"]
discussion_id = data.get("discussion_id","unknown_discussion")
message_id = data.get("message_id","unknown_message")
language = data.get("language","python")
ASCIIColors.info("Opening folder:")
# Create a temporary file.
root_folder = lollmsElfServer.lollms_paths.personal_outputs_path/"discussions"/f"d_{discussion_id}"
root_folder.mkdir(parents=True,exist_ok=True)
tmp_file = root_folder/f"ai_code_{message_id}.py"
with open(tmp_file,"w") as f:
f.write(code)
os.system('code ' + str(root_folder))
return {"output": "OK", "execution_time": 0}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/open_file")
async def open_file(request: Request):
"""
Opens code in vs code.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
path = data.get('path')
os.system("start "+path)
return {"output": "OK", "execution_time": 0}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/open_code_in_vs_code")
async def open_code_in_vs_code(request: Request):
"""
Opens code in vs code.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
discussion_id = data.get("discussion_id","unknown_discussion")
message_id = data.get("message_id","")
code = data["code"]
discussion_id = data.get("discussion_id","unknown_discussion")
message_id = data.get("message_id","unknown_message")
language = data.get("language","python")
ASCIIColors.info("Opening folder:")
# Create a temporary file.
root_folder = lollmsElfServer.lollms_paths.personal_outputs_path/"discussions"/f"d_{discussion_id}"/f"{message_id}.py"
root_folder.mkdir(parents=True,exist_ok=True)
tmp_file = root_folder/f"ai_code_{message_id}.py"
with open(tmp_file,"w") as f:
f.write(code)
os.system('code ' + str(root_folder))
return {"output": "OK", "execution_time": 0}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/open_code_folder")
async def open_code_folder(request: Request):
"""
Opens code folder.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
discussion_id = data.get("discussion_id","unknown_discussion")
ASCIIColors.info("Opening folder:")
# Create a temporary file.
root_folder = lollmsElfServer.lollms_paths.personal_outputs_path/"discussions"/f"d_{discussion_id}"
root_folder.mkdir(parents=True,exist_ok=True)
if platform.system() == 'Windows':
os.startfile(str(root_folder))
elif platform.system() == 'Linux':
os.system('xdg-open ' + str(root_folder))
elif platform.system() == 'Darwin':
os.system('open ' + str(root_folder))
return {"output": "OK", "execution_time": 0}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}

@ -1 +1 @@
Subproject commit d0368bdb2dadcab350bc8a533a74e3b9fd9e8f07
Subproject commit 4b016c16e91e9f06362f5cd17290ce40aced63a6

View File

@ -962,8 +962,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
verbose=True,
):
asyncio.run(
self.socketio.emit('notification', {
run_async(partial(self.socketio.emit,'notification', {
'content': content,# self.connections[client_id]["generated_text"],
'notification_type': notification_type.value,
"duration": duration,
@ -1044,8 +1043,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
self.connections[client_id]["current_discussion"].current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
mtdt = json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== list else metadata
if self.nb_received_tokens==1:
asyncio.run(
self.socketio.emit('update_message', {
run_async(
partial(self.socketio.emit,'update_message', {
"sender": self.personality.name,
'id':self.connections[client_id]["current_discussion"].current_message.id,
'content': "✍ warming up ...",# self.connections[client_id]["generated_text"],
@ -1059,8 +1058,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
)
)
asyncio.run(
self.socketio.emit('update_message', {
run_async(
partial(self.socketio.emit,'update_message', {
"sender": self.personality.name,
'id':self.connections[client_id]["current_discussion"].current_message.id,
'content': chunk,# self.connections[client_id]["generated_text"],
@ -1085,8 +1084,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
self.connections[client_id]["generated_text"]=self.connections[client_id]["generated_text"].split("!@>")[0]
# Send final message
self.connections[client_id]["current_discussion"].current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
asyncio.run(
self.socketio.emit('close_message', {
run_async(
partial(self.socketio.emit,'close_message', {
"sender": self.personality.name,
"id": self.connections[client_id]["current_discussion"].current_message.id,
"content":self.connections[client_id]["generated_text"],
@ -1143,8 +1142,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
if message_type == MSG_TYPE.MSG_TYPE_NEW_MESSAGE:
self.nb_received_tokens = 0
self.start_time = datetime.now()
asyncio.run(
self.new_message(
run_async(
partial(self.new_message,
client_id,
self.personality.name if personality is None else personality.name,
chunk if parameters["type"]!=MSG_TYPE.MSG_TYPE_UI.value else '',

View File

@ -156,6 +156,9 @@ fi
# cd scripts/python/lollms_installer
# python main.py
# cd ..
echo "Creating a bin dir (required for llamacpp binding)"
mkdir ../installer_files/lollms_env/bin
PrintBigMessage() {
echo

View File

@ -83,9 +83,12 @@ done < requirements.txt
python -m pip install -r requirements.txt --upgrade
cd scripts/python/lollms_installer
python main.py
cd ..
# cd scripts/python/lollms_installer
# python main.py
# cd ..
echo "Creating a bin dir (required for llamacpp binding)"
mkdir ../installer_files/lollms_env/bin
PrintBigMessage() {
echo

View File

@ -169,6 +169,10 @@ fi
# python main.py
# cd ..
echo "Creating a bin dir (required for llamacpp binding)"
mkdir ../installer_files/lollms_env/bin
PrintBigMessage() {
echo
echo "*******************************************************************"

View File

@ -152,6 +152,9 @@ echo Install failed
goto endend
:end
echo Creating a bin dir (required for llamacpp binding)
md ../installer_files/lollms_env/bin
@rem cd scripts\python\lollms_installer
@rem call python main.py
@rem cd ..

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-a1f2945d.js"></script>
<link rel="stylesheet" href="/assets/index-299ef7d5.css">
<script type="module" crossorigin src="/assets/index-cb8d1b46.js"></script>
<link rel="stylesheet" href="/assets/index-27c35171.css">
</head>
<body>
<div id="app"></div>

View File

@ -98,7 +98,7 @@ handleClickOutside(event) {
console.log(command.value)
if (typeof command.value === 'function') {
console.log("Command detected")
console.log("Command detected",command)
command.value();
}
if (this.execute_cmd) {

View File

@ -3072,6 +3072,7 @@ export default {
this.$store.state.toast.showToast("Binding settings updated successfully!", 4, true)
axios.get('/update_binding_settings').then((res) => {
this.$store.state.toast.showToast("Binding settings committed successfully!", 4, true)
window.location.reload();
})
} else {
this.$store.state.toast.showToast("Did not get binding settings responses.\n" + response, 4, false)

@ -1 +1 @@
Subproject commit 4dabcb35d6eecd7935478b587d2e8308241d8fd0
Subproject commit ad282cf4d22cc8bb2db6cc499ea6cae8b7a5deb3