This commit is contained in:
Saifeddine ALOUI 2024-01-07 11:45:10 +01:00
parent de2a328183
commit 376f056ae8
4 changed files with 19 additions and 18 deletions

@ -1 +1 @@
Subproject commit 52d77cacc14d5f92e3151ef922d3694c44ac213d
Subproject commit adf1e23559cbea865972fd80ed2304eb7ec23901

View File

@ -20,7 +20,7 @@ from lollms.paths import LollmsPaths
from lollms.helpers import ASCIIColors, trace_exception
from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom
from lollms.app import LollmsApplication
from lollms.utilities import File64BitsManager, PromptReshaper, PackageManager, find_first_available_file_index
from lollms.utilities import File64BitsManager, PromptReshaper, PackageManager, find_first_available_file_index, run_async
import git
try:
@ -228,7 +228,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
"processing":False,
"schedule_for_deletion":False
}
await self.socketio.emit('connected', room=sid)
await self.socketio.emit('connected', to=sid)
ASCIIColors.success(f'Client {sid} connected')
@socketio.event
@ -943,14 +943,15 @@ class LOLLMSWebUI(LOLLMSElfServer):
display_type:NotificationDisplayType=NotificationDisplayType.TOAST,
verbose=True
):
self.socketio.emit('notification', {
'content': content,# self.connections[client_id]["generated_text"],
'notification_type': notification_type.value,
"duration": duration,
'display_type':display_type.value
}, room=client_id
)
self.socketio.sleep(0.01)
run_async(
self.socketio.emit('notification', {
'content': content,# self.connections[client_id]["generated_text"],
'notification_type': notification_type.value,
"duration": duration,
'display_type':display_type.value
}, to=client_id
)
)
if verbose:
if notification_type==NotificationType.NOTIF_SUCCESS:
ASCIIColors.success(content)
@ -1011,7 +1012,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at,
'open': open
}, room=client_id
}, to=client_id
)
def update_message(self, client_id, chunk,
@ -1033,7 +1034,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at,
'parameters':parameters,
'metadata':metadata
}, room=client_id
}, to=client_id
)
@ -1047,7 +1048,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at,
'parameters':parameters,
'metadata':metadata
}, room=client_id
}, to=client_id
)
self.socketio.sleep(0.01)
if msg_type != MSG_TYPE.MSG_TYPE_INFO:
@ -1074,7 +1075,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
'created_at': self.connections[client_id]["current_discussion"].current_message.created_at,
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at,
}, room=client_id
}, to=client_id
)
def process_chunk(
self,
@ -1398,7 +1399,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
'status': True,
'discussion_id':d.discussion_id,
'title':title
}, room=client_id)
}, to=client_id)
self.busy=False

View File

@ -1,6 +1,6 @@
"""
Project: lollms_installer
Author: Your Name
Author: ParisNeo
Description: This tool is designed to install and configure the LoLLMS system on your machine. LoLLMS is a multi-bindings, multi-personalities LLM full-stack system for AI applications in robotics. It provides a user-friendly interface for setting up and managing the system.
"""
from fastapi import FastAPI

@ -1 +1 @@
Subproject commit 3f877d19e68d69251faf0356fbecbc33e89dee57
Subproject commit 882003f258657cc2f3f4492e3e4d62d5f0c53fd8