upgraded code

This commit is contained in:
Saifeddine ALOUI 2024-03-17 03:25:46 +01:00
parent 5d4b5faa0f
commit ddba6ed364
2 changed files with 47 additions and 48 deletions

View File

@ -1026,13 +1026,13 @@ class AIPersonality:
save_db=self.config.data_vectorization_save_db, save_db=self.config.data_vectorization_save_db,
data_visualization_method=VisualizationMethod.PCA, data_visualization_method=VisualizationMethod.PCA,
database_dict=None) database_dict=None)
data = GenericDataLoader.read_file(path) data = GenericDataLoader.read_file(path)
self.vectorizer.add_document(path, data, self.config.data_vectorization_chunk_size, self.config.data_vectorization_overlap_size, add_first_line_to_all_chunks=True if path.suffix==".csv" else False) self.vectorizer.add_document(path, data, self.config.data_vectorization_chunk_size, self.config.data_vectorization_overlap_size, add_first_line_to_all_chunks=True if path.suffix==".csv" else False)
self.vectorizer.index() self.vectorizer.index()
if callback is not None: if callback is not None:
callback("File added successfully",MSG_TYPE.MSG_TYPE_INFO) callback("File added successfully",MSG_TYPE.MSG_TYPE_INFO)
self.HideBlockingMessage("Adding file to vector store.\nPlease stand by") self.HideBlockingMessage("Adding file to vector store.\nPlease stand by")
return True return True
except Exception as e: except Exception as e:
trace_exception(e) trace_exception(e)
self.HideBlockingMessage("Adding file to vector store.\nPlease stand by") self.HideBlockingMessage("Adding file to vector store.\nPlease stand by")

View File

@ -22,6 +22,7 @@ from typing import List
import socketio import socketio
from functools import partial from functools import partial
from datetime import datetime from datetime import datetime
import threading
import os import os
router = APIRouter() router = APIRouter()
@ -123,50 +124,48 @@ def add_events(sio:socketio):
client.cancel_generation=False client.cancel_generation=False
client.continuing=False client.continuing=False
client.first_chunk=True client.first_chunk=True
def do_generation():
if not lollmsElfServer.model: if not lollmsElfServer.model:
ASCIIColors.error("Model not selected. Please select a model") ASCIIColors.error("Model not selected. Please select a model")
lollmsElfServer.error("Model not selected. Please select a model", client_id=client_id) lollmsElfServer.error("Model not selected. Please select a model", client_id=client_id)
return return
if not lollmsElfServer.busy: if not lollmsElfServer.busy:
if lollmsElfServer.session.get_client(client_id).discussion is None: if client.discussion is None:
if lollmsElfServer.db.does_last_discussion_have_messages(): if lollmsElfServer.db.does_last_discussion_have_messages():
lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.create_discussion() client.discussion = lollmsElfServer.db.create_discussion()
else:
client.discussion = lollmsElfServer.db.load_last_discussion()
ump = lollmsElfServer.config.discussion_prompt_separator + lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
message = client.discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
content="",
metadata=None,
parent_message_id=lollmsElfServer.message_id
)
lollmsElfServer.busy=True
command = data["command"]
parameters = data["parameters"]
lollmsElfServer.prepare_reception(client_id)
if lollmsElfServer.personality.processor is not None:
lollmsElfServer.start_time = datetime.now()
lollmsElfServer.personality.processor.callback = partial(lollmsElfServer.process_chunk, client_id=client_id)
lollmsElfServer.personality.processor.execute_command(command, parameters)
else: else:
lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.load_last_discussion() lollmsElfServer.warning("Non scripted personalities do not support commands",client_id=client_id)
lollmsElfServer.close_message(client_id)
lollmsElfServer.busy=False
ump = lollmsElfServer.config.discussion_prompt_separator +lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix #tpe = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message_id, client_id))
message = lollmsElfServer.session.get_client(client_id).discussion.add_message( #tpe.start()
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
content="",
metadata=None,
parent_message_id=lollmsElfServer.message_id
)
lollmsElfServer.busy=True
client_id = sid
client = lollmsElfServer.session.get_client(client_id)
command = data["command"]
parameters = data["parameters"]
lollmsElfServer.prepare_reception(client_id)
if lollmsElfServer.personality.processor is not None:
lollmsElfServer.start_time = datetime.now()
lollmsElfServer.personality.processor.callback = partial(lollmsElfServer.process_chunk, client_id=client_id)
lollmsElfServer.personality.processor.execute_command(command, parameters)
else: else:
lollmsElfServer.warning("Non scripted personalities do not support commands",client_id=client_id) lollmsElfServer.error("I am busy. Come back later.", client_id=client_id)
lollmsElfServer.close_message(client_id)
lollmsElfServer.busy=False lollmsElfServer.busy=False
#tpe = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message_id, client_id)) client.generation_thread = threading.Thread(target=do_generation)
#tpe.start() client.generation_thread.start()
else: ASCIIColors.info("Started generation task")
lollmsElfServer.error("I am busy. Come back later.", client_id=client_id)
lollmsElfServer.busy=False