new upgrades

This commit is contained in:
Saifeddine ALOUI 2024-04-21 00:13:04 +02:00
parent 71287d0406
commit 2774830914
6 changed files with 27 additions and 5 deletions

View File

@ -813,6 +813,8 @@ class LollmsApplication(LoLLMsCom):
# Check if adding the message will exceed the available space
if tokens_accumulated + len(message_tokenized) > available_space:
message_tokenized[:-(available_space-tokens_accumulated)]
full_message_list.insert(0, message_tokenized)
break
# Add the tokenized message to the full_message_list

View File

@ -422,6 +422,7 @@ class LLMBinding:
def add_default_configurations(self, binding_config:TypedConfig):
binding_config.addConfigs([
{"name":"model_name","type":"str","value":'', "help":"Last known model for fast model recovery"},
{"name":"model_template","type":"text","value":'', "help":"The template for the currently used model (optional)"},
{"name":"clip_model_name","type":"str","value":'ViT-L-14/openai','options':["ViT-L-14/openai","ViT-H-14/laion2b_s32b_b79k"], "help":"Clip model to be used for images understanding"},
{"name":"caption_model_name","type":"str","value":'blip-large','options':['blip-base', 'git-large-coco', 'blip-large','blip2-2.7b', 'blip2-flan-t5-xl'], "help":"Clip model to be used for images understanding"},
{"name":"vqa_model_name","type":"str","value":'Salesforce/blip-vqa-capfilt-large','options':['Salesforce/blip-vqa-capfilt-large', 'Salesforce/blip-vqa-base', 'Salesforce/blip-image-captioning-large','Salesforce/blip2-opt-2.7b', 'Salesforce/blip2-flan-t5-xxl'], "help":"Salesforce question/answer model"},

View File

@ -576,7 +576,7 @@ class Message:
(self.sender, self.content, self.metadata, self.ui, self.message_type, self.rank, self.parent_message_id, self.binding, self.model, self.personality, self.created_at, self.started_generating_at, self.finished_generating_at, nb_tokens, self.discussion_id)
)
def update(self, new_content, new_metadata=None, new_ui=None, commit=True):
def update(self, new_content, new_metadata=None, new_ui=None, nb_tokens=None, commit=True):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET content = ?"
params = [new_content]
@ -587,6 +587,11 @@ class Message:
text+=", ui = ?"
params.append(new_ui)
if nb_tokens is not None:
text+=", nb_tokens = ?"
params.append(nb_tokens)
text +=", finished_generating_at = ? WHERE id = ?"
params.append(self.finished_generating_at)
params.append(self.id)
@ -771,14 +776,14 @@ class Discussion:
else:
return False
def update_message(self, new_content, new_metadata=None, new_ui=None):
def update_message(self, new_content, new_metadata=None, new_ui=None, nb_tokens=None):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update(new_content, new_metadata, new_ui)
self.current_message.update(new_content, new_metadata, new_ui, nb_tokens)
def edit_message(self, message_id, new_content, new_metadata=None, new_ui=None):
"""Edits the content of a message

View File

@ -80,6 +80,9 @@ async def update_setting(request: Request):
lollmsElfServer.binding = BindingBuilder().build_binding(lollmsElfServer.config, lollmsElfServer.lollms_paths, InstallOption.INSTALL_IF_NECESSARY, lollmsCom=lollmsElfServer)
lollmsElfServer.config.model_name = lollmsElfServer.binding.binding_config.model_name
lollmsElfServer.model = lollmsElfServer.binding.build_model()
for per in lollmsElfServer.mounted_personalities:
if per is not None:
per.model = lollmsElfServer.model
lollmsElfServer.config.save_config()
ASCIIColors.green("Binding loaded successfully")
except Exception as ex:

View File

@ -23,6 +23,7 @@ import socketio
import os
from functools import partial
import threading
from datetime import datetime
lollmsElfServer = LOLLMSElfServer.get_instance()
@ -243,13 +244,20 @@ def add_events(sio:socketio):
prompt = data["prompt"]
ump = lollmsElfServer.config.discussion_prompt_separator +lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
try:
nb_tokens = len(lollmsElfServer.model.tokenize(prompt))
except:
nb_tokens = None
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
content=prompt,
metadata=None,
parent_message_id=lollmsElfServer.message_id
parent_message_id=lollmsElfServer.message_id,
created_at=created_at,
nb_tokens=nb_tokens
)
ASCIIColors.green("Starting message generation by "+lollmsElfServer.personality.name)

View File

@ -143,13 +143,16 @@ def add_events(sio:socketio):
client.discussion = lollmsElfServer.db.load_last_discussion()
ump = lollmsElfServer.config.discussion_prompt_separator + lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
message = client.discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
content="",
metadata=None,
parent_message_id=lollmsElfServer.message_id
parent_message_id=lollmsElfServer.message_id,
created_at=created_at,
nb_tokens=None
)
lollmsElfServer.busy=True
command = data["command"]