Huge upgrade, discussion system is now in lollms core

This commit is contained in:
Saifeddine ALOUI 2024-02-26 01:55:44 +01:00
parent c453f90609
commit 02e829b565
13 changed files with 136 additions and 1498 deletions

808
api/db.py
View File

@ -1,808 +0,0 @@
import sqlite3
from pathlib import Path
from datetime import datetime
from lollms.helpers import ASCIIColors
from lollms.paths import LollmsPaths
import json
__author__ = "parisneo"
__github__ = "https://github.com/ParisNeo/lollms-webui"
__copyright__ = "Copyright 2023, "
__license__ = "Apache 2.0"
# =================================== Database ==================================================================
class DiscussionsDB:
def __init__(self, lollms_paths:LollmsPaths, discussion_db_name="default"):
self.lollms_paths = lollms_paths
self.discussion_db_name = discussion_db_name
self.discussion_db_path = self.lollms_paths.personal_discussions_path/discussion_db_name
self.discussion_db_path.mkdir(exist_ok=True, parents= True)
self.discussion_db_file_path = self.discussion_db_path/"database.db"
def create_tables(self):
db_version = 10
with sqlite3.connect(self.discussion_db_file_path) as conn:
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS schema_version (
id INTEGER PRIMARY KEY AUTOINCREMENT,
version INTEGER NOT NULL
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS discussion (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS message (
id INTEGER PRIMARY KEY AUTOINCREMENT,
binding TEXT,
model TEXT,
personality TEXT,
sender TEXT NOT NULL,
content TEXT NOT NULL,
message_type INT NOT NULL,
sender_type INT DEFAULT 0,
rank INT NOT NULL DEFAULT 0,
parent_message_id INT,
created_at TIMESTAMP,
finished_generating_at TIMESTAMP,
discussion_id INTEGER NOT NULL,
metadata TEXT,
ui TEXT,
FOREIGN KEY (discussion_id) REFERENCES discussion(id),
FOREIGN KEY (parent_message_id) REFERENCES message(id)
)
""")
cursor.execute("SELECT * FROM schema_version")
row = cursor.fetchone()
if row is None:
cursor.execute("INSERT INTO schema_version (version) VALUES (?)", (db_version,))
else:
cursor.execute("UPDATE schema_version SET version = ?", (db_version,))
conn.commit()
def add_missing_columns(self):
with sqlite3.connect(self.discussion_db_file_path) as conn:
cursor = conn.cursor()
table_columns = {
'discussion': [
'id',
'title',
'created_at'
],
'message': [
'id',
'binding',
'model',
'personality',
'sender',
'content',
'message_type',
'sender_type',
'rank',
'parent_message_id',
'created_at',
'metadata',
'ui',
'finished_generating_at',
'discussion_id'
]
}
for table, columns in table_columns.items():
cursor.execute(f"PRAGMA table_info({table})")
existing_columns = [column[1] for column in cursor.fetchall()]
for column in columns:
if column not in existing_columns:
if column == 'id':
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} INTEGER PRIMARY KEY AUTOINCREMENT")
elif column.endswith('_at'):
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TIMESTAMP")
elif column=='metadata':
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TEXT")
elif column=='message_type':
cursor.execute(f"ALTER TABLE {table} RENAME COLUMN type TO {column}")
elif column=='sender_type':
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} INT DEFAULT 0")
elif column=='parent_message_id':
cursor.execute(f"ALTER TABLE {table} RENAME COLUMN parent TO {column}")
else:
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TEXT")
ASCIIColors.yellow(f"Added column :{column}")
conn.commit()
def select(self, query, params=None, fetch_all=True):
"""
Execute the specified SQL select query on the database,
with optional parameters.
Returns the cursor object for further processing.
"""
with sqlite3.connect(self.discussion_db_file_path) as conn:
if params is None:
cursor = conn.execute(query)
else:
cursor = conn.execute(query, params)
if fetch_all:
return cursor.fetchall()
else:
return cursor.fetchone()
def delete(self, query, params=None):
"""
Execute the specified SQL delete query on the database,
with optional parameters.
Returns the cursor object for further processing.
"""
with sqlite3.connect(self.discussion_db_file_path) as conn:
cursor = conn.cursor()
if params is None:
cursor.execute(query)
else:
cursor.execute(query, params)
conn.commit()
def insert(self, query, params=None):
"""
Execute the specified INSERT SQL query on the database,
with optional parameters.
Returns the ID of the newly inserted row.
"""
with sqlite3.connect(self.discussion_db_file_path) as conn:
cursor = conn.execute(query, params)
rowid = cursor.lastrowid
conn.commit()
self.conn = None
return rowid
def update(self, query, params:tuple=None):
"""
Execute the specified Update SQL query on the database,
with optional parameters.
Returns the ID of the newly inserted row.
"""
with sqlite3.connect(self.discussion_db_file_path) as conn:
conn.execute(query, params)
conn.commit()
def load_last_discussion(self):
last_discussion_id = self.select("SELECT id FROM discussion ORDER BY id DESC LIMIT 1", fetch_all=False)
if last_discussion_id is None:
last_discussion = self.create_discussion()
last_discussion_id = last_discussion.discussion_id
else:
last_discussion_id = last_discussion_id[0]
self.current_message_id = self.select("SELECT id FROM message WHERE discussion_id=? ORDER BY id DESC LIMIT 1", (last_discussion_id,), fetch_all=False)
return Discussion(last_discussion_id, self)
def create_discussion(self, title="untitled"):
"""Creates a new discussion
Args:
title (str, optional): The title of the discussion. Defaults to "untitled".
Returns:
Discussion: A Discussion instance
"""
discussion_id = self.insert(f"INSERT INTO discussion (title) VALUES (?)",(title,))
return Discussion(discussion_id, self)
def build_discussion(self, discussion_id=0):
return Discussion(discussion_id, self)
def get_discussions(self):
rows = self.select("SELECT * FROM discussion")
return [{"id": row[0], "title": row[1]} for row in rows]
def does_last_discussion_have_messages(self):
last_discussion_id = self.select("SELECT id FROM discussion ORDER BY id DESC LIMIT 1", fetch_all=False)
if last_discussion_id is None:
last_discussion = self.create_discussion()
last_discussion_id = last_discussion.discussion_id
else:
last_discussion_id = last_discussion_id[0]
last_message = self.select("SELECT * FROM message WHERE discussion_id=?", (last_discussion_id,), fetch_all=False)
return last_message is not None
def remove_discussions(self):
self.delete("DELETE FROM message")
self.delete("DELETE FROM discussion")
def export_to_json(self):
"""
Export all discussions and their messages from the database to a JSON format.
Returns:
list: A list of dictionaries representing discussions and their messages.
Each dictionary contains the discussion ID, title, and a list of messages.
Each message dictionary contains the sender, content, message type, rank,
parent message ID, binding, model, personality, created at, and finished
generating at fields.
"""
db_discussions = self.select("SELECT * FROM discussion")
discussions = []
for row in db_discussions:
discussion_id = row[0]
discussion_title = row[1]
discussion = {"id": discussion_id, "title":discussion_title, "messages": []}
rows = self.select(f"SELECT sender, content, message_type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at FROM message WHERE discussion_id=?",(discussion_id,))
for message_row in rows:
sender = message_row[0]
content = message_row[1]
content_type = message_row[2]
rank = message_row[3]
parent_message_id = message_row[4]
binding = message_row[5]
model = message_row[6]
personality = message_row[7]
created_at = message_row[8]
finished_generating_at = message_row[9]
discussion["messages"].append(
{"sender": sender, "content": content, "message_type": content_type, "rank": rank, "parent_message_id": parent_message_id, "binding": binding, "model":model, "personality":personality, "created_at":created_at, "finished_generating_at":finished_generating_at}
)
discussions.append(discussion)
return discussions
def export_all_as_markdown_list_for_vectorization(self):
"""
Export all discussions and their messages from the database to a Markdown list format.
Returns:
list: A list of lists representing discussions and their messages in a Markdown format.
Each inner list contains the discussion title and a string representing all
messages in the discussion in a Markdown format.
"""
data = self.export_all_discussions_to_json()
# Initialize an empty result string
discussions = []
# Iterate through discussions in the JSON data
for discussion in data:
# Extract the title
title = discussion['title']
messages = ""
# Iterate through messages in the discussion
for message in discussion['messages']:
sender = message['sender']
content = message['content']
# Append the sender and content in a Markdown format
messages += f'{sender}: {content}\n'
discussions.append([title, messages])
return discussions
def export_all_as_markdown(self):
"""
Export all discussions and their messages from the database to a Markdown format.
Returns:
str: A string representing all discussions and their messages in a Markdown format.
Each discussion is represented as a Markdown heading, and each message is
represented with the sender and content in a Markdown format.
"""
data = self.export_all_discussions_to_json()
# Initialize an empty result string
result = ''
# Iterate through discussions in the JSON data
for discussion in data:
# Extract the title
title = discussion['title']
# Append the title with '#' as Markdown heading
result += f'#{title}\n'
# Iterate through messages in the discussion
for message in discussion['messages']:
sender = message['sender']
content = message['content']
# Append the sender and content in a Markdown format
result += f'{sender}: {content}\n'
return result
def export_all_discussions_to_json(self):
# Convert the list of discussion IDs to a tuple
db_discussions = self.select(
f"SELECT * FROM discussion"
)
discussions = []
for row in db_discussions:
discussion_id = row[0]
discussion_title = row[1]
discussion = {"id": discussion_id, "title":discussion_title, "messages": []}
rows = self.select(f"SELECT sender, content, message_type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at FROM message WHERE discussion_id=?",(discussion_id,))
for message_row in rows:
sender = message_row[0]
content = message_row[1]
content_type = message_row[2]
rank = message_row[3]
parent_message_id = message_row[4]
binding = message_row[5]
model = message_row[6]
personality = message_row[7]
created_at = message_row[8]
finished_generating_at = message_row[9]
discussion["messages"].append(
{"sender": sender, "content": content, "message_type": content_type, "rank": rank, "parent_message_id": parent_message_id, "binding": binding, "model":model, "personality":personality, "created_at":created_at, "finished_generating_at": finished_generating_at}
)
discussions.append(discussion)
return discussions
def export_discussions_to_json(self, discussions_ids:list):
# Convert the list of discussion IDs to a tuple
discussions_ids_tuple = tuple(discussions_ids)
txt = ','.join(['?'] * len(discussions_ids_tuple))
db_discussions = self.select(
f"SELECT * FROM discussion WHERE id IN ({txt})",
discussions_ids_tuple
)
discussions = []
for row in db_discussions:
discussion_id = row[0]
discussion_title = row[1]
discussion = {"id": discussion_id, "title":discussion_title, "messages": []}
rows = self.select(f"SELECT sender, content, message_type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at FROM message WHERE discussion_id=?",(discussion_id,))
for message_row in rows:
sender = message_row[0]
content = message_row[1]
content_type = message_row[2]
rank = message_row[3]
parent_message_id = message_row[4]
binding = message_row[5]
model = message_row[6]
personality = message_row[7]
created_at = message_row[8]
finished_generating_at = message_row[9]
discussion["messages"].append(
{"sender": sender, "content": content, "message_type": content_type, "rank": rank, "parent_message_id": parent_message_id, "binding": binding, "model":model, "personality":personality, "created_at":created_at, "finished_generating_at": finished_generating_at}
)
discussions.append(discussion)
return discussions
def import_from_json(self, json_data):
discussions = []
data = json_data
for discussion_data in data:
discussion_id = discussion_data.get("id")
discussion_title = discussion_data.get("title")
messages_data = discussion_data.get("messages", [])
discussion = {"id": discussion_id, "title": discussion_title, "messages": []}
# Insert discussion into the database
discussion_id = self.insert("INSERT INTO discussion (title) VALUES (?)", (discussion_title,))
for message_data in messages_data:
sender = message_data.get("sender")
content = message_data.get("content")
content_type = message_data.get("message_type",message_data.get("type"))
rank = message_data.get("rank")
parent_message_id = message_data.get("parent_message_id")
binding = message_data.get("binding","")
model = message_data.get("model","")
personality = message_data.get("personality","")
created_at = message_data.get("created_at",datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
finished_generating_at = message_data.get("finished_generating_at",datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
discussion["messages"].append(
{"sender": sender, "content": content, "message_type": content_type, "rank": rank, "binding": binding, "model": model, "personality": personality, "created_at": created_at, "finished_generating_at": finished_generating_at}
)
# Insert message into the database
self.insert("INSERT INTO message (sender, content, message_type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(sender, content, content_type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id))
discussions.append(discussion)
return discussions
def export_discussions_to_markdown(self, discussions_ids:list, title = ""):
# Convert the list of discussion IDs to a tuple
discussions_ids_tuple = tuple(discussions_ids)
txt = ','.join(['?'] * len(discussions_ids_tuple))
db_discussions = self.select(
f"SELECT * FROM discussion WHERE id IN ({txt})",
discussions_ids_tuple
)
discussions = f"# {title}" if title!="" else ""
for row in db_discussions:
discussion_id = row[0]
discussion_title = row[1]
discussions += f"## {discussion_title}\n"
rows = self.select(f"SELECT sender, content, message_type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at FROM message WHERE discussion_id=?",(discussion_id,))
for message_row in rows:
sender = message_row[0]
content = message_row[1]
content_type = message_row[2]
rank = message_row[3]
parent_message_id = message_row[4]
binding = message_row[5]
model = message_row[6]
personality = message_row[7]
created_at = message_row[8]
finished_generating_at = message_row[9]
discussions +=f"### {sender}:\n{content}\n"
discussions +=f"\n"
return discussions
class Message:
def __init__(
self,
discussion_id,
discussions_db,
message_type,
sender_type,
sender,
content,
metadata = None,
ui = None,
rank = 0,
parent_message_id = 0,
binding = "",
model = "",
personality = "",
created_at = None,
finished_generating_at = None,
id = None,
insert_into_db = False
):
self.discussion_id = discussion_id
self.discussions_db = discussions_db
self.self = self
self.sender = sender
self.sender_type = sender_type
self.content = content
self.message_type = message_type
self.rank = rank
self.parent_message_id = parent_message_id
self.binding = binding
self.model = model
self.metadata = json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== dict else metadata
self.ui = ui
self.personality = personality
self.created_at = created_at
self.finished_generating_at = finished_generating_at
if insert_into_db:
self.id = self.discussions_db.insert(
"INSERT INTO message (sender, message_type, sender_type, sender, content, metadata, ui, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(sender, message_type, sender_type, sender, content, metadata, ui, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id)
)
else:
self.id = id
@staticmethod
def get_fields():
return [
"id",
"message_type",
"sender_type",
"sender",
"content",
"metadata",
"ui",
"rank",
"parent_message_id",
"binding",
"model",
"personality",
"created_at",
"finished_generating_at",
"discussion_id"
]
@staticmethod
def from_db(discussions_db, message_id):
columns = Message.get_fields()
rows = discussions_db.select(
f"SELECT {','.join(columns)} FROM message WHERE id=?", (message_id,)
)
data_dict={
col:rows[0][i]
for i,col in enumerate(columns)
}
data_dict["discussions_db"]=discussions_db
return Message(
**data_dict
)
@staticmethod
def from_dict(discussions_db,data_dict):
data_dict["discussions_db"]=discussions_db
return Message(
**data_dict
)
def insert_into_db(self):
self.message_id = self.discussions_db.insert(
"INSERT INTO message (sender, content, metadata, ui, message_type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(self.sender, self.content, self.metadata, self.ui, self.message_type, self.rank, self.parent_message_id, self.binding, self.model, self.personality, self.created_at, self.finished_generating_at, self.discussion_id)
)
def update_db(self):
self.message_id = self.discussions_db.insert(
"INSERT INTO message (sender, content, metadata, ui, message_type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(self.sender, self.content, self.metadata, self.ui, self.message_type, self.rank, self.parent_message_id, self.binding, self.model, self.personality, self.created_at, self.finished_generating_at, self.discussion_id)
)
def update(self, new_content, new_metadata=None, new_ui=None, commit=True):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET content = ?"
params = [new_content]
if new_metadata is not None:
text+=", metadata = ?"
params.append(new_metadata)
if new_ui is not None:
text+=", ui = ?"
params.append(new_ui)
text +=", finished_generating_at = ? WHERE id = ?"
params.append(self.finished_generating_at)
params.append(self.id)
self.discussions_db.update(
text, tuple(params)
)
def to_json(self):
attributes = Message.get_fields()
msgJson = {}
for attribute_name in attributes:
attribute_value = getattr(self, attribute_name, None)
if attribute_name=="metadata":
if type(attribute_value) == str:
msgJson[attribute_name] = json.loads(attribute_value)
else:
msgJson[attribute_name] = attribute_value
else:
msgJson[attribute_name] = attribute_value
return msgJson
class Discussion:
def __init__(self, discussion_id, discussions_db:DiscussionsDB):
self.discussion_id = discussion_id
self.discussions_db = discussions_db
self.discussion_folder = self.discussions_db.discussion_db_path/f"{discussion_id}"
self.discussion_folder.mkdir(exist_ok=True)
self.messages = self.get_messages()
if len(self.messages)>0:
self.current_message = self.messages[-1]
def load_message(self, id):
"""Gets a list of messages information
Returns:
list: List of entries in the format {"id":message id, "sender":sender name, "content":message content, "message_type":message type, "rank": message rank}
"""
self.current_message = Message.from_db(self.discussions_db, id)
return self.current_message
def add_message(
self,
message_type,
sender_type,
sender,
content,
metadata=None,
ui=None,
rank=0,
parent_message_id=0,
binding="",
model ="",
personality="",
created_at=None,
finished_generating_at=None
):
"""Adds a new message to the discussion
Args:
sender (str): The sender name
content (str): The text sent by the sender
Returns:
int: The added message id
"""
if created_at is None:
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if finished_generating_at is None:
finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.current_message = Message(
self.discussion_id,
self.discussions_db,
message_type,
sender_type,
sender,
content,
metadata,
ui,
rank,
parent_message_id,
binding,
model,
personality,
created_at,
finished_generating_at,
insert_into_db=True
)
self.messages.append(self.current_message)
return self.current_message
def rename(self, new_title):
"""Renames the discussion
Args:
new_title (str): The nex discussion name
"""
self.discussions_db.update(
f"UPDATE discussion SET title=? WHERE id=?",(new_title,self.discussion_id)
)
def title(self):
"""Renames the discussion
Args:
new_title (str): The nex discussion name
"""
rows = self.discussions_db.select(
f"Select title from discussion WHERE id={self.discussion_id}"
)
return rows[0][0]
def delete_discussion(self):
"""Deletes the discussion
"""
self.discussions_db.delete(
f"DELETE FROM message WHERE discussion_id={self.discussion_id}"
)
self.discussions_db.delete(
f"DELETE FROM discussion WHERE id={self.discussion_id}"
)
def get_messages(self):
"""Gets a list of messages information
Returns:
list: List of entries in the format {"id":message id, "sender":sender name, "content":message content, "message_type":message type, "rank": message rank}
"""
columns = Message.get_fields()
rows = self.discussions_db.select(
f"SELECT {','.join(columns)} FROM message WHERE discussion_id=?", (self.discussion_id,)
)
msg_dict = [{ c:row[i] for i,c in enumerate(columns)} for row in rows]
self.messages=[]
for msg in msg_dict:
self.messages.append(Message.from_dict(self.discussions_db, msg))
if len(self.messages)>0:
self.current_message = self.messages[-1]
return self.messages
def get_message(self, message_id):
for message in self.messages:
if message.id == int(message_id):
self.current_message = message
return message
return None
def select_message(self, message_id):
msg = self.get_message(message_id)
if msg is not None:
self.current_message = msg
return True
else:
return False
def update_message(self, new_content, new_metadata=None, new_ui=None):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update(new_content, new_metadata, new_ui)
def edit_message(self, message_id, new_content, new_metadata=None, new_ui=None):
"""Edits the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
msg = self.get_message(message_id)
if msg:
msg.update(new_content, new_metadata, new_ui)
return True
else:
return False
def message_rank_up(self, message_id):
"""Increments the rank of the message
Args:
message_id (int): The id of the message to be changed
"""
# Retrieve current rank value for message_id
current_rank = self.discussions_db.select("SELECT rank FROM message WHERE id=?", (message_id,),False)[0]
# Increment current rank value by 1
new_rank = current_rank + 1
self.discussions_db.update(
f"UPDATE message SET rank = ? WHERE id = ?",(new_rank,message_id)
)
return new_rank
def message_rank_down(self, message_id):
"""Increments the rank of the message
Args:
message_id (int): The id of the message to be changed
"""
# Retrieve current rank value for message_id
current_rank = self.discussions_db.select("SELECT rank FROM message WHERE id=?", (message_id,),False)[0]
# Increment current rank value by 1
new_rank = current_rank - 1
self.discussions_db.update(
f"UPDATE message SET rank = ? WHERE id = ?",(new_rank,message_id)
)
return new_rank
def delete_message(self, message_id):
"""Delete the message
Args:
message_id (int): The id of the message to be deleted
"""
# Retrieve current rank value for message_id
self.discussions_db.delete("DELETE FROM message WHERE id=?", (message_id,))
def export_for_vectorization(self):
"""
Export all discussions and their messages from the database to a Markdown list format.
Returns:
list: A list of lists representing discussions and their messages in a Markdown format.
Each inner list contains the discussion title and a string representing all
messages in the discussion in a Markdown format.
"""
# Extract the title
title = self.title()
messages = ""
# Iterate through messages in the discussion
for message in self.messages:
sender = message.sender
content = message.content
# Append the sender and content in a Markdown format
messages += f'{sender}: {content}\n'
return title, messages
# ========================================================================================================================

2
app.py
View File

@ -109,7 +109,7 @@ if __name__ == "__main__":
from lollms.server.endpoints.lollms_motion_ctrl import router as lollms_motion_ctrl from lollms.server.endpoints.lollms_motion_ctrl import router as lollms_motion_ctrl
from endpoints.lollms_webui_infos import router as lollms_webui_infos_router from endpoints.lollms_webui_infos import router as lollms_webui_infos_router
from endpoints.lollms_discussion import router as lollms_discussion_router from lollms.server.endpoints.lollms_discussion import router as lollms_discussion_router
from endpoints.lollms_message import router as lollms_message_router from endpoints.lollms_message import router as lollms_message_router
from endpoints.lollms_advanced import router as lollms_advanced_router from endpoints.lollms_advanced import router as lollms_advanced_router
from endpoints.chat_bar import router as chat_bar_router from endpoints.chat_bar import router as chat_bar_router

View File

@ -15,7 +15,7 @@ from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from api.db import DiscussionsDB from lollms.databases.discussions_database import DiscussionsDB
from pathlib import Path from pathlib import Path
from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod
import tqdm import tqdm

View File

@ -16,7 +16,7 @@ from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, show_yes_no_dialog from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, show_yes_no_dialog
from lollms.security import sanitize_path from lollms.security import sanitize_path
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from api.db import DiscussionsDB from lollms.databases.discussions_database import DiscussionsDB
from pathlib import Path from pathlib import Path
from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod
import tqdm import tqdm

View File

@ -1,229 +0,0 @@
"""
project: lollms_webui
file: lollms_discussion.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to manipulate the discussion elements.
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.security import sanitize_path
from ascii_colors import ASCIIColors
from api.db import DiscussionsDB, Discussion
from typing import List
from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod
import tqdm
from pathlib import Path
class GenerateRequest(BaseModel):
text: str
class DatabaseSelectionParameters(BaseModel):
name: str
class EditTitleParameters(BaseModel):
client_id: str
title: str
id: int
class MakeTitleParameters(BaseModel):
id: int
class DeleteDiscussionParameters(BaseModel):
client_id: str
id: int
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
@router.get("/list_discussions")
def list_discussions():
discussions = lollmsElfServer.db.get_discussions()
return discussions
@router.get("/list_databases")
async def list_databases():
"""List all the personal databases in the LoLLMs server."""
# Retrieve the list of database names
databases = [f.name for f in lollmsElfServer.lollms_paths.personal_discussions_path.iterdir() if f.is_dir() and (f/"database.db").exists()]
# Return the list of database names
return databases
@router.post("/select_database")
def select_database(data:DatabaseSelectionParameters):
sanitize_path(data.name)
print(f'Selecting database {data.name}')
# Create database object
lollmsElfServer.db = DiscussionsDB(lollmsElfServer.lollms_paths, data.name)
ASCIIColors.info("Checking discussions database... ",end="")
lollmsElfServer.db.create_tables()
lollmsElfServer.db.add_missing_columns()
lollmsElfServer.config.discussion_db_name = data.name
ASCIIColors.success("ok")
if lollmsElfServer.config.auto_save:
lollmsElfServer.config.save_config()
if lollmsElfServer.config.data_vectorization_activate and lollmsElfServer.config.activate_ltm:
try:
ASCIIColors.yellow("0- Detected discussion vectorization request")
folder = lollmsElfServer.lollms_paths.personal_discussions_path/"vectorized_dbs"
folder.mkdir(parents=True, exist_ok=True)
lollmsElfServer.long_term_memory = TextVectorizer(
vectorization_method=VectorizationMethod.TFIDF_VECTORIZER,#=VectorizationMethod.BM25_VECTORIZER,
database_path=folder/lollmsElfServer.config.discussion_db_name,
data_visualization_method=VisualizationMethod.PCA,#VisualizationMethod.PCA,
save_db=True
)
ASCIIColors.yellow("1- Exporting discussions")
lollmsElfServer.info("Exporting discussions")
discussions = lollmsElfServer.db.export_all_as_markdown_list_for_vectorization()
ASCIIColors.yellow("2- Adding discussions to vectorizer")
lollmsElfServer.info("Adding discussions to vectorizer")
index = 0
nb_discussions = len(discussions)
for (title,discussion) in tqdm(discussions):
lollmsElfServer.sio.emit('update_progress',{'value':int(100*(index/nb_discussions))})
index += 1
if discussion!='':
skill = lollmsElfServer.learn_from_discussion(title, discussion)
lollmsElfServer.long_term_memory.add_document(title, skill, chunk_size=lollmsElfServer.config.data_vectorization_chunk_size, overlap_size=lollmsElfServer.config.data_vectorization_overlap_size, force_vectorize=False, add_as_a_bloc=False)
ASCIIColors.yellow("3- Indexing database")
lollmsElfServer.info("Indexing database",True, None)
lollmsElfServer.long_term_memory.index()
ASCIIColors.yellow("Ready")
except Exception as ex:
lollmsElfServer.error(f"Couldn't vectorize the database:{ex}")
return {"status":False}
return {"status":True}
@router.post("/export_discussion")
def export_discussion():
return {"discussion_text":lollmsElfServer.get_discussion_to()}
class DiscussionEditTitle(BaseModel):
client_id: str
title: str
id: int
@router.post("/edit_title")
async def edit_title(discussion_edit_title: DiscussionEditTitle):
try:
client_id = discussion_edit_title.client_id
title = discussion_edit_title.title
discussion_id = discussion_edit_title.id
lollmsElfServer.connections[client_id]["current_discussion"] = Discussion(discussion_id, lollmsElfServer.db)
lollmsElfServer.connections[client_id]["current_discussion"].rename(title)
return {'status':True}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
class DiscussionTitle(BaseModel):
id: int
@router.post("/make_title")
async def make_title(discussion_title: DiscussionTitle):
try:
ASCIIColors.info("Making title")
discussion_id = discussion_title.id
discussion = Discussion(discussion_id, lollmsElfServer.db)
title = lollmsElfServer.make_discussion_title(discussion)
discussion.rename(title)
return {'status':True, 'title':title}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.get("/export")
def export():
return lollmsElfServer.db.export_to_json()
class DiscussionDelete(BaseModel):
client_id: str
id: int
@router.post("/delete_discussion")
async def delete_discussion(discussion: DiscussionDelete):
"""
Executes Python code and returns the output.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
client_id = discussion.client_id
discussion_id = discussion.id
lollmsElfServer.connections[client_id]["current_discussion"] = Discussion(discussion_id, lollmsElfServer.db)
lollmsElfServer.connections[client_id]["current_discussion"].delete_discussion()
lollmsElfServer.connections[client_id]["current_discussion"] = None
return {'status':True}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
# ----------------------------- import/export --------------------
class DiscussionExport(BaseModel):
discussion_ids: List[int]
export_format: str
@router.post("/export_multiple_discussions")
async def export_multiple_discussions(discussion_export: DiscussionExport):
try:
discussion_ids = discussion_export.discussion_ids
export_format = discussion_export.export_format
if export_format=="json":
discussions = lollmsElfServer.db.export_discussions_to_json(discussion_ids)
elif export_format=="markdown":
discussions = lollmsElfServer.db.export_discussions_to_markdown(discussion_ids)
else:
discussions = lollmsElfServer.db.export_discussions_to_markdown(discussion_ids)
return discussions
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
class DiscussionInfo(BaseModel):
id: int
content: str
class DiscussionImport(BaseModel):
jArray: List[DiscussionInfo]
@router.post("/import_multiple_discussions")
async def import_multiple_discussions(discussion_import: DiscussionImport):
try:
discussions = discussion_import.jArray
lollmsElfServer.db.import_from_json(discussions)
return discussions
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}

View File

@ -15,7 +15,7 @@ from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from api.db import DiscussionsDB from lollms.databases.discussions_database import DiscussionsDB
from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod
import tqdm import tqdm
@ -41,7 +41,7 @@ async def edit_message(edit_params: EditMessageParameters):
new_message = edit_params.message new_message = edit_params.message
metadata = json.dumps(edit_params.metadata,indent=4) metadata = json.dumps(edit_params.metadata,indent=4)
try: try:
lollmsElfServer.connections[client_id]["current_discussion"].edit_message(message_id, new_message, new_metadata=metadata) lollmsElfServer.session.get_client(client_id).discussion.edit_message(message_id, new_message, new_metadata=metadata)
return {"status": True} return {"status": True}
except Exception as ex: except Exception as ex:
trace_exception(ex) # Assuming 'trace_exception' function logs the error trace_exception(ex) # Assuming 'trace_exception' function logs the error
@ -59,7 +59,7 @@ async def message_rank_up(rank_params: MessageRankParameters):
message_id = rank_params.id message_id = rank_params.id
try: try:
new_rank = lollmsElfServer.connections[client_id]["current_discussion"].message_rank_up(message_id) new_rank = lollmsElfServer.session.get_client(client_id).discussion.message_rank_up(message_id)
return {"status": True, "new_rank": new_rank} return {"status": True, "new_rank": new_rank}
except Exception as ex: except Exception as ex:
trace_exception(ex) # Assuming 'trace_exception' function logs the error trace_exception(ex) # Assuming 'trace_exception' function logs the error
@ -71,7 +71,7 @@ def message_rank_down(rank_params: MessageRankParameters):
client_id = rank_params.client_id client_id = rank_params.client_id
message_id = rank_params.id message_id = rank_params.id
try: try:
new_rank = lollmsElfServer.connections[client_id]["current_discussion"].message_rank_down(message_id) new_rank = lollmsElfServer.session.get_client(client_id).discussion.message_rank_down(message_id)
return {"status": True, "new_rank": new_rank} return {"status": True, "new_rank": new_rank}
except Exception as ex: except Exception as ex:
return {"status": False, "error":str(ex)} return {"status": False, "error":str(ex)}
@ -85,11 +85,11 @@ async def delete_message(delete_params: MessageDeleteParameters):
client_id = delete_params.client_id client_id = delete_params.client_id
message_id = delete_params.id message_id = delete_params.id
if lollmsElfServer.connections[client_id]["current_discussion"] is None: if lollmsElfServer.session.get_client(client_id).discussion is None:
return {"status": False,"message":"No discussion is selected"} return {"status": False,"message":"No discussion is selected"}
else: else:
try: try:
new_rank = lollmsElfServer.connections[client_id]["current_discussion"].delete_message(message_id) new_rank = lollmsElfServer.session.get_client(client_id).discussion.delete_message(message_id)
ASCIIColors.yellow("Message deleted") ASCIIColors.yellow("Message deleted")
return {"status":True,"new_rank": new_rank} return {"status":True,"new_rank": new_rank}
except Exception as ex: except Exception as ex:

View File

@ -28,7 +28,7 @@ import threading
import os import os
import time import time
from api.db import Discussion from lollms.databases.discussions_database import Discussion
from datetime import datetime from datetime import datetime
router = APIRouter() router = APIRouter()
@ -46,7 +46,7 @@ def add_events(sio:socketio):
ASCIIColors.info(f"Building empty User message requested by : {client_id}") ASCIIColors.info(f"Building empty User message requested by : {client_id}")
# send the message to the bot # send the message to the bot
print(f"Creating an empty message for AI answer orientation") print(f"Creating an empty message for AI answer orientation")
if lollmsElfServer.connections[client_id]["current_discussion"]: if lollmsElfServer.session.get_client(client_id).discussion:
if not lollmsElfServer.model: if not lollmsElfServer.model:
lollmsElfServer.error("No model selected. Please make sure you select a model before starting generation", client_id = client_id) lollmsElfServer.error("No model selected. Please make sure you select a model before starting generation", client_id = client_id)
return return
@ -58,7 +58,7 @@ def add_events(sio:socketio):
ASCIIColors.info(f"Building empty AI message requested by : {client_id}") ASCIIColors.info(f"Building empty AI message requested by : {client_id}")
# send the message to the bot # send the message to the bot
print(f"Creating an empty message for AI answer orientation") print(f"Creating an empty message for AI answer orientation")
if lollmsElfServer.connections[client_id]["current_discussion"]: if lollmsElfServer.session.get_client(client_id).discussion:
if not lollmsElfServer.model: if not lollmsElfServer.model:
lollmsElfServer.error("No model selected. Please make sure you select a model before starting generation", client_id=client_id) lollmsElfServer.error("No model selected. Please make sure you select a model before starting generation", client_id=client_id)
return return

View File

@ -26,7 +26,7 @@ import socketio
import threading import threading
import os import os
from api.db import Discussion from lollms.databases.discussions_database import Discussion
from datetime import datetime from datetime import datetime
router = APIRouter() router = APIRouter()
@ -43,20 +43,20 @@ def add_events(sio:socketio):
ASCIIColors.yellow("New descussion requested") ASCIIColors.yellow("New descussion requested")
client_id = sid client_id = sid
title = data["title"] title = data["title"]
if lollmsElfServer.connections[client_id]["current_discussion"] is not None: if lollmsElfServer.session.get_client(client_id).discussion is not None:
if lollmsElfServer.long_term_memory is not None: if lollmsElfServer.long_term_memory is not None:
title, content = lollmsElfServer.connections[client_id]["current_discussion"].export_for_vectorization() title, content = lollmsElfServer.session.get_client(client_id).discussion.export_for_vectorization()
skill = lollmsElfServer.learn_from_discussion(title, content) skill = lollmsElfServer.learn_from_discussion(title, content)
lollmsElfServer.long_term_memory.add_document(title, skill, chunk_size=lollmsElfServer.config.data_vectorization_chunk_size, overlap_size=lollmsElfServer.config.data_vectorization_overlap_size, force_vectorize=False, add_as_a_bloc=False, add_to_index=True) lollmsElfServer.long_term_memory.add_document(title, skill, chunk_size=lollmsElfServer.config.data_vectorization_chunk_size, overlap_size=lollmsElfServer.config.data_vectorization_overlap_size, force_vectorize=False, add_as_a_bloc=False, add_to_index=True)
ASCIIColors.yellow("4- Saving database") ASCIIColors.yellow("4- Saving database")
lollmsElfServer.long_term_memory.save_to_json() lollmsElfServer.long_term_memory.save_to_json()
lollmsElfServer.connections[client_id]["current_discussion"] = lollmsElfServer.db.create_discussion(title) lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.create_discussion(title)
# Get the current timestamp # Get the current timestamp
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# Return a success response # Return a success response
if lollmsElfServer.connections[client_id]["current_discussion"] is None: if lollmsElfServer.session.get_client(client_id).discussion is None:
lollmsElfServer.connections[client_id]["current_discussion"] = lollmsElfServer.db.load_last_discussion() lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.load_last_discussion()
if lollmsElfServer.personality.welcome_message!="": if lollmsElfServer.personality.welcome_message!="":
if lollmsElfServer.config.force_output_language_to_be and lollmsElfServer.config.force_output_language_to_be.lower().strip() !="english": if lollmsElfServer.config.force_output_language_to_be and lollmsElfServer.config.force_output_language_to_be.lower().strip() !="english":
@ -64,7 +64,7 @@ def add_events(sio:socketio):
else: else:
welcome_message = lollmsElfServer.personality.welcome_message welcome_message = lollmsElfServer.personality.welcome_message
message = lollmsElfServer.connections[client_id]["current_discussion"].add_message( message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value if lollmsElfServer.personality.include_welcome_message_in_disucssion else MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value, message_type = MSG_TYPE.MSG_TYPE_FULL.value if lollmsElfServer.personality.include_welcome_message_in_disucssion else MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value,
sender_type = SENDER_TYPES.SENDER_TYPES_AI.value, sender_type = SENDER_TYPES.SENDER_TYPES_AI.value,
sender = lollmsElfServer.personality.name, sender = lollmsElfServer.personality.name,
@ -80,7 +80,7 @@ def add_events(sio:socketio):
) )
await lollmsElfServer.sio.emit('discussion_created', await lollmsElfServer.sio.emit('discussion_created',
{'id':lollmsElfServer.connections[client_id]["current_discussion"].discussion_id}, {'id':lollmsElfServer.session.get_client(client_id).discussion.discussion_id},
to=client_id to=client_id
) )
else: else:
@ -95,14 +95,14 @@ def add_events(sio:socketio):
ASCIIColors.yellow(f"Loading discussion for client {client_id} ... ", end="") ASCIIColors.yellow(f"Loading discussion for client {client_id} ... ", end="")
if "id" in data: if "id" in data:
discussion_id = data["id"] discussion_id = data["id"]
lollmsElfServer.connections[client_id]["current_discussion"] = Discussion(discussion_id, lollmsElfServer.db) lollmsElfServer.session.get_client(client_id).discussion = Discussion(discussion_id, lollmsElfServer.db)
else: else:
if lollmsElfServer.connections[client_id]["current_discussion"] is not None: if lollmsElfServer.session.get_client(client_id).discussion is not None:
discussion_id = lollmsElfServer.connections[client_id]["current_discussion"].discussion_id discussion_id = lollmsElfServer.session.get_client(client_id).discussion.discussion_id
lollmsElfServer.connections[client_id]["current_discussion"] = Discussion(discussion_id, lollmsElfServer.db) lollmsElfServer.session.get_client(client_id).discussion = Discussion(discussion_id, lollmsElfServer.db)
else: else:
lollmsElfServer.connections[client_id]["current_discussion"] = lollmsElfServer.db.create_discussion() lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.create_discussion()
messages = lollmsElfServer.connections[client_id]["current_discussion"].get_messages() messages = lollmsElfServer.session.get_client(client_id).discussion.get_messages()
jsons = [m.to_json() for m in messages] jsons = [m.to_json() for m in messages]
await lollmsElfServer.sio.emit('discussion', await lollmsElfServer.sio.emit('discussion',
jsons, jsons,

View File

@ -36,10 +36,12 @@ def add_events(sio:socketio):
def handle_generate_msg(sid, data): def handle_generate_msg(sid, data):
client_id = sid client_id = sid
lollmsElfServer.cancel_gen = False lollmsElfServer.cancel_gen = False
lollmsElfServer.connections[client_id]["generated_text"]="" client = lollmsElfServer.session.get_client(client_id)
lollmsElfServer.connections[client_id]["cancel_generation"]=False
lollmsElfServer.connections[client_id]["continuing"]=False client.generated_text=""
lollmsElfServer.connections[client_id]["first_chunk"]=True client.cancel_generation=False
client.continuing=False
client.first_chunk=True
@ -49,15 +51,15 @@ def add_events(sio:socketio):
return return
if not lollmsElfServer.busy: if not lollmsElfServer.busy:
if lollmsElfServer.connections[client_id]["current_discussion"] is None: if lollmsElfServer.session.get_client(client_id).discussion is None:
if lollmsElfServer.db.does_last_discussion_have_messages(): if lollmsElfServer.db.does_last_discussion_have_messages():
lollmsElfServer.connections[client_id]["current_discussion"] = lollmsElfServer.db.create_discussion() lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.create_discussion()
else: else:
lollmsElfServer.connections[client_id]["current_discussion"] = lollmsElfServer.db.load_last_discussion() lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.load_last_discussion()
prompt = data["prompt"] prompt = data["prompt"]
ump = lollmsElfServer.config.discussion_prompt_separator +lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix ump = lollmsElfServer.config.discussion_prompt_separator +lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
message = lollmsElfServer.connections[client_id]["current_discussion"].add_message( message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value, message_type = MSG_TYPE.MSG_TYPE_FULL.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value, sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""), sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
@ -67,8 +69,8 @@ def add_events(sio:socketio):
) )
ASCIIColors.green("Starting message generation by "+lollmsElfServer.personality.name) ASCIIColors.green("Starting message generation by "+lollmsElfServer.personality.name)
lollmsElfServer.connections[client_id]['generation_thread'] = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id)) client.generation_thread = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id))
lollmsElfServer.connections[client_id]['generation_thread'].start() client.generation_thread.start()
# lollmsElfServer.sio.sleep(0.01) # lollmsElfServer.sio.sleep(0.01)
ASCIIColors.info("Started generation task") ASCIIColors.info("Started generation task")
@ -81,43 +83,45 @@ def add_events(sio:socketio):
@sio.on('generate_msg_from') @sio.on('generate_msg_from')
def handle_generate_msg_from(sid, data): def handle_generate_msg_from(sid, data):
client_id = sid client_id = sid
client = lollmsElfServer.session.get_client(client_id)
lollmsElfServer.cancel_gen = False lollmsElfServer.cancel_gen = False
lollmsElfServer.connections[client_id]["continuing"]=False client.continuing=False
lollmsElfServer.connections[client_id]["first_chunk"]=True client.first_chunk=True
if lollmsElfServer.connections[client_id]["current_discussion"] is None: if lollmsElfServer.session.get_client(client_id).discussion is None:
ASCIIColors.warning("Please select a discussion") ASCIIColors.warning("Please select a discussion")
lollmsElfServer.error("Please select a discussion first", client_id=client_id) lollmsElfServer.error("Please select a discussion first", client_id=client_id)
return return
id_ = data['id'] id_ = data['id']
generation_type = data.get('msg_type',None) generation_type = data.get('msg_type',None)
if id_==-1: if id_==-1:
message = lollmsElfServer.connections[client_id]["current_discussion"].current_message message = lollmsElfServer.session.get_client(client_id).discussion.current_message
else: else:
message = lollmsElfServer.connections[client_id]["current_discussion"].load_message(id_) message = lollmsElfServer.session.get_client(client_id).discussion.load_message(id_)
if message is None: if message is None:
return return
lollmsElfServer.connections[client_id]['generation_thread'] = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id, False, generation_type)) client.generation_thread = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id, False, generation_type))
lollmsElfServer.connections[client_id]['generation_thread'].start() client.generation_thread.start()
@sio.on('continue_generate_msg_from') @sio.on('continue_generate_msg_from')
def handle_continue_generate_msg_from(sid, data): def handle_continue_generate_msg_from(sid, data):
client_id = sid client_id = sid
client = lollmsElfServer.session.get_client(client_id)
lollmsElfServer.cancel_gen = False lollmsElfServer.cancel_gen = False
lollmsElfServer.connections[client_id]["continuing"]=True client.continuing=True
lollmsElfServer.connections[client_id]["first_chunk"]=True client.first_chunk=True
if lollmsElfServer.connections[client_id]["current_discussion"] is None: if lollmsElfServer.session.get_client(client_id).discussion is None:
ASCIIColors.yellow("Please select a discussion") ASCIIColors.yellow("Please select a discussion")
lollmsElfServer.error("Please select a discussion", client_id=client_id) lollmsElfServer.error("Please select a discussion", client_id=client_id)
return return
id_ = data['id'] id_ = data['id']
if id_==-1: if id_==-1:
message = lollmsElfServer.connections[client_id]["current_discussion"].current_message message = lollmsElfServer.session.get_client(client_id).discussion.current_message
else: else:
message = lollmsElfServer.connections[client_id]["current_discussion"].load_message(id_) message = lollmsElfServer.session.get_client(client_id).discussion.load_message(id_)
lollmsElfServer.connections[client_id]["generated_text"]=message.content client.generated_text=message.content
lollmsElfServer.connections[client_id]['generation_thread'] = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id, True)) client.generation_thread = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id, True))
lollmsElfServer.connections[client_id]['generation_thread'].start() client.generation_thread.start()

View File

@ -28,7 +28,7 @@ import threading
import os import os
import time import time
from api.db import Discussion from lollms.databases.discussions_database import Discussion
from datetime import datetime from datetime import datetime
router = APIRouter() router = APIRouter()

@ -1 +1 @@
Subproject commit 5424913d1237f6bfc0c2175170b63260875f35f8 Subproject commit e5bd88ade664254a29275bd950b6e194aa34ceef

View File

@ -8,7 +8,7 @@ This class provides a singleton instance of the LoLLMS web UI, allowing access t
from lollms.server.elf_server import LOLLMSElfServer from lollms.server.elf_server import LOLLMSElfServer
from datetime import datetime from datetime import datetime
from api.db import DiscussionsDB, Discussion from lollms.databases.discussions_database import DiscussionsDB, Discussion
from pathlib import Path from pathlib import Path
from lollms.config import InstallOption from lollms.config import InstallOption
from lollms.types import MSG_TYPE, SENDER_TYPES from lollms.types import MSG_TYPE, SENDER_TYPES
@ -20,7 +20,7 @@ from lollms.helpers import ASCIIColors, trace_exception
from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom
from lollms.app import LollmsApplication from lollms.app import LollmsApplication
from lollms.utilities import File64BitsManager, PromptReshaper, PackageManager, find_first_available_file_index, run_async, is_asyncio_loop_running, yes_or_no_input from lollms.utilities import File64BitsManager, PromptReshaper, PackageManager, find_first_available_file_index, run_async, is_asyncio_loop_running, yes_or_no_input
from lollms.generation import RECPTION_MANAGER, ROLE_CHANGE_DECISION, ROLE_CHANGE_OURTPUT from lollms.generation import RECEPTION_MANAGER, ROLE_CHANGE_DECISION, ROLE_CHANGE_OURTPUT
import git import git
import asyncio import asyncio
@ -212,45 +212,21 @@ class LOLLMSWebUI(LOLLMSElfServer):
# This is used to keep track of messages # This is used to keep track of messages
self.download_infos={} self.download_infos={}
self.connections = {
0:{
"current_discussion":None,
"generated_text":"",
"cancel_generation": False,
"generation_thread": None,
"processing":False,
"schedule_for_deletion":False,
"continuing": False,
"first_chunk": True,
"reception_manager": RECPTION_MANAGER()
}
}
# Define a WebSocket event handler # Define a WebSocket event handler
@sio.event @sio.event
async def connect(sid, environ): async def connect(sid, environ):
#Create a new connection information self.session.add_client(sid, sid, self.db.load_last_discussion(), self.db)
self.connections[sid] = {
"current_discussion":self.db.load_last_discussion(),
"generated_text":"",
"continuing": False,
"first_chunk": True,
"cancel_generation": False,
"generation_thread": None,
"processing":False,
"schedule_for_deletion":False,
"reception_manager":RECPTION_MANAGER()
}
await self.sio.emit('connected', to=sid) await self.sio.emit('connected', to=sid)
ASCIIColors.success(f'Client {sid} connected') ASCIIColors.success(f'Client {sid} connected')
@sio.event @sio.event
def disconnect(sid): def disconnect(sid):
try: try:
if self.connections[sid]["processing"]: self.session.add_client(sid, sid, self.db.load_last_discussion(), self.db)
self.connections[sid]["schedule_for_deletion"]=True if self.session.get_client(sid).processing:
# else: self.session.get_client(sid).schedule_for_deletion=True
# del self.connections[sid] else:
self.session.remove_client(sid, sid)
except Exception as ex: except Exception as ex:
pass pass
@ -264,7 +240,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
self.start_servers() self.start_servers()
def get_uploads_path(self, client_id): def get_uploads_path(self, client_id):
return self.db.discussion_db_path/f'{self.connections[client_id]["current_discussion"].discussion_id}' return self.session.get_client(client_id).discussion_path # self.db.discussion_db_path/f'{["discussion"].discussion_id}'
# Other methods and properties of the LoLLMSWebUI singleton class # Other methods and properties of the LoLLMSWebUI singleton class
def check_module_update_(self, repo_path, branch_name="main"): def check_module_update_(self, repo_path, branch_name="main"):
try: try:
@ -354,10 +330,11 @@ class LOLLMSWebUI(LOLLMSElfServer):
if self.summoned: if self.summoned:
client_id = 0 client_id = 0
self.cancel_gen = False self.cancel_gen = False
self.connections[client_id]["generated_text"]="" client = self.session.get_client(client_id)
self.connections[client_id]["cancel_generation"]=False client.generated_text=""
self.connections[client_id]["continuing"]=False client.cancel_generation=False
self.connections[client_id]["first_chunk"]=True client.continuing=False
client.first_chunk=True
if not self.model: if not self.model:
ASCIIColors.error("Model not selected. Please select a model") ASCIIColors.error("Model not selected. Please select a model")
@ -365,15 +342,15 @@ class LOLLMSWebUI(LOLLMSElfServer):
return return
if not self.busy: if not self.busy:
if self.connections[client_id]["current_discussion"] is None: if client.discussion is None:
if self.db.does_last_discussion_have_messages(): if self.db.does_last_discussion_have_messages():
self.connections[client_id]["current_discussion"] = self.db.create_discussion() client.discussion = self.db.create_discussion()
else: else:
self.connections[client_id]["current_discussion"] = self.db.load_last_discussion() client.discussion = self.db.load_last_discussion()
prompt = text prompt = text
ump = self.config.discussion_prompt_separator +self.config.user_name.strip() if self.config.use_user_name_in_discussions else self.personality.user_message_prefix ump = self.config.discussion_prompt_separator +self.config.user_name.strip() if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
message = self.connections[client_id]["current_discussion"].add_message( message = client.discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value, message_type = MSG_TYPE.MSG_TYPE_FULL.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value, sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(self.config.discussion_prompt_separator,"").replace(":",""), sender = ump.replace(self.config.discussion_prompt_separator,"").replace(":",""),
@ -383,8 +360,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
) )
ASCIIColors.green("Starting message generation by "+self.personality.name) ASCIIColors.green("Starting message generation by "+self.personality.name)
self.connections[client_id]['generation_thread'] = threading.Thread(target=self.start_message_generation, args=(message, message.id, client_id)) client.generation_thread = threading.Thread(target=self.start_message_generation, args=(message, message.id, client_id))
self.connections[client_id]['generation_thread'].start() client.generation_thread.start()
self.sio.sleep(0.01) self.sio.sleep(0.01)
ASCIIColors.info("Started generation task") ASCIIColors.info("Started generation task")
@ -689,328 +666,17 @@ class LOLLMSWebUI(LOLLMSElfServer):
def recover_discussion(self,client_id, message_index=-1): def recover_discussion(self,client_id, message_index=-1):
messages = self.connections[client_id]["current_discussion"].get_messages() messages = self.session.get_client(client_id).discussion.get_messages()
discussion="" discussion=""
for msg in messages: for msg in messages:
if message_index!=-1 and msg>message_index: if message_index!=-1 and msg>message_index:
break break
discussion += "\n" + self.config.discussion_prompt_separator + msg.sender + ": " + msg.content.strip() discussion += "\n" + self.config.discussion_prompt_separator + msg.sender + ": " + msg.content.strip()
return discussion return discussion
def prepare_query(self, client_id: str, message_id: int = -1, is_continue: bool = False, n_tokens: int = 0, generation_type = None) -> Tuple[str, str, List[str]]:
"""
Prepares the query for the model.
Args:
client_id (str): The client ID.
message_id (int): The message ID. Default is -1.
is_continue (bool): Whether the query is a continuation. Default is False.
n_tokens (int): The number of tokens. Default is 0.
Returns:
Tuple[str, str, List[str]]: The prepared query, original message content, and tokenized query.
"""
if self.personality.callback is None:
self.personality.callback = partial(self.process_chunk, client_id=client_id)
# Get the list of messages
messages = self.connections[client_id]["current_discussion"].get_messages()
# Find the index of the message with the specified message_id
message_index = -1
for i, message in enumerate(messages):
if message.id == message_id:
message_index = i
break
# Define current message
current_message = messages[message_index]
# Build the conditionning text block
conditionning = self.personality.personality_conditioning
# Check if there are document files to add to the prompt
internet_search_results = ""
internet_search_infos = []
documentation = ""
knowledge = ""
# boosting information
if self.config.positive_boost:
positive_boost="\n!@>important information: "+self.config.positive_boost+"\n"
n_positive_boost = len(self.model.tokenize(positive_boost))
else:
positive_boost=""
n_positive_boost = 0
if self.config.negative_boost:
negative_boost="\n!@>important information: "+self.config.negative_boost+"\n"
n_negative_boost = len(self.model.tokenize(negative_boost))
else:
negative_boost=""
n_negative_boost = 0
if self.config.force_output_language_to_be:
force_language="\n!@>important information: Answer the user in this language :"+self.config.force_output_language_to_be+"\n"
n_force_language = len(self.model.tokenize(force_language))
else:
force_language=""
n_force_language = 0
if self.config.fun_mode:
fun_mode="\n!@>important information: Fun mode activated. In this mode you must answer in a funny playful way. Do not be serious in your answers. Each answer needs to make the user laugh.\n"
n_fun_mode = len(self.model.tokenize(positive_boost))
else:
fun_mode=""
n_fun_mode = 0
discussion = None
if generation_type != "simple_question":
if self.config.activate_internet_search:
if discussion is None:
discussion = self.recover_discussion(client_id)
if self.config.internet_activate_search_decision:
self.personality.step_start(f"Requesting if {self.personality.name} needs to search internet to answer the user")
need = not self.personality.yes_no(f"Do you have enough information to give a satisfactory answer to {self.config.user_name}'s request without internet search? (If you do not know or you can't answer 0 (no)", discussion)
self.personality.step_end(f"Requesting if {self.personality.name} needs to search internet to answer the user")
self.personality.step("Yes" if need else "No")
else:
need=True
if need:
self.personality.step_start("Crafting internet search query")
query = self.personality.fast_gen(f"!@>discussion:\n{discussion[-2048:]}\n!@>system: Read the discussion and craft a web search query suited to recover needed information to reply to last {self.config.user_name} message.\nDo not answer the prompt. Do not add explanations.\n!@>websearch query: ", max_generation_size=256, show_progress=True, callback=self.personality.sink)
self.personality.step_end("Crafting internet search query")
self.personality.step(f"web search query: {query}")
self.personality.step_start("Performing Internet search")
internet_search_results=f"!@>important information: Use the internet search results data to answer {self.config.user_name}'s last message. It is strictly forbidden to give the user an answer without having actual proof from the documentation.\n!@>Web search results:\n"
docs, sorted_similarities, document_ids = self.personality.internet_search(query, self.config.internet_quick_search)
for doc, infos,document_id in zip(docs, sorted_similarities, document_ids):
internet_search_infos.append(document_id)
internet_search_results += f"search result chunk:\nchunk_infos:{document_id['url']}\nchunk_title:{document_id['title']}\ncontent:{doc}"
self.personality.step_end("Performing Internet search")
if self.personality.persona_data_vectorizer:
if documentation=="":
documentation="\n!@>important information: Use the documentation data to answer the user questions. If the data is not present in the documentation, please tell the user that the information he is asking for does not exist in the documentation section. It is strictly forbidden to give the user an answer without having actual proof from the documentation.\n!@>Documentation:\n"
if self.config.data_vectorization_build_keys_words:
if discussion is None:
discussion = self.recover_discussion(client_id)
query = self.personality.fast_gen(f"\n!@>instruction: Read the discussion and rewrite the last prompt for someone who didn't read the entire discussion.\nDo not answer the prompt. Do not add explanations.\n!@>discussion:\n{discussion[-2048:]}\n!@>enhanced query: ", max_generation_size=256, show_progress=True)
ASCIIColors.cyan(f"Query:{query}")
else:
query = current_message.content
try:
docs, sorted_similarities, document_ids = self.personality.persona_data_vectorizer.recover_text(query, top_k=self.config.data_vectorization_nb_chunks)
for doc, infos, doc_id in zip(docs, sorted_similarities, document_ids):
documentation += f"document chunk:\nchunk_infos:{infos}\ncontent:{doc}"
except:
self.warning("Couldn't add documentation to the context. Please verify the vector database")
if len(self.personality.text_files) > 0 and self.personality.vectorizer:
if documentation=="":
documentation="\n!@>important information: Use the documentation data to answer the user questions. If the data is not present in the documentation, please tell the user that the information he is asking for does not exist in the documentation section. It is strictly forbidden to give the user an answer without having actual proof from the documentation.\n!@>Documentation:\n"
if self.config.data_vectorization_build_keys_words:
discussion = self.recover_discussion(client_id)
query = self.personality.fast_gen(f"\n!@>instruction: Read the discussion and rewrite the last prompt for someone who didn't read the entire discussion.\nDo not answer the prompt. Do not add explanations.\n!@>discussion:\n{discussion[-2048:]}\n!@>enhanced query: ", max_generation_size=256, show_progress=True)
ASCIIColors.cyan(f"Query: {query}")
else:
query = current_message.content
try:
docs, sorted_similarities, document_ids = self.personality.vectorizer.recover_text(query, top_k=self.config.data_vectorization_nb_chunks)
for doc, infos in zip(docs, sorted_similarities):
documentation += f"document chunk:\nchunk path: {infos[0]}\nchunk content:{doc}"
documentation += "\n!@>important information: Use the documentation data to answer the user questions. If the data is not present in the documentation, please tell the user that the information he is asking for does not exist in the documentation section. It is strictly forbidden to give the user an answer without having actual proof from the documentation."
except:
self.warning("Couldn't add documentation to the context. Please verify the vector database")
# Check if there is discussion knowledge to add to the prompt
if self.config.activate_ltm and self.long_term_memory is not None:
if knowledge=="":
knowledge="!@>knowledge:\n"
try:
docs, sorted_similarities, document_ids = self.long_term_memory.recover_text(current_message.content, top_k=self.config.data_vectorization_nb_chunks)
for i,(doc, infos) in enumerate(zip(docs, sorted_similarities)):
knowledge += f"!@>knowledge {i}:\n!@>title:\n{infos[0]}\ncontent:\n{doc}"
except:
self.warning("Couldn't add long term memory information to the context. Please verify the vector database") # Add information about the user
user_description=""
if self.config.use_user_name_in_discussions:
user_description="!@>User description:\n"+self.config.user_description+"\n"
# Tokenize the conditionning text and calculate its number of tokens
tokens_conditionning = self.model.tokenize(conditionning)
n_cond_tk = len(tokens_conditionning)
# Tokenize the internet search results text and calculate its number of tokens
if len(internet_search_results)>0:
tokens_internet_search_results = self.model.tokenize(internet_search_results)
n_isearch_tk = len(tokens_internet_search_results)
else:
tokens_internet_search_results = []
n_isearch_tk = 0
# Tokenize the documentation text and calculate its number of tokens
if len(documentation)>0:
tokens_documentation = self.model.tokenize(documentation)
n_doc_tk = len(tokens_documentation)
else:
tokens_documentation = []
n_doc_tk = 0
# Tokenize the knowledge text and calculate its number of tokens
if len(knowledge)>0:
tokens_history = self.model.tokenize(knowledge)
n_history_tk = len(tokens_history)
else:
tokens_history = []
n_history_tk = 0
# Tokenize user description
if len(user_description)>0:
tokens_user_description = self.model.tokenize(user_description)
n_user_description_tk = len(tokens_user_description)
else:
tokens_user_description = []
n_user_description_tk = 0
# Calculate the total number of tokens between conditionning, documentation, and knowledge
total_tokens = n_cond_tk + n_isearch_tk + n_doc_tk + n_history_tk + n_user_description_tk + n_positive_boost + n_negative_boost + n_force_language + n_fun_mode
# Calculate the available space for the messages
available_space = self.config.ctx_size - n_tokens - total_tokens
# if self.config.debug:
# self.info(f"Tokens summary:\nConditionning:{n_cond_tk}\nn_isearch_tk:{n_isearch_tk}\ndoc:{n_doc_tk}\nhistory:{n_history_tk}\nuser description:{n_user_description_tk}\nAvailable space:{available_space}",10)
# Raise an error if the available space is 0 or less
if available_space<1:
self.error(f"Not enough space in context!!\nVerify that your vectorization settings for documents or internet search are realistic compared to your context size.\nYou are {available_space} short of context!")
raise Exception("Not enough space in context!!")
# Accumulate messages until the cumulative number of tokens exceeds available_space
tokens_accumulated = 0
# Initialize a list to store the full messages
full_message_list = []
# If this is not a continue request, we add the AI prompt
if not is_continue:
message_tokenized = self.model.tokenize(
"\n" +self.personality.ai_message_prefix.strip()
)
full_message_list.append(message_tokenized)
# Update the cumulative number of tokens
tokens_accumulated += len(message_tokenized)
if generation_type != "simple_question":
# Accumulate messages starting from message_index
for i in range(message_index, -1, -1):
message = messages[i]
# Check if the message content is not empty and visible to the AI
if message.content != '' and (
message.message_type <= MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message.message_type != MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value):
# Tokenize the message content
message_tokenized = self.model.tokenize(
"\n" + self.config.discussion_prompt_separator + message.sender + ": " + message.content.strip())
# Check if adding the message will exceed the available space
if tokens_accumulated + len(message_tokenized) > available_space:
break
# Add the tokenized message to the full_message_list
full_message_list.insert(0, message_tokenized)
# Update the cumulative number of tokens
tokens_accumulated += len(message_tokenized)
else:
message = messages[message_index]
# Check if the message content is not empty and visible to the AI
if message.content != '' and (
message.message_type <= MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message.message_type != MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value):
# Tokenize the message content
message_tokenized = self.model.tokenize(
"\n" + self.config.discussion_prompt_separator + message.sender + ": " + message.content.strip())
# Add the tokenized message to the full_message_list
full_message_list.insert(0, message_tokenized)
# Update the cumulative number of tokens
tokens_accumulated += len(message_tokenized)
# Build the final discussion messages by detokenizing the full_message_list
discussion_messages = ""
for i in range(len(full_message_list)-1):
message_tokens = full_message_list[i]
discussion_messages += self.model.detokenize(message_tokens)
if len(full_message_list)>0:
ai_prefix = self.model.detokenize(full_message_list[-1])
else:
ai_prefix = ""
# Build the final prompt by concatenating the conditionning and discussion messages
prompt_data = conditionning + internet_search_results + documentation + knowledge + user_description + discussion_messages + positive_boost + negative_boost + force_language + fun_mode + ai_prefix
# Tokenize the prompt data
tokens = self.model.tokenize(prompt_data)
# if this is a debug then show prompt construction details
if self.config["debug"]:
ASCIIColors.bold("CONDITIONNING")
ASCIIColors.yellow(conditionning)
ASCIIColors.bold("INTERNET SEARCH")
ASCIIColors.yellow(internet_search_results)
ASCIIColors.bold("DOC")
ASCIIColors.yellow(documentation)
ASCIIColors.bold("HISTORY")
ASCIIColors.yellow(knowledge)
ASCIIColors.bold("DISCUSSION")
ASCIIColors.hilight(discussion_messages,"!@>",ASCIIColors.color_yellow,ASCIIColors.color_bright_red,False)
ASCIIColors.bold("Final prompt")
ASCIIColors.hilight(prompt_data,"!@>",ASCIIColors.color_yellow,ASCIIColors.color_bright_red,False)
ASCIIColors.info(f"prompt size:{len(tokens)} tokens")
ASCIIColors.info(f"available space after doc and knowledge:{available_space} tokens")
# self.info(f"Tokens summary:\nPrompt size:{len(tokens)}\nTo generate:{available_space}",10)
# Details
context_details = {
"conditionning":conditionning,
"internet_search_infos":internet_search_infos,
"internet_search_results":internet_search_results,
"documentation":documentation,
"knowledge":knowledge,
"user_description":user_description,
"discussion_messages":discussion_messages,
"positive_boost":positive_boost,
"negative_boost":negative_boost,
"force_language":force_language,
"fun_mode":fun_mode,
"ai_prefix":ai_prefix
}
# Return the prepared query, original message content, and tokenized query
return prompt_data, current_message.content, tokens, context_details, internet_search_infos
def get_discussion_to(self, client_id, message_id=-1): def get_discussion_to(self, client_id, message_id=-1):
messages = self.connections[client_id]["current_discussion"].get_messages() messages = self.session.get_client(client_id).discussion.get_messages()
full_message_list = [] full_message_list = []
ump = self.config.discussion_prompt_separator +self.config.user_name.strip() if self.config.use_user_name_in_discussions else self.personality.user_message_prefix ump = self.config.discussion_prompt_separator +self.config.user_name.strip() if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
@ -1044,7 +710,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
verbose = self.verbose verbose = self.verbose
run_async(partial(self.sio.emit,'notification', { run_async(partial(self.sio.emit,'notification', {
'content': content,# self.connections[client_id]["generated_text"], 'content': content,
'notification_type': notification_type.value, 'notification_type': notification_type.value,
"duration": duration, "duration": duration,
'display_type':display_type.value 'display_type':display_type.value
@ -1073,11 +739,12 @@ class LOLLMSWebUI(LOLLMSElfServer):
sender_type:SENDER_TYPES=SENDER_TYPES.SENDER_TYPES_AI, sender_type:SENDER_TYPES=SENDER_TYPES.SENDER_TYPES_AI,
open=False open=False
): ):
client = self.session.get_client(client_id)
self.close_message(client_id) self.close_message(client_id)
mtdt = metadata if metadata is None or type(metadata) == str else json.dumps(metadata, indent=4) mtdt = metadata if metadata is None or type(metadata) == str else json.dumps(metadata, indent=4)
if sender==None: if sender==None:
sender= self.personality.name sender= self.personality.name
msg = self.connections[client_id]["current_discussion"].add_message( msg = client.discussion.add_message(
message_type = message_type.value, message_type = message_type.value,
sender_type = sender_type.value, sender_type = sender_type.value,
sender = sender, sender = sender,
@ -1085,7 +752,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
metadata = mtdt, metadata = mtdt,
ui = ui, ui = ui,
rank = 0, rank = 0,
parent_message_id = self.connections[client_id]["current_discussion"].current_message.id, parent_message_id = client.discussion.current_message.id,
binding = self.config["binding_name"], binding = self.config["binding_name"],
model = self.config["model_name"], model = self.config["model_name"],
personality = self.config["personalities"][self.config["active_personality_id"]], personality = self.config["personalities"][self.config["active_personality_id"]],
@ -1107,8 +774,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
'model' : self.config["model_name"], 'model' : self.config["model_name"],
'personality': self.config["personalities"][self.config["active_personality_id"]], 'personality': self.config["personalities"][self.config["active_personality_id"]],
'created_at': self.connections[client_id]["current_discussion"].current_message.created_at, 'created_at': client.discussion.current_message.created_at,
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at, 'finished_generating_at': client.discussion.current_message.finished_generating_at,
'open': open 'open': open
}, to=client_id }, to=client_id
@ -1121,18 +788,19 @@ class LOLLMSWebUI(LOLLMSElfServer):
ui=None, ui=None,
msg_type:MSG_TYPE=None msg_type:MSG_TYPE=None
): ):
self.connections[client_id]["current_discussion"].current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S') client = self.session.get_client(client_id)
client.discussion.current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
mtdt = json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== list else metadata mtdt = json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== list else metadata
if self.nb_received_tokens==1: if self.nb_received_tokens==1:
run_async( run_async(
partial(self.sio.emit,'update_message', { partial(self.sio.emit,'update_message', {
"sender": self.personality.name, "sender": self.personality.name,
'id':self.connections[client_id]["current_discussion"].current_message.id, 'id':client.discussion.current_message.id,
'content': "✍ warming up ...",# self.connections[client_id]["generated_text"], 'content': "✍ warming up ...",
'ui': ui, 'ui': ui,
'discussion_id':self.connections[client_id]["current_discussion"].discussion_id, 'discussion_id':client.discussion.discussion_id,
'message_type': MSG_TYPE.MSG_TYPE_STEP_END.value, 'message_type': MSG_TYPE.MSG_TYPE_STEP_END.value,
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at, 'finished_generating_at': client.discussion.current_message.finished_generating_at,
'parameters':parameters, 'parameters':parameters,
'metadata':metadata 'metadata':metadata
}, to=client_id }, to=client_id
@ -1142,41 +810,42 @@ class LOLLMSWebUI(LOLLMSElfServer):
run_async( run_async(
partial(self.sio.emit,'update_message', { partial(self.sio.emit,'update_message', {
"sender": self.personality.name, "sender": self.personality.name,
'id':self.connections[client_id]["current_discussion"].current_message.id, 'id':client.discussion.current_message.id,
'content': chunk,# self.connections[client_id]["generated_text"], 'content': chunk,
'ui': ui, 'ui': ui,
'discussion_id':self.connections[client_id]["current_discussion"].discussion_id, 'discussion_id':client.discussion.discussion_id,
'message_type': msg_type.value if msg_type is not None else MSG_TYPE.MSG_TYPE_CHUNK.value if self.nb_received_tokens>1 else MSG_TYPE.MSG_TYPE_FULL.value, 'message_type': msg_type.value if msg_type is not None else MSG_TYPE.MSG_TYPE_CHUNK.value if self.nb_received_tokens>1 else MSG_TYPE.MSG_TYPE_FULL.value,
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at, 'finished_generating_at': client.discussion.current_message.finished_generating_at,
'parameters':parameters, 'parameters':parameters,
'metadata':metadata 'metadata':metadata
}, to=client_id }, to=client_id
) )
) )
if msg_type != MSG_TYPE.MSG_TYPE_INFO: if msg_type != MSG_TYPE.MSG_TYPE_INFO:
self.connections[client_id]["current_discussion"].update_message(self.connections[client_id]["generated_text"], new_metadata=mtdt, new_ui=ui) client.discussion.update_message(client.generated_text, new_metadata=mtdt, new_ui=ui)
def close_message(self, client_id): def close_message(self, client_id):
if not self.connections[client_id]["current_discussion"]: client = self.session.get_client(client_id)
if not client.discussion:
return return
#fix halucination #fix halucination
self.connections[client_id]["generated_text"]=self.connections[client_id]["generated_text"].split("!@>")[0] client.generated_text=client.generated_text.split("!@>")[0]
# Send final message # Send final message
self.connections[client_id]["current_discussion"].current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S') client.discussion.current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
run_async( run_async(
partial(self.sio.emit,'close_message', { partial(self.sio.emit,'close_message', {
"sender": self.personality.name, "sender": self.personality.name,
"id": self.connections[client_id]["current_discussion"].current_message.id, "id": client.discussion.current_message.id,
"content":self.connections[client_id]["generated_text"], "content":client.generated_text,
'binding': self.config["binding_name"], 'binding': self.config["binding_name"],
'model' : self.config["model_name"], 'model' : self.config["model_name"],
'personality':self.config["personalities"][self.config["active_personality_id"]], 'personality':self.config["personalities"][self.config["active_personality_id"]],
'created_at': self.connections[client_id]["current_discussion"].current_message.created_at, 'created_at': client.discussion.current_message.created_at,
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at, 'finished_generating_at': client.discussion.current_message.finished_generating_at,
}, to=client_id }, to=client_id
) )
@ -1194,9 +863,10 @@ class LOLLMSWebUI(LOLLMSElfServer):
""" """
Processes a chunk of generated text Processes a chunk of generated text
""" """
client = self.session.get_client(client_id)
if chunk is None: if chunk is None:
return True return True
if not client_id in list(self.connections.keys()): if not client_id in list(self.session.clients.keys()):
self.error("Connection lost", client_id=client_id) self.error("Connection lost", client_id=client_id)
return return
if message_type == MSG_TYPE.MSG_TYPE_STEP: if message_type == MSG_TYPE.MSG_TYPE_STEP:
@ -1257,20 +927,20 @@ class LOLLMSWebUI(LOLLMSElfServer):
sys.stdout.flush() sys.stdout.flush()
if chunk: if chunk:
self.connections[client_id]["generated_text"] += chunk client.generated_text += chunk
antiprompt = self.personality.detect_antiprompt(self.connections[client_id]["generated_text"]) antiprompt = self.personality.detect_antiprompt(client.generated_text)
if antiprompt: if antiprompt:
ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}") ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}")
self.connections[client_id]["generated_text"] = self.remove_text_from_string(self.connections[client_id]["generated_text"],antiprompt) client.generated_text = self.remove_text_from_string(client.generated_text,antiprompt)
self.update_message(client_id, self.connections[client_id]["generated_text"], parameters, metadata, None, MSG_TYPE.MSG_TYPE_FULL) self.update_message(client_id, client.generated_text, parameters, metadata, None, MSG_TYPE.MSG_TYPE_FULL)
return False return False
else: else:
self.nb_received_tokens += 1 self.nb_received_tokens += 1
if self.connections[client_id]["continuing"] and self.connections[client_id]["first_chunk"]: if client.continuing and client.first_chunk:
self.update_message(client_id, self.connections[client_id]["generated_text"], parameters, metadata) self.update_message(client_id, client.generated_text, parameters, metadata)
else: else:
self.update_message(client_id, chunk, parameters, metadata, msg_type=MSG_TYPE.MSG_TYPE_CHUNK) self.update_message(client_id, chunk, parameters, metadata, msg_type=MSG_TYPE.MSG_TYPE_CHUNK)
self.connections[client_id]["first_chunk"]=False client.first_chunk=False
# if stop generation is detected then stop # if stop generation is detected then stop
if not self.cancel_gen: if not self.cancel_gen:
return True return True
@ -1281,18 +951,18 @@ class LOLLMSWebUI(LOLLMSElfServer):
# Stream the generated text to the main process # Stream the generated text to the main process
elif message_type == MSG_TYPE.MSG_TYPE_FULL: elif message_type == MSG_TYPE.MSG_TYPE_FULL:
self.connections[client_id]["generated_text"] = chunk client.generated_text = chunk
self.nb_received_tokens += 1 self.nb_received_tokens += 1
dt =(datetime.now() - self.start_time).seconds dt =(datetime.now() - self.start_time).seconds
if dt==0: if dt==0:
dt=1 dt=1
spd = self.nb_received_tokens/dt spd = self.nb_received_tokens/dt
ASCIIColors.green(f"Received {self.nb_received_tokens} tokens (speed: {spd:.2f}t/s) ",end="\r",flush=True) ASCIIColors.green(f"Received {self.nb_received_tokens} tokens (speed: {spd:.2f}t/s) ",end="\r",flush=True)
antiprompt = self.personality.detect_antiprompt(self.connections[client_id]["generated_text"]) antiprompt = self.personality.detect_antiprompt(client.generated_text)
if antiprompt: if antiprompt:
ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}") ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}")
self.connections[client_id]["generated_text"] = self.remove_text_from_string(self.connections[client_id]["generated_text"],antiprompt) client.generated_text = self.remove_text_from_string(client.generated_text,antiprompt)
self.update_message(client_id, self.connections[client_id]["generated_text"], parameters, metadata, None, MSG_TYPE.MSG_TYPE_FULL) self.update_message(client_id, client.generated_text, parameters, metadata, None, MSG_TYPE.MSG_TYPE_FULL)
return False return False
self.update_message(client_id, chunk, parameters, metadata, ui=None, msg_type=message_type) self.update_message(client_id, chunk, parameters, metadata, ui=None, msg_type=message_type)
@ -1397,21 +1067,22 @@ class LOLLMSWebUI(LOLLMSElfServer):
return output return output
def start_message_generation(self, message, message_id, client_id, is_continue=False, generation_type=None): def start_message_generation(self, message, message_id, client_id, is_continue=False, generation_type=None):
client = self.session.get_client(client_id)
if self.personality is None: if self.personality is None:
self.warning("Select a personality") self.warning("Select a personality")
return return
ASCIIColors.info(f"Text generation requested by client: {client_id}") ASCIIColors.info(f"Text generation requested by client: {client_id}")
# send the message to the bot # send the message to the bot
print(f"Received message : {message.content}") print(f"Received message : {message.content}")
if self.connections[client_id]["current_discussion"]: if client.discussion:
try: try:
if not self.model: if not self.model:
self.error("No model selected. Please make sure you select a model before starting generation", client_id=client_id) self.error("No model selected. Please make sure you select a model before starting generation", client_id=client_id)
return return
# First we need to send the new message ID to the client # First we need to send the new message ID to the client
if is_continue: if is_continue:
self.connections[client_id]["current_discussion"].load_message(message_id) client.discussion.load_message(message_id)
self.connections[client_id]["generated_text"] = message.content client.generated_text = message.content
else: else:
self.new_message(client_id, self.personality.name, "") self.new_message(client_id, self.personality.name, "")
self.update_message(client_id, "✍ warming up ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_START) self.update_message(client_id, "✍ warming up ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_START)
@ -1420,7 +1091,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
self.discussion_messages, self.current_message, tokens, context_details, internet_search_infos = self.prepare_query(client_id, message_id, is_continue, n_tokens=self.config.min_n_predict, generation_type=generation_type) self.discussion_messages, self.current_message, tokens, context_details, internet_search_infos = self.prepare_query(client_id, message_id, is_continue, n_tokens=self.config.min_n_predict, generation_type=generation_type)
self.prepare_reception(client_id) self.prepare_reception(client_id)
self.generating = True self.generating = True
self.connections[client_id]["processing"]=True client.processing=True
try: try:
self.generate( self.generate(
self.discussion_messages, self.discussion_messages,
@ -1441,7 +1112,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
fn = self.personality.name.lower().replace(' ',"_").replace('.','') fn = self.personality.name.lower().replace(' ',"_").replace('.','')
fn = f"{fn}_{message_id}.wav" fn = f"{fn}_{message_id}.wav"
url = f"audio/{fn}" url = f"audio/{fn}"
self.tts.tts_to_file(self.connections[client_id]["generated_text"], Path(self.personality.audio_samples[0]).name, f"{fn}", language=language) self.tts.tts_to_file(client.generated_text, Path(self.personality.audio_samples[0]).name, f"{fn}", language=language)
fl = f"\n".join([ fl = f"\n".join([
f"<audio controls>", f"<audio controls>",
f' <source src="{url}" type="audio/wav">', f' <source src="{url}" type="audio/wav">',
@ -1461,7 +1132,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
if self.tts is None: if self.tts is None:
self.tts = api.TextToSpeech( kv_cache=True, half=True) self.tts = api.TextToSpeech( kv_cache=True, half=True)
reference_clips = [utils.audio.load_audio(str(p), 22050) for p in self.personality.audio_samples] reference_clips = [utils.audio.load_audio(str(p), 22050) for p in self.personality.audio_samples]
tk = self.model.tokenize(self.connections[client_id]["generated_text"]) tk = self.model.tokenize(client.generated_text)
if len(tk)>100: if len(tk)>100:
chunk_size = 100 chunk_size = 100
@ -1472,7 +1143,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
else: else:
pcm_audio = np.concatenate([pcm_audio, self.tts.tts_with_preset(chunk, voice_samples=reference_clips, preset='ultra_fast').numpy().flatten()]) pcm_audio = np.concatenate([pcm_audio, self.tts.tts_with_preset(chunk, voice_samples=reference_clips, preset='ultra_fast').numpy().flatten()])
else: else:
pcm_audio = self.tts.tts_with_preset(self.connections[client_id]["generated_text"], voice_samples=reference_clips, preset='fast').numpy().flatten() pcm_audio = self.tts.tts_with_preset(client.generated_text, voice_samples=reference_clips, preset='fast').numpy().flatten()
sd.play(pcm_audio, 22050) sd.play(pcm_audio, 22050)
self.personality.step_end("Creating audio output") self.personality.step_end("Creating audio output")
""" """
@ -1513,22 +1184,22 @@ class LOLLMSWebUI(LOLLMSElfServer):
f'</a>', f'</a>',
]) ])
sources_text += '</div>' sources_text += '</div>'
self.connections[client_id]["generated_text"]=self.connections[client_id]["generated_text"].split("!@>")[0] + "\n" + sources_text client.generated_text=client.generated_text.split("!@>")[0] + "\n" + sources_text
self.personality.full(self.connections[client_id]["generated_text"]) self.personality.full(client.generated_text)
except: except Exception as ex:
pass trace_exception(ex)
self.close_message(client_id) self.close_message(client_id)
self.update_message(client_id, "Generating ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_END) self.update_message(client_id, "Generating ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_END)
self.connections[client_id]["processing"]=False client.processing=False
if self.connections[client_id]["schedule_for_deletion"]: if client.schedule_for_deletion:
del self.connections[client_id] self.session.remove_client(client.client_id, client.client_id)
ASCIIColors.success(f" ╔══════════════════════════════════════════════════╗ ") ASCIIColors.success(f" ╔══════════════════════════════════════════════════╗ ")
ASCIIColors.success(f" ║ Done ║ ") ASCIIColors.success(f" ║ Done ║ ")
ASCIIColors.success(f" ╚══════════════════════════════════════════════════╝ ") ASCIIColors.success(f" ╚══════════════════════════════════════════════════╝ ")
if self.config.auto_title: if self.config.auto_title:
d = self.connections[client_id]["current_discussion"] d = client.discussion
ttl = d.title() ttl = d.title()
if ttl is None or ttl=="" or ttl=="untitled": if ttl is None or ttl=="" or ttl=="untitled":
title = self.make_discussion_title(d, client_id=client_id) title = self.make_discussion_title(d, client_id=client_id)

View File

@ -14,7 +14,7 @@ from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from ascii_colors import get_trace_exception, trace_exception from ascii_colors import get_trace_exception, trace_exception
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from api.db import DiscussionsDB from lollms.databases.discussions_database import DiscussionsDB
from pathlib import Path from pathlib import Path
from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod
import tqdm import tqdm