Upgraded to lollms 10/ compatible with lollms webui 11

This commit is contained in:
Saifeddine ALOUI 2024-08-14 22:15:01 +02:00
parent 1b4c8ca293
commit 7ebeeb347a
29 changed files with 504 additions and 344 deletions

View File

@ -8,7 +8,7 @@ from lollms.config import InstallOption
from lollms.helpers import ASCIIColors, trace_exception
from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom
from lollms.terminal import MainMenu
from lollms.types import MSG_TYPE, SENDER_TYPES
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
from lollms.utilities import PromptReshaper
from lollms.client_session import Client, Session
from lollms.databases.skills_database import SkillsLibrary
@ -239,8 +239,8 @@ class LollmsApplication(LoLLMsCom):
messages = client.discussion.get_messages()
# Extract relevant information from messages
def cb(str, MSG_TYPE_=MSG_TYPE.MSG_TYPE_FULL, dict=None, list=None):
if MSG_TYPE_!=MSG_TYPE.MSG_TYPE_CHUNK:
def cb(str, MSG_TYPE_=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, dict=None, list=None):
if MSG_TYPE_!=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
self.ShowBlockingMessage(f"Learning\n{str}")
bk_cb = self.tasks_library.callback
self.tasks_library.callback = cb
@ -334,12 +334,15 @@ class LollmsApplication(LoLLMsCom):
def start_servers(self):
ASCIIColors.yellow("* - * - * - Starting services - * - * - *")
ASCIIColors.blue("Loading local TTT services")
tts_services = []
stt_services = []
def start_ttt(*args, **kwargs):
if self.config.enable_ollama_service:
try:
from lollms.services.ollama.lollms_ollama import Service
self.ollama = Service(self, base_url=self.config.ollama_base_url)
tts_services.append("ollama")
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load Ollama")
@ -348,19 +351,31 @@ class LollmsApplication(LoLLMsCom):
try:
from lollms.services.vllm.lollms_vllm import Service
self.vllm = Service(self, base_url=self.config.vllm_url)
tts_services.append("vllm")
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load vllm")
ASCIIColors.blue("Loading loacal STT services")
ASCIIColors.execute_with_animation("Loading local TTT services", start_ttt,ASCIIColors.color_blue)
print("OK")
def start_stt(*args, **kwargs):
if self.config.whisper_activate or self.config.active_stt_service == "whisper":
try:
from lollms.services.whisper.lollms_whisper import LollmsWhisper
self.whisper = LollmsWhisper(self, self.config.whisper_model, self.lollms_paths.personal_outputs_path)
stt_services.append("whisper")
except Exception as ex:
trace_exception(ex)
if self.config.active_stt_service == "openai_whisper":
from lollms.services.openai_whisper.lollms_openai_whisper import LollmsOpenAIWhisper
self.stt = LollmsOpenAIWhisper(self, self.config.openai_whisper_model, self.config.openai_whisper_key)
elif self.config.active_stt_service == "whisper":
from lollms.services.whisper.lollms_whisper import LollmsWhisper
self.stt = LollmsWhisper(self, self.config.whisper_model)
ASCIIColors.blue("Loading local TTS services")
ASCIIColors.execute_with_animation("Loading loacal STT services", start_stt, ASCIIColors.color_blue)
print("OK")
def start_tts(*args, **kwargs):
if self.config.active_tts_service == "xtts":
ASCIIColors.yellow("Loading XTTS")
try:
@ -379,8 +394,19 @@ class LollmsApplication(LoLLMsCom):
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load XTTS")
if self.config.active_tts_service == "eleven_labs_tts":
from lollms.services.eleven_labs_tts.lollms_eleven_labs_tts import LollmsElevenLabsTTS
self.tts = LollmsElevenLabsTTS(self, self.config.elevenlabs_tts_model_id, self.config.elevenlabs_tts_voice_id, self.config.elevenlabs_tts_key, stability=self.config.elevenlabs_tts_voice_stability, similarity_boost=self.config.elevenlabs_tts_voice_boost)
elif self.config.active_tts_service == "openai_tts":
from lollms.services.open_ai_tts.lollms_openai_tts import LollmsOpenAITTS
self.tts = LollmsOpenAITTS(self, self.config.openai_tts_model, self.config.openai_tts_voice, self.config.openai_tts_key)
elif self.config.active_tts_service == "xtts" and self.xtts:
self.tts = self.xtts
ASCIIColors.blue("Loading local TTI services")
ASCIIColors.execute_with_animation("Loading loacal TTS services", start_tts, ASCIIColors.color_blue)
print("OK")
def start_tti(*args, **kwargs):
if self.config.enable_sd_service:
try:
from lollms.services.sd.lollms_sd import LollmsSD
@ -403,7 +429,6 @@ class LollmsApplication(LoLLMsCom):
trace_exception(ex)
self.warning(f"Couldn't load Motion control")
ASCIIColors.blue("Activating TTI service")
if self.config.active_tti_service == "diffusers":
from lollms.services.diffusers.lollms_diffusers import LollmsDiffusers
self.tti = LollmsDiffusers(self)
@ -426,25 +451,8 @@ class LollmsApplication(LoLLMsCom):
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
self.tti = LollmsComfyUI(self, comfyui_base_url=self.config.comfyui_base_url)
ASCIIColors.blue("Activating TTS services")
if self.config.active_tts_service == "eleven_labs_tts":
from lollms.services.eleven_labs_tts.lollms_eleven_labs_tts import LollmsElevenLabsTTS
self.tts = LollmsElevenLabsTTS(self, self.config.elevenlabs_tts_model_id, self.config.elevenlabs_tts_voice_id, self.config.elevenlabs_tts_key, stability=self.config.elevenlabs_tts_voice_stability, similarity_boost=self.config.elevenlabs_tts_voice_boost)
elif self.config.active_tts_service == "openai_tts":
from lollms.services.open_ai_tts.lollms_openai_tts import LollmsOpenAITTS
self.tts = LollmsOpenAITTS(self, self.config.openai_tts_model, self.config.openai_tts_voice, self.config.openai_tts_key)
elif self.config.active_tts_service == "xtts" and self.xtts:
self.tts = self.xtts
ASCIIColors.blue("Loading STT services")
if self.config.active_stt_service == "openai_whisper":
from lollms.services.openai_whisper.lollms_openai_whisper import LollmsOpenAIWhisper
self.stt = LollmsOpenAIWhisper(self, self.config.openai_whisper_model, self.config.openai_whisper_key)
elif self.config.active_stt_service == "whisper":
from lollms.services.whisper.lollms_whisper import LollmsWhisper
self.stt = LollmsWhisper(self, self.config.whisper_model)
ASCIIColors.execute_with_animation("Loading loacal TTI services", start_tti, ASCIIColors.color_blue)
print("OK")
def verify_servers(self, reload_all=False):
@ -1345,7 +1353,7 @@ class LollmsApplication(LoLLMsCom):
# Check if the message content is not empty and visible to the AI
if message.content != '' and (
message.message_type <= MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message.message_type != MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value):
message.message_type <= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER.value and message.message_type != MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI.value):
# Tokenize the message content
if self.config.use_model_name_in_discussions:
@ -1377,7 +1385,7 @@ class LollmsApplication(LoLLMsCom):
# Check if the message content is not empty and visible to the AI
if message.content != '' and (
message.message_type <= MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message.message_type != MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value):
message.message_type <= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER.value and message.message_type != MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI.value):
if self.config.use_model_name_in_discussions:
if message.model:

View File

@ -4,7 +4,7 @@ from lollms.paths import LollmsPaths
from threading import Thread
class Client:
def __init__(self, lollms_paths:LollmsPaths, client_id, discussion:Discussion, db:DiscussionsDB):
def __init__(self, lollms_paths:LollmsPaths, client_id:str, discussion:Discussion, db:DiscussionsDB):
self.client_id = client_id
self.discussion = discussion
self.lollms_paths = lollms_paths

View File

@ -1,8 +1,9 @@
from ascii_colors import ASCIIColors
from lollms.types import MSG_TYPE, SENDER_TYPES
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES, MSG_TYPE
from typing import Callable
import socketio
from enum import Enum
from lollms.types import MSG_OPERATION_TYPE
class NotificationType(Enum):
"""Notification types."""
@ -155,12 +156,12 @@ class LoLLMsCom:
parameters=None,
metadata=None,
ui=None,
message_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_FULL,
message_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CONTENT,
sender_type:SENDER_TYPES=SENDER_TYPES.SENDER_TYPES_AI,
open=False
):
pass
def full(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end
Args:

View File

@ -3,7 +3,7 @@ import sqlite3
from pathlib import Path
from datetime import datetime
from ascii_colors import ASCIIColors, trace_exception
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.types import BindingType
from lollms.utilities import PackageManager, discussion_path_to_url
from lollms.paths import LollmsPaths
@ -18,7 +18,7 @@ import json
import shutil
from lollms.tasks import TasksLibrary
import json
from typing import Dict, Any
from typing import Dict, Any, List
__author__ = "parisneo"
__github__ = "https://github.com/ParisNeo/lollms-webui"
@ -40,7 +40,7 @@ class DiscussionsDB:
self.discussion_db_file_path = self.discussion_db_path/"database.db"
def create_tables(self):
db_version = 13
db_version = 14
with sqlite3.connect(self.discussion_db_file_path) as conn:
cursor = conn.cursor()
@ -77,6 +77,7 @@ class DiscussionsDB:
finished_generating_at TIMESTAMP,
nb_tokens INT,
discussion_id INTEGER NOT NULL,
steps TEXT,
metadata TEXT,
ui TEXT,
FOREIGN KEY (discussion_id) REFERENCES discussion(id),
@ -119,6 +120,7 @@ class DiscussionsDB:
'created_at',
'metadata',
'ui',
'steps',
'started_generating_at',
'finished_generating_at',
'nb_tokens',
@ -138,6 +140,8 @@ class DiscussionsDB:
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TIMESTAMP")
elif column=='metadata':
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TEXT")
elif column=='steps':
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TEXT")
elif column=='message_type':
cursor.execute(f"ALTER TABLE {table} RENAME COLUMN type TO {column}")
elif column=='sender_type':
@ -497,11 +501,12 @@ class Message:
def __init__(
self,
discussion_id,
discussions_db,
discussions_db: DiscussionsDB,
message_type,
sender_type,
sender,
content,
steps:list = [],
metadata = None,
ui = None,
rank = 0,
@ -523,6 +528,10 @@ class Message:
self.sender = sender
self.sender_type = sender_type
self.content = content
try:
self.steps = steps if type(steps)==list else json.loads(steps)
except:
self.steps = []
self.message_type = message_type
self.rank = rank
self.parent_message_id = parent_message_id
@ -538,8 +547,8 @@ class Message:
if insert_into_db:
self.id = self.discussions_db.insert(
"INSERT INTO message (sender, message_type, sender_type, sender, content, metadata, ui, rank, parent_message_id, binding, model, personality, created_at, started_generating_at, finished_generating_at, nb_tokens, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(sender, message_type, sender_type, sender, content, metadata, ui, rank, parent_message_id, binding, model, personality, created_at, started_generating_at, finished_generating_at, nb_tokens, discussion_id)
"INSERT INTO message (sender, message_type, sender_type, sender, content, steps, metadata, ui, rank, parent_message_id, binding, model, personality, created_at, started_generating_at, finished_generating_at, nb_tokens, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(sender, message_type, sender_type, sender, content, str(steps), metadata, ui, rank, parent_message_id, binding, model, personality, created_at, started_generating_at, finished_generating_at, nb_tokens, discussion_id)
)
else:
self.id = id
@ -554,6 +563,7 @@ class Message:
"sender",
"content",
"metadata",
"steps",
"ui",
"rank",
"parent_message_id",
@ -607,7 +617,7 @@ class Message:
params = [new_content]
if new_metadata is not None:
text+=", metadata = ?"
params.append(new_metadata)
params.append(new_metadata if type(new_metadata)==str else json.dumps(new_metadata) if type(new_metadata)==dict else None)
self.metadata=new_metadata
if new_ui is not None:
text+=", ui = ?"
@ -632,14 +642,105 @@ class Message:
text, tuple(params)
)
def update_content(self, new_content, started_generating_at=None, nb_tokens=None, commit=True):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET content = ?"
params = [new_content]
if started_generating_at is not None:
text+=", started_generating_at = ?"
params.append(started_generating_at)
self.started_generating_at=started_generating_at
if nb_tokens is not None:
text+=", nb_tokens = ?"
params.append(nb_tokens)
self.nb_tokens=nb_tokens
text +=", finished_generating_at = ? WHERE id = ?"
params.append(self.finished_generating_at)
params.append(self.id)
self.discussions_db.update(
text, tuple(params)
)
def update_steps(self, steps:list, step_type:str, status:bool):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET steps = ?"
self.steps = steps
params = [json.dumps(self.steps)]
text +=" WHERE id = ?"
params.append(self.id)
self.discussions_db.update(
text, tuple(params)
)
def update_metadata(self, new_metadata):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET metadata = ?"
params = [None if new_metadata is None else new_metadata if type(new_metadata)==str else json.dumps(new_metadata)]
text +=", finished_generating_at = ? WHERE id = ?"
params.append(self.finished_generating_at)
params.append(self.id)
self.discussions_db.update(
text, tuple(params)
)
def update_ui(self, new_ui):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET ui = ?"
params = [str(new_ui) if new_ui is not None else None]
text +=", finished_generating_at = ? WHERE id = ?"
params.append(self.finished_generating_at)
params.append(self.id)
self.discussions_db.update(
text, tuple(params)
)
def add_step(self, step: str, step_type: str, status: bool, done: bool):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
# Check if the step text already exists
for existing_step in self.steps:
if existing_step['text'] == step:
# Update the existing step
existing_step['step_type'] = step_type
existing_step['status'] = status
existing_step['done'] = done
break
else:
# If it doesn't exist, append a new step
self.steps.append({
"id": len(self.steps),
"text": step,
"step_type": step_type,
"status": status,
"done": done
})
# Prepare the SQL update statement
text = "UPDATE message SET steps = ? WHERE id = ?"
params = [json.dumps(self.steps), self.id]
# Update the database
self.discussions_db.update(text, tuple(params))
def to_json(self):
attributes = Message.get_fields()
msgJson = {}
for attribute_name in attributes:
attribute_value = getattr(self, attribute_name, None)
if attribute_name=="metadata":
if attribute_name in ["metadata","steps"]:
if type(attribute_value) == str:
msgJson[attribute_name] = json.loads(attribute_value)
try:
msgJson[attribute_name] = json.loads(attribute_value.replace("'", '"'))
except Exception as ex:
trace_exception(ex)
msgJson[attribute_name] = None
else:
msgJson[attribute_name] = attribute_value
else:
@ -647,7 +748,7 @@ class Message:
return msgJson
class Discussion:
def __init__(self, lollms:LoLLMsCom, discussion_id, discussions_db:DiscussionsDB):
def __init__(self, lollms:LoLLMsCom, discussion_id:int, discussions_db:DiscussionsDB):
self.lollms = lollms
self.current_message = None
self.discussion_id = discussion_id
@ -725,7 +826,7 @@ class Discussion:
try:
self.vectorizer.remove_document(fn)
if callback is not None:
callback("File removed successfully",MSG_TYPE.MSG_TYPE_INFO)
callback("File removed successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
return True
except ValueError as ve:
ASCIIColors.error(f"Couldn't remove the file")
@ -778,7 +879,7 @@ class Discussion:
if path.suffix in [".wav",".mp3"]:
self.audio_files.append(path)
if process:
self.lollms.new_message(client.client_id if client is not None else 0, content = "", message_type = MSG_TYPE.MSG_TYPE_FULL)
self.lollms.new_message(client.client_id if client is not None else 0, content = "", message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
if self.lollms.stt is None:
self.lollms.info("Please select an stt engine in the services settings first")
self.lollms.info(f"Transcribing ... ")
@ -799,7 +900,7 @@ class Discussion:
pth = str(view_file).replace("\\","/").split('/')
if "discussion_databases" in pth:
pth = discussion_path_to_url(view_file)
self.lollms.new_message(client.client_id if client is not None else 0, content = "", message_type = MSG_TYPE.MSG_TYPE_FULL)
self.lollms.new_message(client.client_id if client is not None else 0, content = "", message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
output = f'<img src="{pth}" width="800">\n\n'
self.lollms.full(output, client_id=client.client_id)
self.lollms.close_message(client.client_id if client is not None else 0)
@ -827,7 +928,7 @@ class Discussion:
ASCIIColors.error("Couldn't create new message")
ASCIIColors.info("Received image file")
if callback is not None:
callback("Image file added successfully", MSG_TYPE.MSG_TYPE_INFO)
callback("Image file added successfully", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
else:
try:
# self.ShowBlockingMessage("Adding file to vector store.\nPlease stand by")
@ -845,7 +946,7 @@ class Discussion:
self.vectorizer.add_document(path.stem, data, path, True)
self.vectorizer.build_index()
if callback is not None:
callback("File added successfully",MSG_TYPE.MSG_TYPE_INFO)
callback("File added successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
self.lollms.HideBlockingMessage(client.client_id)
return True
except Exception as e:
@ -866,15 +967,16 @@ class Discussion:
self,
message_type,
sender_type,
sender,
content,
sender:str,
content:str,
steps:list,
metadata=None,
ui=None,
rank=0,
ui:str|None=None,
rank:int=0,
parent_message_id=0,
binding="",
model ="",
personality="",
binding:str="",
model:str ="",
personality:str="",
created_at=None,
started_generating_at=None,
finished_generating_at=None,
@ -908,6 +1010,7 @@ class Discussion:
sender_type,
sender,
content,
steps,
metadata,
ui,
rank,
@ -1017,7 +1120,7 @@ class Discussion:
f"DELETE FROM discussion WHERE id={self.discussion_id}"
)
def get_messages(self):
def get_messages(self)->List[Message]:
"""Gets a list of messages information
Returns:
@ -1062,6 +1165,44 @@ class Discussion:
"""
self.current_message.update(new_content, new_metadata, new_ui, started_generating_at, nb_tokens)
def update_message_content(self, new_content, started_generating_at=None, nb_tokens=None):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update_content(new_content, started_generating_at, nb_tokens)
def update_message_steps(self, steps):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update_steps(new_content, started_generating_at, nb_tokens)
def update_message_metadata(self, new_metadata):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update_metadata(new_metadata)
def update_message_ui(self, new_ui):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update_ui(new_ui)
def edit_message(self, message_id, new_content, new_metadata=None, new_ui=None):
"""Edits the content of a message

View File

@ -9,7 +9,7 @@ License: Apache 2.0
from lollms.utilities import PackageManager
from lollms.com import LoLLMsCom
from lollms.utilities import trace_exception, run_async, install_conda_package
from lollms.types import MSG_TYPE, SENDER_TYPES
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
from lollms.client_session import Session
from ascii_colors import ASCIIColors
import platform

View File

@ -33,7 +33,7 @@ import subprocess
import yaml
from ascii_colors import ASCIIColors
import time
from lollms.types import MSG_TYPE, SUMMARY_MODE
from lollms.types import MSG_OPERATION_TYPE, SUMMARY_MODE
import json
from typing import Any, List, Optional, Type, Callable, Dict, Any, Union
import json
@ -118,7 +118,7 @@ class AIPersonality:
ignore_discussion_documents_rag=False,
is_relative_path=True,
installation_option:InstallOption=InstallOption.INSTALL_IF_NECESSARY,
callback: Callable[[str, MSG_TYPE, dict, list], bool]=None
callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None
):
"""
Initialize an AIPersonality instance.
@ -298,7 +298,7 @@ class AIPersonality:
def new_message(self, message_text:str, message_type:MSG_TYPE= MSG_TYPE.MSG_TYPE_FULL, metadata=[], callback: Callable[[str, int, dict, list, Any], bool]=None):
def new_message(self, message_text:str, message_type:MSG_OPERATION_TYPE= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, metadata=[], callback: Callable[[str, int, dict, list, Any], bool]=None):
"""This sends step rogress to front end
Args:
@ -309,9 +309,9 @@ class AIPersonality:
callback = self.callback
if callback:
callback(message_text, MSG_TYPE.MSG_TYPE_NEW_MESSAGE, parameters={'type':message_type.value,'metadata':metadata}, personality=self)
callback(message_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_NEW_MESSAGE, parameters={'type':message_type.value,'metadata':metadata}, personality=self)
def full(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end
Args:
@ -322,9 +322,9 @@ class AIPersonality:
callback = self.callback
if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL)
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
def ui(self, ui_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def ui(self, ui_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends ui text to front end
Args:
@ -335,10 +335,10 @@ class AIPersonality:
callback = self.callback
if callback:
callback(ui_text, MSG_TYPE.MSG_TYPE_UI)
callback(ui_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def full_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to AI)
Args:
@ -349,9 +349,9 @@ class AIPersonality:
callback = self.callback
if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI)
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
def full_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to user)
Args:
@ -362,7 +362,7 @@ class AIPersonality:
callback = self.callback
if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER)
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER)
def build_prompt(self, prompt_parts:List[str], sacrifice_id:int=-1, context_size:int=None, minimum_spare_context_size:int=None):
@ -533,7 +533,7 @@ class AIPersonality:
ASCIIColors.red("Model failed to rank inputs")
return None
def step_start(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def step_start(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step start
Args:
@ -544,7 +544,7 @@ class AIPersonality:
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_START)
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START)
def step_end(self, step_text, status=True, callback: Callable[[str, int, dict, list], bool]=None):
"""This triggers a step end
@ -557,9 +557,9 @@ class AIPersonality:
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_END, {'status':status})
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
def step(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def step(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step information
Args:
@ -575,7 +575,7 @@ class AIPersonality:
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP)
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP)
def print_prompt(self, title, prompt):
ASCIIColors.red("*-*-*-*-*-*-*-* ", end="")
@ -696,14 +696,14 @@ class AIPersonality:
def process(self, text:str, message_type:MSG_TYPE, callback=None, show_progress=False):
def process(self, text:str, message_type:MSG_OPERATION_TYPE, callback=None, show_progress=False):
if callback is None:
callback = self.callback
if text is None:
return True
if message_type==MSG_TYPE.MSG_TYPE_CHUNK:
if message_type==MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
bot_says = self.bot_says + text
elif message_type==MSG_TYPE.MSG_TYPE_FULL:
elif message_type==MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT:
bot_says = text
if show_progress:
@ -773,7 +773,7 @@ class AIPersonality:
return self.bot_says
def setCallback(self, callback: Callable[[str, MSG_TYPE, dict, list], bool]):
def setCallback(self, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]):
self.callback = callback
if self._processor:
self._processor.callback = callback
@ -967,7 +967,7 @@ class AIPersonality:
try:
self.vectorizer.remove_document(fn)
if callback is not None:
callback("File removed successfully",MSG_TYPE.MSG_TYPE_INFO)
callback("File removed successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
return True
except ValueError as ve:
ASCIIColors.error(f"Couldn't remove the file")
@ -1018,7 +1018,7 @@ class AIPersonality:
f.write(text)
self.info(f"File saved to {transcription_fn}")
self.full(text)
self.set_message_content(text)
elif path.suffix in [".png",".jpg",".jpeg",".gif",".bmp",".svg",".webp"]:
self.image_files.append(path)
if process:
@ -1027,9 +1027,9 @@ class AIPersonality:
pth = str(path).replace("\\","/").split('/')
if "discussion_databases" in pth:
pth = discussion_path_to_url(path)
self.new_message("",MSG_TYPE.MSG_TYPE_FULL)
self.new_message("",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
output = f'<img src="{pth}" width="800">\n\n'
self.full(output)
self.set_message_content(output)
self.app.close_message(client.client_id if client is not None else 0)
if self.model.binding_type not in [BindingType.TEXT_IMAGE, BindingType.TEXT_IMAGE_VIDEO]:
@ -1040,7 +1040,7 @@ class AIPersonality:
img = img.convert("RGB")
output += "## image description :\n"+ self.model.interrogate_blip([img])[0]
# output += "## image description :\n"+ self.model.qna_blip([img],"q:Describe this photo with as much details as possible.\na:")[0]
self.full(output)
self.set_message_content(output)
self.app.close_message(client.client_id if client is not None else 0)
self.HideBlockingMessage("Understanding image (please wait)")
if self.config.debug:
@ -1055,7 +1055,7 @@ class AIPersonality:
ASCIIColors.error("Couldn't create new message")
ASCIIColors.info("Received image file")
if callback is not None:
callback("Image file added successfully", MSG_TYPE.MSG_TYPE_INFO)
callback("Image file added successfully", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
else:
try:
# self.ShowBlockingMessage("Adding file to vector store.\nPlease stand by")
@ -1075,7 +1075,7 @@ class AIPersonality:
self.vectorizer.add_document(path.stem, data, path, True)
self.vectorizer.build_index()
if callback is not None:
callback("File added successfully",MSG_TYPE.MSG_TYPE_INFO)
callback("File added successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
self.HideBlockingMessage(client.client_id)
return True
except Exception as e:
@ -2030,7 +2030,7 @@ class StateMachine:
def process_state(self, command, full_context, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None, context_state:dict=None, client:Client=None):
def process_state(self, command, full_context, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, context_state:dict=None, client:Client=None):
"""
Process the given command based on the current state.
@ -2322,7 +2322,7 @@ class APScript(StateMachine):
def add_file(self, path, client:Client, callback=None, process=True):
self.personality.add_file(path, client=client,callback=callback, process=process)
if callback is not None:
callback("File added successfully",MSG_TYPE.MSG_TYPE_INFO)
callback("File added successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
return True
def remove_file(self, path):
@ -2385,7 +2385,7 @@ class APScript(StateMachine):
return self.personality.generate(prompt, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
def run_workflow(self, prompt:str, previous_discussion_text:str="", callback: Callable[[str, MSG_TYPE, dict, list], bool]=None, context_details:dict=None, client:Client=None):
def run_workflow(self, prompt:str, previous_discussion_text:str="", callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, context_details:dict=None, client:Client=None):
"""
This function generates code based on the given parameters.
@ -2865,29 +2865,29 @@ class APScript(StateMachine):
prompt_parts[sacrifice_id] = sacrifice_text
return self.separator_template.join([s for s in prompt_parts if s!=""])
# ================================================= Sending commands to ui ===========================================
def add_collapsible_entry(self, title, content, subtitle=""):
return "\n".join(
[
f'<details class="flex w-full rounded-xl border border-gray-200 bg-white shadow-sm dark:border-gray-800 dark:bg-gray-900 mb-3.5 max-w-full svelte-1escu1z" open="">',
f' <summary class="grid w-full select-none grid-cols-[40px,1fr] items-center gap-2.5 p-2 svelte-1escu1z">',
f' <dl class="leading-4">',
f' <dd class="text-sm"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-arrow-right">',
f' <line x1="5" y1="12" x2="19" y2="12"></line>',
f' <polyline points="12 5 19 12 12 19"></polyline>',
def add_collapsible_entry(self, title, content, subtitle="", open_by_default=False):
open_attr = 'open' if open_by_default else ''
return "\n".join([
f'<details class="w-full rounded-xl border border-gray-200 bg-white shadow-sm dark:border-gray-700 dark:bg-gray-800 mb-4 transition-all duration-300 ease-in-out hover:shadow-md focus-within:ring-2 focus-within:ring-blue-500 dark:focus-within:ring-blue-400" {open_attr}>',
f' <summary class="flex items-center justify-between p-4 cursor-pointer select-none transition-all duration-300 ease-in-out">',
f' <div class="flex items-center space-x-3">',
f' <div class="flex-shrink-0">',
f' <svg class="w-5 h-5 text-gray-500 dark:text-gray-400 transition-transform duration-300 transform group-open:rotate-90" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor">',
f' <path fill-rule="evenodd" d="M7.293 14.707a1 1 0 010-1.414L10.586 10 7.293 6.707a1 1 0 011.414-1.414l4 4a1 1 0 010 1.414l-4 4a1 1 0 01-1.414 0z" clip-rule="evenodd" />',
f' </svg>',
f' </dd>',
f' </dl>',
f' <dl class="leading-4">',
f' <dd class="text-sm"><h3>{title}</h3></dd>',
f' <dt class="flex items-center gap-1 truncate whitespace-nowrap text-[.82rem] text-gray-400">{subtitle}</dt>',
f' </dl>',
f' </div>',
f' <div>',
f' <h3 class="text-lg font-semibold text-gray-900 dark:text-white">{title}</h3>',
f' <p class="text-sm text-gray-500 dark:text-gray-400">{subtitle}</p>',
f' </div>',
f' </div>',
f' </summary>',
f' <div class="content px-5 pb-5 pt-4">',
f' <div class="px-4 pb-4 pt-2 text-gray-700 dark:text-gray-300 transition-all duration-300 ease-in-out max-h-0 overflow-hidden group-open:max-h-40">',
content,
f' </div>',
f' </details>\n'
]
)
f'</details>\n'
])
def internet_search_with_vectorization(self, query, quick_search:bool=False ):
@ -2918,7 +2918,7 @@ class APScript(StateMachine):
return chunks
def step_start(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def step_start(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step start
Args:
@ -2929,7 +2929,7 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_START)
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START)
def step_end(self, step_text, status=True, callback: Callable[[str, int, dict, list], bool]=None):
"""This triggers a step end
@ -2942,9 +2942,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_END, {'status':status})
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS if status else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_FAILURE)
def step(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def step(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step information
Args:
@ -2960,9 +2960,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP)
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP)
def exception(self, ex, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def exception(self, ex, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client
Args:
@ -2978,9 +2978,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(str(ex), MSG_TYPE.MSG_TYPE_EXCEPTION)
callback(str(ex), MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def warning(self, warning:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def warning(self, warning:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client
Args:
@ -2996,7 +2996,7 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(warning, MSG_TYPE.MSG_TYPE_EXCEPTION)
callback(warning, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def json(self, title:str, json_infos:dict, callback: Callable[[str, int, dict, list], bool]=None, indent=4):
@ -3015,9 +3015,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback("", MSG_TYPE.MSG_TYPE_JSON_INFOS, metadata = [{"title":title, "content":json.dumps(json_infos, indent=indent)}])
callback("", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_JSON_INFOS, metadata = [{"title":title, "content":json.dumps(json_infos, indent=indent)}])
def ui(self, html_ui:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def ui(self, html_ui:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends ui elements to front end
Args:
@ -3033,10 +3033,10 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(html_ui, MSG_TYPE.MSG_TYPE_UI)
callback(html_ui, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def ui_in_iframe(self, html_ui:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def ui_in_iframe(self, html_ui:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends ui elements to front end inside an iframe
Args:
@ -3053,13 +3053,13 @@ class APScript(StateMachine):
if callback:
iframe_html = f'<iframe class="w-full" srcdoc="{html_ui}" style="width:100%; height:100%; border:none;"></iframe>'
callback(iframe_html, MSG_TYPE.MSG_TYPE_UI)
callback(iframe_html, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def code(self, code:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def code(self, code:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends code to front end
Args:
@ -3075,9 +3075,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(code, MSG_TYPE.MSG_TYPE_CODE)
callback(code, MSG_OPERATION_TYPE.MSG_TYPE_CODE)
def chunk(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def chunk(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end
Args:
@ -3088,10 +3088,10 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_CHUNK)
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK)
def full(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None, msg_type:MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL):
def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, msg_type:MSG_OPERATION_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT):
"""This sends full text to front end
Args:
@ -3104,7 +3104,7 @@ class APScript(StateMachine):
if callback:
callback(full_text, msg_type)
def full_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to AI)
Args:
@ -3115,9 +3115,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI)
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
def full_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to user)
Args:
@ -3128,7 +3128,7 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER)
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER)
@ -3752,7 +3752,7 @@ class APScript(StateMachine):
verbose=verbose
)
def info(self, info_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def info(self, info_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends info text to front end
Args:
@ -3763,9 +3763,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(info_text, MSG_TYPE.MSG_TYPE_FULL)
callback(info_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
def step_progress(self, step_text:str, progress:float, callback: Callable[[str, MSG_TYPE, dict, list, AIPersonality], bool]=None):
def step_progress(self, step_text:str, progress:float, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list, AIPersonality], bool]=None):
"""This sends step rogress to front end
Args:
@ -3776,9 +3776,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_PROGRESS, {'progress':progress})
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_PROGRESS, {'progress':progress})
def new_message(self, message_text:str, message_type:MSG_TYPE= MSG_TYPE.MSG_TYPE_FULL, metadata=[], callback: Callable[[str, int, dict, list, AIPersonality], bool]=None):
def new_message(self, message_text:str, message_type:MSG_OPERATION_TYPE= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, metadata=[], callback: Callable[[str, int, dict, list, AIPersonality], bool]=None):
"""This sends step rogress to front end
Args:
@ -3789,9 +3789,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(message_text, MSG_TYPE.MSG_TYPE_NEW_MESSAGE, parameters={'type':message_type.value,'metadata':metadata},personality = self.personality)
callback(message_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_NEW_MESSAGE, parameters={'type':message_type.value,'metadata':metadata},personality = self.personality)
def finished_message(self, message_text:str="", callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def finished_message(self, message_text:str="", callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends step rogress to front end
Args:
@ -3802,7 +3802,7 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(message_text, MSG_TYPE.MSG_TYPE_FINISHED_MESSAGE)
callback(message_text, MSG_OPERATION_TYPE.MSG_TYPE_FINISHED_MESSAGE)
def print_prompt(self, title, prompt):
ASCIIColors.red("*-*-*-*-*-*-*-* ", end="")
@ -3921,7 +3921,7 @@ class APScript(StateMachine):
if context_details["is_continue"]:
out = context_details["previous_chunk"] + out
if send_full:
self.full(out)
self.set_message_content(out)
return out
def generate_with_function_calls(self, context_details: dict, functions: List[Dict[str, Any]], max_answer_length: Optional[int] = None, callback = None) -> List[Dict[str, Any]]:
@ -4179,7 +4179,7 @@ class APScript(StateMachine):
nested_function_calls += 1
self.chunk("\n")
if hide_function_call:
self.full("") #Hide function
self.set_message_content("") #Hide function
if self.config.debug:
self.print_prompt("Function calls", json.dumps(function_calls, indent=4))
@ -4189,7 +4189,7 @@ class APScript(StateMachine):
out += f"{self.separator_template}"+ self.system_custom_header('function calls results') + final_output + "\n"
if prompt_after_execution:
if separate_output:
self.full(final_output)
self.set_message_content(final_output)
self.new_message("")
context_details["discussion_messages"] +=out
if len(self.personality.image_files)>0:

View File

@ -12,7 +12,7 @@ from pydantic import BaseModel, Field
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.security import check_access
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager

View File

@ -10,7 +10,7 @@ from fastapi import APIRouter, Request, HTTPException, Depends, Header
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel, Field
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.security import sanitize_path, check_access
from ascii_colors import ASCIColors

View File

@ -13,7 +13,7 @@ from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel
from lollms.security import check_access
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path

View File

@ -10,7 +10,7 @@ from fastapi import APIRouter, Request
from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import check_access

View File

@ -11,7 +11,7 @@ from fastapi import APIRouter, Request
from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.security import sanitize_path, check_access
from ascii_colors import ASCIIColors

View File

@ -8,16 +8,17 @@ description:
"""
from fastapi import APIRouter, Request, Body, Response
from fastapi import APIRouter, HTTPException
from fastapi.responses import PlainTextResponse
from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.generation import RECEPTION_MANAGER, ROLE_CHANGE_DECISION, ROLE_CHANGE_OURTPUT
from ascii_colors import ASCIIColors
import time
import re
import threading
from typing import List, Optional, Union
import random
@ -127,7 +128,7 @@ async def lollms_generate(request: LollmsGenerateRequest):
ASCIIColors.yellow(prompt)
tokens = elf_server.model.tokenize(prompt)
n_tokens = len(tokens)
ASCIIColors.yellow(f"Prompt input size {n_tokens}")
ASCIIColors.info(f"Prompt input size {n_tokens}")
n_predict = min(min(elf_server.config.ctx_size-n_tokens-1,elf_server.config.max_n_predict), request.n_predict) if request.n_predict>0 else min(elf_server.config.ctx_size-n_tokens-1,elf_server.config.max_n_predict)
stream = request.stream
if elf_server.binding is not None:
@ -136,7 +137,7 @@ async def lollms_generate(request: LollmsGenerateRequest):
async def generate_chunks():
lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen:
return False
@ -189,7 +190,7 @@ async def lollms_generate(request: LollmsGenerateRequest):
elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="text/plain", headers=headers)
else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data
if chunk is None:
return True
@ -276,20 +277,35 @@ async def lollms_generate_with_images(request: LollmsGenerateRequest):
stream = request.stream
prompt_tokens = len(elf_server.binding.tokenize(prompt))
if elf_server.binding is not None:
def add_padding(encoded_image):
missing_padding = len(encoded_image) % 4
if missing_padding:
encoded_image += '=' * (4 - missing_padding)
return encoded_image
def sanitize_base64(encoded_image):
# Remove any characters that are not valid base64 characters
return re.sub(r'[^A-Za-z0-9+/=]', '', encoded_image)
image_files = []
images_path = elf_server.lollms_paths.personal_outputs_path / "tmp_images"
images_path.mkdir(parents=True, exist_ok=True)
for i, encoded_image in enumerate(encoded_images):
# Remove the data URL prefix
if encoded_image.startswith('data:image/png;base64,'):
encoded_image = encoded_image.split(',')[1] # Get the base64 part only
sanitized_image = sanitize_base64(encoded_image)
padded_image = add_padding(sanitized_image)
image_path = images_path/ f'image_{i}.png'
with open(image_path, 'wb') as image_file:
image_file.write(base64.b64decode(encoded_image))
image_file.write(base64.b64decode(padded_image))
image_files.append(image_path)
if stream:
new_output={"new_values":[]}
async def generate_chunks():
lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen:
return False
@ -343,7 +359,7 @@ async def lollms_generate_with_images(request: LollmsGenerateRequest):
elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="text/plain", headers=headers)
else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data
if chunk is None:
return True
@ -373,7 +389,7 @@ async def lollms_generate_with_images(request: LollmsGenerateRequest):
except Exception as ex:
trace_exception(ex)
elf_server.error(ex)
return {"status":False,"error":str(ex)}
raise HTTPException(400, f"Error : {ex}")
# ----------------------- Open AI ----------------------------------------
@ -493,7 +509,7 @@ async def v1_chat_completions(request: ChatGenerationRequest):
async def generate_chunks():
lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen:
return False
@ -551,7 +567,7 @@ async def v1_chat_completions(request: ChatGenerationRequest):
elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="application/json")
else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data
if chunk is None:
return True
@ -580,7 +596,7 @@ async def v1_chat_completions(request: ChatGenerationRequest):
except Exception as ex:
trace_exception(ex)
elf_server.error(ex)
return {"status":False,"error":str(ex)}
raise HTTPException(400, f"Error : {ex}")
class OllamaModelResponse(BaseModel):
id: str
@ -635,7 +651,7 @@ async def ollama_chat_completion(request: ChatGenerationRequest):
async def generate_chunks():
lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen:
return False
@ -693,7 +709,7 @@ async def ollama_chat_completion(request: ChatGenerationRequest):
elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="application/json")
else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data
if chunk is None:
return True
@ -789,7 +805,7 @@ async def ollama_generate(request: CompletionGenerationRequest):
if stream:
output = {"text":""}
def generate_chunks():
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data
output["text"] += chunk
antiprompt = detect_antiprompt(output["text"], [start_header_id_template, end_header_id_template])
@ -810,7 +826,7 @@ async def ollama_generate(request: CompletionGenerationRequest):
return StreamingResponse(generate_chunks())
else:
output = {"text":""}
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if chunk is None:
return
# Yield each chunk of data
@ -875,7 +891,7 @@ async def ollama_completion(request: CompletionGenerationRequest):
async def generate_chunks():
lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen:
return False
@ -928,7 +944,7 @@ async def ollama_completion(request: CompletionGenerationRequest):
elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="text/plain")
else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data
if chunk is None:
return True
@ -979,7 +995,7 @@ async def v1_completion(request: CompletionGenerationRequest):
if stream:
output = {"text":""}
def generate_chunks():
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data
output["text"] += chunk
antiprompt = detect_antiprompt(output["text"])
@ -1000,7 +1016,7 @@ async def v1_completion(request: CompletionGenerationRequest):
return StreamingResponse(generate_chunks())
else:
output = {"text":""}
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK):
def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data
output["text"] += chunk
antiprompt = detect_antiprompt(output["text"])

View File

@ -13,7 +13,7 @@ from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.security import check_access
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path

View File

@ -13,7 +13,7 @@ from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from lollms.security import check_access
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path

View File

@ -12,7 +12,7 @@ from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.security import check_access
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path

View File

@ -27,7 +27,7 @@ from fastapi import APIRouter, Request, HTTPException, Depends, Header
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel, Field
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.security import sanitize_path, check_access
from ascii_colors import ASCIIColors

View File

@ -11,7 +11,7 @@ from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import check_access

View File

@ -10,7 +10,7 @@ from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.security import sanitize_path
from ascii_colors import ASCIIColors

View File

@ -12,7 +12,7 @@ from fastapi.responses import PlainTextResponse
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import find_next_available_filename, output_file_path_to_url, detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import sanitize_path, validate_path, check_access

View File

@ -11,7 +11,7 @@ from fastapi import APIRouter, HTTPException
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string
from ascii_colors import ASCIIColors

View File

@ -12,7 +12,7 @@ from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.security import check_access
from lollms.types import MSG_TYPE
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path

View File

@ -8,17 +8,6 @@ description:
"""
from fastapi import APIRouter, Request
from fastapi import HTTPException
from pydantic import BaseModel
import pkg_resources
from lollms.server.elf_server import LOLLMSElfServer
from fastapi.responses import FileResponse
from lollms.binding import BindingBuilder, InstallOption
from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality
from lollms.types import MSG_TYPE, SENDER_TYPES
from lollms.utilities import load_config, trace_exception, gc
from lollms.utilities import find_first_available_file_index, convert_language_name, run_async
from lollms_webui import LOLLMSWebUI
from pathlib import Path
from typing import List

View File

@ -14,7 +14,7 @@ from lollms.server.elf_server import LOLLMSElfServer
from fastapi.responses import FileResponse
from lollms.binding import BindingBuilder, InstallOption
from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality
from lollms.personality import AIPersonality
from lollms.types import SENDER_TYPES
from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async
from pathlib import Path
@ -95,11 +95,11 @@ def add_events(sio:socketio):
# Raw text generation
lollmsElfServer.answer = {"full_text":""}
def callback(text, message_type: MSG_TYPE, metadata:dict={}):
if message_type == MSG_TYPE.MSG_TYPE_CHUNK:
if message_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
ASCIIColors.success(f"generated: {len(lollmsElfServer.answer['full_text'].split())} words", end='\r')
if text is not None:
lollmsElfServer.answer["full_text"] = lollmsElfServer.answer["full_text"] + text
run_async(partial(lollmsElfServer.sio.emit,'text_chunk', {'chunk': text, 'type':MSG_TYPE.MSG_TYPE_CHUNK.value}, to=client_id))
run_async(partial(lollmsElfServer.sio.emit,'text_chunk', {'chunk': text, 'type':MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK.value}, to=client_id))
if client_id in lollmsElfServer.session.clients.keys():# Client disconnected
if client.requested_stop:
return False
@ -169,7 +169,7 @@ def add_events(sio:socketio):
full_discussion = personality.personality_conditioning + ''.join(full_discussion_blocks)
def callback(text, message_type: MSG_TYPE, metadata:dict={}):
if message_type == MSG_TYPE.MSG_TYPE_CHUNK:
if message_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
lollmsElfServer.answer["full_text"] = lollmsElfServer.answer["full_text"] + text
run_async(partial(lollmsElfServer.sio.emit,'text_chunk', {'chunk': text}, to=client_id))
try:
@ -256,7 +256,7 @@ def add_events(sio:socketio):
nb_tokens = None
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
content=prompt,

View File

@ -16,7 +16,7 @@ from lollms.binding import BindingBuilder, InstallOption
from lollms.security import sanitize_path
from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality
from lollms.personality import AIPersonality
from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async
from pathlib import Path
from typing import List

View File

@ -15,7 +15,7 @@ from lollms.types import SENDER_TYPES
from fastapi.responses import FileResponse
from lollms.binding import BindingBuilder, InstallOption
from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality
from lollms.personality import AIPersonality
from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async
from pathlib import Path
from typing import List
@ -145,7 +145,7 @@ def add_events(sio:socketio):
ump = lollmsElfServer.config.discussion_prompt_separator + lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
message = client.discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
content="",

View File

@ -14,7 +14,7 @@ from lollms.server.elf_server import LOLLMSElfServer
from fastapi.responses import FileResponse
from lollms.binding import BindingBuilder, InstallOption
from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality
from lollms.personality import AIPersonality
from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async
from pathlib import Path
from typing import List

View File

@ -4,7 +4,7 @@ from typing import Callable, List, Dict, Any, Optional
from functools import partial
from datetime import datetime
from ascii_colors import ASCIIColors
from lollms.types import MSG_TYPE, SUMMARY_MODE
from lollms.types import MSG_OPERATION_TYPE, SUMMARY_MODE
from lollms.com import LoLLMsCom
from lollms.utilities import PromptReshaper, remove_text_from_string, process_ai_output
from lollmsvectordb.text_chunker import TextChunker
@ -13,7 +13,7 @@ from lollmsvectordb.directory_binding import DirectoryBinding
import hashlib
import json
class TasksLibrary:
def __init__(self, lollms:LoLLMsCom, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None) -> None:
def __init__(self, lollms:LoLLMsCom, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None) -> None:
self.lollms = lollms
self.config = lollms.config
self.callback = callback
@ -46,14 +46,14 @@ class TasksLibrary:
return prompt.lower()
return None
def process(self, text:str, message_type:MSG_TYPE, callback=None, show_progress=False):
def process(self, text:str, message_type:MSG_OPERATION_TYPE, callback=None, show_progress=False):
if callback is None:
callback = self.callback
if text is None:
return True
if message_type==MSG_TYPE.MSG_TYPE_CHUNK:
if message_type==MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
bot_says = self.bot_says + text
elif message_type==MSG_TYPE.MSG_TYPE_FULL:
elif message_type==MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT:
bot_says = text
if show_progress:
@ -226,7 +226,7 @@ class TasksLibrary:
return gen
# Communications with the user
def step_start(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def step_start(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step start
Args:
@ -237,7 +237,7 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_START)
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START)
def step_end(self, step_text, status=True, callback: Callable[[str, int, dict, list], bool]=None):
"""This triggers a step end
@ -250,9 +250,9 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_END, {'status':status})
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS if status else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_FAILURE)
def step(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def step(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step information
Args:
@ -268,9 +268,9 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP)
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP)
def exception(self, ex, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def exception(self, ex, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client
Args:
@ -286,9 +286,9 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(str(ex), MSG_TYPE.MSG_TYPE_EXCEPTION)
callback(str(ex), MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def warning(self, warning:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def warning(self, warning:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client
Args:
@ -304,9 +304,9 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(warning, MSG_TYPE.MSG_TYPE_EXCEPTION)
callback(warning, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def info(self, info:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def info(self, info:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client
Args:
@ -322,7 +322,7 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(info, MSG_TYPE.MSG_TYPE_INFO)
callback(info, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
def json(self, title:str, json_infos:dict, callback: Callable[[str, int, dict, list], bool]=None, indent=4):
"""This sends json data to front end
@ -340,9 +340,9 @@ class TasksLibrary:
callback = self.callback
if callback:
callback("", MSG_TYPE.MSG_TYPE_JSON_INFOS, metadata = [{"title":title, "content":json.dumps(json_infos, indent=indent)}])
callback("", MSG_OPERATION_TYPE.MSG_TYPE_JSON_INFOS, metadata = [{"title":title, "content":json.dumps(json_infos, indent=indent)}])
def ui(self, html_ui:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def ui(self, html_ui:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends ui elements to front end
Args:
@ -358,9 +358,9 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(html_ui, MSG_TYPE.MSG_TYPE_UI)
callback(html_ui, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def code(self, code:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def code(self, code:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends code to front end
Args:
@ -376,9 +376,9 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(code, MSG_TYPE.MSG_TYPE_CODE)
callback(code, MSG_OPERATION_TYPE.MSG_TYPE_CODE)
def chunk(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def chunk(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end
Args:
@ -389,10 +389,10 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_CHUNK)
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK)
def full(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None, msg_type:MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL):
def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, msg_type:MSG_OPERATION_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT):
"""This sends full text to front end
Args:
@ -405,7 +405,7 @@ class TasksLibrary:
if callback:
callback(full_text, msg_type)
def full_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to AI)
Args:
@ -416,9 +416,9 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI)
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
def full_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to user)
Args:
@ -429,7 +429,7 @@ class TasksLibrary:
callback = self.callback
if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER)
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER)

View File

@ -1,34 +1,39 @@
from enum import Enum
class MSG_TYPE(Enum):
# Messaging
MSG_TYPE_CHUNK = 0 # A chunk of a message (used for classical chat)
MSG_TYPE_FULL = 1 # A full message (for some personality the answer is sent in bulk)
MSG_TYPE_FULL_INVISIBLE_TO_AI = 2 # A full message (for some personality the answer is sent in bulk)
MSG_TYPE_FULL_INVISIBLE_TO_USER = 3 # A full message (for some personality the answer is sent in bulk)
MSG_TYPE_CONTENT = 1 # A full message (for some personality the answer is sent in bulk)
MSG_TYPE_CONTENT_INVISIBLE_TO_AI = 2 # A full message (for some personality the answer is sent in bulk)
MSG_TYPE_CONTENT_INVISIBLE_TO_USER = 3 # A full message (for some personality the answer is sent in bulk)
class MSG_OPERATION_TYPE(Enum):
# Conditionning
MSG_OPERATION_TYPE_ADD_CHUNK = 0 # Add a chunk to the current message
MSG_OPERATION_TYPE_SET_CONTENT = 1 # sets the content of current message
MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI = 2 # sets the content of current message as invisible to ai
MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER = 3 # sets the content of current message as invisible to user
# Informations
MSG_TYPE_EXCEPTION = 4 # An exception occured
MSG_TYPE_WARNING = 5 # A warning occured
MSG_TYPE_INFO = 6 # An information to be shown to user
MSG_OPERATION_TYPE_EXCEPTION = 4 # An exception occured
MSG_OPERATION_TYPE_WARNING = 5 # A warning occured
MSG_OPERATION_TYPE_INFO = 6 # An information to be shown to user
# Steps
MSG_TYPE_STEP = 7 # An instant step (a step that doesn't need time to be executed)
MSG_TYPE_STEP_START = 8 # A step has started (the text contains an explanation of the step done by he personality)
MSG_TYPE_STEP_PROGRESS = 9 # The progress value (the text contains a percentage and can be parsed by the reception)
MSG_TYPE_STEP_END = 10# A step has been done (the text contains an explanation of the step done by he personality)
MSG_OPERATION_TYPE_STEP = 7 # An instant step (a step that doesn't need time to be executed)
MSG_OPERATION_TYPE_STEP_START = 8 # A step has started (the text contains an explanation of the step done by he personality)
MSG_OPERATION_TYPE_STEP_PROGRESS = 9 # The progress value (the text contains a percentage and can be parsed by the reception)
MSG_OPERATION_TYPE_STEP_END_SUCCESS = 10# A step has been done (the text contains an explanation of the step done by he personality)
MSG_OPERATION_TYPE_STEP_END_FAILURE = 11# A step has been done (the text contains an explanation of the step done by he personality)
#Extra
MSG_TYPE_JSON_INFOS = 11# A JSON output that is useful for summarizing the process of generation used by personalities like chain of thoughts and tree of thooughts
MSG_TYPE_REF = 12# References (in form of [text](path))
MSG_TYPE_CODE = 13# A javascript code to execute
MSG_TYPE_UI = 14# A vue.js component to show (we need to build some and parse the text to show it)
MSG_OPERATION_TYPE_JSON_INFOS = 12# A JSON output that is useful for summarizing the process of generation used by personalities like chain of thoughts and tree of thooughts
MSG_OPERATION_TYPE_REF = 13# References (in form of [text](path))
MSG_OPERATION_TYPE_CODE = 14# A javascript code to execute
MSG_OPERATION_TYPE_UI = 15# A vue.js component to show (we need to build some and parse the text to show it)
#Commands
MSG_TYPE_NEW_MESSAGE = 15# A new message
MSG_TYPE_FINISHED_MESSAGE = 17# End of current message
MSG_OPERATION_TYPE_NEW_MESSAGE = 16# A new message
MSG_OPERATION_TYPE_FINISHED_MESSAGE = 17# End of current message
CONTENT_OPERATION_TYPES = [MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER]
class SENDER_TYPES(Enum):
SENDER_TYPES_USER = 0 # Sent by user

View File

@ -26,7 +26,7 @@ def get_all_files(path):
setuptools.setup(
name="lollms",
version="9.5.1",
version="10.0.0",
author="Saifeddine ALOUI (ParisNeo)",
author_email="parisneo_ai@gmail.com",
description="A python library for AI personality definition",