Upgraded to lollms 10/ compatible with lollms webui 11

This commit is contained in:
Saifeddine ALOUI 2024-08-14 22:15:01 +02:00
parent 1b4c8ca293
commit 7ebeeb347a
29 changed files with 504 additions and 344 deletions

View File

@ -8,7 +8,7 @@ from lollms.config import InstallOption
from lollms.helpers import ASCIIColors, trace_exception from lollms.helpers import ASCIIColors, trace_exception
from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom
from lollms.terminal import MainMenu from lollms.terminal import MainMenu
from lollms.types import MSG_TYPE, SENDER_TYPES from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
from lollms.utilities import PromptReshaper from lollms.utilities import PromptReshaper
from lollms.client_session import Client, Session from lollms.client_session import Client, Session
from lollms.databases.skills_database import SkillsLibrary from lollms.databases.skills_database import SkillsLibrary
@ -239,8 +239,8 @@ class LollmsApplication(LoLLMsCom):
messages = client.discussion.get_messages() messages = client.discussion.get_messages()
# Extract relevant information from messages # Extract relevant information from messages
def cb(str, MSG_TYPE_=MSG_TYPE.MSG_TYPE_FULL, dict=None, list=None): def cb(str, MSG_TYPE_=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, dict=None, list=None):
if MSG_TYPE_!=MSG_TYPE.MSG_TYPE_CHUNK: if MSG_TYPE_!=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
self.ShowBlockingMessage(f"Learning\n{str}") self.ShowBlockingMessage(f"Learning\n{str}")
bk_cb = self.tasks_library.callback bk_cb = self.tasks_library.callback
self.tasks_library.callback = cb self.tasks_library.callback = cb
@ -334,117 +334,125 @@ class LollmsApplication(LoLLMsCom):
def start_servers(self): def start_servers(self):
ASCIIColors.yellow("* - * - * - Starting services - * - * - *") ASCIIColors.yellow("* - * - * - Starting services - * - * - *")
tts_services = []
stt_services = []
def start_ttt(*args, **kwargs):
if self.config.enable_ollama_service:
try:
from lollms.services.ollama.lollms_ollama import Service
self.ollama = Service(self, base_url=self.config.ollama_base_url)
tts_services.append("ollama")
ASCIIColors.blue("Loading local TTT services") except Exception as ex:
if self.config.enable_ollama_service: trace_exception(ex)
try: self.warning(f"Couldn't load Ollama")
from lollms.services.ollama.lollms_ollama import Service
self.ollama = Service(self, base_url=self.config.ollama_base_url)
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load Ollama")
if self.config.enable_vllm_service: if self.config.enable_vllm_service:
try: try:
from lollms.services.vllm.lollms_vllm import Service from lollms.services.vllm.lollms_vllm import Service
self.vllm = Service(self, base_url=self.config.vllm_url) self.vllm = Service(self, base_url=self.config.vllm_url)
except Exception as ex: tts_services.append("vllm")
trace_exception(ex) except Exception as ex:
self.warning(f"Couldn't load vllm") trace_exception(ex)
self.warning(f"Couldn't load vllm")
ASCIIColors.blue("Loading loacal STT services") ASCIIColors.execute_with_animation("Loading local TTT services", start_ttt,ASCIIColors.color_blue)
if self.config.whisper_activate or self.config.active_stt_service == "whisper": print("OK")
try: def start_stt(*args, **kwargs):
if self.config.whisper_activate or self.config.active_stt_service == "whisper":
try:
from lollms.services.whisper.lollms_whisper import LollmsWhisper
self.whisper = LollmsWhisper(self, self.config.whisper_model, self.lollms_paths.personal_outputs_path)
stt_services.append("whisper")
except Exception as ex:
trace_exception(ex)
if self.config.active_stt_service == "openai_whisper":
from lollms.services.openai_whisper.lollms_openai_whisper import LollmsOpenAIWhisper
self.stt = LollmsOpenAIWhisper(self, self.config.openai_whisper_model, self.config.openai_whisper_key)
elif self.config.active_stt_service == "whisper":
from lollms.services.whisper.lollms_whisper import LollmsWhisper from lollms.services.whisper.lollms_whisper import LollmsWhisper
self.whisper = LollmsWhisper(self, self.config.whisper_model, self.lollms_paths.personal_outputs_path) self.stt = LollmsWhisper(self, self.config.whisper_model)
except Exception as ex:
trace_exception(ex)
ASCIIColors.blue("Loading local TTS services") ASCIIColors.execute_with_animation("Loading loacal STT services", start_stt, ASCIIColors.color_blue)
if self.config.active_tts_service == "xtts": print("OK")
ASCIIColors.yellow("Loading XTTS")
try: def start_tts(*args, **kwargs):
from lollms.services.xtts.lollms_xtts import LollmsXTTS if self.config.active_tts_service == "xtts":
voice=self.config.xtts_current_voice ASCIIColors.yellow("Loading XTTS")
if voice!="main_voice": try:
voices_folder = self.lollms_paths.custom_voices_path from lollms.services.xtts.lollms_xtts import LollmsXTTS
voice=self.config.xtts_current_voice
if voice!="main_voice":
voices_folder = self.lollms_paths.custom_voices_path
else:
voices_folder = Path(__file__).parent.parent.parent/"services/xtts/voices"
self.xtts = LollmsXTTS(
self,
voices_folders=[voices_folder, self.lollms_paths.custom_voices_path],
freq=self.config.xtts_freq
)
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load XTTS")
if self.config.active_tts_service == "eleven_labs_tts":
from lollms.services.eleven_labs_tts.lollms_eleven_labs_tts import LollmsElevenLabsTTS
self.tts = LollmsElevenLabsTTS(self, self.config.elevenlabs_tts_model_id, self.config.elevenlabs_tts_voice_id, self.config.elevenlabs_tts_key, stability=self.config.elevenlabs_tts_voice_stability, similarity_boost=self.config.elevenlabs_tts_voice_boost)
elif self.config.active_tts_service == "openai_tts":
from lollms.services.open_ai_tts.lollms_openai_tts import LollmsOpenAITTS
self.tts = LollmsOpenAITTS(self, self.config.openai_tts_model, self.config.openai_tts_voice, self.config.openai_tts_key)
elif self.config.active_tts_service == "xtts" and self.xtts:
self.tts = self.xtts
ASCIIColors.execute_with_animation("Loading loacal TTS services", start_tts, ASCIIColors.color_blue)
print("OK")
def start_tti(*args, **kwargs):
if self.config.enable_sd_service:
try:
from lollms.services.sd.lollms_sd import LollmsSD
self.sd = LollmsSD(self, auto_sd_base_url=self.config.sd_base_url)
except:
self.warning(f"Couldn't load SD")
if self.config.enable_comfyui_service:
try:
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
self.comfyui = LollmsComfyUI(self, comfyui_base_url=self.config.comfyui_base_url)
except:
self.warning(f"Couldn't load SD")
if self.config.enable_motion_ctrl_service:
try:
from lollms.services.motion_ctrl.lollms_motion_ctrl import Service
self.motion_ctrl = Service(self, base_url=self.config.motion_ctrl_base_url)
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load Motion control")
if self.config.active_tti_service == "diffusers":
from lollms.services.diffusers.lollms_diffusers import LollmsDiffusers
self.tti = LollmsDiffusers(self)
elif self.config.active_tti_service == "autosd":
if self.sd:
self.tti = self.sd
else: else:
voices_folder = Path(__file__).parent.parent.parent/"services/xtts/voices" from lollms.services.sd.lollms_sd import LollmsSD
self.tti = LollmsSD(self)
elif self.config.active_tti_service == "dall-e":
from lollms.services.dalle.lollms_dalle import LollmsDalle
self.tti = LollmsDalle(self, self.config.dall_e_key)
elif self.config.active_tti_service == "midjourney":
from lollms.services.midjourney.lollms_midjourney import LollmsMidjourney
self.tti = LollmsMidjourney(self, self.config.midjourney_key, self.config.midjourney_timeout, self.config.midjourney_retries)
elif self.config.active_tti_service == "comfyui" and (self.tti is None or self.tti.name!="comfyui"):
if self.comfyui:
self.tti = self.comfyui
else:
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
self.tti = LollmsComfyUI(self, comfyui_base_url=self.config.comfyui_base_url)
self.xtts = LollmsXTTS( ASCIIColors.execute_with_animation("Loading loacal TTI services", start_tti, ASCIIColors.color_blue)
self, print("OK")
voices_folders=[voices_folder, self.lollms_paths.custom_voices_path],
freq=self.config.xtts_freq
)
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load XTTS")
ASCIIColors.blue("Loading local TTI services")
if self.config.enable_sd_service:
try:
from lollms.services.sd.lollms_sd import LollmsSD
self.sd = LollmsSD(self, auto_sd_base_url=self.config.sd_base_url)
except:
self.warning(f"Couldn't load SD")
if self.config.enable_comfyui_service:
try:
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
self.comfyui = LollmsComfyUI(self, comfyui_base_url=self.config.comfyui_base_url)
except:
self.warning(f"Couldn't load SD")
if self.config.enable_motion_ctrl_service:
try:
from lollms.services.motion_ctrl.lollms_motion_ctrl import Service
self.motion_ctrl = Service(self, base_url=self.config.motion_ctrl_base_url)
except Exception as ex:
trace_exception(ex)
self.warning(f"Couldn't load Motion control")
ASCIIColors.blue("Activating TTI service")
if self.config.active_tti_service == "diffusers":
from lollms.services.diffusers.lollms_diffusers import LollmsDiffusers
self.tti = LollmsDiffusers(self)
elif self.config.active_tti_service == "autosd":
if self.sd:
self.tti = self.sd
else:
from lollms.services.sd.lollms_sd import LollmsSD
self.tti = LollmsSD(self)
elif self.config.active_tti_service == "dall-e":
from lollms.services.dalle.lollms_dalle import LollmsDalle
self.tti = LollmsDalle(self, self.config.dall_e_key)
elif self.config.active_tti_service == "midjourney":
from lollms.services.midjourney.lollms_midjourney import LollmsMidjourney
self.tti = LollmsMidjourney(self, self.config.midjourney_key, self.config.midjourney_timeout, self.config.midjourney_retries)
elif self.config.active_tti_service == "comfyui" and (self.tti is None or self.tti.name!="comfyui"):
if self.comfyui:
self.tti = self.comfyui
else:
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
self.tti = LollmsComfyUI(self, comfyui_base_url=self.config.comfyui_base_url)
ASCIIColors.blue("Activating TTS services")
if self.config.active_tts_service == "eleven_labs_tts":
from lollms.services.eleven_labs_tts.lollms_eleven_labs_tts import LollmsElevenLabsTTS
self.tts = LollmsElevenLabsTTS(self, self.config.elevenlabs_tts_model_id, self.config.elevenlabs_tts_voice_id, self.config.elevenlabs_tts_key, stability=self.config.elevenlabs_tts_voice_stability, similarity_boost=self.config.elevenlabs_tts_voice_boost)
elif self.config.active_tts_service == "openai_tts":
from lollms.services.open_ai_tts.lollms_openai_tts import LollmsOpenAITTS
self.tts = LollmsOpenAITTS(self, self.config.openai_tts_model, self.config.openai_tts_voice, self.config.openai_tts_key)
elif self.config.active_tts_service == "xtts" and self.xtts:
self.tts = self.xtts
ASCIIColors.blue("Loading STT services")
if self.config.active_stt_service == "openai_whisper":
from lollms.services.openai_whisper.lollms_openai_whisper import LollmsOpenAIWhisper
self.stt = LollmsOpenAIWhisper(self, self.config.openai_whisper_model, self.config.openai_whisper_key)
elif self.config.active_stt_service == "whisper":
from lollms.services.whisper.lollms_whisper import LollmsWhisper
self.stt = LollmsWhisper(self, self.config.whisper_model)
def verify_servers(self, reload_all=False): def verify_servers(self, reload_all=False):
@ -1345,7 +1353,7 @@ class LollmsApplication(LoLLMsCom):
# Check if the message content is not empty and visible to the AI # Check if the message content is not empty and visible to the AI
if message.content != '' and ( if message.content != '' and (
message.message_type <= MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message.message_type != MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value): message.message_type <= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER.value and message.message_type != MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI.value):
# Tokenize the message content # Tokenize the message content
if self.config.use_model_name_in_discussions: if self.config.use_model_name_in_discussions:
@ -1377,7 +1385,7 @@ class LollmsApplication(LoLLMsCom):
# Check if the message content is not empty and visible to the AI # Check if the message content is not empty and visible to the AI
if message.content != '' and ( if message.content != '' and (
message.message_type <= MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message.message_type != MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value): message.message_type <= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER.value and message.message_type != MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI.value):
if self.config.use_model_name_in_discussions: if self.config.use_model_name_in_discussions:
if message.model: if message.model:

View File

@ -4,7 +4,7 @@ from lollms.paths import LollmsPaths
from threading import Thread from threading import Thread
class Client: class Client:
def __init__(self, lollms_paths:LollmsPaths, client_id, discussion:Discussion, db:DiscussionsDB): def __init__(self, lollms_paths:LollmsPaths, client_id:str, discussion:Discussion, db:DiscussionsDB):
self.client_id = client_id self.client_id = client_id
self.discussion = discussion self.discussion = discussion
self.lollms_paths = lollms_paths self.lollms_paths = lollms_paths

View File

@ -1,8 +1,9 @@
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from lollms.types import MSG_TYPE, SENDER_TYPES from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES, MSG_TYPE
from typing import Callable from typing import Callable
import socketio import socketio
from enum import Enum from enum import Enum
from lollms.types import MSG_OPERATION_TYPE
class NotificationType(Enum): class NotificationType(Enum):
"""Notification types.""" """Notification types."""
@ -155,12 +156,12 @@ class LoLLMsCom:
parameters=None, parameters=None,
metadata=None, metadata=None,
ui=None, ui=None,
message_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_FULL, message_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CONTENT,
sender_type:SENDER_TYPES=SENDER_TYPES.SENDER_TYPES_AI, sender_type:SENDER_TYPES=SENDER_TYPES.SENDER_TYPES_AI,
open=False open=False
): ):
pass pass
def full(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end """This sends full text to front end
Args: Args:

View File

@ -3,7 +3,7 @@ import sqlite3
from pathlib import Path from pathlib import Path
from datetime import datetime from datetime import datetime
from ascii_colors import ASCIIColors, trace_exception from ascii_colors import ASCIIColors, trace_exception
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.types import BindingType from lollms.types import BindingType
from lollms.utilities import PackageManager, discussion_path_to_url from lollms.utilities import PackageManager, discussion_path_to_url
from lollms.paths import LollmsPaths from lollms.paths import LollmsPaths
@ -18,7 +18,7 @@ import json
import shutil import shutil
from lollms.tasks import TasksLibrary from lollms.tasks import TasksLibrary
import json import json
from typing import Dict, Any from typing import Dict, Any, List
__author__ = "parisneo" __author__ = "parisneo"
__github__ = "https://github.com/ParisNeo/lollms-webui" __github__ = "https://github.com/ParisNeo/lollms-webui"
@ -40,7 +40,7 @@ class DiscussionsDB:
self.discussion_db_file_path = self.discussion_db_path/"database.db" self.discussion_db_file_path = self.discussion_db_path/"database.db"
def create_tables(self): def create_tables(self):
db_version = 13 db_version = 14
with sqlite3.connect(self.discussion_db_file_path) as conn: with sqlite3.connect(self.discussion_db_file_path) as conn:
cursor = conn.cursor() cursor = conn.cursor()
@ -77,6 +77,7 @@ class DiscussionsDB:
finished_generating_at TIMESTAMP, finished_generating_at TIMESTAMP,
nb_tokens INT, nb_tokens INT,
discussion_id INTEGER NOT NULL, discussion_id INTEGER NOT NULL,
steps TEXT,
metadata TEXT, metadata TEXT,
ui TEXT, ui TEXT,
FOREIGN KEY (discussion_id) REFERENCES discussion(id), FOREIGN KEY (discussion_id) REFERENCES discussion(id),
@ -119,6 +120,7 @@ class DiscussionsDB:
'created_at', 'created_at',
'metadata', 'metadata',
'ui', 'ui',
'steps',
'started_generating_at', 'started_generating_at',
'finished_generating_at', 'finished_generating_at',
'nb_tokens', 'nb_tokens',
@ -138,6 +140,8 @@ class DiscussionsDB:
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TIMESTAMP") cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TIMESTAMP")
elif column=='metadata': elif column=='metadata':
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TEXT") cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TEXT")
elif column=='steps':
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TEXT")
elif column=='message_type': elif column=='message_type':
cursor.execute(f"ALTER TABLE {table} RENAME COLUMN type TO {column}") cursor.execute(f"ALTER TABLE {table} RENAME COLUMN type TO {column}")
elif column=='sender_type': elif column=='sender_type':
@ -497,11 +501,12 @@ class Message:
def __init__( def __init__(
self, self,
discussion_id, discussion_id,
discussions_db, discussions_db: DiscussionsDB,
message_type, message_type,
sender_type, sender_type,
sender, sender,
content, content,
steps:list = [],
metadata = None, metadata = None,
ui = None, ui = None,
rank = 0, rank = 0,
@ -517,29 +522,33 @@ class Message:
insert_into_db = False insert_into_db = False
): ):
self.discussion_id = discussion_id self.discussion_id = discussion_id
self.discussions_db = discussions_db self.discussions_db = discussions_db
self.self = self self.self = self
self.sender = sender self.sender = sender
self.sender_type = sender_type self.sender_type = sender_type
self.content = content self.content = content
self.message_type = message_type try:
self.rank = rank self.steps = steps if type(steps)==list else json.loads(steps)
self.parent_message_id = parent_message_id except:
self.binding = binding self.steps = []
self.model = model self.message_type = message_type
self.metadata = json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== dict else metadata self.rank = rank
self.ui = ui self.parent_message_id = parent_message_id
self.personality = personality self.binding = binding
self.created_at = created_at self.model = model
self.started_generating_at = started_generating_at self.metadata = json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== dict else metadata
self.ui = ui
self.personality = personality
self.created_at = created_at
self.started_generating_at = started_generating_at
self.finished_generating_at = finished_generating_at self.finished_generating_at = finished_generating_at
self.nb_tokens = nb_tokens self.nb_tokens = nb_tokens
if insert_into_db: if insert_into_db:
self.id = self.discussions_db.insert( self.id = self.discussions_db.insert(
"INSERT INTO message (sender, message_type, sender_type, sender, content, metadata, ui, rank, parent_message_id, binding, model, personality, created_at, started_generating_at, finished_generating_at, nb_tokens, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "INSERT INTO message (sender, message_type, sender_type, sender, content, steps, metadata, ui, rank, parent_message_id, binding, model, personality, created_at, started_generating_at, finished_generating_at, nb_tokens, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(sender, message_type, sender_type, sender, content, metadata, ui, rank, parent_message_id, binding, model, personality, created_at, started_generating_at, finished_generating_at, nb_tokens, discussion_id) (sender, message_type, sender_type, sender, content, str(steps), metadata, ui, rank, parent_message_id, binding, model, personality, created_at, started_generating_at, finished_generating_at, nb_tokens, discussion_id)
) )
else: else:
self.id = id self.id = id
@ -554,6 +563,7 @@ class Message:
"sender", "sender",
"content", "content",
"metadata", "metadata",
"steps",
"ui", "ui",
"rank", "rank",
"parent_message_id", "parent_message_id",
@ -607,7 +617,7 @@ class Message:
params = [new_content] params = [new_content]
if new_metadata is not None: if new_metadata is not None:
text+=", metadata = ?" text+=", metadata = ?"
params.append(new_metadata) params.append(new_metadata if type(new_metadata)==str else json.dumps(new_metadata) if type(new_metadata)==dict else None)
self.metadata=new_metadata self.metadata=new_metadata
if new_ui is not None: if new_ui is not None:
text+=", ui = ?" text+=", ui = ?"
@ -632,14 +642,105 @@ class Message:
text, tuple(params) text, tuple(params)
) )
def update_content(self, new_content, started_generating_at=None, nb_tokens=None, commit=True):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET content = ?"
params = [new_content]
if started_generating_at is not None:
text+=", started_generating_at = ?"
params.append(started_generating_at)
self.started_generating_at=started_generating_at
if nb_tokens is not None:
text+=", nb_tokens = ?"
params.append(nb_tokens)
self.nb_tokens=nb_tokens
text +=", finished_generating_at = ? WHERE id = ?"
params.append(self.finished_generating_at)
params.append(self.id)
self.discussions_db.update(
text, tuple(params)
)
def update_steps(self, steps:list, step_type:str, status:bool):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET steps = ?"
self.steps = steps
params = [json.dumps(self.steps)]
text +=" WHERE id = ?"
params.append(self.id)
self.discussions_db.update(
text, tuple(params)
)
def update_metadata(self, new_metadata):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET metadata = ?"
params = [None if new_metadata is None else new_metadata if type(new_metadata)==str else json.dumps(new_metadata)]
text +=", finished_generating_at = ? WHERE id = ?"
params.append(self.finished_generating_at)
params.append(self.id)
self.discussions_db.update(
text, tuple(params)
)
def update_ui(self, new_ui):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
text = f"UPDATE message SET ui = ?"
params = [str(new_ui) if new_ui is not None else None]
text +=", finished_generating_at = ? WHERE id = ?"
params.append(self.finished_generating_at)
params.append(self.id)
self.discussions_db.update(
text, tuple(params)
)
def add_step(self, step: str, step_type: str, status: bool, done: bool):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
# Check if the step text already exists
for existing_step in self.steps:
if existing_step['text'] == step:
# Update the existing step
existing_step['step_type'] = step_type
existing_step['status'] = status
existing_step['done'] = done
break
else:
# If it doesn't exist, append a new step
self.steps.append({
"id": len(self.steps),
"text": step,
"step_type": step_type,
"status": status,
"done": done
})
# Prepare the SQL update statement
text = "UPDATE message SET steps = ? WHERE id = ?"
params = [json.dumps(self.steps), self.id]
# Update the database
self.discussions_db.update(text, tuple(params))
def to_json(self): def to_json(self):
attributes = Message.get_fields() attributes = Message.get_fields()
msgJson = {} msgJson = {}
for attribute_name in attributes: for attribute_name in attributes:
attribute_value = getattr(self, attribute_name, None) attribute_value = getattr(self, attribute_name, None)
if attribute_name=="metadata": if attribute_name in ["metadata","steps"]:
if type(attribute_value) == str: if type(attribute_value) == str:
msgJson[attribute_name] = json.loads(attribute_value) try:
msgJson[attribute_name] = json.loads(attribute_value.replace("'", '"'))
except Exception as ex:
trace_exception(ex)
msgJson[attribute_name] = None
else: else:
msgJson[attribute_name] = attribute_value msgJson[attribute_name] = attribute_value
else: else:
@ -647,7 +748,7 @@ class Message:
return msgJson return msgJson
class Discussion: class Discussion:
def __init__(self, lollms:LoLLMsCom, discussion_id, discussions_db:DiscussionsDB): def __init__(self, lollms:LoLLMsCom, discussion_id:int, discussions_db:DiscussionsDB):
self.lollms = lollms self.lollms = lollms
self.current_message = None self.current_message = None
self.discussion_id = discussion_id self.discussion_id = discussion_id
@ -725,7 +826,7 @@ class Discussion:
try: try:
self.vectorizer.remove_document(fn) self.vectorizer.remove_document(fn)
if callback is not None: if callback is not None:
callback("File removed successfully",MSG_TYPE.MSG_TYPE_INFO) callback("File removed successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
return True return True
except ValueError as ve: except ValueError as ve:
ASCIIColors.error(f"Couldn't remove the file") ASCIIColors.error(f"Couldn't remove the file")
@ -778,7 +879,7 @@ class Discussion:
if path.suffix in [".wav",".mp3"]: if path.suffix in [".wav",".mp3"]:
self.audio_files.append(path) self.audio_files.append(path)
if process: if process:
self.lollms.new_message(client.client_id if client is not None else 0, content = "", message_type = MSG_TYPE.MSG_TYPE_FULL) self.lollms.new_message(client.client_id if client is not None else 0, content = "", message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
if self.lollms.stt is None: if self.lollms.stt is None:
self.lollms.info("Please select an stt engine in the services settings first") self.lollms.info("Please select an stt engine in the services settings first")
self.lollms.info(f"Transcribing ... ") self.lollms.info(f"Transcribing ... ")
@ -799,7 +900,7 @@ class Discussion:
pth = str(view_file).replace("\\","/").split('/') pth = str(view_file).replace("\\","/").split('/')
if "discussion_databases" in pth: if "discussion_databases" in pth:
pth = discussion_path_to_url(view_file) pth = discussion_path_to_url(view_file)
self.lollms.new_message(client.client_id if client is not None else 0, content = "", message_type = MSG_TYPE.MSG_TYPE_FULL) self.lollms.new_message(client.client_id if client is not None else 0, content = "", message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
output = f'<img src="{pth}" width="800">\n\n' output = f'<img src="{pth}" width="800">\n\n'
self.lollms.full(output, client_id=client.client_id) self.lollms.full(output, client_id=client.client_id)
self.lollms.close_message(client.client_id if client is not None else 0) self.lollms.close_message(client.client_id if client is not None else 0)
@ -827,7 +928,7 @@ class Discussion:
ASCIIColors.error("Couldn't create new message") ASCIIColors.error("Couldn't create new message")
ASCIIColors.info("Received image file") ASCIIColors.info("Received image file")
if callback is not None: if callback is not None:
callback("Image file added successfully", MSG_TYPE.MSG_TYPE_INFO) callback("Image file added successfully", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
else: else:
try: try:
# self.ShowBlockingMessage("Adding file to vector store.\nPlease stand by") # self.ShowBlockingMessage("Adding file to vector store.\nPlease stand by")
@ -845,7 +946,7 @@ class Discussion:
self.vectorizer.add_document(path.stem, data, path, True) self.vectorizer.add_document(path.stem, data, path, True)
self.vectorizer.build_index() self.vectorizer.build_index()
if callback is not None: if callback is not None:
callback("File added successfully",MSG_TYPE.MSG_TYPE_INFO) callback("File added successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
self.lollms.HideBlockingMessage(client.client_id) self.lollms.HideBlockingMessage(client.client_id)
return True return True
except Exception as e: except Exception as e:
@ -866,15 +967,16 @@ class Discussion:
self, self,
message_type, message_type,
sender_type, sender_type,
sender, sender:str,
content, content:str,
steps:list,
metadata=None, metadata=None,
ui=None, ui:str|None=None,
rank=0, rank:int=0,
parent_message_id=0, parent_message_id=0,
binding="", binding:str="",
model ="", model:str ="",
personality="", personality:str="",
created_at=None, created_at=None,
started_generating_at=None, started_generating_at=None,
finished_generating_at=None, finished_generating_at=None,
@ -908,6 +1010,7 @@ class Discussion:
sender_type, sender_type,
sender, sender,
content, content,
steps,
metadata, metadata,
ui, ui,
rank, rank,
@ -1017,7 +1120,7 @@ class Discussion:
f"DELETE FROM discussion WHERE id={self.discussion_id}" f"DELETE FROM discussion WHERE id={self.discussion_id}"
) )
def get_messages(self): def get_messages(self)->List[Message]:
"""Gets a list of messages information """Gets a list of messages information
Returns: Returns:
@ -1062,6 +1165,44 @@ class Discussion:
""" """
self.current_message.update(new_content, new_metadata, new_ui, started_generating_at, nb_tokens) self.current_message.update(new_content, new_metadata, new_ui, started_generating_at, nb_tokens)
def update_message_content(self, new_content, started_generating_at=None, nb_tokens=None):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update_content(new_content, started_generating_at, nb_tokens)
def update_message_steps(self, steps):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update_steps(new_content, started_generating_at, nb_tokens)
def update_message_metadata(self, new_metadata):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update_metadata(new_metadata)
def update_message_ui(self, new_ui):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
self.current_message.update_ui(new_ui)
def edit_message(self, message_id, new_content, new_metadata=None, new_ui=None): def edit_message(self, message_id, new_content, new_metadata=None, new_ui=None):
"""Edits the content of a message """Edits the content of a message

View File

@ -9,7 +9,7 @@ License: Apache 2.0
from lollms.utilities import PackageManager from lollms.utilities import PackageManager
from lollms.com import LoLLMsCom from lollms.com import LoLLMsCom
from lollms.utilities import trace_exception, run_async, install_conda_package from lollms.utilities import trace_exception, run_async, install_conda_package
from lollms.types import MSG_TYPE, SENDER_TYPES from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
from lollms.client_session import Session from lollms.client_session import Session
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
import platform import platform

View File

@ -33,7 +33,7 @@ import subprocess
import yaml import yaml
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
import time import time
from lollms.types import MSG_TYPE, SUMMARY_MODE from lollms.types import MSG_OPERATION_TYPE, SUMMARY_MODE
import json import json
from typing import Any, List, Optional, Type, Callable, Dict, Any, Union from typing import Any, List, Optional, Type, Callable, Dict, Any, Union
import json import json
@ -118,7 +118,7 @@ class AIPersonality:
ignore_discussion_documents_rag=False, ignore_discussion_documents_rag=False,
is_relative_path=True, is_relative_path=True,
installation_option:InstallOption=InstallOption.INSTALL_IF_NECESSARY, installation_option:InstallOption=InstallOption.INSTALL_IF_NECESSARY,
callback: Callable[[str, MSG_TYPE, dict, list], bool]=None callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None
): ):
""" """
Initialize an AIPersonality instance. Initialize an AIPersonality instance.
@ -298,7 +298,7 @@ class AIPersonality:
def new_message(self, message_text:str, message_type:MSG_TYPE= MSG_TYPE.MSG_TYPE_FULL, metadata=[], callback: Callable[[str, int, dict, list, Any], bool]=None): def new_message(self, message_text:str, message_type:MSG_OPERATION_TYPE= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, metadata=[], callback: Callable[[str, int, dict, list, Any], bool]=None):
"""This sends step rogress to front end """This sends step rogress to front end
Args: Args:
@ -309,9 +309,9 @@ class AIPersonality:
callback = self.callback callback = self.callback
if callback: if callback:
callback(message_text, MSG_TYPE.MSG_TYPE_NEW_MESSAGE, parameters={'type':message_type.value,'metadata':metadata}, personality=self) callback(message_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_NEW_MESSAGE, parameters={'type':message_type.value,'metadata':metadata}, personality=self)
def full(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end """This sends full text to front end
Args: Args:
@ -322,9 +322,9 @@ class AIPersonality:
callback = self.callback callback = self.callback
if callback: if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL) callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
def ui(self, ui_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def ui(self, ui_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends ui text to front end """This sends ui text to front end
Args: Args:
@ -335,10 +335,10 @@ class AIPersonality:
callback = self.callback callback = self.callback
if callback: if callback:
callback(ui_text, MSG_TYPE.MSG_TYPE_UI) callback(ui_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def full_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to AI) """This sends full text to front end (INVISIBLE to AI)
Args: Args:
@ -349,9 +349,9 @@ class AIPersonality:
callback = self.callback callback = self.callback
if callback: if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI) callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
def full_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to user) """This sends full text to front end (INVISIBLE to user)
Args: Args:
@ -362,7 +362,7 @@ class AIPersonality:
callback = self.callback callback = self.callback
if callback: if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER) callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER)
def build_prompt(self, prompt_parts:List[str], sacrifice_id:int=-1, context_size:int=None, minimum_spare_context_size:int=None): def build_prompt(self, prompt_parts:List[str], sacrifice_id:int=-1, context_size:int=None, minimum_spare_context_size:int=None):
@ -533,7 +533,7 @@ class AIPersonality:
ASCIIColors.red("Model failed to rank inputs") ASCIIColors.red("Model failed to rank inputs")
return None return None
def step_start(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def step_start(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step start """This triggers a step start
Args: Args:
@ -544,7 +544,7 @@ class AIPersonality:
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_START) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START)
def step_end(self, step_text, status=True, callback: Callable[[str, int, dict, list], bool]=None): def step_end(self, step_text, status=True, callback: Callable[[str, int, dict, list], bool]=None):
"""This triggers a step end """This triggers a step end
@ -557,9 +557,9 @@ class AIPersonality:
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_END, {'status':status}) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
def step(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def step(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step information """This triggers a step information
Args: Args:
@ -575,7 +575,7 @@ class AIPersonality:
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP)
def print_prompt(self, title, prompt): def print_prompt(self, title, prompt):
ASCIIColors.red("*-*-*-*-*-*-*-* ", end="") ASCIIColors.red("*-*-*-*-*-*-*-* ", end="")
@ -696,14 +696,14 @@ class AIPersonality:
def process(self, text:str, message_type:MSG_TYPE, callback=None, show_progress=False): def process(self, text:str, message_type:MSG_OPERATION_TYPE, callback=None, show_progress=False):
if callback is None: if callback is None:
callback = self.callback callback = self.callback
if text is None: if text is None:
return True return True
if message_type==MSG_TYPE.MSG_TYPE_CHUNK: if message_type==MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
bot_says = self.bot_says + text bot_says = self.bot_says + text
elif message_type==MSG_TYPE.MSG_TYPE_FULL: elif message_type==MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT:
bot_says = text bot_says = text
if show_progress: if show_progress:
@ -773,7 +773,7 @@ class AIPersonality:
return self.bot_says return self.bot_says
def setCallback(self, callback: Callable[[str, MSG_TYPE, dict, list], bool]): def setCallback(self, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]):
self.callback = callback self.callback = callback
if self._processor: if self._processor:
self._processor.callback = callback self._processor.callback = callback
@ -967,7 +967,7 @@ class AIPersonality:
try: try:
self.vectorizer.remove_document(fn) self.vectorizer.remove_document(fn)
if callback is not None: if callback is not None:
callback("File removed successfully",MSG_TYPE.MSG_TYPE_INFO) callback("File removed successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
return True return True
except ValueError as ve: except ValueError as ve:
ASCIIColors.error(f"Couldn't remove the file") ASCIIColors.error(f"Couldn't remove the file")
@ -1018,7 +1018,7 @@ class AIPersonality:
f.write(text) f.write(text)
self.info(f"File saved to {transcription_fn}") self.info(f"File saved to {transcription_fn}")
self.full(text) self.set_message_content(text)
elif path.suffix in [".png",".jpg",".jpeg",".gif",".bmp",".svg",".webp"]: elif path.suffix in [".png",".jpg",".jpeg",".gif",".bmp",".svg",".webp"]:
self.image_files.append(path) self.image_files.append(path)
if process: if process:
@ -1027,9 +1027,9 @@ class AIPersonality:
pth = str(path).replace("\\","/").split('/') pth = str(path).replace("\\","/").split('/')
if "discussion_databases" in pth: if "discussion_databases" in pth:
pth = discussion_path_to_url(path) pth = discussion_path_to_url(path)
self.new_message("",MSG_TYPE.MSG_TYPE_FULL) self.new_message("",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
output = f'<img src="{pth}" width="800">\n\n' output = f'<img src="{pth}" width="800">\n\n'
self.full(output) self.set_message_content(output)
self.app.close_message(client.client_id if client is not None else 0) self.app.close_message(client.client_id if client is not None else 0)
if self.model.binding_type not in [BindingType.TEXT_IMAGE, BindingType.TEXT_IMAGE_VIDEO]: if self.model.binding_type not in [BindingType.TEXT_IMAGE, BindingType.TEXT_IMAGE_VIDEO]:
@ -1040,7 +1040,7 @@ class AIPersonality:
img = img.convert("RGB") img = img.convert("RGB")
output += "## image description :\n"+ self.model.interrogate_blip([img])[0] output += "## image description :\n"+ self.model.interrogate_blip([img])[0]
# output += "## image description :\n"+ self.model.qna_blip([img],"q:Describe this photo with as much details as possible.\na:")[0] # output += "## image description :\n"+ self.model.qna_blip([img],"q:Describe this photo with as much details as possible.\na:")[0]
self.full(output) self.set_message_content(output)
self.app.close_message(client.client_id if client is not None else 0) self.app.close_message(client.client_id if client is not None else 0)
self.HideBlockingMessage("Understanding image (please wait)") self.HideBlockingMessage("Understanding image (please wait)")
if self.config.debug: if self.config.debug:
@ -1055,7 +1055,7 @@ class AIPersonality:
ASCIIColors.error("Couldn't create new message") ASCIIColors.error("Couldn't create new message")
ASCIIColors.info("Received image file") ASCIIColors.info("Received image file")
if callback is not None: if callback is not None:
callback("Image file added successfully", MSG_TYPE.MSG_TYPE_INFO) callback("Image file added successfully", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
else: else:
try: try:
# self.ShowBlockingMessage("Adding file to vector store.\nPlease stand by") # self.ShowBlockingMessage("Adding file to vector store.\nPlease stand by")
@ -1075,7 +1075,7 @@ class AIPersonality:
self.vectorizer.add_document(path.stem, data, path, True) self.vectorizer.add_document(path.stem, data, path, True)
self.vectorizer.build_index() self.vectorizer.build_index()
if callback is not None: if callback is not None:
callback("File added successfully",MSG_TYPE.MSG_TYPE_INFO) callback("File added successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
self.HideBlockingMessage(client.client_id) self.HideBlockingMessage(client.client_id)
return True return True
except Exception as e: except Exception as e:
@ -2030,7 +2030,7 @@ class StateMachine:
def process_state(self, command, full_context, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None, context_state:dict=None, client:Client=None): def process_state(self, command, full_context, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, context_state:dict=None, client:Client=None):
""" """
Process the given command based on the current state. Process the given command based on the current state.
@ -2322,7 +2322,7 @@ class APScript(StateMachine):
def add_file(self, path, client:Client, callback=None, process=True): def add_file(self, path, client:Client, callback=None, process=True):
self.personality.add_file(path, client=client,callback=callback, process=process) self.personality.add_file(path, client=client,callback=callback, process=process)
if callback is not None: if callback is not None:
callback("File added successfully",MSG_TYPE.MSG_TYPE_INFO) callback("File added successfully",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
return True return True
def remove_file(self, path): def remove_file(self, path):
@ -2385,7 +2385,7 @@ class APScript(StateMachine):
return self.personality.generate(prompt, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug) return self.personality.generate(prompt, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
def run_workflow(self, prompt:str, previous_discussion_text:str="", callback: Callable[[str, MSG_TYPE, dict, list], bool]=None, context_details:dict=None, client:Client=None): def run_workflow(self, prompt:str, previous_discussion_text:str="", callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, context_details:dict=None, client:Client=None):
""" """
This function generates code based on the given parameters. This function generates code based on the given parameters.
@ -2865,29 +2865,29 @@ class APScript(StateMachine):
prompt_parts[sacrifice_id] = sacrifice_text prompt_parts[sacrifice_id] = sacrifice_text
return self.separator_template.join([s for s in prompt_parts if s!=""]) return self.separator_template.join([s for s in prompt_parts if s!=""])
# ================================================= Sending commands to ui =========================================== # ================================================= Sending commands to ui ===========================================
def add_collapsible_entry(self, title, content, subtitle=""): def add_collapsible_entry(self, title, content, subtitle="", open_by_default=False):
return "\n".join( open_attr = 'open' if open_by_default else ''
[ return "\n".join([
f'<details class="flex w-full rounded-xl border border-gray-200 bg-white shadow-sm dark:border-gray-800 dark:bg-gray-900 mb-3.5 max-w-full svelte-1escu1z" open="">', f'<details class="w-full rounded-xl border border-gray-200 bg-white shadow-sm dark:border-gray-700 dark:bg-gray-800 mb-4 transition-all duration-300 ease-in-out hover:shadow-md focus-within:ring-2 focus-within:ring-blue-500 dark:focus-within:ring-blue-400" {open_attr}>',
f' <summary class="grid w-full select-none grid-cols-[40px,1fr] items-center gap-2.5 p-2 svelte-1escu1z">', f' <summary class="flex items-center justify-between p-4 cursor-pointer select-none transition-all duration-300 ease-in-out">',
f' <dl class="leading-4">', f' <div class="flex items-center space-x-3">',
f' <dd class="text-sm"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-arrow-right">', f' <div class="flex-shrink-0">',
f' <line x1="5" y1="12" x2="19" y2="12"></line>', f' <svg class="w-5 h-5 text-gray-500 dark:text-gray-400 transition-transform duration-300 transform group-open:rotate-90" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="currentColor">',
f' <polyline points="12 5 19 12 12 19"></polyline>', f' <path fill-rule="evenodd" d="M7.293 14.707a1 1 0 010-1.414L10.586 10 7.293 6.707a1 1 0 011.414-1.414l4 4a1 1 0 010 1.414l-4 4a1 1 0 01-1.414 0z" clip-rule="evenodd" />',
f' </svg>', f' </svg>',
f' </dd>', f' </div>',
f' </dl>', f' <div>',
f' <dl class="leading-4">', f' <h3 class="text-lg font-semibold text-gray-900 dark:text-white">{title}</h3>',
f' <dd class="text-sm"><h3>{title}</h3></dd>', f' <p class="text-sm text-gray-500 dark:text-gray-400">{subtitle}</p>',
f' <dt class="flex items-center gap-1 truncate whitespace-nowrap text-[.82rem] text-gray-400">{subtitle}</dt>', f' </div>',
f' </dl>', f' </div>',
f' </summary>', f' </summary>',
f' <div class="content px-5 pb-5 pt-4">', f' <div class="px-4 pb-4 pt-2 text-gray-700 dark:text-gray-300 transition-all duration-300 ease-in-out max-h-0 overflow-hidden group-open:max-h-40">',
content, content,
f' </div>', f' </div>',
f' </details>\n' f'</details>\n'
] ])
)
def internet_search_with_vectorization(self, query, quick_search:bool=False ): def internet_search_with_vectorization(self, query, quick_search:bool=False ):
@ -2918,7 +2918,7 @@ class APScript(StateMachine):
return chunks return chunks
def step_start(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def step_start(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step start """This triggers a step start
Args: Args:
@ -2929,7 +2929,7 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_START) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START)
def step_end(self, step_text, status=True, callback: Callable[[str, int, dict, list], bool]=None): def step_end(self, step_text, status=True, callback: Callable[[str, int, dict, list], bool]=None):
"""This triggers a step end """This triggers a step end
@ -2942,9 +2942,9 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_END, {'status':status}) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS if status else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_FAILURE)
def step(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def step(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step information """This triggers a step information
Args: Args:
@ -2960,9 +2960,9 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP)
def exception(self, ex, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def exception(self, ex, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client """This sends exception to the client
Args: Args:
@ -2978,9 +2978,9 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(str(ex), MSG_TYPE.MSG_TYPE_EXCEPTION) callback(str(ex), MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def warning(self, warning:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def warning(self, warning:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client """This sends exception to the client
Args: Args:
@ -2996,7 +2996,7 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(warning, MSG_TYPE.MSG_TYPE_EXCEPTION) callback(warning, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def json(self, title:str, json_infos:dict, callback: Callable[[str, int, dict, list], bool]=None, indent=4): def json(self, title:str, json_infos:dict, callback: Callable[[str, int, dict, list], bool]=None, indent=4):
@ -3015,9 +3015,9 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback("", MSG_TYPE.MSG_TYPE_JSON_INFOS, metadata = [{"title":title, "content":json.dumps(json_infos, indent=indent)}]) callback("", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_JSON_INFOS, metadata = [{"title":title, "content":json.dumps(json_infos, indent=indent)}])
def ui(self, html_ui:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def ui(self, html_ui:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends ui elements to front end """This sends ui elements to front end
Args: Args:
@ -3033,10 +3033,10 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(html_ui, MSG_TYPE.MSG_TYPE_UI) callback(html_ui, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def ui_in_iframe(self, html_ui:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def ui_in_iframe(self, html_ui:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends ui elements to front end inside an iframe """This sends ui elements to front end inside an iframe
Args: Args:
@ -3053,13 +3053,13 @@ class APScript(StateMachine):
if callback: if callback:
iframe_html = f'<iframe class="w-full" srcdoc="{html_ui}" style="width:100%; height:100%; border:none;"></iframe>' iframe_html = f'<iframe class="w-full" srcdoc="{html_ui}" style="width:100%; height:100%; border:none;"></iframe>'
callback(iframe_html, MSG_TYPE.MSG_TYPE_UI) callback(iframe_html, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def code(self, code:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def code(self, code:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends code to front end """This sends code to front end
Args: Args:
@ -3075,9 +3075,9 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(code, MSG_TYPE.MSG_TYPE_CODE) callback(code, MSG_OPERATION_TYPE.MSG_TYPE_CODE)
def chunk(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def chunk(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end """This sends full text to front end
Args: Args:
@ -3088,10 +3088,10 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_CHUNK) callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK)
def full(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None, msg_type:MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL): def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, msg_type:MSG_OPERATION_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT):
"""This sends full text to front end """This sends full text to front end
Args: Args:
@ -3104,7 +3104,7 @@ class APScript(StateMachine):
if callback: if callback:
callback(full_text, msg_type) callback(full_text, msg_type)
def full_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to AI) """This sends full text to front end (INVISIBLE to AI)
Args: Args:
@ -3115,9 +3115,9 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI) callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
def full_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to user) """This sends full text to front end (INVISIBLE to user)
Args: Args:
@ -3128,7 +3128,7 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER) callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER)
@ -3752,7 +3752,7 @@ class APScript(StateMachine):
verbose=verbose verbose=verbose
) )
def info(self, info_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def info(self, info_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends info text to front end """This sends info text to front end
Args: Args:
@ -3763,9 +3763,9 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(info_text, MSG_TYPE.MSG_TYPE_FULL) callback(info_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
def step_progress(self, step_text:str, progress:float, callback: Callable[[str, MSG_TYPE, dict, list, AIPersonality], bool]=None): def step_progress(self, step_text:str, progress:float, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list, AIPersonality], bool]=None):
"""This sends step rogress to front end """This sends step rogress to front end
Args: Args:
@ -3776,9 +3776,9 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_PROGRESS, {'progress':progress}) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_PROGRESS, {'progress':progress})
def new_message(self, message_text:str, message_type:MSG_TYPE= MSG_TYPE.MSG_TYPE_FULL, metadata=[], callback: Callable[[str, int, dict, list, AIPersonality], bool]=None): def new_message(self, message_text:str, message_type:MSG_OPERATION_TYPE= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, metadata=[], callback: Callable[[str, int, dict, list, AIPersonality], bool]=None):
"""This sends step rogress to front end """This sends step rogress to front end
Args: Args:
@ -3789,9 +3789,9 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(message_text, MSG_TYPE.MSG_TYPE_NEW_MESSAGE, parameters={'type':message_type.value,'metadata':metadata},personality = self.personality) callback(message_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_NEW_MESSAGE, parameters={'type':message_type.value,'metadata':metadata},personality = self.personality)
def finished_message(self, message_text:str="", callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def finished_message(self, message_text:str="", callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends step rogress to front end """This sends step rogress to front end
Args: Args:
@ -3802,7 +3802,7 @@ class APScript(StateMachine):
callback = self.callback callback = self.callback
if callback: if callback:
callback(message_text, MSG_TYPE.MSG_TYPE_FINISHED_MESSAGE) callback(message_text, MSG_OPERATION_TYPE.MSG_TYPE_FINISHED_MESSAGE)
def print_prompt(self, title, prompt): def print_prompt(self, title, prompt):
ASCIIColors.red("*-*-*-*-*-*-*-* ", end="") ASCIIColors.red("*-*-*-*-*-*-*-* ", end="")
@ -3921,7 +3921,7 @@ class APScript(StateMachine):
if context_details["is_continue"]: if context_details["is_continue"]:
out = context_details["previous_chunk"] + out out = context_details["previous_chunk"] + out
if send_full: if send_full:
self.full(out) self.set_message_content(out)
return out return out
def generate_with_function_calls(self, context_details: dict, functions: List[Dict[str, Any]], max_answer_length: Optional[int] = None, callback = None) -> List[Dict[str, Any]]: def generate_with_function_calls(self, context_details: dict, functions: List[Dict[str, Any]], max_answer_length: Optional[int] = None, callback = None) -> List[Dict[str, Any]]:
@ -4179,7 +4179,7 @@ class APScript(StateMachine):
nested_function_calls += 1 nested_function_calls += 1
self.chunk("\n") self.chunk("\n")
if hide_function_call: if hide_function_call:
self.full("") #Hide function self.set_message_content("") #Hide function
if self.config.debug: if self.config.debug:
self.print_prompt("Function calls", json.dumps(function_calls, indent=4)) self.print_prompt("Function calls", json.dumps(function_calls, indent=4))
@ -4189,7 +4189,7 @@ class APScript(StateMachine):
out += f"{self.separator_template}"+ self.system_custom_header('function calls results') + final_output + "\n" out += f"{self.separator_template}"+ self.system_custom_header('function calls results') + final_output + "\n"
if prompt_after_execution: if prompt_after_execution:
if separate_output: if separate_output:
self.full(final_output) self.set_message_content(final_output)
self.new_message("") self.new_message("")
context_details["discussion_messages"] +=out context_details["discussion_messages"] +=out
if len(self.personality.image_files)>0: if len(self.personality.image_files)>0:

View File

@ -12,7 +12,7 @@ from pydantic import BaseModel, Field
from lollms_webui import LOLLMSWebUI from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.security import check_access from lollms.security import check_access
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager

View File

@ -10,7 +10,7 @@ from fastapi import APIRouter, Request, HTTPException, Depends, Header
from lollms_webui import LOLLMSWebUI from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.security import sanitize_path, check_access from lollms.security import sanitize_path, check_access
from ascii_colors import ASCIColors from ascii_colors import ASCIColors

View File

@ -13,7 +13,7 @@ from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel from pydantic import BaseModel
from lollms.security import check_access from lollms.security import check_access
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path from pathlib import Path

View File

@ -10,7 +10,7 @@ from fastapi import APIRouter, Request
from lollms.server.elf_server import LOLLMSElfServer from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import check_access from lollms.security import check_access

View File

@ -11,7 +11,7 @@ from fastapi import APIRouter, Request
from lollms.server.elf_server import LOLLMSElfServer from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.security import sanitize_path, check_access from lollms.security import sanitize_path, check_access
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors

View File

@ -8,16 +8,17 @@ description:
""" """
from fastapi import APIRouter, Request, Body, Response from fastapi import APIRouter, HTTPException
from fastapi.responses import PlainTextResponse from fastapi.responses import PlainTextResponse
from lollms.server.elf_server import LOLLMSElfServer from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.generation import RECEPTION_MANAGER, ROLE_CHANGE_DECISION, ROLE_CHANGE_OURTPUT from lollms.generation import RECEPTION_MANAGER, ROLE_CHANGE_DECISION, ROLE_CHANGE_OURTPUT
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
import time import time
import re
import threading import threading
from typing import List, Optional, Union from typing import List, Optional, Union
import random import random
@ -127,7 +128,7 @@ async def lollms_generate(request: LollmsGenerateRequest):
ASCIIColors.yellow(prompt) ASCIIColors.yellow(prompt)
tokens = elf_server.model.tokenize(prompt) tokens = elf_server.model.tokenize(prompt)
n_tokens = len(tokens) n_tokens = len(tokens)
ASCIIColors.yellow(f"Prompt input size {n_tokens}") ASCIIColors.info(f"Prompt input size {n_tokens}")
n_predict = min(min(elf_server.config.ctx_size-n_tokens-1,elf_server.config.max_n_predict), request.n_predict) if request.n_predict>0 else min(elf_server.config.ctx_size-n_tokens-1,elf_server.config.max_n_predict) n_predict = min(min(elf_server.config.ctx_size-n_tokens-1,elf_server.config.max_n_predict), request.n_predict) if request.n_predict>0 else min(elf_server.config.ctx_size-n_tokens-1,elf_server.config.max_n_predict)
stream = request.stream stream = request.stream
if elf_server.binding is not None: if elf_server.binding is not None:
@ -136,7 +137,7 @@ async def lollms_generate(request: LollmsGenerateRequest):
async def generate_chunks(): async def generate_chunks():
lk = threading.Lock() lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen: if elf_server.cancel_gen:
return False return False
@ -189,7 +190,7 @@ async def lollms_generate(request: LollmsGenerateRequest):
elf_server.cancel_gen = False elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="text/plain", headers=headers) return StreamingResponse(generate_chunks(), media_type="text/plain", headers=headers)
else: else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data # Yield each chunk of data
if chunk is None: if chunk is None:
return True return True
@ -276,20 +277,35 @@ async def lollms_generate_with_images(request: LollmsGenerateRequest):
stream = request.stream stream = request.stream
prompt_tokens = len(elf_server.binding.tokenize(prompt)) prompt_tokens = len(elf_server.binding.tokenize(prompt))
if elf_server.binding is not None: if elf_server.binding is not None:
def add_padding(encoded_image):
missing_padding = len(encoded_image) % 4
if missing_padding:
encoded_image += '=' * (4 - missing_padding)
return encoded_image
def sanitize_base64(encoded_image):
# Remove any characters that are not valid base64 characters
return re.sub(r'[^A-Za-z0-9+/=]', '', encoded_image)
image_files = [] image_files = []
images_path = elf_server.lollms_paths.personal_outputs_path / "tmp_images" images_path = elf_server.lollms_paths.personal_outputs_path / "tmp_images"
images_path.mkdir(parents=True, exist_ok=True) images_path.mkdir(parents=True, exist_ok=True)
for i, encoded_image in enumerate(encoded_images): for i, encoded_image in enumerate(encoded_images):
# Remove the data URL prefix
if encoded_image.startswith('data:image/png;base64,'):
encoded_image = encoded_image.split(',')[1] # Get the base64 part only
sanitized_image = sanitize_base64(encoded_image)
padded_image = add_padding(sanitized_image)
image_path = images_path/ f'image_{i}.png' image_path = images_path/ f'image_{i}.png'
with open(image_path, 'wb') as image_file: with open(image_path, 'wb') as image_file:
image_file.write(base64.b64decode(encoded_image)) image_file.write(base64.b64decode(padded_image))
image_files.append(image_path) image_files.append(image_path)
if stream: if stream:
new_output={"new_values":[]} new_output={"new_values":[]}
async def generate_chunks(): async def generate_chunks():
lk = threading.Lock() lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen: if elf_server.cancel_gen:
return False return False
@ -343,7 +359,7 @@ async def lollms_generate_with_images(request: LollmsGenerateRequest):
elf_server.cancel_gen = False elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="text/plain", headers=headers) return StreamingResponse(generate_chunks(), media_type="text/plain", headers=headers)
else: else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data # Yield each chunk of data
if chunk is None: if chunk is None:
return True return True
@ -373,7 +389,7 @@ async def lollms_generate_with_images(request: LollmsGenerateRequest):
except Exception as ex: except Exception as ex:
trace_exception(ex) trace_exception(ex)
elf_server.error(ex) elf_server.error(ex)
return {"status":False,"error":str(ex)} raise HTTPException(400, f"Error : {ex}")
# ----------------------- Open AI ---------------------------------------- # ----------------------- Open AI ----------------------------------------
@ -493,7 +509,7 @@ async def v1_chat_completions(request: ChatGenerationRequest):
async def generate_chunks(): async def generate_chunks():
lk = threading.Lock() lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen: if elf_server.cancel_gen:
return False return False
@ -551,7 +567,7 @@ async def v1_chat_completions(request: ChatGenerationRequest):
elf_server.cancel_gen = False elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="application/json") return StreamingResponse(generate_chunks(), media_type="application/json")
else: else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data # Yield each chunk of data
if chunk is None: if chunk is None:
return True return True
@ -580,7 +596,7 @@ async def v1_chat_completions(request: ChatGenerationRequest):
except Exception as ex: except Exception as ex:
trace_exception(ex) trace_exception(ex)
elf_server.error(ex) elf_server.error(ex)
return {"status":False,"error":str(ex)} raise HTTPException(400, f"Error : {ex}")
class OllamaModelResponse(BaseModel): class OllamaModelResponse(BaseModel):
id: str id: str
@ -635,7 +651,7 @@ async def ollama_chat_completion(request: ChatGenerationRequest):
async def generate_chunks(): async def generate_chunks():
lk = threading.Lock() lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen: if elf_server.cancel_gen:
return False return False
@ -693,7 +709,7 @@ async def ollama_chat_completion(request: ChatGenerationRequest):
elf_server.cancel_gen = False elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="application/json") return StreamingResponse(generate_chunks(), media_type="application/json")
else: else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data # Yield each chunk of data
if chunk is None: if chunk is None:
return True return True
@ -789,7 +805,7 @@ async def ollama_generate(request: CompletionGenerationRequest):
if stream: if stream:
output = {"text":""} output = {"text":""}
def generate_chunks(): def generate_chunks():
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data # Yield each chunk of data
output["text"] += chunk output["text"] += chunk
antiprompt = detect_antiprompt(output["text"], [start_header_id_template, end_header_id_template]) antiprompt = detect_antiprompt(output["text"], [start_header_id_template, end_header_id_template])
@ -810,7 +826,7 @@ async def ollama_generate(request: CompletionGenerationRequest):
return StreamingResponse(generate_chunks()) return StreamingResponse(generate_chunks())
else: else:
output = {"text":""} output = {"text":""}
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if chunk is None: if chunk is None:
return return
# Yield each chunk of data # Yield each chunk of data
@ -875,7 +891,7 @@ async def ollama_completion(request: CompletionGenerationRequest):
async def generate_chunks(): async def generate_chunks():
lk = threading.Lock() lk = threading.Lock()
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
if elf_server.cancel_gen: if elf_server.cancel_gen:
return False return False
@ -928,7 +944,7 @@ async def ollama_completion(request: CompletionGenerationRequest):
elf_server.cancel_gen = False elf_server.cancel_gen = False
return StreamingResponse(generate_chunks(), media_type="text/plain") return StreamingResponse(generate_chunks(), media_type="text/plain")
else: else:
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data # Yield each chunk of data
if chunk is None: if chunk is None:
return True return True
@ -979,7 +995,7 @@ async def v1_completion(request: CompletionGenerationRequest):
if stream: if stream:
output = {"text":""} output = {"text":""}
def generate_chunks(): def generate_chunks():
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data # Yield each chunk of data
output["text"] += chunk output["text"] += chunk
antiprompt = detect_antiprompt(output["text"]) antiprompt = detect_antiprompt(output["text"])
@ -1000,7 +1016,7 @@ async def v1_completion(request: CompletionGenerationRequest):
return StreamingResponse(generate_chunks()) return StreamingResponse(generate_chunks())
else: else:
output = {"text":""} output = {"text":""}
def callback(chunk, chunk_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_CHUNK): def callback(chunk, chunk_type:MSG_TYPE_CONTENT=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK):
# Yield each chunk of data # Yield each chunk of data
output["text"] += chunk output["text"] += chunk
antiprompt = detect_antiprompt(output["text"]) antiprompt = detect_antiprompt(output["text"])

View File

@ -13,7 +13,7 @@ from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.security import check_access from lollms.security import check_access
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path from pathlib import Path

View File

@ -13,7 +13,7 @@ from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel from pydantic import BaseModel
from lollms.security import check_access from lollms.security import check_access
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path from pathlib import Path

View File

@ -12,7 +12,7 @@ from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.security import check_access from lollms.security import check_access
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path from pathlib import Path

View File

@ -27,7 +27,7 @@ from fastapi import APIRouter, Request, HTTPException, Depends, Header
from lollms_webui import LOLLMSWebUI from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.security import sanitize_path, check_access from lollms.security import sanitize_path, check_access
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors

View File

@ -11,7 +11,7 @@ from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import check_access from lollms.security import check_access

View File

@ -10,7 +10,7 @@ from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
from lollms.security import sanitize_path from lollms.security import sanitize_path
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors

View File

@ -12,7 +12,7 @@ from fastapi.responses import PlainTextResponse
from lollms_webui import LOLLMSWebUI from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import find_next_available_filename, output_file_path_to_url, detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager from lollms.utilities import find_next_available_filename, output_file_path_to_url, detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import sanitize_path, validate_path, check_access from lollms.security import sanitize_path, validate_path, check_access

View File

@ -11,7 +11,7 @@ from fastapi import APIRouter, HTTPException
from lollms_webui import LOLLMSWebUI from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string from lollms.utilities import detect_antiprompt, remove_text_from_string
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors

View File

@ -12,7 +12,7 @@ from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.security import check_access from lollms.security import check_access
from lollms.types import MSG_TYPE from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path from pathlib import Path

View File

@ -8,17 +8,6 @@ description:
""" """
from fastapi import APIRouter, Request from fastapi import APIRouter, Request
from fastapi import HTTPException
from pydantic import BaseModel
import pkg_resources
from lollms.server.elf_server import LOLLMSElfServer
from fastapi.responses import FileResponse
from lollms.binding import BindingBuilder, InstallOption
from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality
from lollms.types import MSG_TYPE, SENDER_TYPES
from lollms.utilities import load_config, trace_exception, gc
from lollms.utilities import find_first_available_file_index, convert_language_name, run_async
from lollms_webui import LOLLMSWebUI from lollms_webui import LOLLMSWebUI
from pathlib import Path from pathlib import Path
from typing import List from typing import List

View File

@ -14,7 +14,7 @@ from lollms.server.elf_server import LOLLMSElfServer
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
from lollms.binding import BindingBuilder, InstallOption from lollms.binding import BindingBuilder, InstallOption
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality from lollms.personality import AIPersonality
from lollms.types import SENDER_TYPES from lollms.types import SENDER_TYPES
from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async
from pathlib import Path from pathlib import Path
@ -95,11 +95,11 @@ def add_events(sio:socketio):
# Raw text generation # Raw text generation
lollmsElfServer.answer = {"full_text":""} lollmsElfServer.answer = {"full_text":""}
def callback(text, message_type: MSG_TYPE, metadata:dict={}): def callback(text, message_type: MSG_TYPE, metadata:dict={}):
if message_type == MSG_TYPE.MSG_TYPE_CHUNK: if message_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
ASCIIColors.success(f"generated: {len(lollmsElfServer.answer['full_text'].split())} words", end='\r') ASCIIColors.success(f"generated: {len(lollmsElfServer.answer['full_text'].split())} words", end='\r')
if text is not None: if text is not None:
lollmsElfServer.answer["full_text"] = lollmsElfServer.answer["full_text"] + text lollmsElfServer.answer["full_text"] = lollmsElfServer.answer["full_text"] + text
run_async(partial(lollmsElfServer.sio.emit,'text_chunk', {'chunk': text, 'type':MSG_TYPE.MSG_TYPE_CHUNK.value}, to=client_id)) run_async(partial(lollmsElfServer.sio.emit,'text_chunk', {'chunk': text, 'type':MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK.value}, to=client_id))
if client_id in lollmsElfServer.session.clients.keys():# Client disconnected if client_id in lollmsElfServer.session.clients.keys():# Client disconnected
if client.requested_stop: if client.requested_stop:
return False return False
@ -169,7 +169,7 @@ def add_events(sio:socketio):
full_discussion = personality.personality_conditioning + ''.join(full_discussion_blocks) full_discussion = personality.personality_conditioning + ''.join(full_discussion_blocks)
def callback(text, message_type: MSG_TYPE, metadata:dict={}): def callback(text, message_type: MSG_TYPE, metadata:dict={}):
if message_type == MSG_TYPE.MSG_TYPE_CHUNK: if message_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
lollmsElfServer.answer["full_text"] = lollmsElfServer.answer["full_text"] + text lollmsElfServer.answer["full_text"] = lollmsElfServer.answer["full_text"] + text
run_async(partial(lollmsElfServer.sio.emit,'text_chunk', {'chunk': text}, to=client_id)) run_async(partial(lollmsElfServer.sio.emit,'text_chunk', {'chunk': text}, to=client_id))
try: try:
@ -256,7 +256,7 @@ def add_events(sio:socketio):
nb_tokens = None nb_tokens = None
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S') created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
message = lollmsElfServer.session.get_client(client_id).discussion.add_message( message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value, message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value, sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""), sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
content=prompt, content=prompt,

View File

@ -16,7 +16,7 @@ from lollms.binding import BindingBuilder, InstallOption
from lollms.security import sanitize_path from lollms.security import sanitize_path
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality from lollms.personality import AIPersonality
from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async
from pathlib import Path from pathlib import Path
from typing import List from typing import List

View File

@ -15,7 +15,7 @@ from lollms.types import SENDER_TYPES
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
from lollms.binding import BindingBuilder, InstallOption from lollms.binding import BindingBuilder, InstallOption
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality from lollms.personality import AIPersonality
from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async
from pathlib import Path from pathlib import Path
from typing import List from typing import List
@ -145,7 +145,7 @@ def add_events(sio:socketio):
ump = lollmsElfServer.config.discussion_prompt_separator + lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix ump = lollmsElfServer.config.discussion_prompt_separator + lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S') created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
message = client.discussion.add_message( message = client.discussion.add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value, message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value, sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""), sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
content="", content="",

View File

@ -14,7 +14,7 @@ from lollms.server.elf_server import LOLLMSElfServer
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
from lollms.binding import BindingBuilder, InstallOption from lollms.binding import BindingBuilder, InstallOption
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from lollms.personality import MSG_TYPE, AIPersonality from lollms.personality import AIPersonality
from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async from lollms.utilities import load_config, trace_exception, gc, terminate_thread, run_async
from pathlib import Path from pathlib import Path
from typing import List from typing import List

View File

@ -4,7 +4,7 @@ from typing import Callable, List, Dict, Any, Optional
from functools import partial from functools import partial
from datetime import datetime from datetime import datetime
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
from lollms.types import MSG_TYPE, SUMMARY_MODE from lollms.types import MSG_OPERATION_TYPE, SUMMARY_MODE
from lollms.com import LoLLMsCom from lollms.com import LoLLMsCom
from lollms.utilities import PromptReshaper, remove_text_from_string, process_ai_output from lollms.utilities import PromptReshaper, remove_text_from_string, process_ai_output
from lollmsvectordb.text_chunker import TextChunker from lollmsvectordb.text_chunker import TextChunker
@ -13,7 +13,7 @@ from lollmsvectordb.directory_binding import DirectoryBinding
import hashlib import hashlib
import json import json
class TasksLibrary: class TasksLibrary:
def __init__(self, lollms:LoLLMsCom, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None) -> None: def __init__(self, lollms:LoLLMsCom, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None) -> None:
self.lollms = lollms self.lollms = lollms
self.config = lollms.config self.config = lollms.config
self.callback = callback self.callback = callback
@ -46,14 +46,14 @@ class TasksLibrary:
return prompt.lower() return prompt.lower()
return None return None
def process(self, text:str, message_type:MSG_TYPE, callback=None, show_progress=False): def process(self, text:str, message_type:MSG_OPERATION_TYPE, callback=None, show_progress=False):
if callback is None: if callback is None:
callback = self.callback callback = self.callback
if text is None: if text is None:
return True return True
if message_type==MSG_TYPE.MSG_TYPE_CHUNK: if message_type==MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
bot_says = self.bot_says + text bot_says = self.bot_says + text
elif message_type==MSG_TYPE.MSG_TYPE_FULL: elif message_type==MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT:
bot_says = text bot_says = text
if show_progress: if show_progress:
@ -226,7 +226,7 @@ class TasksLibrary:
return gen return gen
# Communications with the user # Communications with the user
def step_start(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def step_start(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step start """This triggers a step start
Args: Args:
@ -237,7 +237,7 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_START) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START)
def step_end(self, step_text, status=True, callback: Callable[[str, int, dict, list], bool]=None): def step_end(self, step_text, status=True, callback: Callable[[str, int, dict, list], bool]=None):
"""This triggers a step end """This triggers a step end
@ -250,9 +250,9 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP_END, {'status':status}) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS if status else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_FAILURE)
def step(self, step_text, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def step(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This triggers a step information """This triggers a step information
Args: Args:
@ -268,9 +268,9 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(step_text, MSG_TYPE.MSG_TYPE_STEP) callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP)
def exception(self, ex, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def exception(self, ex, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client """This sends exception to the client
Args: Args:
@ -286,9 +286,9 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(str(ex), MSG_TYPE.MSG_TYPE_EXCEPTION) callback(str(ex), MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def warning(self, warning:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def warning(self, warning:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client """This sends exception to the client
Args: Args:
@ -304,9 +304,9 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(warning, MSG_TYPE.MSG_TYPE_EXCEPTION) callback(warning, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def info(self, info:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def info(self, info:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends exception to the client """This sends exception to the client
Args: Args:
@ -322,7 +322,7 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(info, MSG_TYPE.MSG_TYPE_INFO) callback(info, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO)
def json(self, title:str, json_infos:dict, callback: Callable[[str, int, dict, list], bool]=None, indent=4): def json(self, title:str, json_infos:dict, callback: Callable[[str, int, dict, list], bool]=None, indent=4):
"""This sends json data to front end """This sends json data to front end
@ -340,9 +340,9 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback("", MSG_TYPE.MSG_TYPE_JSON_INFOS, metadata = [{"title":title, "content":json.dumps(json_infos, indent=indent)}]) callback("", MSG_OPERATION_TYPE.MSG_TYPE_JSON_INFOS, metadata = [{"title":title, "content":json.dumps(json_infos, indent=indent)}])
def ui(self, html_ui:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def ui(self, html_ui:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends ui elements to front end """This sends ui elements to front end
Args: Args:
@ -358,9 +358,9 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(html_ui, MSG_TYPE.MSG_TYPE_UI) callback(html_ui, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def code(self, code:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def code(self, code:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends code to front end """This sends code to front end
Args: Args:
@ -376,9 +376,9 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(code, MSG_TYPE.MSG_TYPE_CODE) callback(code, MSG_OPERATION_TYPE.MSG_TYPE_CODE)
def chunk(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def chunk(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end """This sends full text to front end
Args: Args:
@ -389,10 +389,10 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_CHUNK) callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK)
def full(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None, msg_type:MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL): def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, msg_type:MSG_OPERATION_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT):
"""This sends full text to front end """This sends full text to front end
Args: Args:
@ -405,7 +405,7 @@ class TasksLibrary:
if callback: if callback:
callback(full_text, msg_type) callback(full_text, msg_type)
def full_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to AI) """This sends full text to front end (INVISIBLE to AI)
Args: Args:
@ -416,9 +416,9 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI) callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
def full_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None): def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
"""This sends full text to front end (INVISIBLE to user) """This sends full text to front end (INVISIBLE to user)
Args: Args:
@ -429,7 +429,7 @@ class TasksLibrary:
callback = self.callback callback = self.callback
if callback: if callback:
callback(full_text, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER) callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER)

View File

@ -1,34 +1,39 @@
from enum import Enum from enum import Enum
class MSG_TYPE(Enum): class MSG_TYPE(Enum):
# Messaging # Messaging
MSG_TYPE_CHUNK = 0 # A chunk of a message (used for classical chat) MSG_TYPE_CONTENT = 1 # A full message (for some personality the answer is sent in bulk)
MSG_TYPE_FULL = 1 # A full message (for some personality the answer is sent in bulk) MSG_TYPE_CONTENT_INVISIBLE_TO_AI = 2 # A full message (for some personality the answer is sent in bulk)
MSG_TYPE_FULL_INVISIBLE_TO_AI = 2 # A full message (for some personality the answer is sent in bulk) MSG_TYPE_CONTENT_INVISIBLE_TO_USER = 3 # A full message (for some personality the answer is sent in bulk)
MSG_TYPE_FULL_INVISIBLE_TO_USER = 3 # A full message (for some personality the answer is sent in bulk)
class MSG_OPERATION_TYPE(Enum):
# Conditionning # Conditionning
MSG_OPERATION_TYPE_ADD_CHUNK = 0 # Add a chunk to the current message
MSG_OPERATION_TYPE_SET_CONTENT = 1 # sets the content of current message
MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI = 2 # sets the content of current message as invisible to ai
MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER = 3 # sets the content of current message as invisible to user
# Informations # Informations
MSG_TYPE_EXCEPTION = 4 # An exception occured MSG_OPERATION_TYPE_EXCEPTION = 4 # An exception occured
MSG_TYPE_WARNING = 5 # A warning occured MSG_OPERATION_TYPE_WARNING = 5 # A warning occured
MSG_TYPE_INFO = 6 # An information to be shown to user MSG_OPERATION_TYPE_INFO = 6 # An information to be shown to user
# Steps # Steps
MSG_TYPE_STEP = 7 # An instant step (a step that doesn't need time to be executed) MSG_OPERATION_TYPE_STEP = 7 # An instant step (a step that doesn't need time to be executed)
MSG_TYPE_STEP_START = 8 # A step has started (the text contains an explanation of the step done by he personality) MSG_OPERATION_TYPE_STEP_START = 8 # A step has started (the text contains an explanation of the step done by he personality)
MSG_TYPE_STEP_PROGRESS = 9 # The progress value (the text contains a percentage and can be parsed by the reception) MSG_OPERATION_TYPE_STEP_PROGRESS = 9 # The progress value (the text contains a percentage and can be parsed by the reception)
MSG_TYPE_STEP_END = 10# A step has been done (the text contains an explanation of the step done by he personality) MSG_OPERATION_TYPE_STEP_END_SUCCESS = 10# A step has been done (the text contains an explanation of the step done by he personality)
MSG_OPERATION_TYPE_STEP_END_FAILURE = 11# A step has been done (the text contains an explanation of the step done by he personality)
#Extra #Extra
MSG_TYPE_JSON_INFOS = 11# A JSON output that is useful for summarizing the process of generation used by personalities like chain of thoughts and tree of thooughts MSG_OPERATION_TYPE_JSON_INFOS = 12# A JSON output that is useful for summarizing the process of generation used by personalities like chain of thoughts and tree of thooughts
MSG_TYPE_REF = 12# References (in form of [text](path)) MSG_OPERATION_TYPE_REF = 13# References (in form of [text](path))
MSG_TYPE_CODE = 13# A javascript code to execute MSG_OPERATION_TYPE_CODE = 14# A javascript code to execute
MSG_TYPE_UI = 14# A vue.js component to show (we need to build some and parse the text to show it) MSG_OPERATION_TYPE_UI = 15# A vue.js component to show (we need to build some and parse the text to show it)
#Commands #Commands
MSG_TYPE_NEW_MESSAGE = 15# A new message MSG_OPERATION_TYPE_NEW_MESSAGE = 16# A new message
MSG_TYPE_FINISHED_MESSAGE = 17# End of current message MSG_OPERATION_TYPE_FINISHED_MESSAGE = 17# End of current message
CONTENT_OPERATION_TYPES = [MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER]
class SENDER_TYPES(Enum): class SENDER_TYPES(Enum):
SENDER_TYPES_USER = 0 # Sent by user SENDER_TYPES_USER = 0 # Sent by user

View File

@ -26,7 +26,7 @@ def get_all_files(path):
setuptools.setup( setuptools.setup(
name="lollms", name="lollms",
version="9.5.1", version="10.0.0",
author="Saifeddine ALOUI (ParisNeo)", author="Saifeddine ALOUI (ParisNeo)",
author_email="parisneo_ai@gmail.com", author_email="parisneo_ai@gmail.com",
description="A python library for AI personality definition", description="A python library for AI personality definition",