moved to new mode

This commit is contained in:
Saifeddine ALOUI 2024-08-15 23:50:08 +02:00
parent df4052e096
commit e8a141b8bd
8 changed files with 60 additions and 59 deletions

View File

@ -16,7 +16,7 @@ from lollms.tasks import TasksLibrary
from lollmsvectordb.database_elements.chunk import Chunk
from lollmsvectordb.vector_database import VectorDatabase
from typing import Callable
from typing import Callable, Any
from pathlib import Path
from datetime import datetime
from functools import partial

View File

@ -9,7 +9,7 @@
from fastapi import Request
from typing import Dict, Any
from pathlib import Path
from typing import Callable
from typing import Callable, Any
from lollms.paths import LollmsPaths
from ascii_colors import ASCIIColors
from urllib import request

View File

@ -1,9 +1,10 @@
from ascii_colors import ASCIIColors
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES, MSG_TYPE
from typing import Callable
from typing import Callable, Any
import socketio
from enum import Enum
from lollms.types import MSG_OPERATION_TYPE
from typing import Any, List
class NotificationType(Enum):
"""Notification types."""
@ -161,7 +162,7 @@ class LoLLMsCom:
open=False
):
pass
def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def set_message_content(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end
Args:

View File

@ -1,7 +1,7 @@
from pathlib import Path
from lollms.personality import APScript
from lollmsvectordb.text_document_loader import TextDocumentsLoader
from safe_store.text_vectorizer import TextVectorizer
from lollmsvectordb import VectorDatabase
import json
import re
def remove_indexing_from_markdown(markdown_text):
@ -34,7 +34,7 @@ def find_available_file(folder_path):
i += 1
def buildKnowledgeDB(llm:APScript, data_store:TextVectorizer, data_folder_path:str, output_folder:str, questions_gen_size:int, answer_gen_size:int):
def buildKnowledgeDB(llm:APScript, data_store:VectorDatabase, data_folder_path:str, output_folder:str, questions_gen_size:int, answer_gen_size:int):
output_folder = Path(output_folder)
output_folder.mkdir(parents=True, exist_ok=True)
# Verify if the data_folder_path exists
@ -46,26 +46,27 @@ def buildKnowledgeDB(llm:APScript, data_store:TextVectorizer, data_folder_path:s
for file_path in document_files:
if file_path.suffix in ['.pdf',".txt",".c",".cpp",".h",".py",".msg",".docx",".pptx",".md"]:
print(file_path)
document_text = GenericDataLoader.read_file(file_path)
document_text = TextDocumentsLoader.read_file(file_path)
data_store.add_document(file_path, document_text, chunk_size=512, overlap_size=128)
llm.step_end(f"Loading files")
# Index the vector store
llm.step_start(f"Indexing files")
data_store.index()
data_store.build_index()
llm.step_end(f"Indexing files")
db_name = find_available_file(output_folder)
output = "### Building questions:\n"
llm.full(output)
llm.set_message_content(output)
# Iterate over all documents in data_folder_path
processed_chunks = 0
# Iterate over all chunks and extract text
questions_vector = []
total_chunks = len(data_store.chunks.items())
for chunk_name, chunk in data_store.chunks.items():
chunk_text = chunk["chunk_text"]
chunks = data_store.get_all_chunks()
total_chunks = len(chunks)
for chunk in chunks:
chunk_text = chunk.text
processed_chunks += 1
llm.step_start(f"Processing chunk {chunk_name}: {processed_chunks}/{total_chunks}")
llm.step_start(f"Processing chunk {chunk.chunk_id}: {processed_chunks}/{total_chunks}")
# Build the prompt text with placeholders
prompt_text = f"{llm.config.start_header_id_template}instruction: Generate questions or tasks that delve into the specific details and information presented in the text chunks. Please do not ask questions about the form of the text, and do not mention the text itllm in your questions. Make sure you format the output using Markdown with each question or task placed in a separate paragraph starting with __P__.\n{llm.config.separator_template}{llm.config.start_header_id_template}chunk {{chunk_name}}: {{chunk}}{llm.config.separator_template}{llm.config.start_header_id_template}Here are some questions and tasks to further explore the contents of the given text chunks:\n__P__"
# Ask AI to generate questions
@ -75,9 +76,9 @@ def buildKnowledgeDB(llm:APScript, data_store:TextVectorizer, data_folder_path:s
generated_lines = [q.replace("__P__","") for q in generated_lines]
generated_lines = [remove_indexing_from_markdown(q) for q in generated_lines]
questions_vector.extend(generated_lines)
llm.step_end(f"Processing chunk {chunk_name}: {processed_chunks}/{total_chunks}")
output += "\n<".join(generated_lines) + "\n"
llm.full(output)
llm.step_end(f"Processing chunk {chunk.chunk_id}: {processed_chunks}/{total_chunks}")
output += "\n".join(generated_lines) + "\n"
llm.set_message_content(output)
llm.step_start(f"Saving questions for future use")
with open(output_folder/f"{db_name.split('.')[0]}_q.json", 'w') as file:
@ -85,11 +86,11 @@ def buildKnowledgeDB(llm:APScript, data_store:TextVectorizer, data_folder_path:s
llm.step_end(f"Saving questions for future use")
output += "### Building answers:\n"
llm.full(output)
llm.set_message_content(output)
qna_list=[]
# Perform further processing with questions_vector
for index, question in enumerate(questions_vector):
docs, sorted_similarities, document_ids = data_store.recover_text(question, top_k=int(llm.personality_config.data_vectorization_nb_chunks))
docs, sorted_similarities, document_ids = data_store.search(question, n_results=int(llm.personality_config.data_vectorization_nb_chunks))
if llm.personality_config.use_enhanced_mode:
llm.step_start(f"Verifying RAG data_{index}")
prompt_text = """{llm.config.start_header_id_template}chunk: {{chunk}}
@ -127,7 +128,7 @@ Be precise and helpful.
"id":0
})
output += f"q:{question}\na:{answer}\n"
llm.full(output)
llm.set_message_content(output)
llm.step_end(f"Asking question {index}/{len(questions_vector)}")
with open(output_folder/db_name, 'w') as file:
json.dump(qna_list, file)

View File

@ -32,7 +32,7 @@ def read_text_from_file(file_path: Union[Path, str], tts_module:LollmsTTS, llm:A
# Generate audio from the text
audio_file_path = tts_module.tts_audio(text,use_threading=True)
llm.full(text)
llm.set_message_content(text)
# Return the path to the generated audio file
return str(audio_file_path)

View File

@ -118,7 +118,7 @@ class AIPersonality:
ignore_discussion_documents_rag=False,
is_relative_path=True,
installation_option:InstallOption=InstallOption.INSTALL_IF_NECESSARY,
callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None
callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None
):
"""
Initialize an AIPersonality instance.
@ -311,7 +311,7 @@ class AIPersonality:
if callback:
callback(message_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_NEW_MESSAGE, parameters={'type':message_type.value,'metadata':metadata}, personality=self)
def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def set_message_content(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end
Args:
@ -324,7 +324,7 @@ class AIPersonality:
if callback:
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
def ui(self, ui_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def ui(self, ui_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends ui text to front end
Args:
@ -338,7 +338,7 @@ class AIPersonality:
callback(ui_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end (INVISIBLE to AI)
Args:
@ -351,7 +351,7 @@ class AIPersonality:
if callback:
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end (INVISIBLE to user)
Args:
@ -533,7 +533,7 @@ class AIPersonality:
ASCIIColors.red("Model failed to rank inputs")
return None
def step_start(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def step_start(self, step_text, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This triggers a step start
Args:
@ -559,7 +559,7 @@ class AIPersonality:
if callback:
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
def step(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def step(self, step_text, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This triggers a step information
Args:
@ -773,7 +773,7 @@ class AIPersonality:
return self.bot_says
def setCallback(self, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]):
def setCallback(self, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]):
self.callback = callback
if self._processor:
self._processor.callback = callback
@ -2030,7 +2030,7 @@ class StateMachine:
def process_state(self, command, full_context, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, context_state:dict=None, client:Client=None):
def process_state(self, command, full_context, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None, context_state:dict=None, client:Client=None):
"""
Process the given command based on the current state.
@ -2385,7 +2385,7 @@ class APScript(StateMachine):
return self.personality.generate(prompt, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
def run_workflow(self, prompt:str, previous_discussion_text:str="", callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, context_details:dict=None, client:Client=None):
def run_workflow(self, prompt:str, previous_discussion_text:str="", callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, AIPersonality| None], bool]=None, context_details:dict=None, client:Client=None):
"""
This function generates code based on the given parameters.
@ -2918,7 +2918,7 @@ class APScript(StateMachine):
return chunks
def step_start(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def step_start(self, step_text, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This triggers a step start
Args:
@ -2944,7 +2944,7 @@ class APScript(StateMachine):
if callback:
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS if status else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_FAILURE)
def step(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def step(self, step_text, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This triggers a step information
Args:
@ -2962,7 +2962,7 @@ class APScript(StateMachine):
if callback:
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP)
def exception(self, ex, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def exception(self, ex, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends exception to the client
Args:
@ -2980,7 +2980,7 @@ class APScript(StateMachine):
if callback:
callback(str(ex), MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def warning(self, warning:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def warning(self, warning:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends exception to the client
Args:
@ -3017,7 +3017,7 @@ class APScript(StateMachine):
if callback:
callback([{"title":title, "content":json.dumps(json_infos, indent=indent)}], MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_JSON_INFOS)
def ui(self, html_ui:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]|None=None):
def ui(self, html_ui:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]|None=None):
"""This sends ui elements to front end
Args:
@ -3036,7 +3036,7 @@ class APScript(StateMachine):
callback(html_ui, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def ui_in_iframe(self, html_ui:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def ui_in_iframe(self, html_ui:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends ui elements to front end inside an iframe
Args:
@ -3059,7 +3059,7 @@ class APScript(StateMachine):
def code(self, code:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def code(self, code:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends code to front end
Args:
@ -3077,7 +3077,7 @@ class APScript(StateMachine):
if callback:
callback(code, MSG_OPERATION_TYPE.MSG_TYPE_CODE)
def chunk(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def chunk(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end
Args:
@ -3091,7 +3091,7 @@ class APScript(StateMachine):
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK)
def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, msg_type:MSG_OPERATION_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT):
def set_message_content(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None, msg_type:MSG_OPERATION_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT):
"""This sends full text to front end
Args:
@ -3104,7 +3104,7 @@ class APScript(StateMachine):
if callback:
callback(full_text, msg_type)
def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end (INVISIBLE to AI)
Args:
@ -3117,7 +3117,7 @@ class APScript(StateMachine):
if callback:
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end (INVISIBLE to user)
Args:
@ -3760,7 +3760,7 @@ class APScript(StateMachine):
verbose=verbose
)
def info(self, info_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def info(self, info_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends info text to front end
Args:
@ -3773,7 +3773,7 @@ class APScript(StateMachine):
if callback:
callback(info_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
def step_progress(self, step_text:str, progress:float, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list, AIPersonality], bool]=None):
def step_progress(self, step_text:str, progress:float, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends step rogress to front end
Args:
@ -3797,9 +3797,9 @@ class APScript(StateMachine):
callback = self.callback
if callback:
callback(message_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_NEW_MESSAGE, metadata , personality = self.personality)
callback(message_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_NEW_MESSAGE, personality = self.personality)
def finished_message(self, message_text:str="", callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def finished_message(self, message_text:str="", callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends step rogress to front end
Args:

View File

@ -18,7 +18,6 @@ from lollms.databases.discussions_database import DiscussionsDB, Discussion
from lollms.security import check_access
from typing import List
from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod
import tqdm
from pathlib import Path
# ----------------------- Defining router and main class ------------------------------

View File

@ -1,6 +1,6 @@
import sys
from typing import Callable, List, Dict, Any, Optional
from typing import Callable, Any, List, Dict, Any, Optional
from functools import partial
from datetime import datetime
from ascii_colors import ASCIIColors
@ -13,7 +13,7 @@ from lollmsvectordb.directory_binding import DirectoryBinding
import hashlib
import json
class TasksLibrary:
def __init__(self, lollms:LoLLMsCom, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None) -> None:
def __init__(self, lollms:LoLLMsCom, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None) -> None:
self.lollms = lollms
self.config = lollms.config
self.callback = callback
@ -226,7 +226,7 @@ class TasksLibrary:
return gen
# Communications with the user
def step_start(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def step_start(self, step_text, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This triggers a step start
Args:
@ -252,7 +252,7 @@ class TasksLibrary:
if callback:
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS if status else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_FAILURE)
def step(self, step_text, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def step(self, step_text, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This triggers a step information
Args:
@ -270,7 +270,7 @@ class TasksLibrary:
if callback:
callback(step_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP)
def exception(self, ex, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def exception(self, ex, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends exception to the client
Args:
@ -288,7 +288,7 @@ class TasksLibrary:
if callback:
callback(str(ex), MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def warning(self, warning:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def warning(self, warning:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends exception to the client
Args:
@ -306,7 +306,7 @@ class TasksLibrary:
if callback:
callback(warning, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
def info(self, info:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def info(self, info:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends exception to the client
Args:
@ -342,7 +342,7 @@ class TasksLibrary:
if callback:
callback("", MSG_OPERATION_TYPE.MSG_TYPE_JSON_INFOS, metadata = [{"title":title, "content":json.dumps(json_infos, indent=indent)}])
def ui(self, html_ui:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def ui(self, html_ui:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends ui elements to front end
Args:
@ -360,7 +360,7 @@ class TasksLibrary:
if callback:
callback(html_ui, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI)
def code(self, code:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def code(self, code:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends code to front end
Args:
@ -378,7 +378,7 @@ class TasksLibrary:
if callback:
callback(code, MSG_OPERATION_TYPE.MSG_TYPE_CODE)
def chunk(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def chunk(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end
Args:
@ -392,7 +392,7 @@ class TasksLibrary:
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK)
def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None, msg_type:MSG_OPERATION_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT):
def set_message_content(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None, msg_type:MSG_OPERATION_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT):
"""This sends full text to front end
Args:
@ -405,7 +405,7 @@ class TasksLibrary:
if callback:
callback(full_text, msg_type)
def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_ai(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end (INVISIBLE to AI)
Args:
@ -418,7 +418,7 @@ class TasksLibrary:
if callback:
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]=None):
def set_message_content_invisible_to_user(self, full_text:str, callback: Callable[[str | list | None, MSG_OPERATION_TYPE, str, Any | None], bool]=None):
"""This sends full text to front end (INVISIBLE to user)
Args: