Merge branch 'main' of https://github.com/ParisNeo/lollms into main

This commit is contained in:
saloui 2023-07-11 14:57:46 +02:00
commit dc795e1935
13 changed files with 120 additions and 30 deletions

View File

@ -1,5 +1,5 @@
from lollms.console import Conversation from lollms.console import Conversation
import sys
class MyConversation(Conversation): class MyConversation(Conversation):
def __init__(self, cfg=None): def __init__(self, cfg=None):
super().__init__(cfg, show_welcome_message=False) super().__init__(cfg, show_welcome_message=False)
@ -15,7 +15,9 @@ class MyConversation(Conversation):
full_discussion += self.personality.user_message_prefix+prompt+self.personality.link_text full_discussion += self.personality.user_message_prefix+prompt+self.personality.link_text
full_discussion += self.personality.ai_message_prefix full_discussion += self.personality.ai_message_prefix
def callback(text, type=None): def callback(text, type=None):
print(text, end="", flush=True) print(text, end="")
sys.stdout = sys.__stdout__
sys.stdout.flush()
return True return True
print(self.personality.name+": ",end="",flush=True) print(self.personality.name+": ",end="",flush=True)
output = self.safe_generate(full_discussion, callback=callback) output = self.safe_generate(full_discussion, callback=callback)

View File

@ -1,4 +1,5 @@
from lollms.console import Conversation from lollms.console import Conversation
import sys
class MyConversation(Conversation): class MyConversation(Conversation):
def __init__(self, cfg=None): def __init__(self, cfg=None):
@ -7,9 +8,14 @@ class MyConversation(Conversation):
def start_conversation(self): def start_conversation(self):
prompt = "Once apon a time" prompt = "Once apon a time"
def callback(text, type=None): def callback(text, type=None):
print(text, end="", flush=True) print(text, end="")
sys.stdout = sys.__stdout__
sys.stdout.flush()
return True return True
print(prompt, end="", flush=True) print(prompt, end="")
sys.stdout = sys.__stdout__
sys.stdout.flush()
output = self.safe_generate(prompt, callback=callback) output = self.safe_generate(prompt, callback=callback)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -4,7 +4,7 @@ from lollms.paths import LollmsPaths
from lollms.personality import PersonalityBuilder from lollms.personality import PersonalityBuilder
from lollms.binding import LLMBinding, BindingBuilder, ModelBuilder from lollms.binding import LLMBinding, BindingBuilder, ModelBuilder
from lollms.config import InstallOption from lollms.config import InstallOption
import traceback from lollms.helpers import trace_exception
from lollms.terminal import MainMenu from lollms.terminal import MainMenu
class LollmsApplication: class LollmsApplication:
@ -29,6 +29,7 @@ class LollmsApplication:
self.binding = self.load_binding() self.binding = self.load_binding()
except Exception as ex: except Exception as ex:
ASCIIColors.error(f"Failed to load binding.\nReturned exception: {ex}") ASCIIColors.error(f"Failed to load binding.\nReturned exception: {ex}")
trace_exception(ex)
if self.binding is not None: if self.binding is not None:
ASCIIColors.success(f"Binding {self.config.binding_name} loaded successfully.") ASCIIColors.success(f"Binding {self.config.binding_name} loaded successfully.")
@ -41,6 +42,7 @@ class LollmsApplication:
self.model = self.load_model() self.model = self.load_model()
except Exception as ex: except Exception as ex:
ASCIIColors.error(f"Failed to load model.\nReturned exception: {ex}") ASCIIColors.error(f"Failed to load model.\nReturned exception: {ex}")
trace_exception(ex)
self.model = None self.model = None
else: else:
self.model = None self.model = None
@ -53,15 +55,6 @@ class LollmsApplication:
self.model = None self.model = None
self.mount_personalities() self.mount_personalities()
def trace_exception(self, ex):
# Catch the exception and get the traceback as a list of strings
traceback_lines = traceback.format_exception(type(ex), ex, ex.__traceback__)
# Join the traceback lines into a single string
traceback_text = ''.join(traceback_lines)
ASCIIColors.error(traceback_text)
def load_binding(self): def load_binding(self):
try: try:
binding = BindingBuilder().build_binding(self.config, self.lollms_paths) binding = BindingBuilder().build_binding(self.config, self.lollms_paths)
@ -69,7 +62,12 @@ class LollmsApplication:
print(ex) print(ex)
print(f"Couldn't find binding. Please verify your configuration file at {self.configuration_path} or use the next menu to select a valid binding") print(f"Couldn't find binding. Please verify your configuration file at {self.configuration_path} or use the next menu to select a valid binding")
print(f"Trying to reinstall binding") print(f"Trying to reinstall binding")
try:
binding = BindingBuilder().build_binding(self.config, self.lollms_paths,installation_option=InstallOption.FORCE_INSTALL) binding = BindingBuilder().build_binding(self.config, self.lollms_paths,installation_option=InstallOption.FORCE_INSTALL)
except Exception as ex:
ASCIIColors.error("Couldn't reinstall model")
trace_exception(ex)
return binding return binding
def load_model(self): def load_model(self):
@ -81,6 +79,7 @@ class LollmsApplication:
except Exception as ex: except Exception as ex:
ASCIIColors.error(f"Couldn't load model. Please verify your configuration file at {self.lollms_paths.personal_configuration_path} or use the next menu to select a valid model") ASCIIColors.error(f"Couldn't load model. Please verify your configuration file at {self.lollms_paths.personal_configuration_path} or use the next menu to select a valid model")
ASCIIColors.error(f"Binding returned this exception : {ex}") ASCIIColors.error(f"Binding returned this exception : {ex}")
trace_exception(ex)
ASCIIColors.error(f"{self.config.get_model_path_infos()}") ASCIIColors.error(f"{self.config.get_model_path_infos()}")
print("Please select a valid model or install a new one from a url") print("Please select a valid model or install a new one from a url")
model = None model = None
@ -102,7 +101,7 @@ class LollmsApplication:
except Exception as ex: except Exception as ex:
ASCIIColors.error(f"Couldn't load personality. Please verify your configuration file at {self.lollms_paths.personal_configuration_path} or use the next menu to select a valid personality") ASCIIColors.error(f"Couldn't load personality. Please verify your configuration file at {self.lollms_paths.personal_configuration_path} or use the next menu to select a valid personality")
ASCIIColors.error(f"Binding returned this exception : {ex}") ASCIIColors.error(f"Binding returned this exception : {ex}")
self.trace_exception(ex) trace_exception(ex)
ASCIIColors.error(f"{self.config.get_personality_path_infos()}") ASCIIColors.error(f"{self.config.get_personality_path_infos()}")
self.personality = None self.personality = None
self.mounted_personalities.append(self.personality) self.mounted_personalities.append(self.personality)

View File

@ -22,6 +22,7 @@ import importlib
import subprocess import subprocess
from lollms.config import TypedConfig, InstallOption from lollms.config import TypedConfig, InstallOption
from lollms.main_config import LOLLMSConfig from lollms.main_config import LOLLMSConfig
import traceback
import urllib import urllib
__author__ = "parisneo" __author__ = "parisneo"
@ -92,7 +93,6 @@ class LLMBinding:
print('An error occurred during installation:', str(e)) print('An error occurred during installation:', str(e))
shutil.rmtree(temp_dir) shutil.rmtree(temp_dir)
def get_file_size(self, url): def get_file_size(self, url):
# Send a HEAD request to retrieve file metadata # Send a HEAD request to retrieve file metadata
response = urllib.request.urlopen(url) response = urllib.request.urlopen(url)

View File

@ -241,7 +241,7 @@ Participating personalities:
antiprompt = self.personality.detect_antiprompt(bot_says) antiprompt = self.personality.detect_antiprompt(bot_says)
if antiprompt: if antiprompt:
self.bot_says = self.remove_text_from_string(bot_says,antiprompt) self.bot_says = self.remove_text_from_string(bot_says,antiprompt)
print("Detected hallucination") ASCIIColors.warning(f"Detected hallucination with antiprompt {antiprompt}")
return False return False
else: else:
self.bot_says = bot_says self.bot_says = bot_says

View File

@ -1,4 +1,15 @@
import traceback
def trace_exception(ex):
"""
Traces an exception (useful for debug)
"""
# Catch the exception and get the traceback as a list of strings
traceback_lines = traceback.format_exception(type(ex), ex, ex.__traceback__)
# Join the traceback lines into a single string
traceback_text = ''.join(traceback_lines)
ASCIIColors.error(traceback_text)
class ASCIIColors: class ASCIIColors:
# Reset # Reset

View File

@ -836,9 +836,73 @@ Date: {{date}}
class StateMachine:
def __init__(self, states_dict):
"""
states structure is the following
[
{
"name": the state name,
"commands": [ # list of commands
"command": function
],
"default": default function
}
]
"""
self.states_dict = states_dict
self.current_state_id = 0
def goto_state(self, state):
"""
Transition to the state with the given name or index.
Args:
state (str or int): The name or index of the state to transition to.
Raises:
ValueError: If no state is found with the given name or index.
"""
if isinstance(state, str):
for i, state_dict in enumerate(self.states_dict):
if state_dict["name"] == state:
self.current_state_id = i
return
elif isinstance(state, int):
if 0 <= state < len(self.states_dict):
self.current_state_id = state
return
raise ValueError(f"No state found with name or index: {state}")
class APScript:
def process_state(self, command):
"""
Process the given command based on the current state.
Args:
command: The command to process.
Raises:
ValueError: If the current state doesn't have the command and no default function is defined.
"""
current_state = self.states_dict[self.current_state_id]
commands = current_state["commands"]
for cmd, func in commands:
if cmd == command:
func()
return
default_func = current_state.get("default")
if default_func is not None:
default_func()
else:
raise ValueError(f"Command '{command}' not found in current state and no default function defined.")
class APScript(StateMachine):
""" """
Template class for implementing personality processor classes in the APScript framework. Template class for implementing personality processor classes in the APScript framework.
@ -848,9 +912,10 @@ class APScript:
def __init__( def __init__(
self, self,
personality :AIPersonality, personality :AIPersonality,
personality_config :TypedConfig personality_config :TypedConfig,
states_dict :dict = {}
) -> None: ) -> None:
super().__init__(states_dict)
self.files=[] self.files=[]
self.personality = personality self.personality = personality
self.personality_config = personality_config self.personality_config = personality_config
@ -982,7 +1047,7 @@ class APScript:
antiprompt = self.personality.detect_antiprompt(bot_says) antiprompt = self.personality.detect_antiprompt(bot_says)
if antiprompt: if antiprompt:
self.bot_says = self.remove_text_from_string(bot_says,antiprompt) self.bot_says = self.remove_text_from_string(bot_says,antiprompt)
ASCIIColors.warning("Detected hallucination") ASCIIColors.warning(f"Detected hallucination with antiprompt: {antiprompt}")
return False return False
else: else:
self.bot_says = bot_says self.bot_says = bot_says

View File

@ -462,10 +462,10 @@ class LoLLMsServer(LollmsApplication):
fd = personality.model.detokenize(tk[-min(self.config.ctx_size-n_cond_tk-personality.model_n_predicts,n_tokens):]) fd = personality.model.detokenize(tk[-min(self.config.ctx_size-n_cond_tk-personality.model_n_predicts,n_tokens):])
if personality.processor is not None and personality.processor_cfg["custom_workflow"]: if personality.processor is not None and personality.processor_cfg["custom_workflow"]:
print("processing...", end="", flush=True) ASCIIColors.info("processing...")
generated_text = personality.processor.run_workflow(prompt, previous_discussion_text=personality.personality_conditioning+fd, callback=callback) generated_text = personality.processor.run_workflow(prompt, previous_discussion_text=personality.personality_conditioning+fd, callback=callback)
else: else:
ASCIIColors.info("generating...", end="", flush=True) ASCIIColors.info("generating...")
generated_text = personality.model.generate( generated_text = personality.model.generate(
personality.personality_conditioning+fd, personality.personality_conditioning+fd,
n_predict=personality.model_n_predicts, n_predict=personality.model_n_predicts,
@ -475,7 +475,7 @@ class LoLLMsServer(LollmsApplication):
generated_text = personality.processor.process_model_output(generated_text) generated_text = personality.processor.process_model_output(generated_text)
full_discussion_blocks.append(generated_text.strip()) full_discussion_blocks.append(generated_text.strip())
ASCIIColors.success("\ndone", end="", flush=True) ASCIIColors.success("\ndone")
# Emit the generated text to the client # Emit the generated text to the client
self.socketio.emit('text_generated', {'text': generated_text}, room=client_id) self.socketio.emit('text_generated', {'text': generated_text}, room=client_id)

View File

@ -7,6 +7,7 @@ if TYPE_CHECKING:
from lollms.binding import BindingBuilder from lollms.binding import BindingBuilder
from lollms.config import InstallOption from lollms.config import InstallOption
from lollms.personality import PersonalityBuilder from lollms.personality import PersonalityBuilder
from lollms.helpers import trace_exception
from tqdm import tqdm from tqdm import tqdm
import pkg_resources import pkg_resources
@ -277,6 +278,7 @@ class MainMenu:
except Exception as ex: except Exception as ex:
ASCIIColors.error(f"Couldn't load personality. Please verify your configuration file at {lollms_app.configuration_path} or use the next menu to select a valid personality") ASCIIColors.error(f"Couldn't load personality. Please verify your configuration file at {lollms_app.configuration_path} or use the next menu to select a valid personality")
ASCIIColors.error(f"Binding returned this exception : {ex}") ASCIIColors.error(f"Binding returned this exception : {ex}")
trace_exception(ex)
ASCIIColors.error(f"{lollms_app.config.get_personality_path_infos()}") ASCIIColors.error(f"{lollms_app.config.get_personality_path_infos()}")
print("Please select a valid model or install a new one from a url") print("Please select a valid model or install a new one from a url")
self.select_model() self.select_model()

View File

@ -8,3 +8,4 @@ simple-websocket
eventlet eventlet
wget wget
setuptools setuptools
requests

View File

@ -7,3 +7,4 @@ flask-cors
simple-websocket simple-websocket
wget wget
setuptools setuptools
requests

View File

@ -26,7 +26,7 @@ def get_all_files(path):
setuptools.setup( setuptools.setup(
name="lollms", name="lollms",
version="2.1.32", version="2.1.35",
author="Saifeddine ALOUI", author="Saifeddine ALOUI",
author_email="aloui.saifeddine@gmail.com", author_email="aloui.saifeddine@gmail.com",
description="A python library for AI personality definition", description="A python library for AI personality definition",

View File

@ -3,7 +3,7 @@ import socketio
from pathlib import Path from pathlib import Path
from lollms import MSG_TYPE from lollms import MSG_TYPE
import time import time
import sys
# Connect to the Socket.IO server # Connect to the Socket.IO server
sio = socketio.Client() sio = socketio.Client()
@ -35,7 +35,10 @@ def test_generate_text(host, port, text_file):
@sio.event @sio.event
def text_chunk(data): def text_chunk(data):
print(data["chunk"],end="",flush=True) print(data["chunk"],end="")
sys.stdout = sys.__stdout__
sys.stdout.flush()
@sio.event @sio.event
def text_generated(data): def text_generated(data):