Version 2.0

This commit is contained in:
Saifeddine ALOUI 2023-06-21 13:01:43 +02:00
parent fd043bae32
commit 6ba81cb8a8
12 changed files with 688 additions and 584 deletions

View File

@ -180,7 +180,7 @@ Events generated:
- Creates a deep copy of the configuration (`self.config`) and assigns it to `self.cp_config`.
- Sets the `"model_name"` property of `self.cp_config` to the selected model name.
- Tries to create an instance of the binding class (`self.binding`) with `self.cp_config`.
- If successful, assigns the created binding instance to `self.current_model`.
- If successful, assigns the created binding instance to `self.active_model`.
- Emits a `'select_model'` event with a success response, indicating that the model selection was successful.
- Returns and exits the function.
- If an exception occurs during model creation, prints the exception and emits a `'select_model'` event with a failure response, indicating that a binding needs to be selected first.

View File

@ -5,76 +5,3 @@ __github__ = "https://github.com/ParisNeo/lollms"
__copyright__ = "Copyright 2023, "
__license__ = "Apache 2.0"
from lollms.binding import LLMBinding, LOLLMSConfig
from lollms.personality import AIPersonality, MSG_TYPE
from lollms.helpers import ASCIIColors
from lollms.paths import LollmsPaths
#from lollms.binding import LLMBinding
import importlib
from pathlib import Path
def reset_all_installs():
ASCIIColors.info("Removeing .install files to force reinstall")
folder_path = Path(__file__).parent
path = Path(folder_path)
ASCIIColors.info(f"Searching files from {path}")
for file_path in path.rglob("*.installed"):
file_path.unlink()
ASCIIColors.info(f"Deleted file: {file_path}")
class BindingBuilder:
def build_binding(self, bindings_path: Path, cfg: LOLLMSConfig, force_reinstall=False)->LLMBinding:
binding_path = Path(bindings_path) / cfg["binding_name"]
# define the full absolute path to the module
absolute_path = binding_path.resolve()
# infer the module name from the file path
module_name = binding_path.stem
# use importlib to load the module from the file path
loader = importlib.machinery.SourceFileLoader(module_name, str(absolute_path / "__init__.py"))
binding_module = loader.load_module()
binding:LLMBinding = getattr(binding_module, binding_module.binding_name)
return binding(
cfg,
force_reinstall = force_reinstall
)
class ModelBuilder:
def __init__(self, binding:LLMBinding):
self.binding = binding
self.model = None
self.build_model(binding.config)
def build_model(self, cfg: LOLLMSConfig):
self.model = self.binding.build_model()
def get_model(self):
return self.model
class PersonalityBuilder:
def __init__(self, lollms_paths:LollmsPaths, config:LOLLMSConfig, model:LLMBinding):
self.config = config
self.lollms_paths = lollms_paths
self.model = model
def build_personality(self, force_reinstall=False):
if self.config["active_personality_id"]>=len(self.config["personalities"]):
ASCIIColors.warning("Personality ID was out of range. Resetting to 0.")
self.config["active_personality_id"]=0
if len(self.config["personalities"][self.config["active_personality_id"]].split("/"))==3:
self.personality = AIPersonality(self.lollms_paths, self.lollms_paths.personalities_zoo_path / self.config["personalities"][self.config["active_personality_id"]], self.model, force_reinstall= force_reinstall)
else:
self.personality = AIPersonality(self.lollms_paths, self.config["personalities"][self.config["active_personality_id"]], self.model, is_relative_path=False, force_reinstall= force_reinstall)
return self.personality
def get_personality(self):
return self.personality

View File

@ -16,7 +16,7 @@ import yaml
from tqdm import tqdm
import importlib
import subprocess
from lollms.config import TypedConfig
from lollms.config import TypedConfig, InstallOption
from lollms.main_config import LOLLMSConfig
@ -26,36 +26,17 @@ __copyright__ = "Copyright 2023, "
__license__ = "Apache 2.0"
import yaml
class BindingInstaller:
def __init__(self, config: LOLLMSConfig) -> None:
self.config = config
def reinstall_pytorch_with_cuda(self):
result = subprocess.run(["pip", "install", "--upgrade", "torch", "torchvision", "torchaudio", "--no-cache-dir", "--index-url", "https://download.pytorch.org/whl/cu117"])
if result.returncode != 0:
ASCIIColors.warning("Couldn't find Cuda build tools on your PC. Reverting to CPU.")
result = subprocess.run(["pip", "install", "--upgrade", "torch", "torchvision", "torchaudio", "--no-cache-dir"])
if result.returncode != 0:
ASCIIColors.error("Couldn't install pytorch !!")
else:
ASCIIColors.error("Pytorch installed successfully!!")
class LLMBinding:
file_extension='*.bin'
binding_path = Path(__file__).parent
def __init__(
self,
binding_dir:Path,
lollms_paths:LollmsPaths,
config:LOLLMSConfig,
binding_config:TypedConfig,
force_install:bool=False
installation_option:InstallOption=InstallOption.INSTALL_IF_NECESSARY
) -> None:
self.binding_dir = binding_dir
self.binding_folder_name = binding_dir.stem
@ -67,7 +48,9 @@ class LLMBinding:
self.configuration_file_path = lollms_paths.personal_configuration_path/f"binding_{self.binding_folder_name}.yaml"
self.binding_config.config.file_path = self.configuration_file_path
if not self.configuration_file_path.exists() or force_install:
# Installation
if (not self.configuration_file_path.exists() or installation_option==InstallOption.FORCE_INSTALL) and installation_option!=InstallOption.NEVER_INSTALL:
self.install()
self.binding_config.config.save_config()
else:
@ -126,8 +109,11 @@ class LLMBinding:
Returns:
dict: A dictionary containing the loaded data from the local_config.yaml file.
"""
self.binding_config.config.load_config()
"""
try:
self.binding_config.config.load_config()
except:
self.binding_config.config.save_config()
self.binding_config.sync()
def save_config_file(self, path):
@ -188,19 +174,64 @@ class LLMBinding:
return " ".join(tokens_list)
@staticmethod
def install_binding(binding_path, config:LOLLMSConfig):
install_file_name = "install.py"
install_script_path = binding_path / install_file_name
if install_script_path.exists():
module_name = install_file_name[:-3] # Remove the ".py" extension
module_spec = importlib.util.spec_from_file_location(module_name, str(install_script_path))
module = importlib.util.module_from_spec(module_spec)
module_spec.loader.exec_module(module)
if hasattr(module, "Install"):
module.Install(config)
# To implement by children
# @staticmethod
# def get_available_models():
# ===============================
class BindingInstaller:
def __init__(self, config: LOLLMSConfig) -> None:
self.config = config
def reinstall_pytorch_with_cuda(self):
result = subprocess.run(["pip", "install", "--upgrade", "torch", "torchvision", "torchaudio", "--no-cache-dir", "--index-url", "https://download.pytorch.org/whl/cu117"])
if result.returncode != 0:
ASCIIColors.warning("Couldn't find Cuda build tools on your PC. Reverting to CPU.")
result = subprocess.run(["pip", "install", "--upgrade", "torch", "torchvision", "torchaudio", "--no-cache-dir"])
if result.returncode != 0:
ASCIIColors.error("Couldn't install pytorch !!")
else:
ASCIIColors.error("Pytorch installed successfully!!")
class BindingBuilder:
def build_binding(
self,
config: LOLLMSConfig,
lollms_paths:LollmsPaths,
installation_option:InstallOption=InstallOption.INSTALL_IF_NECESSARY
)->LLMBinding:
if len(str(config.binding_name).split("/"))>1:
binding_path = Path(config.binding_name)
else:
binding_path = lollms_paths.bindings_zoo_path / config["binding_name"]
# define the full absolute path to the module
absolute_path = binding_path.resolve()
# infer the module name from the file path
module_name = binding_path.stem
# use importlib to load the module from the file path
loader = importlib.machinery.SourceFileLoader(module_name, str(absolute_path / "__init__.py"))
binding_module = loader.load_module()
binding:LLMBinding = getattr(binding_module, binding_module.binding_name)
return binding(
config,
lollms_paths=lollms_paths,
installation_option = installation_option
)
class ModelBuilder:
def __init__(self, binding:LLMBinding):
self.binding = binding
self.model = None
self.build_model()
def build_model(self):
self.model = self.binding.build_model()
def get_model(self):
return self.model

@ -1 +1 @@
Subproject commit dc6ac41b445ee20678e115f51818968c869e3cb8
Subproject commit 721fdce27c00950364db64677bffc621907b791a

View File

@ -1,201 +1,20 @@
from pathlib import Path
from lollms.helpers import ASCIIColors
import yaml
class BaseConfig:
"""
A base class for managing configuration data.
The `BaseConfig` class provides basic functionality to load, save, and access configuration data.
Attributes:
exceptional_keys (list): A list of exceptional keys that can be accessed directly as attributes.
config (dict): The configuration data stored as a dictionary.
Methods:
to_dict():
Returns the configuration data as a dictionary.
__getitem__(key):
Retrieves the configuration value associated with the specified key.
__getattr__(key):
Retrieves the configuration value associated with the specified key as an attribute.
__setattr__(key, value):
Sets the value of the configuration key.
__setitem__(key, value):
Sets the value of the configuration key.
__contains__(item):
Checks if the configuration contains the specified key.
load_config(file_path):
Loads the configuration from a YAML file.
save_config(file_path):
Saves the configuration to a YAML file.
"""
def __init__(self, exceptional_keys: list = [], config: dict = None, file_path:Path|str=None):
"""
Initializes a new instance of the `BaseConfig` class.
Args:
exceptional_keys (list, optional): A list of exceptional keys that can be accessed directly as attributes.
Defaults to an empty list.
config (dict, optional): The configuration data stored as a dictionary. Defaults to None.
"""
self.exceptional_keys = exceptional_keys
self.config = config
self.file_path = file_path
def to_dict(self):
"""
Returns the configuration data as a dictionary.
Returns:
dict: The configuration data as a dictionary.
"""
return self.config
def __getitem__(self, key):
"""
Retrieves the configuration value associated with the specified key.
Args:
key (Any): The key to retrieve the configuration value.
Returns:
Any: The configuration value associated with the key.
Raises:
ValueError: If no configuration is loaded.
KeyError: If the specified key is not found in the configuration.
"""
if self.config is None:
raise ValueError("No configuration loaded.")
return self.config[key]
def __getattr__(self, key):
"""
Retrieves the configuration value associated with the specified key as an attribute.
Args:
key (str): The key to retrieve the configuration value.
Returns:
Any: The configuration value associated with the key.
Raises:
ValueError: If no configuration is loaded.
AttributeError: If the specified key is not found in the configuration.
"""
if key == "exceptional_keys":
return super().__getattribute__(key)
if key in self.exceptional_keys + ["config","file_path"] or key.startswith("__"):
return super().__getattribute__(key)
else:
if self.config is None:
raise ValueError("No configuration loaded.")
return self.config[key]
def __setattr__(self, key, value):
"""
Sets the value of the configuration key.
Args:
key (str): The key of the configuration.
value (Any): The new value for the configuration key.
Raises:
ValueError: If no configuration is loaded.
"""
if key == "exceptional_keys":
return super().__setattr__(key, value)
if key in self.exceptional_keys + ["config","file_path"] or key.startswith("__"):
super().__setattr__(key, value)
else:
if self.config is None:
raise ValueError("No configuration loaded.")
self.config[key] = value
def __setitem__(self, key, value):
"""
Sets the value of the configuration key.
Args:
key (str): The key of the configuration.
value (Any): The new value for the configuration key.
Raises:
ValueError: If no configuration is loaded.
"""
if self.config is None:
raise ValueError("No configuration loaded.")
self.config[key] = value
def __contains__(self, item):
"""
Checks if the configuration contains the specified key.
Args:
item (str): The key to check.
Returns:
bool: True if the key is present in the configuration, False otherwise.
Raises:
ValueError: If no configuration is loaded.
"""
if self.config is None:
raise ValueError("No configuration loaded.")
return item in self.config
def load_config(self, file_path: Path | str = None):
"""
Loads the configuration from a YAML file.
Args:
file_path (str or Path, optional): The path to the YAML file. If not provided, uses the previously set file path.
Raises:
ValueError: If no configuration file path is specified.
FileNotFoundError: If the specified file path does not exist.
yaml.YAMLError: If there is an error parsing the YAML file.
"""
if file_path is None:
if self.file_path is None:
raise ValueError("No configuration file path specified.")
file_path = self.file_path
file_path = Path(file_path)
if not file_path.exists():
raise FileNotFoundError(f"Configuration file not found: {file_path}")
with open(file_path, 'r', encoding='utf-8') as stream:
self.config = yaml.safe_load(stream)
def save_config(self, file_path=None):
"""
Saves the configuration to a YAML file.
Args:
file_path (str or Path, optional): The path to the YAML file. If not provided, uses the previously set file path.
Raises:
ValueError: If no configuration is loaded.
ValueError: If no configuration file path is specified.
PermissionError: If the user does not have permission to write to the specified file path.
yaml.YAMLError: If there is an error serializing the configuration to YAML.
"""
if file_path is None:
if self.file_path is None:
raise ValueError("No configuration file path specified.")
file_path = self.file_path
if self.config is None:
raise ValueError("No configuration loaded.")
file_path = Path(file_path)
with open(file_path, "w") as f:
yaml.dump(self.config, f)
from enum import Enum
class InstallOption(Enum):
"""Enumeration for installation options."""
NEVER_INSTALL = 1
"""Do not install under any circumstances."""
INSTALL_IF_NECESSARY = 2
"""Install if necessary, but do not force installation."""
FORCE_INSTALL = 3
"""Force installation, regardless of current state."""
class ConfigTemplate:
@ -376,6 +195,211 @@ class ConfigTemplate:
return False
class BaseConfig:
"""
A base class for managing configuration data.
The `BaseConfig` class provides basic functionality to load, save, and access configuration data.
Attributes:
exceptional_keys (list): A list of exceptional keys that can be accessed directly as attributes.
config (dict): The configuration data stored as a dictionary.
Methods:
to_dict():
Returns the configuration data as a dictionary.
__getitem__(key):
Retrieves the configuration value associated with the specified key.
__getattr__(key):
Retrieves the configuration value associated with the specified key as an attribute.
__setattr__(key, value):
Sets the value of the configuration key.
__setitem__(key, value):
Sets the value of the configuration key.
__contains__(item):
Checks if the configuration contains the specified key.
load_config(file_path):
Loads the configuration from a YAML file.
save_config(file_path):
Saves the configuration to a YAML file.
"""
def __init__(self, exceptional_keys: list = [], config: dict = None, file_path:Path|str=None):
"""
Initializes a new instance of the `BaseConfig` class.
Args:
exceptional_keys (list, optional): A list of exceptional keys that can be accessed directly as attributes.
Defaults to an empty list.
config (dict, optional): The configuration data stored as a dictionary. Defaults to None.
"""
self.exceptional_keys = exceptional_keys
self.config = config
self.file_path = file_path
@staticmethod
def from_template(template:ConfigTemplate, exceptional_keys: list = []):
config = {}
for entry in template.template:
config[entry["name"]]=entry["value"]
return BaseConfig(exceptional_keys, config)
def to_dict(self):
"""
Returns the configuration data as a dictionary.
Returns:
dict: The configuration data as a dictionary.
"""
return self.config
def __getitem__(self, key):
"""
Retrieves the configuration value associated with the specified key.
Args:
key (Any): The key to retrieve the configuration value.
Returns:
Any: The configuration value associated with the key.
Raises:
ValueError: If no configuration is loaded.
KeyError: If the specified key is not found in the configuration.
"""
if self.config is None:
raise ValueError("No configuration loaded.")
return self.config[key]
def __getattr__(self, key):
"""
Retrieves the configuration value associated with the specified key as an attribute.
Args:
key (str): The key to retrieve the configuration value.
Returns:
Any: The configuration value associated with the key.
Raises:
ValueError: If no configuration is loaded.
AttributeError: If the specified key is not found in the configuration.
"""
if key == "exceptional_keys":
return super().__getattribute__(key)
if key in self.exceptional_keys + ["config","file_path"] or key.startswith("__"):
return super().__getattribute__(key)
else:
if self.config is None:
raise ValueError("No configuration loaded.")
return self.config[key]
def __setattr__(self, key, value):
"""
Sets the value of the configuration key.
Args:
key (str): The key of the configuration.
value (Any): The new value for the configuration key.
Raises:
ValueError: If no configuration is loaded.
"""
if key == "exceptional_keys":
return super().__setattr__(key, value)
if key in self.exceptional_keys + ["config","file_path"] or key.startswith("__"):
super().__setattr__(key, value)
else:
if self.config is None:
raise ValueError("No configuration loaded.")
self.config[key] = value
def __setitem__(self, key, value):
"""
Sets the value of the configuration key.
Args:
key (str): The key of the configuration.
value (Any): The new value for the configuration key.
Raises:
ValueError: If no configuration is loaded.
"""
if self.config is None:
raise ValueError("No configuration loaded.")
self.config[key] = value
def __contains__(self, item):
"""
Checks if the configuration contains the specified key.
Args:
item (str): The key to check.
Returns:
bool: True if the key is present in the configuration, False otherwise.
Raises:
ValueError: If no configuration is loaded.
"""
if self.config is None:
raise ValueError("No configuration loaded.")
return item in self.config
def load_config(self, file_path: Path | str = None):
"""
Loads the configuration from a YAML file.
Args:
file_path (str or Path, optional): The path to the YAML file. If not provided, uses the previously set file path.
Raises:
ValueError: If no configuration file path is specified.
FileNotFoundError: If the specified file path does not exist.
yaml.YAMLError: If there is an error parsing the YAML file.
"""
if file_path is None:
if self.file_path is None:
raise ValueError("No configuration file path specified.")
file_path = self.file_path
file_path = Path(file_path)
if not file_path.exists():
raise FileNotFoundError(f"Configuration file not found: {file_path}")
with open(file_path, 'r', encoding='utf-8') as stream:
self.config = yaml.safe_load(stream)
def save_config(self, file_path=None):
"""
Saves the configuration to a YAML file.
Args:
file_path (str or Path, optional): The path to the YAML file. If not provided, uses the previously set file path.
Raises:
ValueError: If no configuration is loaded.
ValueError: If no configuration file path is specified.
PermissionError: If the user does not have permission to write to the specified file path.
yaml.YAMLError: If there is an error serializing the configuration to YAML.
"""
if file_path is None:
if self.file_path is None:
raise ValueError("No configuration file path specified.")
file_path = self.file_path
if self.config is None:
raise ValueError("No configuration loaded.")
file_path = Path(file_path)
with open(file_path, "w") as f:
yaml.dump(self.config, f)
class TypedConfig:
"""
This type of configuration contains a template of descriptions for the fields of the configuration.
@ -396,6 +420,55 @@ class TypedConfig:
# Fill the template values from the config values
self.sync()
def get(self, key, default_value=None):
if self.config is None:
raise ValueError("No configuration loaded.")
if key in self.config:
return self.config[key]
else:
return default_value
def __getattr__(self, key):
"""
Retrieves the configuration entry with the specified key as an attribute.
Args:
key (str): The name of the configuration entry.
Returns:
dict: The configuration entry with the specified key, or None if not found.
Raises:
ValueError: If no configuration is loaded.
"""
if key == "exceptional_keys":
return super().__getattribute__(key)
if key in ["config","config_template"] or key.startswith("__"):
return super().__getattribute__(key)
else:
if self.config is None:
raise ValueError("No configuration loaded.")
return self.config[key]
def __getitem__(self, key):
"""
Retrieves the configuration entry with the specified key as an attribute.
Args:
key (str): The name of the configuration entry.
Returns:
dict: The configuration entry with the specified key, or None if not found.
Raises:
ValueError: If no configuration is loaded.
"""
if self.config is None:
raise ValueError("No configuration loaded.")
return self.config[key]
def sync(self):
"""
Fills the template values from the config values.

View File

@ -1,19 +1,33 @@
from lollms.personality import MSG_TYPE
from lollms.config import InstallOption
from lollms.binding import BindingBuilder, ModelBuilder
from lollms.personality import MSG_TYPE, PersonalityBuilder
from lollms.main_config import LOLLMSConfig
from lollms.helpers import ASCIIColors
from lollms.paths import LollmsPaths
from lollms import reset_all_installs
import yaml
from pathlib import Path
import sys
from tqdm import tqdm
import pkg_resources
import argparse
from tqdm import tqdm
from lollms import BindingBuilder, ModelBuilder, PersonalityBuilder
import yaml
import sys
class LollmsApplication:
def __init__(self, config:LOLLMSConfig, lollms_paths:LollmsPaths) -> None:
self.config = config
self.lollms_paths = lollms_paths
def reset_all_installs(lollms_paths:LollmsPaths):
ASCIIColors.info("Removeing all configuration files to force reinstall")
ASCIIColors.info(f"Searching files from {lollms_paths.personal_configuration_path}")
for file_path in lollms_paths.personal_configuration_path.iterdir():
if file_path.name!="local_config.yaml" and file_path.suffix.lower()==".yaml":
file_path.unlink()
ASCIIColors.info(f"Deleted file: {file_path}")
class MainMenu:
def __init__(self, lollms_app):
def __init__(self, lollms_app:LollmsApplication):
self.binding_infs = []
self.lollms_app = lollms_app
@ -76,7 +90,7 @@ class MainMenu:
with open(p/"models.yaml", "r") as f:
models = yaml.safe_load(f)
is_installed = (self.lollms_app.lollms_paths.personal_configuration_path/f"binding_{p.name}.yaml").exists()
entry=f"{ASCIIColors.color_green if is_installed else ''}{card['name']} (by {card['author']})"
entry=f"{ASCIIColors.color_green if is_installed else ''}{'*' if self.lollms_app.config['binding_name']==card['name'] else ''} {card['name']} (by {card['author']})"
bindings_list.append(entry)
entry={
"name":p.name,
@ -190,7 +204,7 @@ class MainMenu:
def reinstall_binding(self):
lollms_app = self.lollms_app
try:
lollms_app.binding = BindingBuilder().build_binding(lollms_app.lollms_paths.bindings_zoo_path, lollms_app.config, force_reinstall=True)
lollms_app.binding = BindingBuilder().build_binding(lollms_app.config, lollms_app.lollms_paths,InstallOption.FORCE_INSTALL)
except Exception as ex:
print(ex)
print(f"Couldn't find binding. Please verify your configuration file at {lollms_app.config.file_path} or use the next menu to select a valid binding")
@ -199,7 +213,7 @@ class MainMenu:
def reinstall_personality(self):
lollms_app = self.lollms_app
try:
lollms_app.personality = PersonalityBuilder(lollms_app.lollms_paths, lollms_app.config, lollms_app.model).build_personality(force_reinstall=True)
lollms_app.personality = PersonalityBuilder(lollms_app.lollms_paths, lollms_app.config, lollms_app.model, installation_option=InstallOption.FORCE_INSTALL).build_personality()
except Exception as ex:
ASCIIColors.error(f"Couldn't load personality. Please verify your configuration file at {lollms_app.configuration_path} or use the next menu to select a valid personality")
ASCIIColors.error(f"Binding returned this exception : {ex}")
@ -244,7 +258,7 @@ class MainMenu:
else:
print("Invalid choice! Try again.")
class Conversation:
class Conversation(LollmsApplication):
def __init__(
self,
configuration_path:str|Path=None,
@ -262,14 +276,17 @@ class Conversation:
self.bot_says = ""
# get paths
self.lollms_paths = LollmsPaths.find_paths(force_local=False)
lollms_paths = LollmsPaths.find_paths(force_local=False)
# Configuration loading part
config = LOLLMSConfig.autoload(lollms_paths, configuration_path)
super().__init__(config, lollms_paths=lollms_paths)
# Build menu
self.menu = MainMenu(self)
# Configuration loading part
self.config = LOLLMSConfig.autoload(self.lollms_paths, configuration_path)
if self.config.model_name is None:
self.menu.select_model()
@ -382,12 +399,12 @@ Participating personalities:
# cfg.download_model(url)
else:
try:
self.binding = BindingBuilder().build_binding(self.lollms_paths.bindings_zoo_path, self.config)
self.binding = BindingBuilder().build_binding(self.config, self.lollms_paths)
except Exception as ex:
print(ex)
print(f"Couldn't find binding. Please verify your configuration file at {self.configuration_path} or use the next menu to select a valid binding")
print(f"Trying to reinstall binding")
self.binding = BindingBuilder().build_binding(self.lollms_paths.bindings_zoo_path, self.config,force_reinstall=True)
self.binding = BindingBuilder().build_binding(self.config, self.lollms_paths,installation_option=InstallOption.FORCE_INSTALL)
self.menu.select_binding()
def load_model(self):

@ -1 +1 @@
Subproject commit cc16d94f2ebaee7736891cf507ca5cc0026d58d6
Subproject commit 25f8e84618c5a46c2bb7cb6fbb6817b53ef86c2d

View File

@ -1,5 +1,7 @@
from datetime import datetime
from pathlib import Path
from lollms.config import InstallOption, TypedConfig, BaseConfig
from lollms.main_config import LOLLMSConfig
from lollms.paths import LollmsPaths
from lollms.binding import LLMBinding
@ -13,227 +15,8 @@ import importlib
import shutil
import subprocess
import yaml
from enum import Enum
from lollms.helpers import ASCIIColors
class MSG_TYPE(Enum):
MSG_TYPE_CHUNK=0
MSG_TYPE_FULL=1
MSG_TYPE_META=2
MSG_TYPE_REF=3
MSG_TYPE_CODE=4
MSG_TYPE_UI=5
class APScript:
"""
Template class for implementing personality processor classes in the APScript framework.
This class provides a basic structure and placeholder methods for processing model inputs and outputs.
Personality-specific processor classes should inherit from this class and override the necessary methods.
Methods:
__init__():
Initializes the APScript object.
run_workflow(generate_fn, prompt):
Runs the workflow for processing the model input and output.
process_model_input(text):
Process the model input.
process_model_output(text):
Process the model output.
Attributes:
None
Usage:
```
# Create a personality-specific processor class that inherits from APScript
class MyPersonalityProcessor(APScript):
def __init__(self):
super().__init__()
def process_model_input(self, text):
# Implement the desired behavior for processing the model input
# and return the processed model input
def process_model_output(self, text):
# Implement the desired behavior for processing the model output
# and return the processed model output
# Create an instance of the personality processor
my_processor = MyPersonalityProcessor()
# Define the generate function and prompt
def generate_fn(prompt):
# Implement the logic to generate model output based on the prompt
# and return the generated text
prompt = "Enter your input: "
# Run the workflow
my_processor.run_workflow(generate_fn, prompt)
```
"""
def __init__(self, personality=None) -> None:
self.files=[]
self.personality = personality
def install_personality(self, personality_path, force_reinstall=False):
install_file_name = "install.py"
install_script_path = personality_path/ "scripts" / install_file_name
if install_script_path.exists():
module_name = install_file_name[:-3] # Remove the ".py" extension
module_spec = importlib.util.spec_from_file_location(module_name, str(install_script_path))
module = importlib.util.module_from_spec(module_spec)
module_spec.loader.exec_module(module)
if hasattr(module, "Install"):
module.Install(self.personality,force_reinstall=force_reinstall)
def add_file(self, path):
self.files.append(path)
return True
def remove_file(self, path):
self.files.remove(path)
def load_config_file(self, path, default_config=None):
"""
Load the content of local_config.yaml file.
The function reads the content of the local_config.yaml file and returns it as a Python dictionary.
If a default_config is provided, it fills any missing entries in the loaded dictionary.
If at least one field from default configuration was not present in the loaded configuration, the updated
configuration is saved.
Args:
path (str): The path to the local_config.yaml file.
default_config (dict, optional): A dictionary with default values to fill missing entries.
Returns:
dict: A dictionary containing the loaded data from the local_config.yaml file, with missing entries filled
by default_config if provided.
"""
with open(path, 'r') as file:
data = yaml.safe_load(file)
if default_config:
updated = False
for key, value in default_config.items():
if key not in data:
data[key] = value
updated = True
if updated:
self.save_config_file(path, data)
return data
def save_config_file(self, path, data):
"""
Save the configuration data to a local_config.yaml file.
Args:
path (str): The path to save the local_config.yaml file.
data (dict): The configuration data to be saved.
Returns:
None
"""
with open(path, 'w') as file:
yaml.dump(data, file)
def remove_text_from_string(self, string, text_to_find):
"""
Removes everything from the first occurrence of the specified text in the string (case-insensitive).
Parameters:
string (str): The original string.
text_to_find (str): The text to find in the string.
Returns:
str: The updated string.
"""
index = string.lower().find(text_to_find.lower())
if index != -1:
string = string[:index]
return string
def process(self, text:str, message_type:MSG_TYPE):
bot_says = self.bot_says + text
antiprompt = self.personality.detect_antiprompt(bot_says)
if antiprompt:
self.bot_says = self.remove_text_from_string(bot_says,antiprompt)
ASCIIColors.warning("Detected hallucination")
return False
else:
self.bot_says = bot_says
return True
def generate(self, prompt, max_size):
self.bot_says = ""
return self.personality.model.generate(
prompt,
max_size,
self.process,
temperature=self.personality.model_temperature,
top_k=self.personality.model_top_k,
top_p=self.personality.model_top_p,
repeat_penalty=self.personality.model_repeat_penalty,
).strip()
def run_workflow(self, prompt:str, previous_discussion_text:str="", callback=None):
"""
Runs the workflow for processing the model input and output.
This method should be called to execute the processing workflow.
Args:
generate_fn (function): A function that generates model output based on the input prompt.
The function should take a single argument (prompt) and return the generated text.
prompt (str): The input prompt for the model.
previous_discussion_text (str, optional): The text of the previous discussion. Default is an empty string.
Returns:
None
"""
return None
def process_model_input(self, text:str):
"""
Process the model input.
This method should be overridden in the personality-specific processor class to define
the desired behavior for processing the model input.
Args:
text (str): The model input text.
Returns:
Any: The processed model input.
"""
return None
def process_model_output(self, text:str):
"""
Process the model output.
This method should be overridden in the personality-specific processor class to define
the desired behavior for processing the model output.
Args:
text (str): The model output text.
Returns:
Any: The processed model output.
"""
return None
from lollms.types import MSG_TYPE
@ -269,12 +52,13 @@ class AIPersonality:
def __init__(
self,
personality_package_path: str|Path,
lollms_paths:LollmsPaths,
personality_package_path: str|Path = None,
config:LOLLMSConfig,
model:LLMBinding=None,
run_scripts=True,
is_relative_path=True,
force_reinstall=False
installation_option:InstallOption=InstallOption.INSTALL_IF_NECESSARY
):
"""
Initialize an AIPersonality instance.
@ -287,10 +71,11 @@ class AIPersonality:
"""
self.lollms_paths = lollms_paths
self.model = model
self.config = config
self.files = []
self.force_reinstall = force_reinstall
self.installation_option = installation_option
# First setup a default personality
# Version
@ -366,6 +151,8 @@ Date: {{date}}
if not self.personality_package_path.is_dir():
raise ValueError("The provided path is not a folder.")
self.personality_folder_name = self.personality_package_path.stem
# Open and store the personality
self.load_personality(personality_package_path)
@ -456,24 +243,6 @@ Date: {{date}}
if self.run_scripts:
#If it has an install script then execute it.
install_file_name = "install.py"
self.install_script_path = self.scripts_path / install_file_name
if self.install_script_path.exists():
module_name = install_file_name[:-3] # Remove the ".py" extension
module_spec = importlib.util.spec_from_file_location(module_name, str(self.install_script_path))
module = importlib.util.module_from_spec(module_spec)
module_spec.loader.exec_module(module)
if hasattr(module, "Install"):
self._install = module.Install(self, force_reinstall=self.force_reinstall)
else:
self._install = None
#Install requirements
for entry in self._dependencies:
if not is_package_installed(entry):
install_package(entry)
# Search for any processor code
processor_file_name = "processor.py"
self.processor_script_path = self.scripts_path / processor_file_name
@ -992,12 +761,12 @@ Date: {{date}}
self._assets_list = value
@property
def processor(self) -> APScript:
def processor(self) -> 'APScript':
"""Get the number of words to consider for repeat penalty."""
return self._processor
@processor.setter
def processor(self, value: APScript):
def processor(self, value: 'APScript'):
"""Set the number of words to consider for repeat penalty.
Args:
@ -1065,6 +834,255 @@ Date: {{date}}
output_string = re.sub(pattern, replace, input_string)
return output_string
class APScript:
"""
Template class for implementing personality processor classes in the APScript framework.
This class provides a basic structure and placeholder methods for processing model inputs and outputs.
Personality-specific processor classes should inherit from this class and override the necessary methods.
"""
def __init__(
self,
personality :AIPersonality,
personality_config :TypedConfig
) -> None:
self.files=[]
self.personality = personality
self.personality_config = personality_config
self.installation_option = personality.installation_option
self.configuration_file_path = self.personality.lollms_paths.personal_configuration_path/f"personality_{self.personality.personality_folder_name}.yaml"
self.personality_config.config.file_path = self.configuration_file_path
# Installation
if (not self.configuration_file_path.exists() or self.installation_option==InstallOption.FORCE_INSTALL) and self.installation_option!=InstallOption.NEVER_INSTALL:
self.install()
self.personality_config.config.save_config()
else:
self.load_personality_config()
self.models_folder = self.personality.lollms_paths.personal_models_path / self.personality.personality_folder_name
self.models_folder.mkdir(parents=True, exist_ok=True)
def load_personality_config(self):
"""
Load the content of local_config.yaml file.
The function reads the content of the local_config.yaml file and returns it as a Python dictionary.
Args:
None
Returns:
dict: A dictionary containing the loaded data from the local_config.yaml file.
"""
try:
self.personality_config.config.load_config()
except:
self.personality_config.config.save_config()
self.personality_config.sync()
def install(self):
"""
Installation procedure (to be implemented)
"""
ASCIIColors.blue("*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*")
ASCIIColors.red(f"Installing {self.personality.personality_folder_name}")
ASCIIColors.blue("*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*")
def add_file(self, path):
self.files.append(path)
return True
def remove_file(self, path):
self.files.remove(path)
def load_config_file(self, path, default_config=None):
"""
Load the content of local_config.yaml file.
The function reads the content of the local_config.yaml file and returns it as a Python dictionary.
If a default_config is provided, it fills any missing entries in the loaded dictionary.
If at least one field from default configuration was not present in the loaded configuration, the updated
configuration is saved.
Args:
path (str): The path to the local_config.yaml file.
default_config (dict, optional): A dictionary with default values to fill missing entries.
Returns:
dict: A dictionary containing the loaded data from the local_config.yaml file, with missing entries filled
by default_config if provided.
"""
with open(path, 'r') as file:
data = yaml.safe_load(file)
if default_config:
updated = False
for key, value in default_config.items():
if key not in data:
data[key] = value
updated = True
if updated:
self.save_config_file(path, data)
return data
def save_config_file(self, path, data):
"""
Save the configuration data to a local_config.yaml file.
Args:
path (str): The path to save the local_config.yaml file.
data (dict): The configuration data to be saved.
Returns:
None
"""
with open(path, 'w') as file:
yaml.dump(data, file)
def remove_text_from_string(self, string, text_to_find):
"""
Removes everything from the first occurrence of the specified text in the string (case-insensitive).
Parameters:
string (str): The original string.
text_to_find (str): The text to find in the string.
Returns:
str: The updated string.
"""
index = string.lower().find(text_to_find.lower())
if index != -1:
string = string[:index]
return string
def process(self, text:str, message_type:MSG_TYPE):
bot_says = self.bot_says + text
antiprompt = self.personality.detect_antiprompt(bot_says)
if antiprompt:
self.bot_says = self.remove_text_from_string(bot_says,antiprompt)
ASCIIColors.warning("Detected hallucination")
return False
else:
self.bot_says = bot_says
return True
def generate(self, prompt, max_size):
self.bot_says = ""
return self.personality.model.generate(
prompt,
max_size,
self.process,
temperature=self.personality.model_temperature,
top_k=self.personality.model_top_k,
top_p=self.personality.model_top_p,
repeat_penalty=self.personality.model_repeat_penalty,
).strip()
def run_workflow(self, prompt:str, previous_discussion_text:str="", callback=None):
"""
Runs the workflow for processing the model input and output.
This method should be called to execute the processing workflow.
Args:
generate_fn (function): A function that generates model output based on the input prompt.
The function should take a single argument (prompt) and return the generated text.
prompt (str): The input prompt for the model.
previous_discussion_text (str, optional): The text of the previous discussion. Default is an empty string.
Returns:
None
"""
return None
def process_model_input(self, text:str):
"""
Process the model input.
This method should be overridden in the personality-specific processor class to define
the desired behavior for processing the model input.
Args:
text (str): The model input text.
Returns:
Any: The processed model input.
"""
return None
def process_model_output(self, text:str):
"""
Process the model output.
This method should be overridden in the personality-specific processor class to define
the desired behavior for processing the model output.
Args:
text (str): The model output text.
Returns:
Any: The processed model output.
"""
return None
# ===========================================================
class AIPersonalityInstaller:
def __init__(self, personality:AIPersonality) -> None:
self.personality = personality
class PersonalityBuilder:
def __init__(
self,
lollms_paths:LollmsPaths,
config:LOLLMSConfig,
model:LLMBinding,
installation_option:InstallOption=InstallOption.INSTALL_IF_NECESSARY
):
self.config = config
self.lollms_paths = lollms_paths
self.model = model
self.installation_option = installation_option
def build_personality(self):
if self.config["active_personality_id"]>=len(self.config["personalities"]):
ASCIIColors.warning("Personality ID was out of range. Resetting to 0.")
self.config["active_personality_id"]=0
if len(self.config["personalities"][self.config["active_personality_id"]].split("/"))==3:
self.personality = AIPersonality(
self.lollms_paths.personalities_zoo_path / self.config["personalities"][self.config["active_personality_id"]],
self.lollms_paths,
self.config,
self.model,
installation_option=self.installation_option
)
else:
self.personality = AIPersonality(
self.config["personalities"][self.config["active_personality_id"]],
self.lollms_paths,
self.config,
self.model,
is_relative_path=False,
installation_option=self.installation_option
)
return self.personality
def get_personality(self):
return self.personality

View File

@ -1,15 +1,16 @@
from lollms.config import InstallOption
from flask import Flask, request
from flask_socketio import SocketIO, emit
from flask_cors import CORS
from lollms.personality import AIPersonality, MSG_TYPE
from lollms.types import MSG_TYPE
from lollms.personality import AIPersonality
from lollms.main_config import LOLLMSConfig
from lollms.binding import LLMBinding
from lollms.binding import LLMBinding, BindingBuilder, ModelBuilder
from lollms.personality import PersonalityBuilder
from lollms.helpers import ASCIIColors
from lollms.console import MainMenu
from lollms.paths import LollmsPaths
from lollms.console import MainMenu
from lollms import BindingBuilder, ModelBuilder, PersonalityBuilder
from lollms import reset_all_installs
from typing import List, Tuple
import importlib
from pathlib import Path
@ -18,13 +19,22 @@ import logging
import yaml
import copy
def reset_all_installs(lollms_paths:LollmsPaths):
ASCIIColors.info("Removeing all configuration files to force reinstall")
ASCIIColors.info(f"Searching files from {lollms_paths.personal_configuration_path}")
for file_path in lollms_paths.personal_configuration_path.iterdir():
if file_path.name!="local_config.yaml" and file_path.suffix.lower()==".yaml":
file_path.unlink()
ASCIIColors.info(f"Deleted file: {file_path}")
class LoLLMsServer:
def __init__(self):
host = "localhost"
port = "9601"
self.clients = {}
self.current_binding = None
self.current_model = None
self.active_model = None
self.personalities = []
self.answer = ['']
self.is_ready = True
@ -95,14 +105,19 @@ class LoLLMsServer:
self.menu.select_model()
else:
try:
self.current_model = self.binding(self.config)
self.active_model = self.binding(self.config)
except Exception as ex:
print(f"{ASCIIColors.color_red}Couldn't load model Please select a valid model{ASCIIColors.color_reset}")
print(f"{ASCIIColors.color_red}{ex}{ASCIIColors.color_reset}")
self.menu.select_model()
for p in self.config.personalities:
personality = AIPersonality(self.lollms_paths, self.config.lollms_paths.personalities_zoo_path/p, self.current_model)
personality = AIPersonality(
self.config.lollms_paths.personalities_zoo_path/p,
self.lollms_paths,
self.config,
self.active_model
)
self.personalities.append(personality)
if self.config.active_personality_id>len(self.personalities):
@ -352,7 +367,7 @@ class LoLLMsServer:
self.cp_config = copy.deepcopy(self.config)
self.cp_config["model_name"] = data['model_name']
try:
self.current_model = self.binding(self.cp_config)
self.active_model = self.binding(self.cp_config)
emit('select_model', {'success':True, 'model_name': model_name}, room=request.sid)
except Exception as ex:
print(ex)
@ -362,7 +377,12 @@ class LoLLMsServer:
def handle_add_personality(data):
personality_path = data['path']
try:
personality = AIPersonality(self.lollms_paths, personality_path)
personality = AIPersonality(
personality_path,
self.lollms_paths,
self.config,
self.active_model
)
self.personalities.append(personality)
self.config["personalities"].append(personality_path)
emit('personality_added', {'success':True, 'name': personality.name, 'id':len(self.personalities)-1}, room=request.sid)
@ -391,13 +411,13 @@ class LoLLMsServer:
@self.socketio.on('tokenize')
def tokenize(data):
prompt = data['prompt']
tk = self.current_model.tokenize(prompt)
tk = self.active_model.tokenize(prompt)
emit("tokenized", {"tokens":tk})
@self.socketio.on('detokenize')
def detokenize(data):
prompt = data['prompt']
txt = self.current_model.detokenize(prompt)
txt = self.active_model.detokenize(prompt)
emit("detokenized", {"text":txt})
@self.socketio.on('cancel_generation')
@ -420,7 +440,7 @@ class LoLLMsServer:
return
def generate_text():
self.is_ready = False
model = self.current_model
model = self.active_model
self.clients[client_id]["is_generating"]=True
self.clients[client_id]["requested_stop"]=False
prompt = data['prompt']

View File

@ -1,15 +1,24 @@
from lollms.config import InstallOption
from lollms.main_config import LOLLMSConfig
from lollms.helpers import ASCIIColors
from lollms.paths import LollmsPaths
from lollms import reset_all_installs
from lollms.binding import BindingBuilder, ModelBuilder
import shutil
from pathlib import Path
import argparse
from tqdm import tqdm
from lollms import BindingBuilder, ModelBuilder, PersonalityBuilder
from lollms.personality import PersonalityBuilder
from lollms.console import MainMenu
def reset_all_installs(lollms_paths:LollmsPaths):
ASCIIColors.info("Removeing all configuration files to force reinstall")
ASCIIColors.info(f"Searching files from {lollms_paths.personal_configuration_path}")
for file_path in lollms_paths.personal_configuration_path.iterdir():
if file_path.name!="local_config.yaml" and file_path.suffix.lower()==".yaml":
file_path.unlink()
ASCIIColors.info(f"Deleted file: {file_path}")
class Settings:
def __init__(
self,
@ -143,12 +152,12 @@ Participating personalities:
# cfg.download_model(url)
else:
try:
self.binding = BindingBuilder().build_binding(self.lollms_paths.bindings_zoo_path, self.config)
self.binding = BindingBuilder().build_binding(self.config, self.lollms_paths)
except Exception as ex:
print(ex)
print(f"Couldn't find binding. Please verify your configuration file at {self.cfg_path} or use the next menu to select a valid binding")
print(f"Trying to reinstall binding")
self.binding = BindingBuilder().build_binding(self.lollms_paths.bindings_zoo_path, self.config,force_reinstall=True)
self.binding = BindingBuilder().build_binding(self.config, self.lollms_paths,installation_option=InstallOption.FORCE_INSTALL)
self.menu.select_binding()
def load_model(self):

9
lollms/types.py Normal file
View File

@ -0,0 +1,9 @@
from enum import Enum
class MSG_TYPE(Enum):
MSG_TYPE_CHUNK=0
MSG_TYPE_FULL=1
MSG_TYPE_META=2
MSG_TYPE_REF=3
MSG_TYPE_CODE=4
MSG_TYPE_UI=5

View File

@ -26,7 +26,7 @@ def get_all_files(path):
setuptools.setup(
name="lollms",
version="1.2.12",
version="2.0.0",
author="Saifeddine ALOUI",
author_email="aloui.saifeddine@gmail.com",
description="A python library for AI personality definition",