This commit is contained in:
Saifeddine ALOUI 2023-07-19 18:42:29 +02:00
commit 02cccec253
17 changed files with 8052 additions and 15 deletions

View File

@ -1,3 +0,0 @@
{
"ros.distro": "noetic"
}

View File

@ -1,8 +1,8 @@
# Lord Of Large Language Models Socket.io Endpoints Documentation
<img src="https://github.com/ParisNeo/lollms/blob/main/lollms/assets/logo.png" alt="Logo" width="200" height="200">
<img src="https://github.com/ParisNeo/lollms/blob/main/lollms/assets/logo.png?raw=true" alt="Logo" width="200" height="200">
The server provides several Socket.io endpoints that clients can use to interact with the server. The default URL for the server is `http://localhost:9600`, but it can be changed using the configuration file or launch parameters.
The server provides several Socket.io endpoints that clients can use to interact with the server. The default URL for the server is `http://localhost:9601`, but it can be changed using the configuration file or launch parameters.
## Endpoints

View File

@ -174,7 +174,7 @@ Participating personalities:
full_discussion = self.reset_context()
while True:
try:
ump = "!@>"+self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
ump = self.config.discussion_prompt_separator +self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
if self.config.use_user_name_in_discussions:
prompt = input(f"{ASCIIColors.color_green}{self.config.user_name}: {ASCIIColors.color_reset}")
else:

View File

@ -0,0 +1,19 @@
from flask import Flask, send_from_directory
import os
app = Flask(__name__, static_folder='static/')
@app.route('/')
def index():
return app.send_static_file('index.html')
@app.route('/<path:filename>')
def serve_file(filename):
root_dir = "static/"
return send_from_directory(root_dir, filename)
def main():
app.run()
if __name__ == '__main__':
main()

View File

Before

Width:  |  Height:  |  Size: 459 KiB

After

Width:  |  Height:  |  Size: 459 KiB

File diff suppressed because it is too large Load Diff

View File

@ -19,7 +19,7 @@ import argparse
import logging
import yaml
import copy
import gc
def reset_all_installs(lollms_paths:LollmsPaths):
ASCIIColors.info("Removeing all configuration files to force reinstall")
ASCIIColors.info(f"Searching files from {lollms_paths.personal_configuration_path}")
@ -265,8 +265,17 @@ class LoLLMsServer(LollmsApplication):
self.cp_config = copy.deepcopy(self.config)
self.cp_config["binding_name"] = data['binding_name']
try:
del self.model
del self.binding
self.model = None
self.binding = None
gc.collect()
for personality in self.mount_personalities:
personality.model = None
self.binding = self.build_binding(self.bindings_path, self.cp_config)
self.config = self.cp_config
self.mount_personalities()
gc.collect()
emit('select_binding', {'success':True, 'binding_name': self.cp_config["binding_name"]}, room=request.sid)
except Exception as ex:
print(ex)
@ -281,7 +290,14 @@ class LoLLMsServer(LollmsApplication):
self.cp_config = copy.deepcopy(self.config)
self.cp_config["model_name"] = data['model_name']
try:
del self.model
self.model = None
gc.collect()
for personality in self.mount_personalities:
personality.model = None
self.model = self.binding.build_model()
self.mount_personalities()
gc.collect()
emit('select_model', {'success':True, 'model_name': model_name}, room=request.sid)
except Exception as ex:
print(ex)
@ -421,7 +437,7 @@ class LoLLMsServer(LollmsApplication):
else:
try:
personality: AIPersonality = self.personalities[personality_id]
ump = "!@>"+self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
ump = self.config.discussion_prompt_separator +self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
personality.model = model
cond_tk = personality.model.tokenize(personality.personality_conditioning)
n_cond_tk = len(cond_tk)

View File

@ -1,13 +1,17 @@
# =================== Lord Of Large Language Models Configuration file ===========================
version: 9
version: 11
binding_name: null
model_name: null
# Enables gpu usage
enable_gpu: true
# Host information
host: localhost
port: 9600
# Genreration parameters
discussion_prompt_separator: "!@>"
seed: -1
n_predict: 1024
ctx_size: 2048

View File

@ -104,10 +104,10 @@ Date: {{date}}
"""
self._welcome_message: str = "Welcome! I am lollms (Lord of LLMs) A free and open assistant built by ParisNeo. What can I do for you today?"
self._include_welcome_message_in_disucssion: bool = True
self._user_message_prefix: str = "## Human: "
self._user_message_prefix: str = "!@> Human: "
self._link_text: str = "\n"
self._ai_message_prefix: str = "## lollms:"
self._anti_prompts:list = ["## Human","## lollms","##Human","##Assistant","##lollms"]
self._ai_message_prefix: str = "!@> lollms:"
self._anti_prompts:list = [self.config.discussion_prompt_separator]
# Extra
self._dependencies: List[str] = []
@ -206,7 +206,7 @@ Date: {{date}}
self._user_message_prefix = config.get("user_message_prefix", self._user_message_prefix)
self._link_text = config.get("link_text", self._link_text)
self._ai_message_prefix = config.get("ai_message_prefix", self._ai_message_prefix)
self._anti_prompts = config.get("anti_prompts", self._anti_prompts)
self._anti_prompts = [self.config.discussion_prompt_separator]+config.get("anti_prompts", self._anti_prompts)
self._dependencies = config.get("dependencies", self._dependencies)
self._disclaimer = config.get("disclaimer", self._disclaimer)
self._help = config.get("help", self._help)
@ -1139,6 +1139,26 @@ class APScript(StateMachine):
if callback:
callback(str(ex), MSG_TYPE.MSG_TYPE_EXCEPTION)
def warning(self, warning:str, callback=None):
"""This sends exception to the client
Args:
step_text (str): The step text
callback (callable, optional): A callable with this signature (str, MSG_TYPE) to send the step to. Defaults to None.
"""
if callback:
callback(warning, MSG_TYPE.MSG_TYPE_EXCEPTION)
def info(self, info:str, callback=None):
"""This sends exception to the client
Args:
inf (str): The information to be sent
callback (callable, optional): A callable with this signature (str, MSG_TYPE) to send the step to. Defaults to None.
"""
if callback:
callback(info, MSG_TYPE.MSG_TYPE_INFO)
def json(self, json_infos:dict, callback=None):
"""This sends json data to front end

View File

@ -248,7 +248,7 @@ class MainMenu(Menu):
models_dir:Path = (self.lollms_app.lollms_paths.personal_models_path/self.lollms_app.config['binding_name'])
models_dir.mkdir(parents=True, exist_ok=True)
models_list = [v for v in self.lollms_app.binding.list_models(self.lollms_app.config)] + ["Install model", "Change binding", "Back"]
models_list = self.lollms_app.binding.list_models(self.lollms_app.config) + ["Install model", "Change binding", "Back"]
choice = self.show_menu(models_list)
if 1 <= choice <= len(models_list)-3:
print(f"You selected model: {ASCIIColors.color_green}{models_list[choice - 1]}{ASCIIColors.color_reset}")

View File

@ -26,7 +26,7 @@ def get_all_files(path):
setuptools.setup(
name="lollms",
version="2.1.44",
version="2.1.45",
author="Saifeddine ALOUI",
author_email="aloui.saifeddine@gmail.com",
description="A python library for AI personality definition",
@ -41,6 +41,7 @@ setuptools.setup(
'lollms-server = lollms.apps.server:main',
'lollms-console = lollms.apps.console:main',
'lollms-settings = lollms.apps.settings:main',
'lollms-playground = lollms.apps.playground:main'
],
},
extras_require={"dev": requirements_dev},