Merge remote-tracking branch 'origin/main' into lollms-patchy2

This commit is contained in:
andzejsp 2023-06-23 09:06:09 +03:00
commit 9b22662e75
6 changed files with 137 additions and 61 deletions

View File

@ -14,8 +14,9 @@ from api.helpers import compare_lists
from pathlib import Path
import importlib
from lollms.config import InstallOption
from lollms.personality import AIPersonality, MSG_TYPE
from lollms.binding import LOLLMSConfig, BindingBuilder, LLMBinding
from lollms.types import MSG_TYPE
from lollms.personality import AIPersonality, PersonalityBuilder
from lollms.binding import LOLLMSConfig, BindingBuilder, LLMBinding, ModelBuilder
from lollms.paths import LollmsPaths
from lollms.helpers import ASCIIColors
import multiprocessing as mp
@ -152,6 +153,12 @@ class LoLLMsAPPI():
def disconnect():
ASCIIColors.error(f'Client {request.sid} disconnected')
@socketio.on('cancel_generation')
def cancel_generation():
self.cancel_gen = True
ASCIIColors.error(f'Client {request.sid} canceled generation')
@socketio.on('install_model')
def install_model(data):
room_id = request.sid
@ -181,8 +188,7 @@ class LoLLMsAPPI():
socketio.emit('install_progress',{'status': True, 'error': ''}, room=room_id)
tpe = threading.Thread(target=install_model_, args=())
tpe.start()
@socketio.on('uninstall_model')
def uninstall_model(data):
model_path = data['path']
@ -195,6 +201,27 @@ class LoLLMsAPPI():
installation_path.unlink()
socketio.emit('install_progress',{'status': True, 'error': ''}, room=request.sid)
@socketio.on('upload_file')
def upload_file(data):
file = data['file']
filename = file.filename
save_path = self.lollms_paths.personal_uploads_path/filename # Specify the desired folder path
try:
if not self.personality.processor is None:
self.personality.processor.add_file(save_path)
file.save(save_path)
# File saved successfully
socketio.emit('progress', {'status':True, 'progress': 100})
else:
# Personality doesn't support file sending
socketio.emit('progress', {'status':False, 'error': "Personality doesn't support file sending"})
except Exception as e:
# Error occurred while saving the file
socketio.emit('progress', {'status':False, 'error': str(e)})
@ -219,7 +246,7 @@ class LoLLMsAPPI():
ASCIIColors.green("Starting message generation by"+self.personality.name)
task = self.socketio.start_background_task(self.start_message_generation, message, message_id)
ASCIIColors.info("Started generation task")
#tpe = threading.Thread(target=self.start_message_generation, args=(message, message_id))
#tpe.start()
else:
@ -302,6 +329,51 @@ class LoLLMsAPPI():
return mounted_personalities
# ================================== LOLLMSApp
def load_binding(self):
if self.config.binding_name is None:
print(f"No bounding selected")
print("Please select a valid model or install a new one from a url")
self.menu.select_binding()
# cfg.download_model(url)
else:
try:
self.binding = BindingBuilder().build_binding(self.config, self.lollms_paths)
except Exception as ex:
print(ex)
print(f"Couldn't find binding. Please verify your configuration file at {self.config.file_path} or use the next menu to select a valid binding")
print(f"Trying to reinstall binding")
self.binding = BindingBuilder().build_binding(self.config, self.lollms_paths, InstallOption.FORCE_INSTALL)
self.menu.select_binding()
def load_model(self):
try:
self.active_model = ModelBuilder(self.binding).get_model()
ASCIIColors.success("Model loaded successfully")
except Exception as ex:
ASCIIColors.error(f"Couldn't load model.")
ASCIIColors.error(f"Binding returned this exception : {ex}")
ASCIIColors.error(f"{self.config.get_model_path_infos()}")
print("Please select a valid model or install a new one from a url")
self.menu.select_model()
def load_personality(self):
try:
self.personality = PersonalityBuilder(self.lollms_paths, self.config, self.model).build_personality()
except Exception as ex:
ASCIIColors.error(f"Couldn't load personality.")
ASCIIColors.error(f"Binding returned this exception : {ex}")
ASCIIColors.error(f"{self.config.get_personality_path_infos()}")
print("Please select a valid model or install a new one from a url")
self.menu.select_model()
self.cond_tk = self.personality.model.tokenize(self.personality.personality_conditioning)
self.n_cond_tk = len(self.cond_tk)
#properties
@property
def message_id(self):

51
app.py
View File

@ -76,8 +76,6 @@ class LoLLMsWebUI(LoLLMsAPPI):
self.app = _app
self.cancel_gen = False
self.binding_changed = False
self.model_changed = False
app.template_folder = "web/dist"
if config["active_personality_id"]>=len(config["personalities"]):
@ -317,24 +315,26 @@ class LoLLMsWebUI(LoLLMsAPPI):
personalities = {}
for language_folder in personalities_folder.iterdir():
lang = language_folder.stem
if language_folder.is_dir():
if language_folder.is_dir() and not language_folder.stem.startswith('.'):
personalities[language_folder.name] = {}
for category_folder in language_folder.iterdir():
cat = category_folder.stem
if category_folder.is_dir():
if category_folder.is_dir() and not category_folder.stem.startswith('.'):
personalities[language_folder.name][category_folder.name] = []
for personality_folder in category_folder.iterdir():
pers = personality_folder.stem
if personality_folder.is_dir():
if personality_folder.is_dir() and not personality_folder.stem.startswith('.'):
personality_info = {"folder":personality_folder.stem}
config_path = personality_folder / 'config.yaml'
if not config_path.exists():
"""
try:
shutil.rmtree(str(config_path.parent))
ASCIIColors.warning(f"Deleted useless personality: {config_path.parent}")
except Exception as ex:
ASCIIColors.warning(f"Couldn't delete personality ({ex})")
continue
"""
continue
try:
with open(config_path) as config_file:
config_data = yaml.load(config_file, Loader=yaml.FullLoader)
@ -483,14 +483,30 @@ class LoLLMsWebUI(LoLLMsAPPI):
elif setting_name== "model_name":
self.config["model_name"]=data['setting_value']
self.model_changed = True
try:
self.binding.build_model()
except Exception as ex:
print(f"Couldn't load model: [{ex}]")
return jsonify({ "status":False, 'error':str(ex)})
print("update_settings : New model selected")
elif setting_name== "binding_name":
if self.config['binding_name']!= data['setting_value']:
print(f"New binding selected : {data['setting_value']}")
self.config["binding_name"]=data['setting_value']
self.binding_changed = True
try:
self.binding = BindingBuilder().build_binding(self.config, self.lollms_paths)
try:
self.binding.build_model()
except Exception as ex:
print(f"Couldn't load model: [{ex}]")
return jsonify({ "status":False, 'error':str(ex)})
except Exception as ex:
print(f"Couldn't build binding: [{ex}]")
return jsonify({"status":False, 'error':str(ex)})
else:
if self.config["debug"]:
print(f"Configuration {data['setting_name']} set to {data['setting_value']}")
@ -512,25 +528,6 @@ class LoLLMsWebUI(LoLLMsAPPI):
def apply_settings(self):
ASCIIColors.success("OK")
if self.binding_changed:
try:
self.binding = BindingBuilder().build_binding(self.config, self.lollms_paths)
try:
self.binding.build_model()
except Exception as ex:
print(f"Couldn't load model: [{ex}]")
return jsonify({ "status":False, 'error':str(ex)})
except Exception as ex:
print(f"Couldn't build binding: [{ex}]")
return jsonify({"status":False, 'error':str(ex)})
else:
if self.model_changed:
try:
self.binding.build_model()
except Exception as ex:
print(f"Couldn't load model: [{ex}]")
return jsonify({ "status":False, 'error':str(ex)})
self.rebuild_personalities()
return jsonify({"status":True})

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-067e1b5a.js"></script>
<link rel="stylesheet" href="/assets/index-4a05d245.css">
<script type="module" crossorigin src="/assets/index-21ad23f2.js"></script>
<link rel="stylesheet" href="/assets/index-97886d16.css">
</head>
<body>
<div id="app"></div>

View File

@ -373,8 +373,11 @@ export default {
return {}
}
},
async stop_gen() {
try {
socket.emit('cancel_generation', {});
const res = await axios.get('/stop_gen')
if (res) {