added support for bindings loading

This commit is contained in:
Saifeddine ALOUI 2023-07-18 01:04:48 +02:00
parent 89a52963fc
commit be86fc9613
2 changed files with 18 additions and 10 deletions

View File

@ -477,7 +477,7 @@ class LoLLMsAPPI(LollmsApplication):
ASCIIColors.green(f"{self.lollms_paths.personal_path}")
def rebuild_personalities(self):
def rebuild_personalities(self, reload_all=False):
loaded = self.mounted_personalities
loaded_names = [f"{p.language}/{p.category}/{p.personality_folder_name}" for p in loaded]
mounted_personalities=[]
@ -491,7 +491,7 @@ class LoLLMsAPPI(LollmsApplication):
ASCIIColors.green(f" {personality}")
else:
ASCIIColors.yellow(f" {personality}")
if personality in loaded_names:
if personality in loaded_names and not reload_all:
mounted_personalities.append(loaded[loaded_names.index(personality)])
else:
personality_path = self.lollms_paths.personalities_zoo_path/f"{personality}"
@ -660,10 +660,12 @@ class LoLLMsAPPI(LollmsApplication):
composed_messages = link_text.join(self.full_message_list)
t = self.model.tokenize(composed_messages)
cond_tk = self.model.tokenize(self.personality.personality_conditioning)
n_t = len(t)
n_cond_tk = len(cond_tk)
max_prompt_stx_size = 3*int(self.config.ctx_size/4)
if self.n_cond_tk+n_t>max_prompt_stx_size:
nb_tk = max_prompt_stx_size-self.n_cond_tk
if n_cond_tk+n_t>max_prompt_stx_size:
nb_tk = max_prompt_stx_size-n_cond_tk
composed_messages = self.model.detokenize(t[-nb_tk:])
ASCIIColors.warning(f"Cropping discussion to fit context [using {nb_tk} tokens/{self.config.ctx_size}]")
discussion_messages = self.personality.personality_conditioning+ composed_messages

18
app.py
View File

@ -526,7 +526,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
if self.config["model_name"] is not None:
try:
self.model = self.binding.build_model()
self.rebuild_personalities()
self.rebuild_personalities(reload_all=True)
except Exception as ex:
# Catch the exception and get the traceback as a list of strings
traceback_lines = traceback.format_exception(type(ex), ex, ex.__traceback__)
@ -545,6 +545,9 @@ class LoLLMsWebUI(LoLLMsAPPI):
print(f"New binding selected : {data['setting_value']}")
self.config["binding_name"]=data['setting_value']
try:
if self.binding:
self.binding.destroy_model()
self.binding = None
self.model = None
for per in self.mounted_personalities:
@ -809,7 +812,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
root_dir = self.lollms_paths.personal_path / "uploads"
root_dir.mkdir(exist_ok=True, parents=True)
path = str(root_dir+"/".join(filename.split("/")[:-1]))
path = str(root_dir/"/".join(filename.split("/")[:-1]))
fn = filename.split("/")[-1]
return send_from_directory(path, fn)
@ -948,7 +951,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
print(f"Couldn't build model: [{ex}]")
trace_exception(ex)
try:
self.rebuild_personalities()
self.rebuild_personalities(reload_all=True)
except Exception as ex:
print(f"Couldn't reload personalities: [{ex}]")
return jsonify({"status": True})
@ -1069,7 +1072,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
print(f"Couldn't build model: [{ex}]")
trace_exception(ex)
try:
self.rebuild_personalities()
self.rebuild_personalities(reload_all=True)
except Exception as ex:
print(f"Couldn't reload personalities: [{ex}]")
return jsonify({"status": True})
@ -1282,9 +1285,12 @@ class LoLLMsWebUI(LoLLMsAPPI):
try:
ASCIIColors.info("Recovering file from front end")
file = request.files['file']
file.save(self.lollms_paths.personal_uploads_path / file.filename)
path:Path = self.lollms_paths.personal_uploads_path / self.personality.personality_folder_name
path.mkdir(parents=True, exist_ok=True)
file_path = path / file.filename
file.save( file_path )
if self.personality.processor:
self.personality.processor.add_file(self.lollms_paths.personal_uploads_path / file.filename)
self.personality.processor.add_file(file_path)
return jsonify({"status": True})
except Exception as ex:
ASCIIColors.error(ex)