diff --git a/app.py b/app.py
index 4acbcc20..205ec83c 100644
--- a/app.py
+++ b/app.py
@@ -51,9 +51,8 @@ app.config['SECRET_KEY'] = 'secret!'
# Set the logging level to WARNING or higher
logging.getLogger('socketio').setLevel(logging.WARNING)
logging.getLogger('engineio').setLevel(logging.WARNING)
-# Suppress Flask's default console output
-log = logging.getLogger('werkzeug')
-log.setLevel(logging.ERROR)
+logging.getLogger('werkzeug').setLevel(logging.ERROR)
+logging.basicConfig(level=logging.WARNING)
import time
from gpt4all_api.config import load_config, save_config
@@ -192,6 +191,10 @@ class Gpt4AllWebUI(GPT4AllAPI):
self.add_endpoint(
"/update_setting", "update_setting", self.update_setting, methods=["POST"]
)
+ self.add_endpoint(
+ "/apply_settings", "apply_settings", self.apply_settings, methods=["POST"]
+ )
+
self.add_endpoint(
"/save_settings", "save_settings", self.save_settings, methods=["POST"]
@@ -293,29 +296,16 @@ class Gpt4AllWebUI(GPT4AllAPI):
elif setting_name== "model":
self.config["model"]=data['setting_value']
print("update_settings : New model selected")
- # Build chatbot
- self.process.set_config(self.config)
elif setting_name== "backend":
- print("New backend selected")
if self.config['backend']!= data['setting_value']:
print("New backend selected")
self.config["backend"]=data['setting_value']
-
- backend_ =self.process.rebuild_backend(self.config)
- models = backend_.list_models(self.config)
- if len(models)>0:
- self.backend = backend_
- self.config['model'] = models[0]
- # Build chatbot
- self.process.set_config(self.config)
- if self.config["debug"]:
- print(f"Configuration {data['setting_name']} set to {data['setting_value']}")
- return jsonify({'setting_name': data['setting_name'], "status":True})
- else:
- if self.config["debug"]:
- print(f"Configuration {data['setting_name']} couldn't be set to {data['setting_value']}")
- return jsonify({'setting_name': data['setting_name'], "status":False})
+ try:
+ self.backend = self.process.load_backend(self.config["backend"])
+ except Exception as ex:
+ print("Couldn't build backend")
+ return jsonify({'setting_name': data['setting_name'], "status":False, 'error':str(ex)})
else:
if self.config["debug"]:
print(f"Configuration {data['setting_name']} set to {data['setting_value']}")
@@ -330,10 +320,13 @@ class Gpt4AllWebUI(GPT4AllAPI):
print(f"Configuration {data['setting_name']} set to {data['setting_value']}")
print("Configuration updated")
- self.process.set_config(self.config)
# Tell that the setting was changed
return jsonify({'setting_name': data['setting_name'], "status":True})
+
+ def apply_settings(self):
+ return jsonify(self.process.set_config(self.config))
+
def list_backends(self):
backends_dir = Path('./backends') # replace with the actual path to the models folder
backends = [f.stem for f in backends_dir.iterdir() if f.is_dir() and f.stem!="__pycache__"]
@@ -527,7 +520,7 @@ class Gpt4AllWebUI(GPT4AllAPI):
print("New backend selected")
self.config['backend'] = backend
- backend_ =self.process.load_backend(Path("backends")/config["backend"])
+ backend_ =self.process.load_backend(config["backend"])
models = backend_.list_models(self.config)
if len(models)>0:
self.backend = backend_
@@ -619,15 +612,18 @@ class Gpt4AllWebUI(GPT4AllAPI):
Returns:
_type_: _description_
"""
-
+ if self.backend is None:
+ return jsonify([])
model_list = self.backend.get_available_models()
models = []
for model in model_list:
try:
- filename = model['filename']
- server = model['server']
- filesize = model['filesize']
+ filename = model.get('filename',"")
+ server = model.get('server',"")
+ image_url = model.get("image_url", '/icons/default.png')
+ filesize = int(model.get('filesize',0))
+ description = model.get('description',"")
if server.endswith("/"):
path = f'{server}{filename}'
else:
@@ -635,14 +631,17 @@ class Gpt4AllWebUI(GPT4AllAPI):
local_path = Path(f'./models/{self.config["backend"]}/{filename}')
is_installed = local_path.exists()
models.append({
- 'title': model['filename'],
- 'icon': '/icons/default.png', # Replace with the path to the model icon
- 'description': model['description'],
+ 'title': filename,
+ 'icon': image_url, # Replace with the path to the model icon
+ 'description': description,
'isInstalled': is_installed,
'path': path,
'filesize': filesize,
})
- except:
+ except Exception as ex:
+ print("#################################")
+ print(ex)
+ print("#################################")
print(f"Problem with model : {model}")
return jsonify(models)
diff --git a/gpt4all_api/api.py b/gpt4all_api/api.py
index 15d178e3..25884421 100644
--- a/gpt4all_api/api.py
+++ b/gpt4all_api/api.py
@@ -88,13 +88,26 @@ class ModelProcess:
self.cancel_queue = mp.Queue(maxsize=1)
self.clear_queue_queue = mp.Queue(maxsize=1)
self.set_config_queue = mp.Queue(maxsize=1)
+ self.set_config_result_queue = mp.Queue(maxsize=1)
self.started_queue = mp.Queue()
self.process = None
self.is_generating = mp.Value('i', 0)
self.model_ready = mp.Value('i', 0)
self.ready = False
-
- def load_backend(self, backend_path:Path):
+
+ self.reset_config_result()
+
+ def reset_config_result(self):
+ self._set_config_result = {
+ 'status': 'succeeded',
+ 'backend_status':'ok',
+ 'model_status':'ok',
+ 'personality_status':'ok',
+ 'errors':[]
+ }
+
+ def load_backend(self, backend_name:str):
+ backend_path = Path("backends")/backend_name
# first find out if there is a requirements.txt file
requirements_file = backend_path/"requirements.txt"
if requirements_file.exists():
@@ -131,7 +144,10 @@ class ModelProcess:
def set_config(self, config):
self.set_config_queue.put(config)
-
+ # Wait for it t o be consumed
+ while self.set_config_result_queue.empty():
+ time.sleep(0.5)
+ return self.set_config_result_queue.get()
def generate(self, prompt, id, n_predict):
self.generate_queue.put((prompt, id, n_predict))
@@ -144,19 +160,20 @@ class ModelProcess:
def rebuild_backend(self, config):
try:
-
- backend = self.load_backend(Path("backends")/config["backend"])
+ backend = self.load_backend(config["backend"])
print("Backend loaded successfully")
except Exception as ex:
print("Couldn't build backend")
print(ex)
backend = None
+ self._set_config_result['backend_status'] ='failed'
+ self._set_config_result['errors'].append(f"couldn't build backend:{ex}")
return backend
def _rebuild_model(self):
try:
print("Rebuilding model")
- self.backend = self.load_backend(Path("backends")/self.config["backend"])
+ self.backend = self.load_backend(self.config["backend"])
print("Backend loaded successfully")
try:
model_file = Path("models")/self.config["backend"]/self.config["model"]
@@ -168,6 +185,8 @@ class ModelProcess:
print("Couldn't build model")
print(ex)
self.model = None
+ self._set_config_result['model_status'] ='failed'
+ self._set_config_result['errors'].append(f"couldn't build model:{ex}")
except Exception as ex:
print("Couldn't build backend")
print(ex)
@@ -183,6 +202,7 @@ class ModelProcess:
if self.config["debug"]:
print(ex)
personality = AIPersonality()
+
return personality
def _rebuild_personality(self):
@@ -194,6 +214,8 @@ class ModelProcess:
if self.config["debug"]:
print(ex)
self.personality = AIPersonality()
+ self._set_config_result['personality_status'] ='failed'
+ self._set_config_result['errors'].append(f"couldn't load personality:{ex}")
def _run(self):
self._rebuild_model()
@@ -297,7 +319,9 @@ class ModelProcess:
config = self.set_config_queue.get()
if config is not None:
print("Inference process : Setting configuration")
+ self.reset_config_result()
self._set_config(config)
+ self.set_config_result_queue.put(self._set_config_result)
def _cancel_generation(self):
self.is_generating.value = 0
@@ -502,7 +526,8 @@ class GPT4AllAPI():
- def load_backend(self, backend_path):
+ def load_backend(self, backend_name):
+ backend_path = Path("backends")/backend_name
# define the full absolute path to the module
absolute_path = backend_path.resolve()
diff --git a/models/gptj/.keep b/models/gptj/.keep
deleted file mode 100644
index e69de29b..00000000
diff --git a/web/src/views/DiscussionsView.vue b/web/src/views/DiscussionsView.vue
index b65aaef3..7d1d386a 100644
--- a/web/src/views/DiscussionsView.vue
+++ b/web/src/views/DiscussionsView.vue
@@ -119,7 +119,7 @@