mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2024-12-20 04:47:55 +00:00
Upgraded backend management
This commit is contained in:
parent
cf2316fcd0
commit
a7c6014adc
9
app.py
9
app.py
@ -206,8 +206,7 @@ class Gpt4AllWebUI(GPT4AllAPI):
|
|||||||
|
|
||||||
|
|
||||||
def list_models(self):
|
def list_models(self):
|
||||||
models_dir = Path('./models')/self.config["backend"] # replace with the actual path to the models folder
|
models = self.backend.list_models(self.config)
|
||||||
models = [f.name for f in models_dir.glob(self.backend.file_extension)]
|
|
||||||
return jsonify(models)
|
return jsonify(models)
|
||||||
|
|
||||||
|
|
||||||
@ -419,11 +418,11 @@ class Gpt4AllWebUI(GPT4AllAPI):
|
|||||||
print("New backend selected")
|
print("New backend selected")
|
||||||
|
|
||||||
self.config['backend'] = backend
|
self.config['backend'] = backend
|
||||||
models_dir = Path('./models')/self.config["backend"] # replace with the actual path to the models folder
|
backend_ =self.load_backend(self.BACKENDS_LIST[self.config["backend"]])
|
||||||
models = [f.name for f in models_dir.glob(self.backend.file_extension)]
|
models = backend_.list_models(self.config)
|
||||||
if len(models)>0:
|
if len(models)>0:
|
||||||
|
self.backend = backend_
|
||||||
self.config['model'] = models[0]
|
self.config['model'] = models[0]
|
||||||
self.load_backend(self.BACKENDS_LIST[self.config["backend"]])
|
|
||||||
# Build chatbot
|
# Build chatbot
|
||||||
self.chatbot_bindings = self.create_chatbot()
|
self.chatbot_bindings = self.create_chatbot()
|
||||||
return jsonify({"status": "ok"})
|
return jsonify({"status": "ok"})
|
||||||
|
@ -45,7 +45,7 @@ class GPT4AllAPI():
|
|||||||
# Select backend
|
# Select backend
|
||||||
self.BACKENDS_LIST = {f.stem:f for f in Path("backends").iterdir() if f.is_dir() and f.stem!="__pycache__"}
|
self.BACKENDS_LIST = {f.stem:f for f in Path("backends").iterdir() if f.is_dir() and f.stem!="__pycache__"}
|
||||||
|
|
||||||
self.load_backend(self.BACKENDS_LIST[self.config["backend"]])
|
self.backend =self.load_backend(self.BACKENDS_LIST[self.config["backend"]])
|
||||||
|
|
||||||
# Build chatbot
|
# Build chatbot
|
||||||
self.chatbot_bindings = self.create_chatbot()
|
self.chatbot_bindings = self.create_chatbot()
|
||||||
@ -65,7 +65,7 @@ class GPT4AllAPI():
|
|||||||
loader = importlib.machinery.SourceFileLoader(module_name, str(absolute_path/"__init__.py"))
|
loader = importlib.machinery.SourceFileLoader(module_name, str(absolute_path/"__init__.py"))
|
||||||
backend_module = loader.load_module()
|
backend_module = loader.load_module()
|
||||||
backend_class = getattr(backend_module, backend_module.backend_name)
|
backend_class = getattr(backend_module, backend_module.backend_name)
|
||||||
self.backend = backend_class
|
return backend_class
|
||||||
|
|
||||||
def create_chatbot(self):
|
def create_chatbot(self):
|
||||||
return self.backend(self.config)
|
return self.backend(self.config)
|
||||||
|
@ -39,3 +39,9 @@ class GPTBackend:
|
|||||||
verbose (bool, optional): If true, the code will spit many informations about the generation process. Defaults to False.
|
verbose (bool, optional): If true, the code will spit many informations about the generation process. Defaults to False.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
@staticmethod
|
||||||
|
def list_models(config:dict):
|
||||||
|
"""Lists the models for this backend
|
||||||
|
"""
|
||||||
|
models_dir = Path('./models')/config["backend"] # replace with the actual path to the models folder
|
||||||
|
return [f.name for f in models_dir.glob(GPTBackend.file_extension)]
|
||||||
|
Loading…
Reference in New Issue
Block a user