From b76474c5f4becedac38745e95bd7279482cc70b3 Mon Sep 17 00:00:00 2001 From: saloui Date: Sun, 14 May 2023 10:32:55 +0200 Subject: [PATCH] updated doc --- README.md | 9 +++++---- app.py | 37 ++++++++++++++++++++----------------- 2 files changed, 25 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index b178ac1a..23bed2a6 100644 --- a/README.md +++ b/README.md @@ -97,10 +97,11 @@ Now you're ready to work! # Supported backends Two backends are now supported: -1 - [The llama_cpp backend by Abdeladim](https://github.com/abdeladim-s/pyllamacpp) -2 - [The GPT-j backend by Abdeladim](https://github.com/abdeladim-s/pygptj) -3 - [The GPT-j backend by marella](https://github.com/marella/gpt4all-j) -4 - Hugging face's Transformers (under construction) + +1- [The llama_cpp backend by Abdeladim](https://github.com/abdeladim-s/pyllamacpp) +2- [The GPT-j backend by Abdeladim](https://github.com/abdeladim-s/pygptj) +3- [The GPT-j backend by marella](https://github.com/marella/gpt4all-j) +4- Hugging face's Transformers (under construction) # Supported models You can also refuse to download the model during the install procedure and download it manually. diff --git a/app.py b/app.py index aad4f9a8..6c39d96e 100644 --- a/app.py +++ b/app.py @@ -623,23 +623,26 @@ class Gpt4AllWebUI(GPT4AllAPI): models = [] for model in model_list: - filename = model['filename'] - server = model['server'] - filesize = model['filesize'] - if server.endswith("/"): - path = f'{server}{filename}' - else: - path = f'{server}/{filename}' - local_path = Path(f'./models/{self.config["backend"]}/{filename}') - is_installed = local_path.exists() - models.append({ - 'title': model['filename'], - 'icon': '/icons/default.png', # Replace with the path to the model icon - 'description': model['description'], - 'isInstalled': is_installed, - 'path': path, - 'filesize': filesize, - }) + try: + filename = model['filename'] + server = model['server'] + filesize = model['filesize'] + if server.endswith("/"): + path = f'{server}{filename}' + else: + path = f'{server}/{filename}' + local_path = Path(f'./models/{self.config["backend"]}/{filename}') + is_installed = local_path.exists() + models.append({ + 'title': model['filename'], + 'icon': '/icons/default.png', # Replace with the path to the model icon + 'description': model['description'], + 'isInstalled': is_installed, + 'path': path, + 'filesize': filesize, + }) + except: + print(f"Problem with model : {model}") return jsonify(models)