updated doc

This commit is contained in:
saloui 2023-05-14 10:32:55 +02:00
parent b676c29f58
commit b76474c5f4
2 changed files with 25 additions and 21 deletions

View File

@ -97,10 +97,11 @@ Now you're ready to work!
# Supported backends
Two backends are now supported:
1 - [The llama_cpp backend by Abdeladim](https://github.com/abdeladim-s/pyllamacpp)
2 - [The GPT-j backend by Abdeladim](https://github.com/abdeladim-s/pygptj)
3 - [The GPT-j backend by marella](https://github.com/marella/gpt4all-j)
4 - Hugging face's Transformers (under construction)
1- [The llama_cpp backend by Abdeladim](https://github.com/abdeladim-s/pyllamacpp)
2- [The GPT-j backend by Abdeladim](https://github.com/abdeladim-s/pygptj)
3- [The GPT-j backend by marella](https://github.com/marella/gpt4all-j)
4- Hugging face's Transformers (under construction)
# Supported models
You can also refuse to download the model during the install procedure and download it manually.

37
app.py
View File

@ -623,23 +623,26 @@ class Gpt4AllWebUI(GPT4AllAPI):
models = []
for model in model_list:
filename = model['filename']
server = model['server']
filesize = model['filesize']
if server.endswith("/"):
path = f'{server}{filename}'
else:
path = f'{server}/{filename}'
local_path = Path(f'./models/{self.config["backend"]}/{filename}')
is_installed = local_path.exists()
models.append({
'title': model['filename'],
'icon': '/icons/default.png', # Replace with the path to the model icon
'description': model['description'],
'isInstalled': is_installed,
'path': path,
'filesize': filesize,
})
try:
filename = model['filename']
server = model['server']
filesize = model['filesize']
if server.endswith("/"):
path = f'{server}{filename}'
else:
path = f'{server}/{filename}'
local_path = Path(f'./models/{self.config["backend"]}/{filename}')
is_installed = local_path.exists()
models.append({
'title': model['filename'],
'icon': '/icons/default.png', # Replace with the path to the model icon
'description': model['description'],
'isInstalled': is_installed,
'path': path,
'filesize': filesize,
})
except:
print(f"Problem with model : {model}")
return jsonify(models)