mirror of
https://github.com/ParisNeo/lollms.git
synced 2025-02-21 17:56:38 +00:00
upgraded
This commit is contained in:
parent
6db95c8c2d
commit
0025a36775
@ -1,7 +1,9 @@
|
|||||||
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
|
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
|
||||||
version: 71
|
version: 72
|
||||||
binding_name: null
|
binding_name: null
|
||||||
model_name: null
|
model_name: null
|
||||||
|
model_variant: null
|
||||||
|
model_type: null
|
||||||
|
|
||||||
show_news_panel: True
|
show_news_panel: True
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ from lollms.config import TypedConfig, InstallOption
|
|||||||
from lollms.main_config import LOLLMSConfig
|
from lollms.main_config import LOLLMSConfig
|
||||||
from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom
|
from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom
|
||||||
from lollms.security import sanitize_path
|
from lollms.security import sanitize_path
|
||||||
|
from lollms.utilities import show_message_dialog
|
||||||
|
|
||||||
import urllib
|
import urllib
|
||||||
import inspect
|
import inspect
|
||||||
@ -151,6 +152,12 @@ class LLMBinding:
|
|||||||
print("Install model triggered")
|
print("Install model triggered")
|
||||||
sanitize_path(model_path)
|
sanitize_path(model_path)
|
||||||
model_path = model_path.replace("\\","/")
|
model_path = model_path.replace("\\","/")
|
||||||
|
parts = model_path.split("/")
|
||||||
|
if parts[2]=="huggingface.co":
|
||||||
|
ASCIIColors.cyan("Hugging face model detected")
|
||||||
|
model_name = parts[4]
|
||||||
|
else:
|
||||||
|
model_name = variant_name
|
||||||
|
|
||||||
if model_type.lower() in model_path.lower():
|
if model_type.lower() in model_path.lower():
|
||||||
model_type:str=model_type
|
model_type:str=model_type
|
||||||
@ -174,15 +181,21 @@ class LLMBinding:
|
|||||||
else:
|
else:
|
||||||
filename = parts[4]
|
filename = parts[4]
|
||||||
installation_path = installation_dir / filename
|
installation_path = installation_dir / filename
|
||||||
|
|
||||||
elif model_type=="gpt4all":
|
elif model_type=="gpt4all":
|
||||||
filename = variant_name
|
filename = variant_name
|
||||||
model_path = "http://gpt4all.io/models/gguf/"+filename
|
model_path = "http://gpt4all.io/models/gguf/"+filename
|
||||||
installation_path = installation_dir / filename
|
installation_root_dir = installation_dir / model_name
|
||||||
|
installation_root_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
installation_path = installation_root_dir / filename
|
||||||
else:
|
else:
|
||||||
filename = Path(model_path).name
|
filename = Path(model_path).name
|
||||||
installation_path = installation_dir / filename
|
installation_root_dir = installation_dir / model_name
|
||||||
|
installation_root_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
installation_path = installation_root_dir / filename
|
||||||
print("Model install requested")
|
print("Model install requested")
|
||||||
print(f"Model path : {model_path}")
|
print(f"Model path : {model_path}")
|
||||||
|
print(f"Installation Path : {installation_path}")
|
||||||
|
|
||||||
model_name = filename
|
model_name = filename
|
||||||
binding_folder = self.config["binding_name"]
|
binding_folder = self.config["binding_name"]
|
||||||
@ -705,7 +718,7 @@ class LLMBinding:
|
|||||||
models = []
|
models = []
|
||||||
for models_folder in self.models_folders:
|
for models_folder in self.models_folders:
|
||||||
if models_folder.name in ["ggml","gguf","gpt4all"]:
|
if models_folder.name in ["ggml","gguf","gpt4all"]:
|
||||||
models+=[f.name for f in models_folder.iterdir() if f.suffix in self.supported_file_extensions or f.suffix==".reference"]
|
models+=[f.name for f in models_folder.iterdir() if f.is_dir() and not f.stem.startswith(".") or f.suffix==".reference"]
|
||||||
else:
|
else:
|
||||||
models+=[f.name for f in models_folder.iterdir() if f.is_dir() and not f.stem.startswith(".") or f.suffix==".reference"]
|
models+=[f.name for f in models_folder.iterdir() if f.is_dir() and not f.stem.startswith(".") or f.suffix==".reference"]
|
||||||
return models
|
return models
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
|
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
|
||||||
version: 71
|
version: 72
|
||||||
binding_name: null
|
binding_name: null
|
||||||
model_name: null
|
model_name: null
|
||||||
|
model_variant: null
|
||||||
|
model_type: null
|
||||||
|
|
||||||
show_news_panel: True
|
show_news_panel: True
|
||||||
|
|
||||||
|
@ -71,6 +71,25 @@ def show_yes_no_dialog(title, text):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def show_message_dialog(title, text):
|
||||||
|
import tkinter as tk
|
||||||
|
from tkinter import messagebox
|
||||||
|
# Create a new Tkinter root window and hide it
|
||||||
|
root = tk.Tk()
|
||||||
|
root.withdraw()
|
||||||
|
|
||||||
|
# Make the window appear on top
|
||||||
|
root.attributes('-topmost', True)
|
||||||
|
|
||||||
|
# Show the dialog box
|
||||||
|
result = messagebox.askquestion(title, text)
|
||||||
|
|
||||||
|
# Destroy the root window
|
||||||
|
root.destroy()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def is_linux():
|
def is_linux():
|
||||||
return sys.platform.startswith("linux")
|
return sys.platform.startswith("linux")
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user