mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2025-01-18 02:39:47 +00:00
Merge branch 'lollms' of https://github.com/nomic-ai/gpt4all-ui into lollms
This commit is contained in:
commit
6a297ec066
@ -281,9 +281,8 @@ class ModelProcess:
|
||||
personality = AIPersonality(self.lollms_paths, personality_path, run_scripts=True, model=self.model)
|
||||
self.mounted_personalities.append(personality)
|
||||
except Exception as ex:
|
||||
print(f"Personality file not found or is corrupted ({personality_path}).\nPlease verify that the personality you have selected exists or select another personality. Some updates may lead to change in personality name or category, so check the personality selection in settings to be sure.")
|
||||
if self.config["debug"]:
|
||||
print(ex)
|
||||
ASCIIColors.error(f"Personality file not found or is corrupted ({personality_path}).\nPlease verify that the personality you have selected exists or select another personality. Some updates may lead to change in personality name or category, so check the personality selection in settings to be sure.")
|
||||
ASCIIColors.error(f"Exception received is: {ex}")
|
||||
personality = AIPersonality(self.lollms_paths, model=self.model)
|
||||
failed_personalities.append(personality_path)
|
||||
self._set_config_result['errors'].append(f"couldn't build personalities:{ex}")
|
||||
@ -295,12 +294,13 @@ class ModelProcess:
|
||||
elif len(failed_personalities)>0:
|
||||
self._set_config_result['status'] ='semi_failed'
|
||||
self._set_config_result['personalities_status'] ='semi_failed'
|
||||
|
||||
if self.config['active_personality_id']<len(self.mounted_personalities):
|
||||
self.personality = self.mounted_personalities[self.config['active_personality_id']]
|
||||
ASCIIColors.success("Personality set successfully")
|
||||
else:
|
||||
self.personality = None
|
||||
ASCIIColors.error("Personality set failed")
|
||||
|
||||
ASCIIColors.error("Failed to set personality. Please select a valid one")
|
||||
|
||||
def _run(self):
|
||||
self._rebuild_model()
|
||||
|
30
app.py
30
app.py
@ -565,8 +565,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
|
||||
personalities = [f.stem for f in personalities_dir.iterdir() if f.is_dir()]
|
||||
except Exception as ex:
|
||||
personalities=[]
|
||||
if self.config["debug"]:
|
||||
print(f"No personalities found. Using default one {ex}")
|
||||
ASCIIColors.error(f"No personalities found. Using default one {ex}")
|
||||
return jsonify(personalities)
|
||||
|
||||
def list_languages(self):
|
||||
@ -1110,8 +1109,7 @@ def sync_cfg(default_config, config):
|
||||
return config, added_entries, removed_entries
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
lollms_paths = LollmsPaths.find_paths(force_local=True)
|
||||
lollms_paths = LollmsPaths.find_paths(force_local=True, custom_default_cfg_path="configs/config.yaml")
|
||||
db_folder = lollms_paths.personal_path/"databases"
|
||||
db_folder.mkdir(parents=True, exist_ok=True)
|
||||
parser = argparse.ArgumentParser(description="Start the chatbot Flask app.")
|
||||
@ -1182,26 +1180,8 @@ if __name__ == "__main__":
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
# The default configuration must be kept unchanged as it is committed to the repository,
|
||||
# so we have to make a copy that is not comitted
|
||||
default_config = load_config("configs/config.yaml")
|
||||
|
||||
if args.config!="local_config":
|
||||
args.config = "local_config"
|
||||
if not lollms_paths.personal_configuration_path/f"local_config.yaml".exists():
|
||||
print("No local configuration file found. Building from scratch")
|
||||
shutil.copy(default_config, lollms_paths.personal_configuration_path/f"local_config.yaml")
|
||||
|
||||
config_file_path = lollms_paths.personal_configuration_path/f"local_config.yaml"
|
||||
config = LOLLMSConfig(config_file_path)
|
||||
|
||||
|
||||
if "version" not in config or int(config["version"])<int(default_config["version"]):
|
||||
#Upgrade old configuration files to new format
|
||||
ASCIIColors.error("Configuration file is very old.\nReplacing with default configuration")
|
||||
config, added, removed = sync_cfg(default_config, config)
|
||||
print(f"Added entries : {added}, removed entries:{removed}")
|
||||
config.save_config(config_file_path)
|
||||
# Configuration loading part
|
||||
config = LOLLMSConfig.autoload(lollms_paths)
|
||||
|
||||
# Override values in config with command-line arguments
|
||||
for arg_name, arg_value in vars(args).items():
|
||||
@ -1210,7 +1190,7 @@ if __name__ == "__main__":
|
||||
|
||||
# executor = ThreadPoolExecutor(max_workers=1)
|
||||
# app.config['executor'] = executor
|
||||
bot = LoLLMsWebUI(app, socketio, config, config_file_path, lollms_paths)
|
||||
bot = LoLLMsWebUI(app, socketio, config, config.file_path, lollms_paths)
|
||||
|
||||
# chong Define custom WebSocketHandler with error handling
|
||||
class CustomWebSocketHandler(WebSocketHandler):
|
||||
|
@ -1,7 +1,7 @@
|
||||
# =================== Lord Of Large Language Models Configuration file ===========================
|
||||
version: 7
|
||||
binding_name: llama_cpp_official
|
||||
model_name: Wizard-Vicuna-7B-Uncensored.ggmlv3.q4_0.bin
|
||||
model_name: airoboros-13b-gpt4.ggmlv3.q4_0.bin
|
||||
|
||||
# Host information
|
||||
host: localhost
|
||||
|
1
web/dist/assets/index-29d93ec2.css
vendored
Normal file
1
web/dist/assets/index-29d93ec2.css
vendored
Normal file
File diff suppressed because one or more lines are too long
88
web/dist/assets/index-ce2e3117.js
vendored
Normal file
88
web/dist/assets/index-ce2e3117.js
vendored
Normal file
File diff suppressed because one or more lines are too long
16
web/dist/assets/index-f7a8e21d.js
vendored
16
web/dist/assets/index-f7a8e21d.js
vendored
File diff suppressed because one or more lines are too long
4
web/dist/index.html
vendored
4
web/dist/index.html
vendored
@ -6,8 +6,8 @@
|
||||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>GPT4All - WEBUI</title>
|
||||
<script type="module" crossorigin src="/assets/index-f7a8e21d.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index-8e9b89f5.css">
|
||||
<script type="module" crossorigin src="/assets/index-ce2e3117.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index-29d93ec2.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
1
web/src/clip
Submodule
1
web/src/clip
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit a9b1bf5920416aaeaec965c25dd9e8f98c864f16
|
@ -934,7 +934,6 @@ export default {
|
||||
this.api_get_req("list_models").then(response => { this.modelsArr = response })
|
||||
//this.api_get_req("list_personalities_languages").then(response => { this.persLangArr = response })
|
||||
this.api_get_req("list_personalities_categories").then(response => { this.persCatgArr = response })
|
||||
this.api_get_req("list_personalities").then(response => { this.persArr = response })
|
||||
//this.api_get_req("list_languages").then(response => { this.langArr = response })
|
||||
this.api_get_req("get_config").then(response => {
|
||||
console.log("Received config")
|
||||
@ -957,6 +956,10 @@ export default {
|
||||
this.configFile.personality_folder = response["personality_name"]
|
||||
console.log("received infos")
|
||||
});
|
||||
this.api_get_req("list_personalities").then(response => {
|
||||
this.persArr = response
|
||||
console.log(`Listed personalities:\n${response}`)
|
||||
})
|
||||
this.api_get_req("disk_usage").then(response => {
|
||||
this.diskUsage = response
|
||||
})
|
||||
|
@ -1,5 +1,6 @@
|
||||
@echo off
|
||||
|
||||
echo \u001b[34m
|
||||
echo HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
|
||||
echo HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
|
||||
echo HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
|
||||
@ -34,7 +35,7 @@ echo HHHHHHHHHHHH.HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
|
||||
echo HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
|
||||
echo HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
|
||||
echo HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
|
||||
|
||||
echo \u001b[0m
|
||||
echo Checking internet connection
|
||||
|
||||
ping google.com -n 1 >nul 2>&1
|
||||
|
2
webui.sh
2
webui.sh
@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
echo "\u001b[34m"
|
||||
echo "HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH"
|
||||
echo "HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH"
|
||||
echo "HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH"
|
||||
@ -33,6 +34,7 @@ echo "HHHHHHHHHHHH.HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH
|
||||
echo "HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH"
|
||||
echo "HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH"
|
||||
echo "HHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHHH"
|
||||
echo "\u001b[0m"
|
||||
|
||||
if ping -q -c 1 google.com >/dev/null 2>&1; then
|
||||
echo -e "\e[32mInternet Connection working fine\e[0m"
|
||||
|
Loading…
Reference in New Issue
Block a user