lollms-webui/app.py

960 lines
38 KiB
Python
Raw Normal View History

2023-04-11 10:50:07 +00:00
######
# Project : GPT4ALL-UI
# Author : ParisNeo with the help of the community
# Supported by Nomic-AI
# Licence : Apache 2.0
# Description :
# A front end Flask application for llamacpp models.
# The official GPT4All Web ui
# Made by the community for the community
######
2023-04-20 17:30:03 +00:00
__author__ = "parisneo"
__github__ = "https://github.com/nomic-ai/gpt4all-ui"
__copyright__ = "Copyright 2023, "
__license__ = "Apache 2.0"
2023-04-27 23:39:57 +00:00
import os
import logging
2023-04-06 19:12:49 +00:00
import argparse
import json
2023-04-06 19:12:49 +00:00
import re
import traceback
import threading
2023-04-08 10:25:40 +00:00
import sys
2023-05-11 19:36:52 +00:00
from tqdm import tqdm
2023-04-30 20:40:19 +00:00
from pyaipersonality import AIPersonality
from pyGpt4All.db import DiscussionsDB, Discussion
from flask import (
Flask,
Response,
jsonify,
render_template,
request,
stream_with_context,
2023-04-08 17:55:33 +00:00
send_from_directory
)
2023-04-27 14:44:22 +00:00
from flask_socketio import SocketIO, emit
2023-04-08 17:55:33 +00:00
from pathlib import Path
import gc
from geventwebsocket.handler import WebSocketHandler
from gevent.pywsgi import WSGIServer
2023-05-10 21:33:08 +00:00
import requests
2023-05-11 19:36:52 +00:00
from concurrent.futures import ThreadPoolExecutor, as_completed
app = Flask("GPT4All-WebUI", static_url_path="/static", static_folder="static")
2023-05-03 15:03:22 +00:00
socketio = SocketIO(app, cors_allowed_origins="*", async_mode='gevent', ping_timeout=30, ping_interval=15)
2023-05-02 20:53:27 +00:00
2023-04-27 23:39:57 +00:00
app.config['SECRET_KEY'] = 'secret!'
# Set the logging level to WARNING or higher
logging.getLogger('socketio').setLevel(logging.WARNING)
logging.getLogger('engineio').setLevel(logging.WARNING)
2023-04-27 23:39:57 +00:00
# Suppress Flask's default console output
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
import time
from pyGpt4All.config import load_config, save_config
from pyGpt4All.api import GPT4AllAPI
2023-04-14 15:11:40 +00:00
import shutil
2023-04-16 21:22:09 +00:00
import markdown
class Gpt4AllWebUI(GPT4AllAPI):
def __init__(self, _app, _socketio, config:dict, personality:dict, config_file_path) -> None:
super().__init__(config, personality, config_file_path)
self.app = _app
self.cancel_gen = False
self.socketio = _socketio
2023-05-02 20:53:27 +00:00
if "use_new_ui" in self.config:
if self.config["use_new_ui"]:
app.template_folder = "web/dist"
2023-04-08 17:55:33 +00:00
# =========================================================================================
# Endpoints
# =========================================================================================
2023-04-20 17:30:03 +00:00
self.add_endpoint(
"/list_backends", "list_backends", self.list_backends, methods=["GET"]
)
2023-04-10 08:27:25 +00:00
self.add_endpoint(
"/list_models", "list_models", self.list_models, methods=["GET"]
)
2023-04-20 17:30:03 +00:00
self.add_endpoint(
"/list_personalities_languages", "list_personalities_languages", self.list_personalities_languages, methods=["GET"]
)
self.add_endpoint(
"/list_personalities_categories", "list_personalities_categories", self.list_personalities_categories, methods=["GET"]
)
2023-04-13 22:47:20 +00:00
self.add_endpoint(
"/list_personalities", "list_personalities", self.list_personalities, methods=["GET"]
)
2023-04-14 00:10:22 +00:00
self.add_endpoint(
"/list_languages", "list_languages", self.list_languages, methods=["GET"]
)
2023-04-10 08:27:25 +00:00
self.add_endpoint(
"/list_discussions", "list_discussions", self.list_discussions, methods=["GET"]
)
2023-04-08 17:55:33 +00:00
2023-04-20 17:30:03 +00:00
self.add_endpoint("/set_personality_language", "set_personality_language", self.set_personality_language, methods=["GET"])
self.add_endpoint("/set_personality_category", "set_personality_category", self.set_personality_category, methods=["GET"])
self.add_endpoint("/", "", self.index, methods=["GET"])
2023-05-02 20:53:27 +00:00
self.add_endpoint("/<path:filename>", "serve_static", self.serve_static, methods=["GET"])
2023-05-05 13:01:38 +00:00
self.add_endpoint("/personalities/<path:filename>", "serve_personalities", self.serve_personalities, methods=["GET"])
2023-05-13 12:19:56 +00:00
2023-05-02 20:53:27 +00:00
2023-04-10 08:27:25 +00:00
self.add_endpoint("/export_discussion", "export_discussion", self.export_discussion, methods=["GET"])
self.add_endpoint("/export", "export", self.export, methods=["GET"])
self.add_endpoint(
"/new_discussion", "new_discussion", self.new_discussion, methods=["GET"]
)
2023-04-23 22:19:15 +00:00
self.add_endpoint("/stop_gen", "stop_gen", self.stop_gen, methods=["GET"])
self.add_endpoint("/rename", "rename", self.rename, methods=["POST"])
2023-05-04 14:38:03 +00:00
self.add_endpoint("/edit_title", "edit_title", self.edit_title, methods=["POST"])
self.add_endpoint(
"/load_discussion", "load_discussion", self.load_discussion, methods=["POST"]
)
self.add_endpoint(
"/delete_discussion",
"delete_discussion",
self.delete_discussion,
methods=["POST"],
)
self.add_endpoint(
"/update_message", "update_message", self.update_message, methods=["GET"]
)
self.add_endpoint(
"/message_rank_up", "message_rank_up", self.message_rank_up, methods=["GET"]
)
self.add_endpoint(
"/message_rank_down", "message_rank_down", self.message_rank_down, methods=["GET"]
)
2023-04-13 10:31:48 +00:00
self.add_endpoint(
"/delete_message", "delete_message", self.delete_message, methods=["GET"]
)
self.add_endpoint(
"/set_backend", "set_backend", self.set_backend, methods=["POST"]
)
2023-04-23 19:05:39 +00:00
self.add_endpoint(
"/set_model", "set_model", self.set_model, methods=["POST"]
)
2023-04-07 21:22:17 +00:00
self.add_endpoint(
"/update_model_params", "update_model_params", self.update_model_params, methods=["POST"]
)
2023-04-08 17:55:33 +00:00
self.add_endpoint(
2023-04-12 20:36:03 +00:00
"/get_config", "get_config", self.get_config, methods=["GET"]
2023-04-08 17:55:33 +00:00
)
2023-05-10 21:33:08 +00:00
self.add_endpoint(
"/get_available_models", "get_available_models", self.get_available_models, methods=["GET"]
)
2023-05-11 19:36:52 +00:00
2023-04-11 20:56:15 +00:00
2023-04-11 23:34:50 +00:00
self.add_endpoint(
"/extensions", "extensions", self.extensions, methods=["GET"]
)
self.add_endpoint(
"/training", "training", self.training, methods=["GET"]
)
2023-04-13 10:31:48 +00:00
self.add_endpoint(
"/main", "main", self.main, methods=["GET"]
)
self.add_endpoint(
"/settings", "settings", self.settings, methods=["GET"]
)
2023-04-11 23:34:50 +00:00
2023-04-11 20:56:15 +00:00
self.add_endpoint(
"/help", "help", self.help, methods=["GET"]
)
2023-04-27 14:44:22 +00:00
self.add_endpoint(
"/get_generation_status", "get_generation_status", self.get_generation_status, methods=["GET"]
)
2023-04-27 14:44:22 +00:00
self.add_endpoint(
"/update_setting", "update_setting", self.update_setting, methods=["POST"]
)
self.add_endpoint(
"/save_settings", "save_settings", self.save_settings, methods=["POST"]
)
2023-05-06 20:49:56 +00:00
self.add_endpoint(
"/get_current_personality", "get_current_personality", self.get_current_personality, methods=["GET"]
)
2023-04-27 14:44:22 +00:00
# =========================================================================================
2023-04-27 14:44:22 +00:00
# Socket IO stuff
# =========================================================================================
2023-04-27 14:44:22 +00:00
@socketio.on('connect')
2023-04-27 23:39:57 +00:00
def connect():
2023-04-27 14:44:22 +00:00
print('Client connected')
@socketio.on('disconnect')
2023-04-27 23:39:57 +00:00
def disconnect():
2023-04-27 14:44:22 +00:00
print('Client disconnected')
2023-05-11 19:36:52 +00:00
@socketio.on('install_model')
def install_model(data):
model_path = data["path"]
progress = 0
installation_dir = Path(f'./models/{self.config["backend"]}/')
filename = Path(model_path).name
installation_path = installation_dir / filename
print("Model install requested")
print(f"Model path : {model_path}")
if installation_path.exists():
print("Error: Model already exists")
data.installing = False
socketio.emit('install_progress',{'status': 'failed', 'error': 'model already exists'})
socketio.emit('install_progress',{'status': 'progress', 'progress': progress})
response = requests.get(model_path, stream=True)
file_size = int(response.headers.get('Content-Length'))
downloaded_size = 0
2023-05-11 20:27:23 +00:00
CHUNK_SIZE = 8192
2023-05-11 19:36:52 +00:00
def download_chunk(url, start_byte, end_byte, fileobj):
headers = {'Range': f'bytes={start_byte}-{end_byte}'}
response = requests.get(url, headers=headers, stream=True)
2023-05-11 20:27:23 +00:00
downloaded_bytes = 0
2023-05-11 19:36:52 +00:00
for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
if chunk:
fileobj.seek(start_byte)
fileobj.write(chunk)
2023-05-11 20:27:23 +00:00
downloaded_bytes += len(chunk)
2023-05-11 19:36:52 +00:00
start_byte += len(chunk)
2023-05-11 20:27:23 +00:00
return downloaded_bytes
2023-05-11 19:36:52 +00:00
def download_file(url, file_path, num_threads=4):
response = requests.head(url)
file_size = int(response.headers.get('Content-Length'))
2023-05-11 20:27:23 +00:00
chunk_size = file_size // num_threads
progress = 0
2023-05-11 19:36:52 +00:00
with open(file_path, 'wb') as fileobj:
with tqdm(total=file_size, unit='B', unit_scale=True, unit_divisor=1024) as pbar:
with ThreadPoolExecutor(max_workers=num_threads) as executor:
futures = []
for i in range(num_threads):
start_byte = i * chunk_size
end_byte = start_byte + chunk_size - 1 if i < num_threads - 1 else file_size - 1
futures.append(executor.submit(download_chunk, url, start_byte, end_byte, fileobj))
for future in tqdm(as_completed(futures), total=num_threads):
2023-05-11 20:27:23 +00:00
downloaded_bytes = future.result()
progress += downloaded_bytes
pbar.update(downloaded_bytes)
socketio.emit('install_progress', {'status': 'progress', 'progress': progress})
2023-05-11 19:36:52 +00:00
# Usage example
download_file(model_path, installation_path, num_threads=4)
socketio.emit('install_progress',{'status': 'succeeded', 'error': ''})
@socketio.on('uninstall_model')
def uninstall_model(data):
model_path = data['path']
installation_dir = Path(f'./models/{self.config["backend"]}/')
filename = Path(model_path).name
installation_path = installation_dir / filename
if not installation_path.exists():
socketio.emit('install_progress',{'status': 'failed', 'error': ''})
installation_path.unlink()
socketio.emit('install_progress',{'status': 'succeeded', 'error': ''})
2023-04-27 14:44:22 +00:00
2023-04-27 23:39:57 +00:00
@socketio.on('generate_msg')
def generate_msg(data):
2023-04-27 14:44:22 +00:00
if self.current_discussion is None:
if self.db.does_last_discussion_have_messages():
self.current_discussion = self.db.create_discussion()
else:
self.current_discussion = self.db.load_last_discussion()
message = data["prompt"]
message_id = self.current_discussion.add_message(
2023-05-09 05:06:01 +00:00
"user", message, parent=self.message_id
2023-04-27 14:44:22 +00:00
)
2023-05-09 05:06:01 +00:00
self.current_user_message_id = message_id
tpe = threading.Thread(target=self.start_message_generation, args=(message, message_id))
2023-04-27 14:44:22 +00:00
tpe.start()
2023-04-27 23:39:57 +00:00
@socketio.on('generate_msg_from')
def handle_connection(data):
message_id = int(data['id'])
message = data["prompt"]
2023-05-09 05:06:01 +00:00
self.current_user_message_id = message_id
tpe = threading.Thread(target=self.start_message_generation, args=(message, message_id))
2023-04-27 23:39:57 +00:00
tpe.start()
def save_settings(self):
save_config(self.config, self.config_file_path)
if self.config["debug"]:
print("Configuration saved")
# Tell that the setting was changed
self.socketio.emit('save_settings', {"status":True})
return jsonify({"status":True})
2023-05-06 20:49:56 +00:00
2023-05-06 20:49:56 +00:00
def get_current_personality(self):
2023-05-07 11:46:49 +00:00
return jsonify({"personality":self.personality.as_dict()})
2023-05-06 20:49:56 +00:00
# Settings (data: {"setting_name":<the setting name>,"setting_value":<the setting value>})
def update_setting(self):
data = request.get_json()
setting_name = data['setting_name']
if setting_name== "temperature":
self.config["temperature"]=float(data['setting_value'])
elif setting_name== "n_predict":
self.config["n_predict"]=int(data['setting_value'])
elif setting_name== "top_k":
self.config["top_k"]=int(data['setting_value'])
elif setting_name== "top_p":
self.config["top_p"]=float(data['setting_value'])
elif setting_name== "repeat_penalty":
self.config["repeat_penalty"]=float(data['setting_value'])
elif setting_name== "repeat_last_n":
self.config["repeat_last_n"]=int(data['setting_value'])
elif setting_name== "n_threads":
self.config["n_threads"]=int(data['setting_value'])
elif setting_name== "ctx_size":
self.config["ctx_size"]=int(data['setting_value'])
elif setting_name== "language":
self.config["language"]=data['setting_value']
elif setting_name== "personality_language":
2023-05-06 19:03:57 +00:00
back_language = self.config["personality_language"]
if self.config["personality_language"]!=data['setting_value']:
self.config["personality_language"]=data['setting_value']
cats = self.list_personalities_categories()
if len(cats)>0:
back_category = self.config["personality_category"]
self.config["personality_category"]=cats[0]
pers = json.loads(self.list_personalities().data.decode("utf8"))
if len(pers)>0:
self.config["personality"]=pers[0]
personality_fn = f"personalities/{self.config['personality_language']}/{self.config['personality_category']}/{self.config['personality']}"
self.personality.load_personality(personality_fn)
else:
self.config["personality_language"]=back_language
self.config["personality_category"]=back_category
return jsonify({'setting_name': data['setting_name'], "status":False})
else:
self.config["personality_language"]=back_language
return jsonify({'setting_name': data['setting_name'], "status":False})
elif setting_name== "personality_category":
2023-05-06 19:03:57 +00:00
back_category = self.config["personality_category"]
if self.config["personality_category"]!=data['setting_value']:
self.config["personality_category"]=data['setting_value']
pers = json.loads(self.list_personalities().data.decode("utf8"))
if len(pers)>0:
self.config["personality"]=pers[0]
personality_fn = f"personalities/{self.config['personality_language']}/{self.config['personality_category']}/{self.config['personality']}"
self.personality.load_personality(personality_fn)
2023-05-07 01:44:42 +00:00
if self.config["debug"]:
print(self.personality)
2023-05-06 19:03:57 +00:00
else:
self.config["personality_category"]=back_category
return jsonify({'setting_name': data['setting_name'], "status":False})
elif setting_name== "personality":
self.config["personality"]=data['setting_value']
2023-05-06 19:03:57 +00:00
personality_fn = f"personalities/{self.config['personality_language']}/{self.config['personality_category']}/{self.config['personality']}"
self.personality.load_personality(personality_fn)
elif setting_name== "override_personality_model_parameters":
self.config["override_personality_model_parameters"]=bool(data['setting_value'])
elif setting_name== "model":
self.config["model"]=data['setting_value']
print("New model selected")
# Build chatbot
self.chatbot_bindings = self.create_chatbot()
elif setting_name== "backend":
print("New backend selected")
if self.config['backend']!= data['setting_value']:
print("New backend selected")
self.config["backend"]=data['setting_value']
backend_ =self.load_backend(self.BACKENDS_LIST[self.config["backend"]])
models = backend_.list_models(self.config)
if len(models)>0:
self.backend = backend_
self.config['model'] = models[0]
# Build chatbot
self.chatbot_bindings = self.create_chatbot()
if self.config["debug"]:
print(f"Configuration {data['setting_name']} set to {data['setting_value']}")
return jsonify({'setting_name': data['setting_name'], "status":True})
else:
if self.config["debug"]:
print(f"Configuration {data['setting_name']} couldn't be set to {data['setting_value']}")
return jsonify({'setting_name': data['setting_name'], "status":False})
else:
if self.config["debug"]:
print(f"Configuration {data['setting_name']} set to {data['setting_value']}")
return jsonify({'setting_name': data['setting_name'], "status":True})
2023-04-27 23:39:57 +00:00
else:
if self.config["debug"]:
print(f"Configuration {data['setting_name']} couldn't be set to {data['setting_value']}")
return jsonify({'setting_name': data['setting_name'], "status":False})
2023-04-27 14:44:22 +00:00
if self.config["debug"]:
print(f"Configuration {data['setting_name']} set to {data['setting_value']}")
# Tell that the setting was changed
return jsonify({'setting_name': data['setting_name'], "status":True})
2023-04-11 20:56:15 +00:00
2023-04-20 17:30:03 +00:00
def list_backends(self):
backends_dir = Path('./backends') # replace with the actual path to the models folder
backends = [f.stem for f in backends_dir.iterdir() if f.is_dir() and f.stem!="__pycache__"]
2023-04-20 17:30:03 +00:00
return jsonify(backends)
2023-04-08 17:55:33 +00:00
def list_models(self):
2023-05-11 13:09:35 +00:00
if self.backend is not None:
models = self.backend.list_models(self.config)
return jsonify(models)
else:
return jsonify([])
2023-04-13 22:47:20 +00:00
2023-04-20 17:30:03 +00:00
def list_personalities_languages(self):
personalities_languages_dir = Path(f'./personalities') # replace with the actual path to the models folder
personalities_languages = [f.stem for f in personalities_languages_dir.iterdir() if f.is_dir()]
return jsonify(personalities_languages)
def list_personalities_categories(self):
personalities_categories_dir = Path(f'./personalities/{self.config["personality_language"]}') # replace with the actual path to the models folder
personalities_categories = [f.stem for f in personalities_categories_dir.iterdir() if f.is_dir()]
return jsonify(personalities_categories)
2023-04-13 22:47:20 +00:00
def list_personalities(self):
2023-05-05 20:35:08 +00:00
try:
personalities_dir = Path(f'./personalities/{self.config["personality_language"]}/{self.config["personality_category"]}') # replace with the actual path to the models folder
personalities = [f.stem for f in personalities_dir.iterdir() if f.is_dir()]
except Exception as ex:
2023-05-05 20:35:08 +00:00
personalities=[]
if self.config["debug"]:
print(f"No personalities found. Using default one {ex}")
2023-04-13 22:47:20 +00:00
return jsonify(personalities)
2023-04-07 21:22:17 +00:00
2023-04-14 00:10:22 +00:00
def list_languages(self):
lanuguages= [
{ "value": "en-US", "label": "English" },
{ "value": "fr-FR", "label": "Français" },
{ "value": "ar-AR", "label": "العربية" },
{ "value": "it-IT", "label": "Italiano" },
{ "value": "de-DE", "label": "Deutsch" },
{ "value": "nl-XX", "label": "Dutch" },
{ "value": "zh-CN", "label": "中國人" }
]
return jsonify(lanuguages)
2023-04-10 08:27:25 +00:00
def list_discussions(self):
discussions = self.db.get_discussions()
return jsonify(discussions)
2023-04-10 08:27:25 +00:00
2023-04-20 17:30:03 +00:00
def set_personality_language(self):
lang = request.args.get('language')
self.config['personality_language'] = lang
return jsonify({'success':True})
def set_personality_category(self):
category = request.args.get('category')
self.config['personality_category'] = category
return jsonify({'success':True})
def add_endpoint(
self,
endpoint=None,
endpoint_name=None,
handler=None,
methods=["GET"],
*args,
**kwargs,
):
self.app.add_url_rule(
endpoint, endpoint_name, handler, methods=methods, *args, **kwargs
)
2023-04-06 19:12:49 +00:00
def index(self):
2023-04-17 15:59:36 +00:00
return render_template("index.html")
2023-05-02 20:53:27 +00:00
def serve_static(self, filename):
root_dir = os.getcwd()
if "use_new_ui" in self.config:
if self.config["use_new_ui"]:
path = os.path.join(root_dir, 'web/dist/')+"/".join(filename.split("/")[:-1])
else:
path = os.path.join(root_dir, 'static/')+"/".join(filename.split("/")[:-1])
else:
path = os.path.join(root_dir, 'static/')+"/".join(filename.split("/")[:-1])
fn = filename.split("/")[-1]
return send_from_directory(path, fn)
2023-05-05 13:01:38 +00:00
def serve_personalities(self, filename):
root_dir = os.getcwd()
path = os.path.join(root_dir, 'personalities/')+"/".join(filename.split("/")[:-1])
fn = filename.split("/")[-1]
return send_from_directory(path, fn)
2023-04-06 19:12:49 +00:00
def export(self):
2023-04-10 08:27:25 +00:00
return jsonify(self.db.export_to_json())
2023-04-06 19:12:49 +00:00
2023-04-10 08:27:25 +00:00
def export_discussion(self):
2023-04-17 22:23:31 +00:00
return jsonify({"discussion_text":self.get_discussion_to()})
2023-04-10 08:27:25 +00:00
def start_message_generation(self, message, message_id):
bot_says = ""
2023-04-06 19:12:49 +00:00
# send the message to the bot
print(f"Received message : {message}")
2023-04-27 23:39:57 +00:00
if self.current_discussion:
# First we need to send the new message ID to the client
2023-05-09 05:06:01 +00:00
self.current_ai_message_id = self.current_discussion.add_message(
self.personality.name, "", parent = self.current_user_message_id
2023-04-27 23:39:57 +00:00
) # first the content is empty, but we'll fill it at the end
socketio.emit('infos',
{
"type": "input_message_infos",
2023-04-30 20:40:19 +00:00
"bot": self.personality.name,
"user": self.personality.user_name,
2023-04-28 09:19:57 +00:00
"message":message,#markdown.markdown(message),
2023-05-09 05:06:01 +00:00
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
2023-04-27 23:39:57 +00:00
}
2023-05-09 05:06:01 +00:00
)
2023-04-27 23:39:57 +00:00
# prepare query and reception
self.discussion_messages = self.prepare_query(message_id)
self.prepare_reception()
self.generating = True
# app.config['executor'] = ThreadPoolExecutor(max_workers=1)
# app.config['executor'].submit(self.generate_message)
print("## Generating message ##")
2023-04-27 23:39:57 +00:00
self.generate_message()
print()
2023-04-27 23:39:57 +00:00
print("## Done ##")
print()
# Send final message
2023-05-09 05:06:01 +00:00
self.socketio.emit('final', {
'data': self.bot_says,
'ai_message_id':self.current_ai_message_id,
'parent':self.current_user_message_id, 'discussion_id':self.current_discussion.discussion_id
}
)
2023-05-08 14:04:44 +00:00
2023-05-09 05:06:01 +00:00
self.current_discussion.update_message(self.current_ai_message_id, self.bot_says)
2023-04-27 23:39:57 +00:00
self.full_message_list.append(self.bot_says)
self.cancel_gen = False
return bot_says
else:
#No discussion available
2023-05-09 05:06:01 +00:00
print("No discussion selected!!!")
2023-04-27 23:39:57 +00:00
print("## Done ##")
print()
2023-05-02 12:16:10 +00:00
self.cancel_gen = False
2023-04-27 23:39:57 +00:00
return ""
2023-04-13 21:53:13 +00:00
def get_generation_status(self):
return jsonify({"status":self.generating})
2023-04-23 22:19:15 +00:00
def stop_gen(self):
self.cancel_gen = True
2023-05-02 09:02:59 +00:00
print("Stop generation received")
return jsonify({"status": "ok"})
2023-04-06 19:12:49 +00:00
def rename(self):
data = request.get_json()
title = data["title"]
2023-04-10 14:15:12 +00:00
self.current_discussion.rename(title)
2023-04-06 19:12:49 +00:00
return "renamed successfully"
2023-05-04 14:38:03 +00:00
def edit_title(self):
data = request.get_json()
title = data["title"]
discussion_id = data["id"]
self.current_discussion = Discussion(discussion_id, self.db)
self.current_discussion.rename(title)
return "title renamed successfully"
def load_discussion(self):
2023-04-07 21:22:17 +00:00
data = request.get_json()
if "id" in data:
discussion_id = data["id"]
2023-04-13 19:40:46 +00:00
self.current_discussion = Discussion(discussion_id, self.db)
else:
if self.current_discussion is not None:
discussion_id = self.current_discussion.discussion_id
2023-04-13 19:40:46 +00:00
self.current_discussion = Discussion(discussion_id, self.db)
else:
2023-04-13 19:40:46 +00:00
self.current_discussion = self.db.create_discussion()
2023-04-07 21:22:17 +00:00
messages = self.current_discussion.get_messages()
2023-04-28 09:19:57 +00:00
#for message in messages:
# message["content"] = markdown.markdown(message["content"])
2023-04-07 21:22:17 +00:00
2023-04-20 17:30:03 +00:00
return jsonify(messages), {'Content-Type': 'application/json; charset=utf-8'}
2023-04-06 19:12:49 +00:00
def delete_discussion(self):
data = request.get_json()
discussion_id = data["id"]
2023-04-10 09:14:10 +00:00
self.current_discussion = Discussion(discussion_id, self.db)
2023-04-06 19:12:49 +00:00
self.current_discussion.delete_discussion()
self.current_discussion = None
return jsonify({})
2023-04-06 19:12:49 +00:00
def update_message(self):
discussion_id = request.args.get("id")
new_message = request.args.get("message")
self.current_discussion.update_message(discussion_id, new_message)
return jsonify({"status": "ok"})
def message_rank_up(self):
discussion_id = request.args.get("id")
new_rank = self.current_discussion.message_rank_up(discussion_id)
return jsonify({"new_rank": new_rank})
def message_rank_down(self):
discussion_id = request.args.get("id")
new_rank = self.current_discussion.message_rank_down(discussion_id)
return jsonify({"new_rank": new_rank})
2023-04-06 19:12:49 +00:00
2023-04-13 10:31:48 +00:00
def delete_message(self):
discussion_id = request.args.get("id")
2023-04-27 23:39:57 +00:00
if self.current_discussion is None:
return jsonify({"status": False,"message":"No discussion is selected"})
else:
new_rank = self.current_discussion.delete_message(discussion_id)
return jsonify({"status":True,"new_rank": new_rank})
2023-04-13 10:31:48 +00:00
2023-04-06 19:12:49 +00:00
def new_discussion(self):
title = request.args.get("title")
timestamp = self.create_new_discussion(title)
2023-04-24 21:11:32 +00:00
# app.config['executor'] = ThreadPoolExecutor(max_workers=1)
# app.config['executor'].submit(self.create_chatbot)
# target=self.create_chatbot()
2023-04-06 19:12:49 +00:00
# Return a success response
2023-04-30 20:40:19 +00:00
return json.dumps({"id": self.current_discussion.discussion_id, "time": timestamp, "welcome_message":self.personality.welcome_message, "sender":self.personality.name})
def set_backend(self):
data = request.get_json()
backend = str(data["backend"])
if self.config['backend']!= backend:
2023-04-23 19:05:39 +00:00
print("New backend selected")
self.config['backend'] = backend
2023-05-02 14:49:13 +00:00
backend_ =self.load_backend(self.BACKENDS_LIST[self.config["backend"]])
models = backend_.list_models(self.config)
if len(models)>0:
self.backend = backend_
self.config['model'] = models[0]
# Build chatbot
self.chatbot_bindings = self.create_chatbot()
return jsonify({"status": "ok"})
2023-04-23 22:19:15 +00:00
else:
return jsonify({"status": "no_models_found"})
2023-04-23 14:59:00 +00:00
return jsonify({"status": "error"})
2023-04-23 19:05:39 +00:00
def set_model(self):
data = request.get_json()
model = str(data["model"])
if self.config['model']!= model:
print("New model selected")
self.config['model'] = model
# Build chatbot
self.chatbot_bindings = self.create_chatbot()
2023-04-23 19:05:39 +00:00
return jsonify({"status": "ok"})
return jsonify({"status": "error"})
2023-04-23 14:59:00 +00:00
2023-04-07 21:22:17 +00:00
def update_model_params(self):
data = request.get_json()
2023-04-20 17:33:21 +00:00
backend = str(data["backend"])
2023-04-08 17:55:33 +00:00
model = str(data["model"])
2023-04-20 17:33:21 +00:00
personality_language = str(data["personality_language"])
personality_category = str(data["personality_category"])
2023-04-15 11:37:42 +00:00
personality = str(data["personality"])
2023-04-20 17:33:21 +00:00
if self.config['backend']!=backend or self.config['model'] != model:
2023-04-08 17:55:33 +00:00
print("New model selected")
2023-04-20 17:33:21 +00:00
self.config['backend'] = backend
2023-04-12 20:36:03 +00:00
self.config['model'] = model
self.create_chatbot()
2023-04-08 17:55:33 +00:00
self.config['personality_language'] = personality_language
self.config['personality_category'] = personality_category
self.config['personality'] = personality
2023-04-20 17:33:21 +00:00
2023-04-30 20:40:19 +00:00
personality_fn = f"personalities/{self.config['personality_language']}/{self.config['personality_category']}/{self.config['personality']}"
print(f"Loading personality : {personality_fn}")
2023-04-30 20:40:19 +00:00
self.personality = AIPersonality(personality_fn)
2023-04-15 11:37:42 +00:00
2023-04-12 20:36:03 +00:00
self.config['n_predict'] = int(data["nPredict"])
self.config['seed'] = int(data["seed"])
2023-04-14 08:48:14 +00:00
self.config['model'] = str(data["model"])
self.config['voice'] = str(data["voice"])
self.config['language'] = str(data["language"])
2023-04-08 16:00:02 +00:00
self.config['temperature'] = float(data["temperature"])
2023-04-12 20:36:03 +00:00
self.config['top_k'] = int(data["topK"])
self.config['top_p'] = float(data["topP"])
self.config['repeat_penalty'] = float(data["repeatPenalty"])
self.config['repeat_last_n'] = int(data["repeatLastN"])
2023-04-14 09:58:07 +00:00
save_config(self.config, self.config_file_path)
2023-04-20 20:41:32 +00:00
print("==============================================")
print("Parameters changed to:")
2023-04-20 20:41:32 +00:00
print(f"\tBackend:{self.config['backend']}")
2023-04-14 08:48:14 +00:00
print(f"\tModel:{self.config['model']}")
2023-04-20 20:20:18 +00:00
print(f"\tPersonality language:{self.config['personality_language']}")
print(f"\tPersonality category:{self.config['personality_category']}")
2023-04-14 08:48:14 +00:00
print(f"\tPersonality:{self.config['personality']}")
print(f"\tLanguage:{self.config['language']}")
print(f"\tVoice:{self.config['voice']}")
print(f"\tTemperature:{self.config['temperature']}")
2023-04-12 20:36:03 +00:00
print(f"\tNPredict:{self.config['n_predict']}")
print(f"\tSeed:{self.config['seed']}")
print(f"\top_k:{self.config['top_k']}")
print(f"\top_p:{self.config['top_p']}")
print(f"\trepeat_penalty:{self.config['repeat_penalty']}")
print(f"\trepeat_last_n:{self.config['repeat_last_n']}")
2023-04-20 20:41:32 +00:00
print("==============================================")
2023-04-20 20:43:29 +00:00
2023-04-07 21:22:17 +00:00
return jsonify({"status":"ok"})
2023-04-08 17:55:33 +00:00
2023-05-10 21:33:08 +00:00
def get_available_models(self):
2023-05-13 12:19:56 +00:00
"""Get the available models
Returns:
_type_: _description_
"""
model_list = self.backend.get_available_models()
2023-05-10 21:33:08 +00:00
models = []
for model in model_list:
filename = model['filename']
filesize = model['filesize']
path = f'https://gpt4all.io/models/{filename}'
2023-05-11 19:36:52 +00:00
local_path = Path(f'./models/{self.config["backend"]}/{filename}')
is_installed = local_path.exists()
2023-05-10 21:33:08 +00:00
models.append({
'title': model['filename'],
'icon': '/icons/default.png', # Replace with the path to the model icon
'description': model['description'],
'isInstalled': is_installed,
'path': path,
'filesize': filesize,
})
return jsonify(models)
2023-04-12 20:36:03 +00:00
def get_config(self):
return jsonify(self.config)
2023-04-08 17:55:33 +00:00
2023-04-13 10:31:48 +00:00
def main(self):
return render_template("main.html")
def settings(self):
return render_template("settings.html")
2023-04-11 20:56:15 +00:00
def help(self):
return render_template("help.html")
2023-04-11 23:34:50 +00:00
def training(self):
return render_template("training.html")
def extensions(self):
return render_template("extensions.html")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Start the chatbot Flask app.")
parser.add_argument(
"-c", "--config", type=str, default="default", help="Sets the configuration file to be used."
)
parser.add_argument(
"-p", "--personality", type=str, default=None, help="Selects the personality to be using."
)
parser.add_argument(
2023-04-13 13:27:50 +00:00
"-s", "--seed", type=int, default=None, help="Force using a specific seed value."
)
parser.add_argument(
2023-04-12 20:36:03 +00:00
"-m", "--model", type=str, default=None, help="Force using a specific model."
)
parser.add_argument(
2023-04-12 20:36:03 +00:00
"--temp", type=float, default=None, help="Temperature parameter for the model."
)
parser.add_argument(
"--n_predict",
type=int,
2023-04-12 20:36:03 +00:00
default=None,
help="Number of tokens to predict at each step.",
)
2023-04-16 19:33:34 +00:00
parser.add_argument(
"--n_threads",
type=int,
default=None,
help="Number of threads to use.",
)
parser.add_argument(
2023-04-12 20:36:03 +00:00
"--top_k", type=int, default=None, help="Value for the top-k sampling."
)
parser.add_argument(
2023-04-12 20:36:03 +00:00
"--top_p", type=float, default=None, help="Value for the top-p sampling."
)
parser.add_argument(
2023-04-12 20:36:03 +00:00
"--repeat_penalty", type=float, default=None, help="Penalty for repeated tokens."
)
parser.add_argument(
"--repeat_last_n",
type=int,
2023-04-12 20:36:03 +00:00
default=None,
help="Number of previous tokens to consider for the repeat penalty.",
)
parser.add_argument(
"--ctx_size",
type=int,
2023-04-12 20:36:03 +00:00
default=None,#2048,
help="Size of the context window for the model.",
)
parser.add_argument(
"--debug",
dest="debug",
action="store_true",
default=None,
help="launch Flask server in debug mode",
)
parser.add_argument(
2023-04-13 10:31:48 +00:00
"--host", type=str, default=None, help="the hostname to listen on"
)
2023-04-12 20:36:03 +00:00
parser.add_argument("--port", type=int, default=None, help="the port to listen on")
parser.add_argument(
2023-04-12 20:36:03 +00:00
"--db_path", type=str, default=None, help="Database path"
)
2023-04-06 19:12:49 +00:00
args = parser.parse_args()
2023-04-14 15:29:17 +00:00
# The default configuration must be kept unchanged as it is committed to the repository,
# so we have to make a copy that is not comitted
default_config = load_config(f"configs/default.yaml")
2023-04-14 15:11:40 +00:00
if args.config=="default":
args.config = "local_default"
if not Path(f"configs/local_default.yaml").exists():
print("No local configuration file found. Building from scratch")
shutil.copy(f"configs/default.yaml", f"configs/local_default.yaml")
config_file_path = f"configs/{args.config}.yaml"
2023-04-12 20:36:03 +00:00
config = load_config(config_file_path)
if "version" not in config or int(config["version"])<int(default_config["version"]):
#Upgrade old configuration files to new format
print("Configuration file is very old. Replacing with default configuration")
for key, value in default_config.items():
if key not in config:
config[key] = value
2023-05-11 13:09:35 +00:00
config["version"]=int(default_config["version"])
save_config(config, config_file_path)
2023-04-12 20:36:03 +00:00
# Override values in config with command-line arguments
for arg_name, arg_value in vars(args).items():
if arg_value is not None:
config[arg_name] = arg_value
2023-04-06 19:12:49 +00:00
2023-05-05 18:11:06 +00:00
try:
2023-05-07 01:44:42 +00:00
personality_path = f"personalities/{config['personality_language']}/{config['personality_category']}/{config['personality']}"
personality = AIPersonality(personality_path)
except Exception as ex:
2023-05-05 18:11:06 +00:00
print("Personality file not found. Please verify that the personality you have selected exists or select another personality. Some updates may lead to change in personality name or category, so check the personality selection in settings to be sure.")
2023-05-07 01:44:42 +00:00
if config["debug"]:
print(ex)
2023-05-05 18:11:06 +00:00
personality = AIPersonality()
# executor = ThreadPoolExecutor(max_workers=1)
# app.config['executor'] = executor
bot = Gpt4AllWebUI(app, socketio, config, personality, config_file_path)
2023-04-06 19:12:49 +00:00
# chong Define custom WebSocketHandler with error handling
class CustomWebSocketHandler(WebSocketHandler):
def handle_error(self, environ, start_response, e):
# Handle the error here
print("WebSocket error:", e)
super().handle_error(environ, start_response, e)
2023-04-30 20:40:19 +00:00
url = f'http://{config["host"]}:{config["port"]}'
print(f"Please open your browser and go to {url} to view the ui")
# chong -add socket server
app.config['debug'] = config["debug"]
if config["debug"]:
print("debug mode:true")
else:
print("debug mode:false")
http_server = WSGIServer((config["host"], config["port"]), app, handler_class=WebSocketHandler)
2023-05-01 22:04:18 +00:00
http_server.serve_forever()
#if config["debug"]:
# app.run(debug=True, host=config["host"], port=config["port"])
#else:
# app.run(host=config["host"], port=config["port"])