V4.0 Ready

This commit is contained in:
Saifeddine ALOUI 2023-08-03 01:07:29 +02:00
parent bc19cf4b0b
commit 7964244dcd
10 changed files with 821 additions and 791 deletions

View File

@ -9,12 +9,12 @@
######
from flask import request
from datetime import datetime
from api.db import DiscussionsDB
from api.db import DiscussionsDB, Discussion
from api.helpers import compare_lists
from pathlib import Path
import importlib
from lollms.config import InstallOption
from lollms.types import MSG_TYPE
from lollms.types import MSG_TYPE, SENDER_TYPES
from lollms.personality import AIPersonality, PersonalityBuilder
from lollms.binding import LOLLMSConfig, BindingBuilder, LLMBinding, ModelBuilder
from lollms.paths import LollmsPaths
@ -33,6 +33,7 @@ import urllib
import gc
import ctypes
from functools import partial
import json
def terminate_thread(thread):
if thread:
@ -120,7 +121,6 @@ class LoLLMsAPPI(LollmsApplication):
self.cancel_gen = False
# Keeping track of current discussion and message
self.current_discussion = None
self._current_user_message_id = 0
self._current_ai_message_id = 0
self._message_id = 0
@ -140,7 +140,6 @@ class LoLLMsAPPI(LollmsApplication):
ASCIIColors.success("ok")
# This is used to keep track of messages
self.full_message_list = []
self.download_infos={}
self.connections = {0:{
@ -148,10 +147,6 @@ class LoLLMsAPPI(LollmsApplication):
"generated_text":"",
"cancel_generation": False,
"generation_thread": None,
"current_discussion":None,
"current_message_id":0,
"current_ai_message_id":0,
"current_user_message_id":0,
"processing":False,
"schedule_for_deletion":False
}
@ -168,10 +163,6 @@ class LoLLMsAPPI(LollmsApplication):
"generated_text":"",
"cancel_generation": False,
"generation_thread": None,
"current_discussion":None,
"current_message_id":0,
"current_ai_message_id":0,
"current_user_message_id":0,
"processing":False,
"schedule_for_deletion":False
}
@ -392,6 +383,63 @@ class LoLLMsAPPI(LollmsApplication):
'binding_folder' : binding_folder
}, room=request.sid)
@socketio.on('new_discussion')
def new_discussion(data):
client_id = request.sid
title = data["title"]
self.connections[client_id]["current_discussion"] = self.db.create_discussion(title)
# Get the current timestamp
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# Return a success response
if self.connections[client_id]["current_discussion"] is None:
self.connections[client_id]["current_discussion"] = self.db.load_last_discussion()
if self.personality.welcome_message!="":
message = self.connections[client_id]["current_discussion"].add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value if self.personality.include_welcome_message_in_disucssion else MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value,
sender_type = SENDER_TYPES.SENDER_TYPES_AI.value,
sender = self.personality.name,
content = self.personality.welcome_message,
metadata = None,
rank = 0,
parent_message_id = -1,
binding = self.config.binding_name,
model = self.config.model_name,
personality = self.config.personalities[self.config.active_personality_id],
created_at=None,
finished_generating_at=None
)
self.socketio.emit('discussion_created',
{'id':self.connections[client_id]["current_discussion"].discussion_id},
room=client_id
)
else:
self.socketio.emit('discussion_created',
{'id':0},
room=client_id
)
@socketio.on('load_discussion')
def load_discussion(data):
client_id = request.sid
if "id" in data:
discussion_id = data["id"]
self.connections[client_id]["current_discussion"] = Discussion(discussion_id, self.db)
else:
if self.connections[client_id]["current_discussion"] is not None:
discussion_id = self.connections[client_id]["current_discussion"].discussion_id
self.connections[client_id]["current_discussion"] = Discussion(discussion_id, self.db)
else:
self.connections[client_id]["current_discussion"] = self.db.create_discussion()
messages = self.connections[client_id]["current_discussion"].get_messages()
self.socketio.emit('discussion',
[m.to_json() for m in messages],
room=client_id
)
@socketio.on('upload_file')
def upload_file(data):
@ -462,33 +510,29 @@ class LoLLMsAPPI(LollmsApplication):
self.connections[client_id]["cancel_generation"]=False
if not self.model:
self.socketio.emit('model_not_selected',
{
"status":False,
"error":"Model not selected. Please select a model"
}, room=client_id
)
self.notify("Model not selected. Please select a model", False, client_id)
return
if self.is_ready:
if self.current_discussion is None:
if self.connections[client_id]["current_discussion"] is None:
if self.db.does_last_discussion_have_messages():
self.current_discussion = self.db.create_discussion()
self.connections[client_id]["current_discussion"] = self.db.create_discussion()
else:
self.current_discussion = self.db.load_last_discussion()
self.connections[client_id]["current_discussion"] = self.db.load_last_discussion()
message = data["prompt"]
prompt = data["prompt"]
ump = self.config.discussion_prompt_separator +self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
message_id = self.current_discussion.add_message(
ump.replace(self.config.discussion_prompt_separator,"").replace(":",""),
message,
message_type=MSG_TYPE.MSG_TYPE_FULL.value,
parent=self.message_id
message = self.connections[client_id]["current_discussion"].add_message(
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
sender = ump.replace(self.config.discussion_prompt_separator,"").replace(":",""),
content=prompt,
metadata=None,
parent_message_id=self.message_id
)
self.current_user_message_id = message_id
ASCIIColors.green("Starting message generation by"+self.personality.name)
self.connections[client_id]['generation_thread'] = threading.Thread(target=self.start_message_generation, args=(message, message_id, client_id))
self.connections[client_id]['generation_thread'] = threading.Thread(target=self.start_message_generation, args=(message, message.id, client_id))
self.connections[client_id]['generation_thread'].start()
self.socketio.sleep(0.01)
@ -496,41 +540,19 @@ class LoLLMsAPPI(LollmsApplication):
#tpe = threading.Thread(target=self.start_message_generation, args=(message, message_id, client_id))
#tpe.start()
else:
self.socketio.emit("buzzy", {"message":"I am buzzy. Come back later."}, room=client_id)
self.socketio.sleep(0.01)
ASCIIColors.warning(f"OOps request {client_id} refused!! Server buzy")
self.socketio.emit('infos',
{
"status":'model_not_ready',
"type": "input_message_infos",
'logo': "",
"bot": self.personality.name,
"user": self.personality.user_name,
"message":"",
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
'binding': self.current_discussion.current_message_binding,
'model': self.current_discussion.current_message_model,
'personality': self.current_discussion.current_message_personality,
'created_at': self.current_discussion.current_message_created_at,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
self.socketio.sleep(0.01)
self.notify("I am buzzy. Come back later.", False, client_id)
@socketio.on('generate_msg_from')
def handle_connection(data):
def generate_msg_from(data):
client_id = request.sid
if self.connections[client_id]["current_discussion"] is None:
self.notify("Please select a discussion", False, client_id)
return
id_ = data['id']
message_id = int(id_)
if message_id==-1:
self.message_id = message_id
message = ""
else:
message = data["prompt"]
self.current_user_message_id = message_id
self.connections[client_id]['generation_thread'] = threading.Thread(target=self.start_message_generation, args=(message, message_id, client_id))
message = self.connections[client_id]["current_discussion"].select_message(id_)
if message is None:
return
self.connections[client_id]['generation_thread'] = threading.Thread(target=self.start_message_generation, args=(message, message.id, client_id))
self.connections[client_id]['generation_thread'].start()
# generation status
@ -542,10 +564,13 @@ class LoLLMsAPPI(LollmsApplication):
@socketio.on('continue_generate_msg_from')
def handle_connection(data):
client_id = request.sid
message_id = int(data['id'])
message = data["prompt"]
self.current_user_message_id = message_id
self.connections[client_id]['generation_thread'] = threading.Thread(target=self.start_message_generation, args=(message, message_id, client_id, True))
if self.connections[client_id]["current_discussion"] is None:
self.notify("Please select a discussion", False, client_id)
return
id_ = data['id']
message = self.connections[client_id]["current_discussion"].select_message(id_)
self.connections[client_id]['generation_thread'] = threading.Thread(target=self.start_message_generation, args=(message, message.id, client_id, True))
self.connections[client_id]['generation_thread'].start()
# generation status
@ -687,58 +712,30 @@ class LoLLMsAPPI(LollmsApplication):
def condition_chatbot(self):
if self.current_discussion is None:
self.current_discussion = self.db.load_last_discussion()
if self.personality.welcome_message!="":
message_type = MSG_TYPE.MSG_TYPE_FULL.value# if self.personality.include_welcome_message_in_disucssion else MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value
message_id = self.current_discussion.add_message(
self.personality.name, self.personality.welcome_message,
message_type,
0,
-1,
binding= self.config["binding_name"],
model = self.config["model_name"],
personality=self.config["personalities"][self.config["active_personality_id"]]
)
self.current_ai_message_id = message_id
else:
message_id = 0
return message_id
def prepare_reception(self, client_id):
self.connections[client_id]["generated_text"] = ""
self.nb_received_tokens = 0
def create_new_discussion(self, title):
self.current_discussion = self.db.create_discussion(title)
# Get the current timestamp
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# Chatbot conditionning
self.condition_chatbot()
return timestamp
def prepare_query(self, message_id=-1, is_continue=False):
messages = self.current_discussion.get_messages()
self.full_message_list = []
def prepare_query(self, client_id, message_id=-1, is_continue=False):
messages = self.connections[client_id]["current_discussion"].get_messages()
full_message_list = []
for i, message in enumerate(messages):
if message["id"]< message_id or (message_id==-1 and i<len(messages)-1):
if message["type"]<=MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message["type"]!=MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value:
self.full_message_list.append("\n"+self.config.discussion_prompt_separator+message["sender"]+": "+message["content"].strip())
if message.id< message_id or (message_id==-1 and i<len(messages)-1):
if message.message_type<=MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message.message_type!=MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value:
full_message_list.append("\n"+self.config.discussion_prompt_separator+message.sender+": "+message.content.strip())
else:
break
link_text = "\n" #self.personality.link_text
if not is_continue:
self.full_message_list.append("\n"+self.config.discussion_prompt_separator +message["sender"].replace(":","")+": "+message["content"].strip()+link_text+self.personality.ai_message_prefix)
full_message_list.append("\n"+self.config.discussion_prompt_separator +message.sender.replace(":","")+": "+message.content.strip()+link_text+self.personality.ai_message_prefix)
else:
self.full_message_list.append("\n"+self.config.discussion_prompt_separator +message["sender"].replace(":","")+": "+message["content"].strip())
full_message_list.append("\n"+self.config.discussion_prompt_separator +message.sender.replace(":","")+": "+message.content.strip())
composed_messages = link_text.join(self.full_message_list)
composed_messages = link_text.join(full_message_list)
t = self.model.tokenize(composed_messages)
cond_tk = self.model.tokenize(self.personality.personality_conditioning)
n_t = len(t)
@ -755,27 +752,27 @@ class LoLLMsAPPI(LollmsApplication):
ASCIIColors.yellow(discussion_messages)
ASCIIColors.yellow(f"prompt size:{len(tokens)} tokens")
return discussion_messages, message["content"], tokens
return discussion_messages, message.content, tokens
def get_discussion_to(self, message_id=-1):
messages = self.current_discussion.get_messages()
self.full_message_list = []
def get_discussion_to(self, client_id, message_id=-1):
messages = self.connections[client_id]["current_discussion"].get_messages()
full_message_list = []
ump = self.config.discussion_prompt_separator +self.config.user_name+": " if self.config.use_user_name_in_discussions else self.personality.user_message_prefix
for message in messages:
if message["id"]<= message_id or message_id==-1:
if message["type"]!=MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER:
if message["sender"]==self.personality.name:
self.full_message_list.append(self.personality.ai_message_prefix+message["content"])
full_message_list.append(self.personality.ai_message_prefix+message["content"])
else:
self.full_message_list.append(ump + message["content"])
full_message_list.append(ump + message["content"])
link_text = "\n"# self.personality.link_text
if len(self.full_message_list) > self.config["nb_messages_to_remember"]:
discussion_messages = self.personality.personality_conditioning+ link_text.join(self.full_message_list[-self.config["nb_messages_to_remember"]:])
if len(full_message_list) > self.config["nb_messages_to_remember"]:
discussion_messages = self.personality.personality_conditioning+ link_text.join(full_message_list[-self.config["nb_messages_to_remember"]:])
else:
discussion_messages = self.personality.personality_conditioning+ link_text.join(self.full_message_list)
discussion_messages = self.personality.personality_conditioning+ link_text.join(full_message_list)
return discussion_messages # Removes the last return
@ -797,6 +794,86 @@ class LoLLMsAPPI(LollmsApplication):
return string
def notify(self, content, status, client_id):
self.socketio.emit('notification', {
'content': content,# self.connections[client_id]["generated_text"],
'status': status
}, room=client_id
)
def new_message(self,
client_id,
sender,
content,
metadata=None,
message_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_FULL,
sender_type:SENDER_TYPES=SENDER_TYPES.SENDER_TYPES_AI
):
msg = self.connections[client_id]["current_discussion"].add_message(
message_type = message_type.value,
sender_type = sender_type.value,
sender = sender,
content = content,
metadata = json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== dict else metadata,
rank = 0,
parent_message_id = self.connections[client_id]["current_discussion"].current_message.id,
binding = self.config["binding_name"],
model = self.config["model_name"],
personality = self.config["personalities"][self.config["active_personality_id"]],
) # first the content is empty, but we'll fill it at the end
self.socketio.emit('new_message',
{
"sender": self.personality.name,
"message_type": message_type.value,
"sender_type": SENDER_TYPES.SENDER_TYPES_AI.value,
"content": content,
"metadata": json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== dict else metadata,
"id": msg.id,
"parent_message_id": msg.parent_message_id,
'binding': self.config["binding_name"],
'model' : self.config["model_name"],
'personality': self.config["personalities"][self.config["active_personality_id"]],
'created_at': self.connections[client_id]["current_discussion"].current_message.created_at,
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at,
}, room=client_id
)
def update_message(self, client_id, chunk, metadata, msg_type:MSG_TYPE=None):
self.connections[client_id]["current_discussion"].current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.socketio.emit('update_message', {
"sender": self.personality.name,
'id':self.connections[client_id]["current_discussion"].current_message.id,
'content': chunk,# self.connections[client_id]["generated_text"],
'discussion_id':self.connections[client_id]["current_discussion"].discussion_id,
'message_type': msg_type.value if msg_type is not None else MSG_TYPE.MSG_TYPE_CHUNK.value if self.nb_received_tokens>1 else MSG_TYPE.MSG_TYPE_FULL.value,
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at,
'metadata':json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== dict else metadata
}, room=client_id
)
self.socketio.sleep(0.01)
self.connections[client_id]["current_discussion"].update_message(self.connections[client_id]["generated_text"])
def close_message(self, client_id):
# Send final message
self.connections[client_id]["current_discussion"].current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.socketio.emit('close_message', {
"sender": self.personality.name,
"id": self.connections[client_id]["current_discussion"].current_message.id,
"content":self.connections[client_id]["generated_text"],
'binding': self.config["binding_name"],
'model' : self.config["model_name"],
'personality':self.config["personalities"][self.config["active_personality_id"]],
'created_at': self.connections[client_id]["current_discussion"].current_message.created_at,
'finished_generating_at': self.connections[client_id]["current_discussion"].current_message.finished_generating_at,
}, room=client_id
)
def process_chunk(self, chunk, message_type:MSG_TYPE, metadata:dict={}, client_id:int=0):
"""
0 : a regular message
@ -814,88 +891,16 @@ class LoLLMsAPPI(LollmsApplication):
else:
ASCIIColors.error("--> Step ended:"+chunk)
if message_type == MSG_TYPE.MSG_TYPE_EXCEPTION:
self.notify(chunk,False, client_id)
ASCIIColors.error("--> Exception from personality:"+chunk)
if message_type == MSG_TYPE.MSG_TYPE_NEW_MESSAGE:
if client_id==0:
self.nb_received_tokens = 0
self.current_ai_message_id = self.current_discussion.add_message(
self.personality.name,
"",
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
parent = self.current_user_message_id,
binding = self.config["binding_name"],
model = self.config["model_name"],
personality = self.config["personalities"][self.config["active_personality_id"]]
) # first the content is empty, but we'll fill it at the end
self.socketio.emit('infos',
{
"status":'generation_started',
"type": "input_message_infos",
"bot": self.personality.name,
"user": self.personality.user_name,
"message":chunk,
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
"content":chunk,
self.nb_received_tokens = 0
self.new_message(client_id, self.personality.name, chunk, metadata = metadata["metadata"], message_type= MSG_TYPE(metadata["type"]))
'binding': self.current_discussion.current_message_binding,
'model': self.current_discussion.current_message_model,
'personality': self.current_discussion.current_message_personality,
'created_at': self.current_discussion.current_message_created_at,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
else:
self.current_ai_message_id = self.current_discussion.add_message(
self.personality.name,
"",
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
parent = self.current_user_message_id,
binding = self.config["binding_name"],
model = self.config["model_name"],
personality = self.config["personalities"][self.config["active_personality_id"]]
) # first the content is empty, but we'll fill it at the end
self.socketio.emit('infos',
{
"status":'generation_started',
"type": "input_message_infos",
"bot": self.personality.name,
"user": self.personality.user_name,
"message":chunk,
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
"content":chunk,
'binding': self.current_discussion.current_message_binding,
'model': self.current_discussion.current_message_model,
'personality': self.current_discussion.current_message_personality,
'created_at': self.current_discussion.current_message_created_at,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
elif message_type == MSG_TYPE.MSG_TYPE_FINISHED_MESSAGE:
self.socketio.emit('final', {
'data': self.connections[client_id]["generated_text"],
'ai_message_id':self.current_ai_message_id,
'parent':self.current_user_message_id, 'discussion_id':self.current_discussion.discussion_id,
"status":'model_not_ready',
"type": "input_message_infos",
'logo': "",
"bot": self.personality.name,
"user": self.personality.user_name,
"message":self.connections[client_id]["generated_text"],
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
self.close_message(client_id)
'binding': self.current_discussion.current_message_binding,
'model': self.current_discussion.current_message_model,
'personality': self.current_discussion.current_message_personality,
'created_at': self.current_discussion.current_message_created_at,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
elif message_type == MSG_TYPE.MSG_TYPE_CHUNK:
self.connections[client_id]["generated_text"] += chunk
self.nb_received_tokens += 1
@ -906,34 +911,10 @@ class LoLLMsAPPI(LollmsApplication):
if antiprompt:
ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}")
self.connections[client_id]["generated_text"] = self.remove_text_from_string(self.connections[client_id]["generated_text"],antiprompt)
self.socketio.emit('message', {
'data': self.connections[client_id]["generated_text"],
'user_message_id':self.current_user_message_id,
'ai_message_id':self.current_ai_message_id,
'discussion_id':self.current_discussion.discussion_id,
'message_type': MSG_TYPE.MSG_TYPE_FULL.value,
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
'metadata':metadata
}, room=client_id
)
self.update_message(client_id, self.connections[client_id]["generated_text"], metadata,MSG_TYPE.MSG_TYPE_FULL)
return False
else:
self.socketio.emit('message', {
'data': chunk,# self.connections[client_id]["generated_text"],
'user_message_id':self.current_user_message_id,
'ai_message_id':self.current_ai_message_id,
'discussion_id':self.current_discussion.discussion_id,
'message_type': MSG_TYPE.MSG_TYPE_CHUNK.value if self.nb_received_tokens>1 else MSG_TYPE.MSG_TYPE_FULL.value,
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
'metadata':metadata
}, room=client_id
)
self.socketio.sleep(0.01)
self.current_discussion.update_message(self.current_ai_message_id, self.connections[client_id]["generated_text"])
self.update_message(client_id, chunk, metadata)
# if stop generation is detected then stop
if not self.cancel_gen:
return True
@ -947,32 +928,11 @@ class LoLLMsAPPI(LollmsApplication):
self.connections[client_id]["generated_text"] = chunk
self.nb_received_tokens += 1
ASCIIColors.green(f"Received {self.nb_received_tokens} tokens",end="\r",flush=True)
self.socketio.emit('message', {
'data': self.connections[client_id]["generated_text"],
'user_message_id':self.current_user_message_id,
'ai_message_id':self.current_ai_message_id,
'discussion_id':self.current_discussion.discussion_id,
'message_type': message_type.value,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
'metadata':metadata
}, room=client_id
)
self.socketio.sleep(0.01)
self.update_message(client_id, chunk, metadata)
return True
# Stream the generated text to the frontend
else:
self.socketio.emit('message', {
'data': chunk,
'user_message_id':self.current_user_message_id,
'ai_message_id':self.current_ai_message_id,
'discussion_id':self.current_discussion.discussion_id,
'message_type': message_type.value,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
'metadata':metadata
}, room=client_id
)
self.socketio.sleep(0.01)
self.update_message(client_id, chunk, metadata, message_type)
return True
@ -1038,81 +998,21 @@ class LoLLMsAPPI(LollmsApplication):
ASCIIColors.info(f"Text generation requested by client: {client_id}")
# send the message to the bot
print(f"Received message : {message}")
if self.current_discussion:
print(f"Received message : {message.content}")
if self.connections[client_id]["current_discussion"]:
if not self.model:
self.socketio.emit('message', {
'data': "No model selected. Please make sure you select a model before starting generation",
'user_message_id':self.current_user_message_id,
'ai_message_id':self.current_ai_message_id,
'discussion_id':self.current_discussion.discussion_id,
'message_type': MSG_TYPE.MSG_TYPE_EXCEPTION.value,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
'metadata':{}
}, room=client_id
)
# Send final message
self.socketio.emit('final', {
'data': self.connections[client_id]["generated_text"],
'ai_message_id':self.current_ai_message_id,
'parent':self.current_user_message_id, 'discussion_id':self.current_discussion.discussion_id,
"status":'model_not_ready',
"type": "input_message_infos",
'logo': "",
"bot": self.personality.name,
"user": self.personality.user_name,
"message":self.connections[client_id]["generated_text"],
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
'binding': self.current_discussion.current_message_binding,
'model': self.current_discussion.current_message_model,
'personality': self.current_discussion.current_message_personality,
'created_at': self.current_discussion.current_message_created_at,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
self.notify("No model selected. Please make sure you select a model before starting generation", False, client_id)
return
# First we need to send the new message ID to the client
if is_continue:
self.current_ai_message_id = message_id
self.current_discussion.load_message(message_id)
self.connections[client_id]["generated_text"] = self.current_discussion.content
self.connections[client_id]["current_discussion"].load_message(message_id)
self.connections[client_id]["generated_text"] = self.connections[client_id]["current_discussion"].content
else:
self.connections[client_id]["generated_text"] = ""
self.current_ai_message_id = self.current_discussion.add_message(
self.personality.name,
"",
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
parent = self.current_user_message_id,
binding = self.config["binding_name"],
model = self.config["model_name"],
personality = self.config["personalities"][self.config["active_personality_id"]]
) # first the content is empty, but we'll fill it at the end
self.socketio.emit('infos',
{
"status":'generation_started',
"type": "input_message_infos",
"bot": self.personality.name,
"user": self.personality.user_name,
"message":message,#markdown.markdown(message),
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
"content":"✍ please stand by ...",
'binding': self.current_discussion.current_message_binding,
'model': self.current_discussion.current_message_model,
'personality': self.current_discussion.current_message_personality,
'created_at': self.current_discussion.current_message_created_at,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
self.new_message(client_id, self.personality.name, "✍ please stand by ...")
self.socketio.sleep(0.01)
# prepare query and reception
self.discussion_messages, self.current_message, tokens = self.prepare_query(message_id, is_continue)
self.discussion_messages, self.current_message, tokens = self.prepare_query(client_id, message_id, is_continue)
self.prepare_reception(client_id)
self.generating = True
self.connections[client_id]["processing"]=True
@ -1126,33 +1026,10 @@ class LoLLMsAPPI(LollmsApplication):
print()
print("## Done Generation ##")
print()
self.current_discussion.update_message(self.current_ai_message_id, self.connections[client_id]["generated_text"].strip())
self.full_message_list.append(self.connections[client_id]["generated_text"])
self.cancel_gen = False
# Send final message
self.socketio.emit('final', {
'data': self.connections[client_id]["generated_text"],
'ai_message_id':self.current_ai_message_id,
'parent':self.current_user_message_id, 'discussion_id':self.current_discussion.discussion_id,
"status":'model_not_ready',
"type": "input_message_infos",
'logo': "",
"bot": self.personality.name,
"user": self.personality.user_name,
"message":self.connections[client_id]["generated_text"],
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
'binding': self.current_discussion.current_message_binding,
'model': self.current_discussion.current_message_model,
'personality': self.current_discussion.current_message_personality,
'created_at': self.current_discussion.current_message_created_at,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
self.close_message(client_id)
self.socketio.sleep(0.01)
self.connections[client_id]["processing"]=False
if self.connections[client_id]["schedule_for_deletion"]:
@ -1167,36 +1044,9 @@ class LoLLMsAPPI(LollmsApplication):
self.cancel_gen = False
#No discussion available
ASCIIColors.warning("No discussion selected!!!")
self.socketio.emit('message', {
'data': "No discussion selected!!!",
'user_message_id':ump.replace(self.config.discussion_prompt_separator,"").replace(":",""),
'ai_message_id':self.current_ai_message_id,
'discussion_id':0,
'message_type': MSG_TYPE.MSG_TYPE_EXCEPTION.value
}, room=client_id
)
self.socketio.emit('final', {
'data': "No discussion selected",
'ai_message_id':self.current_ai_message_id,
'parent':self.current_user_message_id,
'discussion_id':0,
"status":'model_not_ready',
"type": "input_message_infos",
'logo': "",
"bot": self.personality.name,
"user": self.personality.user_name,
"message":self.connections[client_id]["generated_text"],
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
'binding': "",
'model': "",
'personality': "",
'created_at': "",
'finished_generating_at': "",
}, room=client_id
)
self.notify("No discussion selected!!!",False, client_id)
print()
return ""

292
api/db.py
View File

@ -3,6 +3,7 @@ import sqlite3
from pathlib import Path
from datetime import datetime
from lollms.helpers import ASCIIColors
import json
__author__ = "parisneo"
__github__ = "https://github.com/ParisNeo/lollms-webui"
@ -19,7 +20,7 @@ class DiscussionsDB:
def create_tables(self):
db_version = 7
db_version = 8
with sqlite3.connect(self.db_path) as conn:
cursor = conn.cursor()
@ -47,14 +48,15 @@ class DiscussionsDB:
sender TEXT NOT NULL,
content TEXT NOT NULL,
type INT NOT NULL,
rank INT NOT NULL,
parent INT,
sender_type INT DEFAULT 0,
rank INT NOT NULL DEFAULT 0,
parent_message_id INT,
created_at TIMESTAMP,
finished_generating_at TIMESTAMP,
discussion_id INTEGER NOT NULL,
metadata JSON,
metadata TEXT,
FOREIGN KEY (discussion_id) REFERENCES discussion(id),
FOREIGN KEY (parent) REFERENCES message(id)
FOREIGN KEY (parent_message_id) REFERENCES message(id)
)
""")
@ -85,9 +87,10 @@ class DiscussionsDB:
'personality',
'sender',
'content',
'type',
'message_type',
'sender_type',
'rank',
'parent',
'parent_message_id',
'created_at',
'metadata',
'finished_generating_at',
@ -106,7 +109,13 @@ class DiscussionsDB:
elif column.endswith('_at'):
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TIMESTAMP")
elif column=='metadata':
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} JSON")
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TEXT")
elif column=='message_type':
cursor.execute(f"ALTER TABLE {table} RENAME COLUMN type TO {column}")
elif column=='sender_type':
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} INT DEFAULT 0")
elif column=='parent_message_id':
cursor.execute(f"ALTER TABLE {table} RENAME COLUMN parent TO {column}")
else:
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} TEXT")
ASCIIColors.yellow(f"Added column :{column}")
@ -220,13 +229,13 @@ class DiscussionsDB:
discussion_id = row[0]
discussion_title = row[1]
discussion = {"id": discussion_id, "title":discussion_title, "messages": []}
rows = self.select(f"SELECT sender, content, type, rank, parent, binding, model, personality, created_at, finished_generating_at FROM message WHERE discussion_id=?",(discussion_id,))
rows = self.select(f"SELECT sender, content, type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at FROM message WHERE discussion_id=?",(discussion_id,))
for message_row in rows:
sender = message_row[1]
content = message_row[2]
content_type = message_row[3]
rank = message_row[4]
parent = message_row[5]
parent_message_id = message_row[5]
binding = message_row[6]
model = message_row[7]
personality = message_row[8]
@ -234,7 +243,7 @@ class DiscussionsDB:
finished_generating_at = message_row[10]
discussion["messages"].append(
{"sender": sender, "content": content, "type": content_type, "rank": rank, "parent": parent, "binding": binding, "model":model, "personality":personality, "created_at":created_at, "finished_generating_at":finished_generating_at}
{"sender": sender, "content": content, "type": content_type, "rank": rank, "parent_message_id": parent_message_id, "binding": binding, "model":model, "personality":personality, "created_at":created_at, "finished_generating_at":finished_generating_at}
)
discussions.append(discussion)
return discussions
@ -252,13 +261,13 @@ class DiscussionsDB:
discussion_id = row[0]
discussion_title = row[1]
discussion = {"id": discussion_id, "title":discussion_title, "messages": []}
rows = self.select(f"SELECT sender, content, type, rank, parent, binding, model, personality, created_at, finished_generating_at FROM message WHERE discussion_id=?",(discussion_id,))
rows = self.select(f"SELECT sender, content, type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at FROM message WHERE discussion_id=?",(discussion_id,))
for message_row in rows:
sender = message_row[0]
content = message_row[1]
content_type = message_row[2]
rank = message_row[3]
parent = message_row[4]
parent_message_id = message_row[4]
binding = message_row[5]
model = message_row[6]
personality = message_row[7]
@ -266,7 +275,7 @@ class DiscussionsDB:
finished_generating_at = message_row[9]
discussion["messages"].append(
{"sender": sender, "content": content, "type": content_type, "rank": rank, "parent": parent, "binding": binding, "model":model, "personality":personality, "created_at":created_at, "finished_generating_at": finished_generating_at}
{"sender": sender, "content": content, "type": content_type, "rank": rank, "parent_message_id": parent_message_id, "binding": binding, "model":model, "personality":personality, "created_at":created_at, "finished_generating_at": finished_generating_at}
)
discussions.append(discussion)
return discussions
@ -288,7 +297,7 @@ class DiscussionsDB:
content = message_data.get("content")
content_type = message_data.get("type")
rank = message_data.get("rank")
parent = message_data.get("parent")
parent_message_id = message_data.get("parent_message_id")
binding = message_data.get("binding","")
model = message_data.get("model","")
personality = message_data.get("personality","")
@ -299,23 +308,133 @@ class DiscussionsDB:
)
# Insert message into the database
self.insert("INSERT INTO message (sender, content, type, rank, parent, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(sender, content, content_type, rank, parent, binding, model, personality, created_at, finished_generating_at, discussion_id))
self.insert("INSERT INTO message (sender, content, type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(sender, content, content_type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id))
discussions.append(discussion)
return discussions
class Message:
def __init__(
self,
discussion_id,
discussions_db,
message_type,
sender_type,
sender,
content,
metadata = None,
rank = 0,
parent_message_id = 0,
binding = "",
model = "",
personality = "",
created_at = None,
finished_generating_at = None,
id = None,
insert_into_db = False
):
self.discussion_id = discussion_id
self.discussions_db = discussions_db
self.self = self
self.sender = sender
self.sender_type = sender_type
self.content = content
self.message_type = message_type
self.rank = rank
self.parent_message_id = parent_message_id
self.binding = binding
self.model = model
self.metadata = json.dumps(metadata, indent=4) if metadata is not None and type(metadata)== dict else metadata
self.personality = personality
self.created_at = created_at
self.finished_generating_at = finished_generating_at
if insert_into_db:
self.id = self.discussions_db.insert(
"INSERT INTO message (sender, message_type, sender_type, sender, content, metadata, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(sender, message_type, sender_type, sender, content, metadata, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id)
)
else:
self.id = id
@staticmethod
def get_fields():
return [
"id",
"message_type",
"sender_type",
"sender",
"content",
"metadata",
"rank",
"parent_message_id",
"binding",
"model",
"personality",
"created_at",
"finished_generating_at",
"discussion_id"
]
@staticmethod
def from_db(discussions_db, message_id):
columns = Message.get_fields()
rows = discussions_db.select(
f"SELECT {','.join(columns)} WHERE id=?", (id,)
)
data_dict={
col:rows[i]
for i,col in enumerate(columns)
}
data_dict["discussions_db"]=discussions_db
return Message(
**data_dict
)
@staticmethod
def from_dict(discussions_db,data_dict):
data_dict["discussions_db"]=discussions_db
return Message(
**data_dict
)
def insert_into_db(self):
self.message_id = self.discussions_db.insert(
"INSERT INTO message (sender, content, type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(self.sender, self.content, self.message_type, self.rank, self.parent_message_id, self.binding, self.model, self.personality, self.created_at, self.finished_generating_at, self.discussion_id)
)
def update_db(self):
self.message_id = self.discussions_db.insert(
"INSERT INTO message (sender, content, type, rank, parent_message_id, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(self.sender, self.content, self.message_type, self.rank, self.parent_message_id, self.binding, self.model, self.personality, self.created_at, self.finished_generating_at, self.discussion_id)
)
def update(self, new_content, commit=True):
self.finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
# print(f"{current_date_time}")
self.discussions_db.update(
f"UPDATE message SET content = ?, finished_generating_at = ? WHERE id = ?",(new_content, self.finished_generating_at,self.id)
)
def to_json(self):
attributes = Message.get_fields()
msgJson = {}
for attribute_name in attributes:
attribute_value = getattr(self, attribute_name, None)
msgJson[attribute_name] = attribute_value
return msgJson
class Discussion:
def __init__(self, discussion_id, discussions_db:DiscussionsDB):
self.discussion_id = discussion_id
self.discussions_db = discussions_db
self.current_message_binding = ""
self.current_message_model = ""
self.content = ""
self.current_message_personality = ""
self.current_message_created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.current_message_finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.messages = []
def load_message(self, id):
"""Gets a list of messages information
@ -323,37 +442,24 @@ class Discussion:
Returns:
list: List of entries in the format {"id":message id, "sender":sender name, "content":message content, "type":message type, "rank": message rank}
"""
rows = self.discussions_db.select(
"SELECT id, sender, content, type, rank, parent, binding, model, personality, created_at, finished_generating_at FROM message WHERE id=?", (id,)
)
if len(rows)>0:
row = rows[0]
self.created_at = row[9]
self.current_message_binding = row[6]
self.current_message_model = row[7]
self.current_message_personality = row[8]
self.content = row[2]
self.current_message_created_at = row[9]
self.current_message_finished_generating_at = row[10]
return {
"id": row[0],
"sender": row[1],
"content": row[2],
"type": row[3],
"rank": row[4],
"parent": row[5],
"binding":row[6],
"model": row[7],
"personality": row[8],
"created_at": row[9],
"finished_generating_at": row[10]
}
self.current_message = Message.from_db(self.discussions_db, id)
return self.current_message
def add_message(self, sender, content, message_type=0, rank=0, parent=0, binding="", model ="", personality="", created_at=None, finished_generating_at=None):
def add_message(
self,
message_type,
sender_type,
sender,
content,
metadata=None,
rank=0,
parent_message_id=0,
binding="",
model ="",
personality="",
created_at=None,
finished_generating_at=None
):
"""Adds a new message to the discussion
Args:
@ -368,19 +474,28 @@ class Discussion:
if finished_generating_at is None:
finished_generating_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.current_message_binding = binding
self.current_message_model = model
self.current_message_personality = personality
self.content = content
self.current_message_created_at = created_at
self.current_message_finished_generating_at = finished_generating_at
message_id = self.discussions_db.insert(
"INSERT INTO message (sender, content, type, rank, parent, binding, model, personality, created_at, finished_generating_at, discussion_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(sender, content, message_type, rank, parent, binding, model, personality, created_at, finished_generating_at, self.discussion_id)
self.current_message = Message(
self.discussion_id,
self.discussions_db,
message_type,
sender_type,
sender,
content,
metadata,
rank,
parent_message_id,
binding,
model,
personality,
created_at,
finished_generating_at,
insert_into_db=True
)
return message_id
self.messages.append(self.current_message)
return self.current_message
def rename(self, new_title):
"""Renames the discussion
@ -408,45 +523,36 @@ class Discussion:
Returns:
list: List of entries in the format {"id":message id, "sender":sender name, "content":message content, "type":message type, "rank": message rank}
"""
columns = Message.get_fields()
rows = self.discussions_db.select(
"SELECT id, sender, content, type, rank, parent, binding, model, personality, created_at, finished_generating_at FROM message WHERE discussion_id=?", (self.discussion_id,)
f"SELECT {','.join(columns)} FROM message WHERE discussion_id=?", (self.discussion_id,)
)
msg_dict = [{ c:row[i] for i,c in enumerate(columns)} for row in rows]
self.messages=[]
for msg in msg_dict:
self.messages.append(Message.from_dict(self.discussions_db, msg))
return [{
"id": row[0],
"sender": row[1],
"content": row[2],
"type": row[3],
"rank": row[4],
"parent": row[5],
"binding":row[6],
"model": row[7],
"personality": row[8],
"created_at": row[9],
"finished_generating_at": row[10]
} for row in rows]
if len(self.messages)>0:
self.current_message = self.messages[-1]
def update_message(self, message_id, new_content):
return self.messages
def select_message(self, message_id):
for message in self.messages:
if message.id == message_id:
self.current_message = message
return message
return None
def update_message(self, new_content):
"""Updates the content of a message
Args:
message_id (int): The id of the message to be changed
new_content (str): The nex message content
"""
current_date_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.current_message_finished_generating_at = current_date_time
# print(f"{current_date_time}")
self.discussions_db.update(
f"UPDATE message SET content = ?, finished_generating_at = ? WHERE id = ?",(new_content, current_date_time,message_id)
)
"""
stuff = self.discussions_db.select(
f"Select finished_generating_at from message WHERE id = ?",(message_id,)
)
print(stuff)
"""
self.current_message.update(new_content)
def message_rank_up(self, message_id):
"""Increments the rank of the message

94
app.py
View File

@ -14,6 +14,8 @@ __github__ = "https://github.com/ParisNeo/lollms-webui"
__copyright__ = "Copyright 2023, "
__license__ = "Apache 2.0"
__version__ ="4.0"
main_repo = "https://github.com/ParisNeo/lollms-webui.git"
import os
import sys
@ -189,7 +191,8 @@ class LoLLMsWebUI(LoLLMsAPPI):
# Endpoints
# =========================================================================================
self.add_endpoint("/get_lollms_version", "get_lollms_version", self.get_lollms_version, methods=["POST"])
self.add_endpoint("/get_lollms_version", "get_lollms_version", self.get_lollms_version, methods=["GET"])
self.add_endpoint("/get_lollms_webui_version", "get_lollms_webui_version", self.get_lollms_webui_version, methods=["GET"])
self.add_endpoint("/reload_binding", "reload_binding", self.reload_binding, methods=["POST"])
@ -293,16 +296,12 @@ class LoLLMsWebUI(LoLLMsAPPI):
self.add_endpoint("/export_discussion", "export_discussion", self.export_discussion, methods=["GET"])
self.add_endpoint("/export", "export", self.export, methods=["GET"])
self.add_endpoint(
"/new_discussion", "new_discussion", self.new_discussion, methods=["GET"]
)
self.add_endpoint("/stop_gen", "stop_gen", self.stop_gen, methods=["GET"])
self.add_endpoint("/rename", "rename", self.rename, methods=["POST"])
self.add_endpoint("/edit_title", "edit_title", self.edit_title, methods=["POST"])
self.add_endpoint(
"/load_discussion", "load_discussion", self.load_discussion, methods=["POST"]
)
self.add_endpoint(
"/delete_discussion",
"delete_discussion",
@ -311,7 +310,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
)
self.add_endpoint(
"/update_message", "update_message", self.update_message, methods=["GET"]
"/edit_message", "edit_message", self.edit_message, methods=["GET"]
)
self.add_endpoint(
"/message_rank_up", "message_rank_up", self.message_rank_up, methods=["GET"]
@ -1146,6 +1145,12 @@ class LoLLMsWebUI(LoLLMsAPPI):
version = pkg_resources.get_distribution('lollms').version
ASCIIColors.yellow("Lollms version : "+ version)
return jsonify({"version":version})
def get_lollms_webui_version(self):
version = __version__
ASCIIColors.yellow("Lollms webui version : "+ version)
return jsonify({"version":version})
def reload_binding(self):
try:
@ -1411,84 +1416,69 @@ class LoLLMsWebUI(LoLLMsAPPI):
def rename(self):
data = request.get_json()
client_id = data["client_id"]
title = data["title"]
self.current_discussion.rename(title)
self.connections[client_id]["current_discussion"].rename(title)
return "renamed successfully"
def edit_title(self):
data = request.get_json()
title = data["title"]
discussion_id = data["id"]
self.current_discussion = Discussion(discussion_id, self.db)
self.current_discussion.rename(title)
return "title renamed successfully"
data = request.get_json()
client_id = data["client_id"]
title = data["title"]
discussion_id = data["id"]
self.connections[client_id]["current_discussion"] = Discussion(discussion_id, self.db)
self.connections[client_id]["current_discussion"].rename(title)
return jsonify({'status':True})
def load_discussion(self):
data = request.get_json()
if "id" in data:
discussion_id = data["id"]
self.current_discussion = Discussion(discussion_id, self.db)
else:
if self.current_discussion is not None:
discussion_id = self.current_discussion.discussion_id
self.current_discussion = Discussion(discussion_id, self.db)
else:
self.current_discussion = self.db.create_discussion()
messages = self.current_discussion.get_messages()
return jsonify(messages), {'Content-Type': 'application/json; charset=utf-8'}
def delete_discussion(self):
data = request.get_json()
discussion_id = data["id"]
self.current_discussion = Discussion(discussion_id, self.db)
self.current_discussion.delete_discussion()
self.current_discussion = None
return jsonify({})
data = request.get_json()
client_id = data["client_id"]
discussion_id = data["id"]
self.connections[client_id]["current_discussion"] = Discussion(discussion_id, self.db)
self.connections[client_id]["current_discussion"].delete_discussion()
self.connections[client_id]["current_discussion"] = None
return jsonify({'status':True})
def update_message(self):
discussion_id = request.args.get("id")
new_message = request.args.get("message")
def edit_message(self):
client_id = request.args.get("client_id")
discussion_id = request.args.get("id")
new_message = request.args.get("message")
try:
self.current_discussion.update_message(discussion_id, new_message)
self.connections[client_id]["current_discussion"].edit_message(discussion_id, new_message)
return jsonify({"status": True})
except Exception as ex:
return jsonify({"status": False, "error":str(ex)})
def message_rank_up(self):
discussion_id = request.args.get("id")
client_id = request.args.get("client_id")
discussion_id = request.args.get("id")
try:
new_rank = self.current_discussion.message_rank_up(discussion_id)
new_rank = self.connections[client_id]["current_discussion"].message_rank_up(discussion_id)
return jsonify({"status": True, "new_rank": new_rank})
except Exception as ex:
return jsonify({"status": False, "error":str(ex)})
def message_rank_down(self):
client_id = request.args.get("client_id")
discussion_id = request.args.get("id")
try:
new_rank = self.current_discussion.message_rank_down(discussion_id)
new_rank = self.connections[client_id]["current_discussion"].message_rank_down(discussion_id)
return jsonify({"status": True, "new_rank": new_rank})
except Exception as ex:
return jsonify({"status": False, "error":str(ex)})
def delete_message(self):
client_id = request.args.get("client_id")
discussion_id = request.args.get("id")
if self.current_discussion is None:
if self.connections[client_id]["current_discussion"] is None:
return jsonify({"status": False,"message":"No discussion is selected"})
else:
new_rank = self.current_discussion.delete_message(discussion_id)
new_rank = self.connections[client_id]["current_discussion"].delete_message(discussion_id)
ASCIIColors.yellow("Message deleted")
return jsonify({"status":True,"new_rank": new_rank})
def new_discussion(self):
title = request.args.get("title")
timestamp = self.create_new_discussion(title)
# Return a success response
return json.dumps({"id": self.current_discussion.discussion_id, "time": timestamp, "welcome_message":self.personality.welcome_message, "sender":self.personality.name})
def set_binding(self):
data = request.get_json()

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

8
web/dist/assets/index-aa4e8aed.css vendored Normal file

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-88f48bb9.js"></script>
<link rel="stylesheet" href="/assets/index-87e6454b.css">
<script type="module" crossorigin src="/assets/index-69b520c3.js"></script>
<link rel="stylesheet" href="/assets/index-aa4e8aed.css">
</head>
<body>
<div id="app"></div>

View File

@ -1,66 +1,115 @@
<template>
<pre class="json-viewer">
<template v-if="isObject">
<span @click="toggleCollapsed" class="toggle-icon">
<i v-if="collapsed" class="fas fa-plus-circle"></i>
<i v-else class="fas fa-minus-circle"></i>
</span>
</template>
{{ formattedJson }}
</pre>
</template>
<script>
export default {
props: {
data: {
type: [Object, Array],
required: true,
},
<div v-if="isContentPresent">
<div class="collapsible-section cursor-pointer mb-4 font-bold hover:text-gray-900" @click="toggleCollapsible">
<span class="toggle-icon mr-1">
<i v-if="collapsed" class="fas fa-plus-circle text-gray-600"></i>
<i v-else class="fas fa-minus-circle text-gray-600"></i>
</span>
{{ jsonFormText }}
</div>
<div v-show="!collapsed">
<div class="json-viewer max-h-64 overflow-auto p-4 bg-gray-100 border border-gray-300 rounded dark:bg-gray-600">
<template v-if="isObject">
<span @click="toggleCollapsed" class="toggle-icon cursor-pointer mr-1">
<i v-if="collapsed" class="fas fa-plus-circle text-gray-600"></i>
<i v-else class="fas fa-minus-circle text-gray-600"></i>
</span>
</template>
<pre v-html="formattedJson"></pre>
</div>
</div>
</div>
</template>
<script>
export default {
props: {
jsonData: {
type: [Object, Array, String],
default: null,
},
data() {
return {
collapsed: false,
};
jsonFormText: {
type: String,
default: "JSON Form",
},
computed: {
formattedJson() {
return this.collapsed ? '{}' : JSON.stringify(this.data, null, 2);
},
isObject() {
return typeof this.data === 'object' && this.data !== null;
},
},
data() {
return {
collapsed: true,
};
},
computed: {
formattedJson() {
return this.jsonData.replace(/\n/g, '<br>');
},
methods: {
toggleCollapsed() {
isObject() {
return typeof this.jsonData === 'object' && this.jsonData !== null;
},
isContentPresent() {
return (
this.jsonData !== null &&
(typeof this.jsonData !== 'string' || this.jsonData.trim() !== '')
);
},
},
methods: {
toggleCollapsed() {
this.collapsed = !this.collapsed;
},
toggleCollapsible() {
if (!this.isObject) {
this.collapsed = !this.collapsed;
},
}
},
};
</script>
<style>
.json-viewer {
max-height: 300px;
overflow-y: auto;
padding: 10px;
background-color: #f1f1f1;
border: 1px solid #ccc;
border-radius: 4px;
}
.toggle-icon {
cursor: pointer;
margin-right: 0.25rem;
}
.toggle-icon i {
color: #4a5568;
}
.toggle-icon i:hover {
color: #1a202c;
}
</style>
},
};
</script>
<style>
.collapsible-section {
cursor: pointer;
margin-bottom: 10px;
font-weight: bold;
}
.collapsible-section:hover {
color: #1a202c;
}
.collapsible-section .toggle-icon {
margin-right: 0.25rem;
}
.collapsible-section .toggle-icon i {
color: #4a5568;
}
.collapsible-section .toggle-icon i:hover {
color: #1a202c;
}
.json-viewer {
max-height: 300px;
max-width: 700px;
flex: auto;
overflow-y: auto;
padding: 10px;
background-color: #f1f1f1;
border: 1px solid #ccc;
border-radius: 4px;
}
.json-viewer .toggle-icon {
cursor: pointer;
margin-right: 0.25rem;
}
.json-viewer .toggle-icon i {
color: #4a5568;
}
.json-viewer .toggle-icon i:hover {
color: #1a202c;
}
</style>

View File

@ -135,13 +135,15 @@
</div>
</div>
<MarkdownRenderer ref="mdRender" v-if="!editMsgMode" :markdown-text="message.content">
<MarkdownRenderer ref="mdRender" v-if="!editMsgMode && !message.metadata" :markdown-text="message.content">
</MarkdownRenderer>
<textarea v-if="editMsgMode" ref="mdTextarea" :rows="4"
<textarea v-if="editMsgMode && !message.metadata" ref="mdTextarea" :rows="4"
class="block p-2.5 w-full text-sm text-gray-900 bg-gray-50 rounded-lg border border-gray-300 focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-blue-500 dark:focus:border-blue-500"
:style="{ minHeight: mdRenderHeight + `px` }" placeholder="Enter message here..."
v-model="this.message.content"></textarea>
<JsonViewer :jsonFormText="message.content" :jsonData="message.metadata" />
</div>
<!-- FOOTER -->
<div class="text-sm text-gray-400 mt-2">
@ -183,13 +185,16 @@ import { nextTick } from 'vue'
import feather from 'feather-icons'
import MarkdownRenderer from './MarkdownRenderer.vue';
import Step from './Step.vue';
import JsonViewer from "./JsonViewer.vue"
export default {
// eslint-disable-next-line vue/multi-word-component-names
name: 'Message',
emits: ['copy', 'delete', 'rankUp', 'rankDown', 'updateMessage', 'resendMessage', 'continueMessage'],
components: {
MarkdownRenderer,
Step
Step,
JsonViewer,
},
props: {
message: Object,

View File

@ -10,7 +10,7 @@
<img class="w-24 animate-bounce" title="LoLLMS WebUI" src="@/assets/logo.png" alt="Logo">
<div class="flex flex-col items-start">
<p class="text-2xl ">Lord of Large Language Models</p>
<p class="text-2xl ">Lord of Large Language Models v {{ version }} </p>
<p class="text-gray-400 text-base">One tool to rule them all</p>
</div>
@ -371,13 +371,24 @@ export default {
MSG_TYPE_JSON_INFOS : 11,// A JSON output that is useful for summarizing the process of generation used by personalities like chain of thoughts and tree of thooughts
MSG_TYPE_REF : 12,// References (in form of [text](path))
MSG_TYPE_CODE : 13,// A javascript code to execute
MSG_TYPE_UI : 14 // A vue.js component to show (we need to build some and parse the text to show it)
MSG_TYPE_UI : 14,// A vue.js component to show (we need to build some and parse the text to show it)
MSG_TYPE_NEW_MESSAGE : 15,// A new message
MSG_TYPE_FINISHED_MESSAGE : 17 // End of current message
},
list: [], // Discussion list
tempList: [], // Copy of Discussion list (used for keeping the original list during filtering discussions/searching action)
currentDiscussion: {}, // Current/selected discussion id
discussionArr: [],
// Sender types
senderTypes: {
SENDER_TYPES_USER : 0, // Sent by user
SENDER_TYPES_AI : 1, // Sent by ai
SENDER_TYPES_SYSTEM : 2, // Sent by athe system
},
version : "4.0",
list : [], // Discussion list
tempList : [], // Copy of Discussion list (used for keeping the original list during filtering discussions/searching action)
currentDiscussion : {}, // Current/selected discussion id
discussionArr : [],
loading: false,
filterTitle: '',
filterInProgress: false,
@ -460,42 +471,64 @@ export default {
return []
}
},
async load_discussion(id) {
try {
if (id) {
console.log("Loading discussion", id)
this.loading = true
this.discussionArr=[]
this.setDiscussionLoading(id, this.loading)
const res = await axios.post('/load_discussion', {
id: id
})
load_discussion(id, next) {
if (id) {
console.log("Loading discussion", id)
this.loading = true
this.discussionArr=[]
this.setDiscussionLoading(id, this.loading)
socket.on('discussion', (data)=>{
this.loading = false
this.setDiscussionLoading(id, this.loading)
if (res) {
if (data) {
console.log("received discussion")
console.log(data)
// Filter out the user and bot entries
this.discussionArr = res.data.filter((item) =>
item.type == this.msgTypes.MSG_TYPE_FULL ||
item.type == this.msgTypes.MSG_TYPE_FULL_INVISIBLE_TO_AI
this.discussionArr = data.filter((item) =>
item.message_type == this.msgTypes.MSG_TYPE_CHUNK ||
item.message_type == this.msgTypes.MSG_TYPE_FULL ||
item.message_type == this.msgTypes.MSG_TYPE_FULL_INVISIBLE_TO_AI ||
item.message_type == this.msgTypes.MSG_TYPE_CODE ||
item.message_type == this.msgTypes.MSG_TYPE_JSON_INFOS ||
item.message_type == this.msgTypes.MSG_TYPE_UI
)
console.log("this.discussionArr")
console.log(this.discussionArr)
if(next){
next()
}
}
socket.off('discussion')
})
socket.emit('load_discussion',{"id":id});
}
} catch (error) {
console.log(error.message, 'load_discussion')
this.loading = false
this.setDiscussionLoading(id, this.loading)
}
},
async new_discussion(title) {
new_discussion(title) {
try {
const res = await axios.get('/new_discussion', { params: { title: title } })
if (res) {
return res.data
}
this.loading = true
socket.on('discussion_created',(data)=>{
socket.off('discussion_created')
this.list_discussions().then(()=>{
const index = this.list.findIndex((x) => x.id == data.id)
const discussionItem = this.list[index]
this.selectDiscussion(discussionItem)
this.load_discussion(data.id,()=>{
this.loading = false
nextTick(() => {
const selectedDisElement = document.getElementById('dis-' + data.id)
this.scrollToElement(selectedDisElement)
console.log("Scrolling tp "+selectedDisElement)
})
})
});
});
console.log("new_discussion ", title)
socket.emit('new_discussion', {title:title});
} catch (error) {
console.log("Error: Could not create new discussion", error.message)
return {}
@ -507,6 +540,7 @@ export default {
this.loading = true
this.setDiscussionLoading(id, this.loading)
await axios.post('/delete_discussion', {
client_id: this.client_id,
id: id
})
this.loading = false
@ -524,6 +558,7 @@ export default {
this.loading = true
this.setDiscussionLoading(id, this.loading)
const res = await axios.post('/edit_title', {
client_id: this.client_id,
id: id,
title: new_title
})
@ -544,7 +579,7 @@ export default {
},
async delete_message(id) {
try {
const res = await axios.get('/delete_message', { params: { id: id } })
const res = await axios.get('/delete_message', { params: { client_id: this.client_id, id: id } })
if (res) {
return res.data
@ -571,7 +606,7 @@ export default {
},
async message_rank_up(id) {
try {
const res = await axios.get('/message_rank_up', { params: { id: id } })
const res = await axios.get('/message_rank_up', { params: { client_id: this.client_id, id: id } })
if (res) {
return res.data
@ -583,7 +618,7 @@ export default {
},
async message_rank_down(id) {
try {
const res = await axios.get('/message_rank_down', { params: { id: id } })
const res = await axios.get('/message_rank_down', { params: { client_id: this.client_id, id: id } })
if (res) {
return res.data
@ -593,9 +628,9 @@ export default {
return {}
}
},
async update_message(id, message) {
async edit_message(id, message) {
try {
const res = await axios.get('/update_message', { params: { id: id, message: message } })
const res = await axios.get('/edit_message', { params: { client_id: this.client_id, id: id, message: message } })
if (res) {
return res.data
@ -668,13 +703,13 @@ export default {
localStorage.setItem('selected_discussion', this.currentDiscussion.id)
await this.load_discussion(item.id)
if (this.discussionArr.length > 1) {
this.load_discussion(item.id, ()=>{
if (this.discussionArr.length > 1) {
if (this.currentDiscussion.title === '' || this.currentDiscussion.title === null) {
this.changeTitleUsingUserMSG(this.currentDiscussion.id, this.discussionArr[1].content)
}
}
})
}
else{
@ -686,13 +721,14 @@ export default {
localStorage.setItem('selected_discussion', this.currentDiscussion.id)
await this.load_discussion(item.id)
if (this.discussionArr.length > 1) {
if (this.currentDiscussion.title === '' || this.currentDiscussion.title === null) {
this.changeTitleUsingUserMSG(this.currentDiscussion.id, this.discussionArr[1].content)
this.load_discussion(item.id, ()=>{
if (this.discussionArr.length > 1) {
if (this.currentDiscussion.title === '' || this.currentDiscussion.title === null) {
this.changeTitleUsingUserMSG(this.currentDiscussion.id, this.discussionArr[1].content)
}
}
}
});
}
}
@ -774,12 +810,11 @@ export default {
let usrMessage = {
content: msgObj.message,
id: msgObj.id,
//parent: 10,
rank: 0,
sender: msgObj.user,
created_at: msgObj.created_at,
steps: []
//type: 0
}
this.discussionArr.push(usrMessage)
nextTick(() => {
@ -793,19 +828,19 @@ export default {
// const lastMsg = this.discussionArr[this.discussionArr.length - 1]
// lastMsg.content = msgObj.message
// lastMsg.id = msgObj.user_message_id
// lastMsg.id = msgObj.user_id
// // lastMsg.parent=msgObj.parent
// lastMsg.rank = msgObj.rank
// lastMsg.sender = msgObj.user
// // lastMsg.type=msgObj.type
const index = this.discussionArr.indexOf(item => item.id = msgObj.user_message_id)
const index = this.discussionArr.indexOf(item => item.id = msgObj.user_id)
const newMessage ={
binding: msgObj.binding,
content: msgObj.message,
created_at: msgObj.created_at,
type: msgObj.type,
finished_generating_at: msgObj.finished_generating_at,
id: msgObj.user_message_id,
id: msgObj.user_id,
model: msgObj.model,
personality: msgObj.personality,
sender: msgObj.user,
@ -823,88 +858,57 @@ export default {
this.$store.dispatch('setIsConnected',true);
return true
},
socketIODisonnected() {
socketIODisconnected() {
console.log("socketIOConnected")
this.$store.dispatch('setIsConnected',false);
return true
},
createBotMsg(msgObj) {
// Update previous message with reponse user data
//
// msgObj
// "status": "if the model is not ready this will inform the user that he can't promt the model"
// "type": "input_message_infos",
// "bot": self.personality.name,
// "user": self.personality.user_name,
// "message":message,#markdown.markdown(message),
// "user_message_id": self.current_user_message_id,
// "ai_message_id": self.current_ai_message_id,
new_message(msgObj) {
console.log('create bot', msgObj);
if (msgObj["status"] == "generation_started") {
this.updateLastUserMsg(msgObj)
// Create response message
let responseMessage = {
//content:msgObj.data,
content: "✍ please stand by ...",
created_at:msgObj.created_at,
binding:msgObj.binding,
model:msgObj.model,
id: msgObj.ai_message_id,
parent: msgObj.user_message_id,
personality:msgObj.personality,
rank: 0,
sender: msgObj.bot,
type:msgObj.type,
steps: []
let responseMessage = {
sender: msgObj.sender,
message_type: msgObj.message_type,
sender_type: msgObj.sender_type,
content: msgObj.content,//" please stand by ...",
id: msgObj.id,
parent_id: msgObj.parent_id,
}
this.discussionArr.push(responseMessage)
// nextTick(() => {
// const msgList = document.getElementById('messages-list')
binding: msgObj.binding,
model: msgObj.model,
personality: msgObj.personality,
// this.scrollBottom(msgList)
created_at: msgObj.created_at,
finished_generating_at: msgObj.finished_generating_at,
rank: 0,
// })
steps : [],
metadata : msgObj.metadata
}
console.log(responseMessage)
this.discussionArr.push(responseMessage)
// nextTick(() => {
// const msgList = document.getElementById('messages-list')
if (this.currentDiscussion.title === '' || this.currentDiscussion.title === null) {
if (msgObj.type == "input_message_infos") {
// This is a user input
this.changeTitleUsingUserMSG(this.currentDiscussion.id, msgObj.message)
}
}
console.log("infos", msgObj)
// this.scrollBottom(msgList)
// })
if (this.currentDiscussion.title === '' || this.currentDiscussion.title === null) {
this.changeTitleUsingUserMSG(this.currentDiscussion.id, msgObj.message)
}
console.log("infos", msgObj)
/*
}
else {
this.$refs.toast.showToast("It seems that no model has been loaded. Please download and install a model first, then try again.", 4, false)
this.isGenerating = false
this.setDiscussionLoading(this.currentDiscussion.id, this.isGenerating)
this.chime.play()
}
}*/
},
talk(pers){
this.isGenerating = true;
this.setDiscussionLoading(this.currentDiscussion.id, this.isGenerating);
axios.get('/get_generation_status', {}).then((res) => {
if (res) {
//console.log(res.data.status);
if (!res.data.status) {
console.log('Generating message from ',res.data.status);
socket.emit('generate_msg_from', { id: -1 });
// Temp data
let lastmsgid =0
if(this.discussionArr.length>0){
lastmsgid= Number(this.discussionArr[this.discussionArr.length - 1].id) + 1
}
}
else {
console.log("Already generating");
}
}
}).catch((error) => {
console.log("Error: Could not get generation status", error);
});
},
sendMsg(msg) {
@ -936,6 +940,25 @@ export default {
user: this.$store.state.config.user_name,
created_at: new Date().toLocaleString(),
sender: this.$store.state.config.user_name,
message_type: this.msgTypes.MSG_TYPE_FULL,
sender_type: this.senderTypes.SENDER_TYPES_USER,
content: msg,
id: lastmsgid,
parent_id: lastmsgid,
binding: "",
model: "",
personality: "",
created_at: new Date().toLocaleString(),
finished_generating_at: new Date().toLocaleString(),
rank: 0,
steps: [],
metadata: {}
};
this.createUserMsg(usrMessage);
@ -948,38 +971,55 @@ export default {
console.log("Error: Could not get generation status", error);
});
},
notify(notif){
self.isGenerating = false
this.setDiscussionLoading(this.currentDiscussion.id, this.isGenerating);
nextTick(() => {
const msgList = document.getElementById('messages-list')
this.scrollBottom(msgList)
})
this.$refs.toast.showToast(notif.content, 5, notif.status)
this.chime.play()
},
streamMessageContent(msgObj) {
// Streams response message content from binding
//console.log("Received message",msgObj)
const parent = msgObj.user_message_id
const discussion_id = msgObj.discussion_id
this.setDiscussionLoading(discussion_id, true);
if (this.currentDiscussion.id == discussion_id) {
this.isGenerating = true;
const index = this.discussionArr.findIndex((x) => x.parent == parent && x.id == msgObj.ai_message_id)
const index = this.discussionArr.findIndex((x) => x.id == msgObj.id)
const messageItem = this.discussionArr[index]
if (
messageItem && msgObj.message_type==this.msgTypes.MSG_TYPE_FULL ||
messageItem && msgObj.message_type==this.msgTypes.MSG_TYPE_FULL_INVISIBLE_TO_AI
messageItem && (msgObj.message_type==this.msgTypes.MSG_TYPE_FULL ||
msgObj.message_type==this.msgTypes.MSG_TYPE_FULL_INVISIBLE_TO_AI)
) {
messageItem.content = msgObj.data
messageItem.content = msgObj.content
messageItem.finished_generating_at = msgObj.finished_generating_at
}
else if(messageItem && msgObj.message_type==this.msgTypes.MSG_TYPE_CHUNK){
messageItem.content += msgObj.data
messageItem.content += msgObj.content
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_STEP_START){
console.log(msgObj.metadata)
messageItem.steps.push({"message":msgObj.data,"done":false, "status":true })
messageItem.steps.push({"message":msgObj.content,"done":false, "status":true })
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_STEP_END) {
// Find the step with the matching message and update its 'done' property to true
const matchingStep = messageItem.steps.find(step => step.message === msgObj.data);
const matchingStep = messageItem.steps.find(step => step.message === msgObj.content);
if (matchingStep) {
matchingStep.done = true;
matchingStep.status=msgObj.metadata.status
try {
const metadata = JSON.parse(msgObj.metadata);
matchingStep.status=metadata.status
console.log(metadata);
} catch (error) {
console.error('Error parsing JSON:', error.message);
}
}
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_JSON_INFOS) {
console.log("JSON message")
messageItem.metadata = msgObj.metadata
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_EXCEPTION) {
this.$refs.toast.showToast(msgObj.data, 5, false)
this.$refs.toast.showToast(msgObj.content, 5, false)
}
// // Disables as per request
// nextTick(() => {
@ -1011,17 +1051,7 @@ export default {
// gets new discussion list, selects
// newly created discussion,
// scrolls to the discussion
this.loading = true
const res = await this.new_discussion()
this.loading = false
await this.list_discussions()
const index = this.list.findIndex((x) => x.id == res.id)
const discussionItem = this.list[index]
this.selectDiscussion(discussionItem)
nextTick(() => {
const selectedDisElement = document.getElementById('dis-' + res.id)
this.scrollToElement(selectedDisElement)
})
this.new_discussion(null)
},
loadLastUsedDiscussion() {
// Checks local storage for last selected discussion
@ -1180,7 +1210,7 @@ export default {
},
async updateMessage(msgId, msg) {
await this.update_message(msgId, msg).then(() => {
await this.edit_message(msgId, msg).then(() => {
const message = this.discussionArr[this.discussionArr.findIndex(item => item.id == msgId)]
message.content = msg
@ -1202,7 +1232,8 @@ export default {
this.setDiscussionLoading(this.currentDiscussion.id, this.isGenerating);
axios.get('/get_generation_status', {}).then((res) => {
if (res) {
console.log(res);
console.log("--------------------")
console.log(msgId);
if (!res.data.status) {
socket.emit('generate_msg_from', { prompt: msg, id: msgId });
}
@ -1252,29 +1283,16 @@ export default {
console.log("final", msgObj)
// Last message contains halucination suppression so we need to update the message content too
const parent = msgObj.parent
const parent_id = msgObj.parent_id
const discussion_id = msgObj.discussion_id
if (this.currentDiscussion.id == discussion_id) {
const index = this.discussionArr.findIndex((x) => x.parent == parent && x.id == msgObj.ai_message_id)
const finalMessage = {
binding:msgObj.binding,
content:msgObj.data,
created_at:msgObj.created_at,
finished_generating_at:msgObj.finished_generating_at,
id: msgObj.ai_message_id,
model:msgObj.model,
parent: msgObj.user_message_id,
personality:msgObj.personality,
rank:0,
steps:msgObj.steps,
sender:msgObj.bot,
type:msgObj.type
}
this.discussionArr[index]=finalMessage
const index = this.discussionArr.findIndex((x) => x.id == msgObj.id)
this.discussionArr[index].content = msgObj.content
this.discussionArr[index].finished_generating_at = msgObj.finished_generating_at
// const messageItem = this.discussionArr[index]
// if (messageItem) {
// messageItem.content = msgObj.data
// messageItem.content = msgObj.content
// }
}
nextTick(() => {
@ -1503,6 +1521,13 @@ export default {
},
async created() {
axios.get('/get_lollms_webui_version', {}).then((res) => {
if (res) {
this.version = res.data.version
}
}).catch((error) => {
console.log("Error: Could not get generation status", error);
});
this.$nextTick(() => {
feather.replace();
});
@ -1521,9 +1546,11 @@ export default {
// socket responses
socket.on('infos', this.createBotMsg)
socket.on('message', this.streamMessageContent)
socket.on('final', this.finalMsgEvent)
socket.on('notification', this.notify)
socket.on('new_message', this.new_message)
socket.on('update_message', this.streamMessageContent)
socket.on('close_message', this.finalMsgEvent)
socket.on('connected',this.socketIOConnected)
socket.on('disconnected',this.socketIODisconnected)
console.log("Added events")
@ -1593,6 +1620,9 @@ export default {
},
computed: {
client_id() {
return socket.id
},
isReady(){
console.log("verify ready", this.isCreated)
return this.isCreated