diff --git a/api/__init__.py b/api/__init__.py index 59b39e73..71e1e12b 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -445,6 +445,19 @@ class LoLLMsAPPI(): ASCIIColors.green(f"{self.lollms_paths.personal_path}") + @socketio.on('continue_generate_msg_from') + def handle_connection(data): + message_id = int(data['id']) + message = data["prompt"] + self.current_user_message_id = message_id + tpe = threading.Thread(target=self.start_message_generation, args=(message, message_id)) + tpe.start() + # generation status + self.generating=False + ASCIIColors.blue(f"Your personal data is stored here :",end="") + ASCIIColors.green(f"{self.lollms_paths.personal_path}") + + def rebuild_personalities(self): loaded = self.mounted_personalities loaded_names = [f"{p.language}/{p.category}/{p.personality_folder_name}" for p in loaded] @@ -646,7 +659,7 @@ class LoLLMsAPPI(): self.condition_chatbot() return timestamp - def prepare_query(self, message_id=-1): + def prepare_query(self, message_id=-1, is_continue=False): messages = self.current_discussion.get_messages() self.full_message_list = [] for message in messages: @@ -659,17 +672,20 @@ class LoLLMsAPPI(): else: break - if self.personality.processor is not None: - preprocessed_prompt = self.personality.processor.process_model_input(message["content"]) - else: - preprocessed_prompt = message["content"] - if preprocessed_prompt is not None: - self.full_message_list.append(self.personality.user_message_prefix+preprocessed_prompt+self.personality.link_text+self.personality.ai_message_prefix) - else: - self.full_message_list.append(self.personality.user_message_prefix+message["content"]+self.personality.link_text+self.personality.ai_message_prefix) - - link_text = self.personality.link_text + if not is_continue: + if self.personality.processor is not None: + preprocessed_prompt = self.personality.processor.process_model_input(message["content"]) + else: + preprocessed_prompt = message["content"] + + if preprocessed_prompt is not None: + self.full_message_list.append(self.personality.user_message_prefix+preprocessed_prompt+self.personality.link_text+self.personality.ai_message_prefix) + else: + self.full_message_list.append(self.personality.user_message_prefix+message["content"]+self.personality.link_text+self.personality.ai_message_prefix) + else: + self.full_message_list.append(self.personality.ai_message_prefix+message["content"]) + discussion_messages = self.personality.personality_conditioning+ link_text.join(self.full_message_list) @@ -851,41 +867,44 @@ class LoLLMsAPPI(): output = "" return output - def start_message_generation(self, message, message_id): + def start_message_generation(self, message, message_id, is_continue=False): ASCIIColors.info(f"Text generation requested by client: {self.current_room_id}") # send the message to the bot print(f"Received message : {message}") if self.current_discussion: # First we need to send the new message ID to the client - self.current_ai_message_id = self.current_discussion.add_message( - self.personality.name, - "", - parent = self.current_user_message_id, - binding = self.config["binding_name"], - model = self.config["model_name"], - personality = self.config["personalities"][self.config["active_personality_id"]] - ) # first the content is empty, but we'll fill it at the end - self.socketio.emit('infos', - { - "status":'generation_started', - "type": "input_message_infos", - "bot": self.personality.name, - "user": self.personality.user_name, - "message":message,#markdown.markdown(message), - "user_message_id": self.current_user_message_id, - "ai_message_id": self.current_ai_message_id, + if is_continue: + self.current_ai_message_id = message_id + else: + self.current_ai_message_id = self.current_discussion.add_message( + self.personality.name, + "", + parent = self.current_user_message_id, + binding = self.config["binding_name"], + model = self.config["model_name"], + personality = self.config["personalities"][self.config["active_personality_id"]] + ) # first the content is empty, but we'll fill it at the end + self.socketio.emit('infos', + { + "status":'generation_started', + "type": "input_message_infos", + "bot": self.personality.name, + "user": self.personality.user_name, + "message":message,#markdown.markdown(message), + "user_message_id": self.current_user_message_id, + "ai_message_id": self.current_ai_message_id, - 'binding': self.current_discussion.current_message_binding, - 'model': self.current_discussion.current_message_model, - 'personality': self.current_discussion.current_message_personality, - 'created_at': self.current_discussion.current_message_created_at, - 'finished_generating_at': self.current_discussion.current_message_finished_generating_at, - }, room=self.current_room_id + 'binding': self.current_discussion.current_message_binding, + 'model': self.current_discussion.current_message_model, + 'personality': self.current_discussion.current_message_personality, + 'created_at': self.current_discussion.current_message_created_at, + 'finished_generating_at': self.current_discussion.current_message_finished_generating_at, + }, room=self.current_room_id ) self.socketio.sleep(0) # prepare query and reception - self.discussion_messages, self.current_message = self.prepare_query(message_id) + self.discussion_messages, self.current_message = self.prepare_query(message_id, is_continue) self.prepare_reception() self.generating = True self.generate(self.discussion_messages, self.current_message, n_predict = self.config['n_predict'], callback=self.process_chunk) @@ -921,14 +940,21 @@ class LoLLMsAPPI(): ) self.socketio.sleep(0) - print() - print("## Done ##") - print() + ASCIIColors.success(f" ╔══════════════════════════════════════════════════╗ ") + ASCIIColors.success(f" ║ Done ║ ") + ASCIIColors.success(f" ╚══════════════════════════════════════════════════╝ ") else: - #No discussion available - print("No discussion selected!!!") - print("## Done ##") - print() self.cancel_gen = False + #No discussion available + ASCIIColors.warning("No discussion selected!!!") + self.socketio.emit('message', { + 'data': "No discussion selected!!!", + 'user_message_id':self.current_user_message_id, + 'ai_message_id':self.current_ai_message_id, + 'discussion_id':0, + 'message_type': MSG_TYPE.MSG_TYPE_EXCEPTION.value + }, room=self.current_room_id + ) + print() return "" \ No newline at end of file