From b126999ed2683464c2980fb1a3468658be2d475c Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Sat, 8 Jul 2023 00:22:52 +0200 Subject: [PATCH] updated --- examples/chat_forever/console.py | 6 ++++-- examples/simple_story/console.py | 10 ++++++++-- lollms/server.py | 6 +++--- setup.py | 2 +- tests/endoints_unit_tests/python/test_generation.py | 7 +++++-- 5 files changed, 21 insertions(+), 10 deletions(-) diff --git a/examples/chat_forever/console.py b/examples/chat_forever/console.py index d9a808d..573f36e 100644 --- a/examples/chat_forever/console.py +++ b/examples/chat_forever/console.py @@ -1,5 +1,5 @@ from lollms.console import Conversation - +import sys class MyConversation(Conversation): def __init__(self, cfg=None): super().__init__(cfg, show_welcome_message=False) @@ -15,7 +15,9 @@ class MyConversation(Conversation): full_discussion += self.personality.user_message_prefix+prompt+self.personality.link_text full_discussion += self.personality.ai_message_prefix def callback(text, type=None): - print(text, end="", flush=True) + print(text, end="") + sys.stdout = sys.__stdout__ + sys.stdout.flush() return True print(self.personality.name+": ",end="",flush=True) output = self.safe_generate(full_discussion, callback=callback) diff --git a/examples/simple_story/console.py b/examples/simple_story/console.py index 389f775..461fdd3 100644 --- a/examples/simple_story/console.py +++ b/examples/simple_story/console.py @@ -1,4 +1,5 @@ from lollms.console import Conversation +import sys class MyConversation(Conversation): def __init__(self, cfg=None): @@ -7,9 +8,14 @@ class MyConversation(Conversation): def start_conversation(self): prompt = "Once apon a time" def callback(text, type=None): - print(text, end="", flush=True) + print(text, end="") + sys.stdout = sys.__stdout__ + sys.stdout.flush() + return True - print(prompt, end="", flush=True) + print(prompt, end="") + sys.stdout = sys.__stdout__ + sys.stdout.flush() output = self.safe_generate(prompt, callback=callback) if __name__ == '__main__': diff --git a/lollms/server.py b/lollms/server.py index fc30300..6001774 100644 --- a/lollms/server.py +++ b/lollms/server.py @@ -462,10 +462,10 @@ class LoLLMsServer(LollmsApplication): fd = personality.model.detokenize(tk[-min(self.config.ctx_size-n_cond_tk-personality.model_n_predicts,n_tokens):]) if personality.processor is not None and personality.processor_cfg["custom_workflow"]: - print("processing...", end="", flush=True) + ASCIIColors.info("processing...") generated_text = personality.processor.run_workflow(prompt, previous_discussion_text=personality.personality_conditioning+fd, callback=callback) else: - ASCIIColors.info("generating...", end="", flush=True) + ASCIIColors.info("generating...") generated_text = personality.model.generate( personality.personality_conditioning+fd, n_predict=personality.model_n_predicts, @@ -475,7 +475,7 @@ class LoLLMsServer(LollmsApplication): generated_text = personality.processor.process_model_output(generated_text) full_discussion_blocks.append(generated_text.strip()) - ASCIIColors.success("\ndone", end="", flush=True) + ASCIIColors.success("\ndone") # Emit the generated text to the client self.socketio.emit('text_generated', {'text': generated_text}, room=client_id) diff --git a/setup.py b/setup.py index 2234769..05940d1 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ def get_all_files(path): setuptools.setup( name="lollms", - version="2.1.32", + version="2.1.33", author="Saifeddine ALOUI", author_email="aloui.saifeddine@gmail.com", description="A python library for AI personality definition", diff --git a/tests/endoints_unit_tests/python/test_generation.py b/tests/endoints_unit_tests/python/test_generation.py index ad770be..bd059f9 100644 --- a/tests/endoints_unit_tests/python/test_generation.py +++ b/tests/endoints_unit_tests/python/test_generation.py @@ -3,7 +3,7 @@ import socketio from pathlib import Path from lollms import MSG_TYPE import time - +import sys # Connect to the Socket.IO server sio = socketio.Client() @@ -35,7 +35,10 @@ def test_generate_text(host, port, text_file): @sio.event def text_chunk(data): - print(data["chunk"],end="",flush=True) + print(data["chunk"],end="") + sys.stdout = sys.__stdout__ + sys.stdout.flush() + @sio.event def text_generated(data):