This commit is contained in:
Saifeddine ALOUI 2023-07-08 00:22:52 +02:00
parent a7dc7b8014
commit b126999ed2
5 changed files with 21 additions and 10 deletions

View File

@ -1,5 +1,5 @@
from lollms.console import Conversation
import sys
class MyConversation(Conversation):
def __init__(self, cfg=None):
super().__init__(cfg, show_welcome_message=False)
@ -15,7 +15,9 @@ class MyConversation(Conversation):
full_discussion += self.personality.user_message_prefix+prompt+self.personality.link_text
full_discussion += self.personality.ai_message_prefix
def callback(text, type=None):
print(text, end="", flush=True)
print(text, end="")
sys.stdout = sys.__stdout__
sys.stdout.flush()
return True
print(self.personality.name+": ",end="",flush=True)
output = self.safe_generate(full_discussion, callback=callback)

View File

@ -1,4 +1,5 @@
from lollms.console import Conversation
import sys
class MyConversation(Conversation):
def __init__(self, cfg=None):
@ -7,9 +8,14 @@ class MyConversation(Conversation):
def start_conversation(self):
prompt = "Once apon a time"
def callback(text, type=None):
print(text, end="", flush=True)
print(text, end="")
sys.stdout = sys.__stdout__
sys.stdout.flush()
return True
print(prompt, end="", flush=True)
print(prompt, end="")
sys.stdout = sys.__stdout__
sys.stdout.flush()
output = self.safe_generate(prompt, callback=callback)
if __name__ == '__main__':

View File

@ -462,10 +462,10 @@ class LoLLMsServer(LollmsApplication):
fd = personality.model.detokenize(tk[-min(self.config.ctx_size-n_cond_tk-personality.model_n_predicts,n_tokens):])
if personality.processor is not None and personality.processor_cfg["custom_workflow"]:
print("processing...", end="", flush=True)
ASCIIColors.info("processing...")
generated_text = personality.processor.run_workflow(prompt, previous_discussion_text=personality.personality_conditioning+fd, callback=callback)
else:
ASCIIColors.info("generating...", end="", flush=True)
ASCIIColors.info("generating...")
generated_text = personality.model.generate(
personality.personality_conditioning+fd,
n_predict=personality.model_n_predicts,
@ -475,7 +475,7 @@ class LoLLMsServer(LollmsApplication):
generated_text = personality.processor.process_model_output(generated_text)
full_discussion_blocks.append(generated_text.strip())
ASCIIColors.success("\ndone", end="", flush=True)
ASCIIColors.success("\ndone")
# Emit the generated text to the client
self.socketio.emit('text_generated', {'text': generated_text}, room=client_id)

View File

@ -26,7 +26,7 @@ def get_all_files(path):
setuptools.setup(
name="lollms",
version="2.1.32",
version="2.1.33",
author="Saifeddine ALOUI",
author_email="aloui.saifeddine@gmail.com",
description="A python library for AI personality definition",

View File

@ -3,7 +3,7 @@ import socketio
from pathlib import Path
from lollms import MSG_TYPE
import time
import sys
# Connect to the Socket.IO server
sio = socketio.Client()
@ -35,7 +35,10 @@ def test_generate_text(host, port, text_file):
@sio.event
def text_chunk(data):
print(data["chunk"],end="",flush=True)
print(data["chunk"],end="")
sys.stdout = sys.__stdout__
sys.stdout.flush()
@sio.event
def text_generated(data):