From e49b5bfc493b27898e6c2c589880a85ee82cccfd Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Tue, 14 Nov 2023 23:20:13 +0100 Subject: [PATCH] enhanced --- lollms/apps/elf/__init__.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/lollms/apps/elf/__init__.py b/lollms/apps/elf/__init__.py index e98434e..22ea1ad 100644 --- a/lollms/apps/elf/__init__.py +++ b/lollms/apps/elf/__init__.py @@ -266,13 +266,32 @@ def chat_completions(): temperature=temperature, top_p=top_p, n_predict=max_tokens, - # callback=callback + callback=callback ) def stream(): nonlocal response for token in response: - stream_callback(token, None) + completion_timestamp = int(time.time()) + completion_id = ''.join(random.choices( + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', k=28)) + + completion_data = { + 'id': f'chatcmpl-{completion_id}', + 'object': 'chat.completion.chunk', + 'created': completion_timestamp, + 'choices': [ + { + 'delta': { + 'content': token + }, + 'index': 0, + 'finish_reason': None + } + ] + } + yield 'data: %s\n\n' % json.dumps(completion_data, separators=(',' ':')) + time.sleep(0.02) return app.response_class( stream(), mimetype='text/event-stream'