From 513770b73eee24680307b3843bbf955c1184e9b8 Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Sun, 21 Apr 2024 03:59:32 +0200 Subject: [PATCH] fixed context size depletion problem --- lollms/app.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lollms/app.py b/lollms/app.py index 7bc0468..e8750fe 100644 --- a/lollms/app.py +++ b/lollms/app.py @@ -812,9 +812,11 @@ class LollmsApplication(LoLLMsCom): "\n" + self.config.discussion_prompt_separator + message.sender + ": " + message.content.strip()) # Check if adding the message will exceed the available space - if tokens_accumulated + len(message_tokenized) > available_space: - message_tokenized[:-(available_space-tokens_accumulated)] - full_message_list.insert(0, message_tokenized) + if tokens_accumulated + len(message_tokenized) > available_space-n_tokens: + # Update the cumulative number of tokens + msg = message_tokenized[-(available_space-tokens_accumulated-n_tokens):] + tokens_accumulated += available_space-tokens_accumulated-n_tokens + full_message_list.insert(0, msg) break # Add the tokenized message to the full_message_list