From 42bbf653c071fe07cb56124ebaf7fd51640263f1 Mon Sep 17 00:00:00 2001
From: Saifeddine ALOUI <aloui.seifeddine@gmail.com>
Date: Tue, 9 Apr 2024 22:40:46 +0200
Subject: [PATCH] fix

---
 lollms/personality.py | 11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)

diff --git a/lollms/personality.py b/lollms/personality.py
index ef19468..2eb0009 100644
--- a/lollms/personality.py
+++ b/lollms/personality.py
@@ -2222,13 +2222,13 @@ class APScript(StateMachine):
         depth=0
         tk = self.personality.model.tokenize(text)
         while len(tk)>max_summary_size:
-            self.step_start(f"Comprerssing.. [depth {depth}]")
+            self.step_start(f"Comprerssing... [depth {depth+1}]")
             chunk_size = int(self.personality.config.ctx_size*0.6)
             document_chunks = DocumentDecomposer.decompose_document(text, chunk_size, 0, self.personality.model.tokenize, self.personality.model.detokenize, True)
-            text = self.summerize_chunks(document_chunks, 
-            data_extraction_instruction, doc_name, answer_start, max_generation_size, callback)
+            text = self.summerize_chunks(document_chunks, data_extraction_instruction, doc_name, answer_start, max_generation_size, callback)
             tk = self.personality.model.tokenize(text)
-            self.step_end(f"Comprerssing.. [depth {depth}]")
+            self.step(f"Current text size : {tk}, max summary size : {max_summary_size}")
+            self.step_end(f"Comprerssing... [depth {depth+1}]")
         self.step_start(f"Rewriting..")
         text = self.summerize_chunks([text], 
         final_task_instruction, doc_name, answer_start, max_generation_size, callback)
@@ -2239,7 +2239,7 @@ class APScript(StateMachine):
     def summerize_chunks(self, chunks, summary_instruction="summerize", doc_name="chunk", answer_start="", max_generation_size=3000, callback=None):
         summeries = []
         for i, chunk in enumerate(chunks):
-            self.step_start(f"Processing chunk : {i+1}/{len(chunks)}")
+            self.step_start(f" Summary of {doc_name} - Processing chunk : {i+1}/{len(chunks)}")
             summary = f"{answer_start}"+ self.fast_gen(
                         "\n".join([
                             f"!@>Document_chunk: {doc_name}:",
@@ -2252,6 +2252,7 @@ class APScript(StateMachine):
                             max_generation_size=max_generation_size,
                             callback=callback)
             summeries.append(summary)
+            self.step_end(f" Summary of {doc_name} - Processing chunk : {i+1}/{len(chunks)}")
         return "\n".join(summeries)