From 5fc9e91eb4dd3c742ac048c0e2b23009e582b6d4 Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Fri, 5 Jul 2024 07:02:43 +0200 Subject: [PATCH] best --- lollms/personality.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lollms/personality.py b/lollms/personality.py index 4a74970..d3f4572 100644 --- a/lollms/personality.py +++ b/lollms/personality.py @@ -2817,8 +2817,8 @@ class APScript(StateMachine): prompt = self.build_prompt(full_context, sacrifice_id) if self.config.debug: - nb_prompt_tokens = self.personality.model.tokenize(prompt) - nb_tokens = min(self.config.ctx_size - len(nb_prompt_tokens), self.config.max_n_predict) + nb_prompt_tokens = len(self.personality.model.tokenize(prompt)) + nb_tokens = min(self.config.ctx_size - nb_prompt_tokens, self.config.max_n_predict) ASCIIColors.info(f"Prompt size : {nb_prompt_tokens}") ASCIIColors.info(f"Requested generation max size : {nb_tokens}")