From dbc2120b3a725d241ad84b5a9b8ca6e23a3ce6f4 Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Mon, 29 Jul 2024 09:48:05 +0200 Subject: [PATCH] Update personality.py --- lollms/personality.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/lollms/personality.py b/lollms/personality.py index 51610d8..252648e 100644 --- a/lollms/personality.py +++ b/lollms/personality.py @@ -696,10 +696,6 @@ class AIPersonality: if debug == False: debug = self.config.debug - if max_generation_size is None: - prompt_size = self.model.tokenize(prompt) - max_generation_size = min(self.model.config.ctx_size - len(prompt_size),self.config.max_n_predict) - pr = PromptReshaper(prompt) prompt = pr.build(placeholders, self.model.tokenize, @@ -707,8 +703,6 @@ class AIPersonality: max_generation_size, sacrifice ) - ntk = len(self.model.tokenize(prompt)) - max_generation_size = min(min(self.model.config.ctx_size - ntk, max_generation_size), self.config.max_n_predict) # TODO : add show progress gen = self.generate(prompt, max_generation_size, temperature = temperature, top_k = top_k, top_p=top_p, repeat_penalty=repeat_penalty, repeat_last_n=repeat_last_n, callback=callback, show_progress=show_progress).strip().replace("", "").replace("", "")