From d1f6f079423a433768cb6978b7988e60619896bf Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Fri, 19 Apr 2024 22:30:45 +0200 Subject: [PATCH] upgraded --- lollms/app.py | 2 +- lollms/personality.py | 2 +- lollms/server/endpoints/lollms_generator.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lollms/app.py b/lollms/app.py index f7000ad..eb345aa 100644 --- a/lollms/app.py +++ b/lollms/app.py @@ -303,7 +303,7 @@ class LollmsApplication(LoLLMsCom): generation_infos["generated_text"] += chunk antiprompt = self.personality.detect_antiprompt(generation_infos["generated_text"]) if antiprompt: - ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}") + ASCIIColors.warning(f"\n{antiprompt} detected. Stopping generation") generation_infos["generated_text"] = self.remove_text_from_string(generation_infos["generated_text"],antiprompt) return False else: diff --git a/lollms/personality.py b/lollms/personality.py index 5a1cc14..1fd9f12 100644 --- a/lollms/personality.py +++ b/lollms/personality.py @@ -699,7 +699,7 @@ class AIPersonality: antiprompt = self.detect_antiprompt(bot_says) if antiprompt: self.bot_says = self.remove_text_from_string(bot_says,antiprompt) - ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}") + ASCIIColors.warning(f"\n{antiprompt} detected. Stopping generation") return False else: if callback: diff --git a/lollms/server/endpoints/lollms_generator.py b/lollms/server/endpoints/lollms_generator.py index 638d734..a1c7a1f 100644 --- a/lollms/server/endpoints/lollms_generator.py +++ b/lollms/server/endpoints/lollms_generator.py @@ -441,7 +441,7 @@ async def v1_completion(request: CompletionGenerationRequest): output["text"] += chunk antiprompt = detect_antiprompt(output["text"]) if antiprompt: - ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}") + ASCIIColors.warning(f"\n{antiprompt} detected. Stopping generation") output["text"] = remove_text_from_string(output["text"],antiprompt) return False else: @@ -462,7 +462,7 @@ async def v1_completion(request: CompletionGenerationRequest): output["text"] += chunk antiprompt = detect_antiprompt(output["text"]) if antiprompt: - ASCIIColors.warning(f"\nDetected hallucination with antiprompt: {antiprompt}") + ASCIIColors.warning(f"\n{antiprompt} detected. Stopping generation") output["text"] = remove_text_from_string(output["text"],antiprompt) return False else: