enhanced prompt management

This commit is contained in:
Saifeddine ALOUI 2025-01-22 21:38:22 +01:00
parent e566760cdd
commit fab35e47c5
2 changed files with 10 additions and 5 deletions

View File

@ -1321,7 +1321,10 @@ Answer directly with the reformulation of the last prompt.
else:
msg = self.ai_custom_header("assistant") + message.content.strip()
else:
msg = self.user_full_header + message.content.strip()
if self.config.use_user_name_in_discussions:
msg = self.user_full_header + message.content.strip()
else:
msg = self.user_custom_header("user") + message.content.strip()
msg += self.separator_template
message_tokenized = self.model.tokenize(msg)

View File

@ -257,8 +257,7 @@ class AIPersonality:
def build_context(self, context_details, is_continue=False, return_tokens=False):
# Build the final prompt by concatenating the conditionning and discussion messages
prompt_data = self.separator_template.join(
[
elements = [
context_details["conditionning"],
context_details["internet_search_results"],
context_details["documentation"],
@ -267,9 +266,12 @@ class AIPersonality:
context_details["positive_boost"],
context_details["negative_boost"],
context_details["fun_mode"],
self.ai_full_header if not is_continue else '' if not self.config.use_continue_message else "CONTINUE FROM HERE And do not open a new markdown code tag." + self.separator_template + self.ai_full_header
self.ai_full_header if not is_continue else '' if not self.config.use_continue_message \
else "CONTINUE FROM HERE And do not open a new markdown code tag." + self.separator_template + self.ai_full_header
]
)
# Filter out empty elements and join with separator
prompt_data = self.separator_template.join(element for element in elements if element)
tokens = self.model.tokenize(prompt_data)
if return_tokens:
return prompt_data, tokens