mirror of
https://github.com/ParisNeo/lollms.git
synced 2025-02-08 03:50:22 +00:00
uptodate
This commit is contained in:
parent
9e4159c110
commit
0d9b7ad5e0
@ -837,7 +837,7 @@ class LollmsApplication(LoLLMsCom):
|
|||||||
discussion += "\n" + self.config.discussion_prompt_separator + msg.sender + ": " + msg.content.strip()
|
discussion += "\n" + self.config.discussion_prompt_separator + msg.sender + ": " + msg.content.strip()
|
||||||
return discussion
|
return discussion
|
||||||
# -------------------------------------- Prompt preparing
|
# -------------------------------------- Prompt preparing
|
||||||
def prepare_query(self, client_id: str, message_id: int = -1, is_continue: bool = False, n_tokens: int = 0, generation_type = None, force_using_internet=False) -> Tuple[str, str, List[str]]:
|
def prepare_query(self, client_id: str, message_id: int = -1, is_continue: bool = False, n_tokens: int = 0, generation_type = None, force_using_internet=False, previous_chunk="") -> Tuple[str, str, List[str]]:
|
||||||
"""
|
"""
|
||||||
Prepares the query for the model.
|
Prepares the query for the model.
|
||||||
|
|
||||||
@ -1374,7 +1374,8 @@ class LollmsApplication(LoLLMsCom):
|
|||||||
"extra":"",
|
"extra":"",
|
||||||
"available_space":available_space,
|
"available_space":available_space,
|
||||||
"skills":skills,
|
"skills":skills,
|
||||||
"is_continue":is_continue
|
"is_continue":is_continue,
|
||||||
|
"previous_chunk":previous_chunk
|
||||||
}
|
}
|
||||||
if self.config.debug:
|
if self.config.debug:
|
||||||
ASCIIColors.highlight(documentation,"source_document_title", ASCIIColors.color_yellow, ASCIIColors.color_red, False)
|
ASCIIColors.highlight(documentation,"source_document_title", ASCIIColors.color_yellow, ASCIIColors.color_red, False)
|
||||||
|
@ -777,9 +777,11 @@ class AIPersonality:
|
|||||||
self.bot_says = ""
|
self.bot_says = ""
|
||||||
if debug:
|
if debug:
|
||||||
self.print_prompt("gen",prompt)
|
self.print_prompt("gen",prompt)
|
||||||
|
ntokens = self.model.tokenize(prompt)
|
||||||
|
|
||||||
self.model.generate(
|
self.model.generate(
|
||||||
prompt,
|
prompt,
|
||||||
max_size if max_size else min(self.config.ctx_size-len(self.model.tokenize(prompt)), self.config.max_n_predict),
|
max_size if max_size else min(self.config.ctx_size-ntokens,self.config.max_n_predict),
|
||||||
partial(self.process, callback=callback, show_progress=show_progress),
|
partial(self.process, callback=callback, show_progress=show_progress),
|
||||||
temperature=self.model_temperature if temperature is None else temperature,
|
temperature=self.model_temperature if temperature is None else temperature,
|
||||||
top_k=self.model_top_k if top_k is None else top_k,
|
top_k=self.model_top_k if top_k is None else top_k,
|
||||||
@ -3831,8 +3833,21 @@ class APScript(StateMachine):
|
|||||||
|
|
||||||
return rounds_info
|
return rounds_info
|
||||||
|
|
||||||
|
def answer(self, context_details, callback=None, send_full=True):
|
||||||
|
if context_details["is_continue"]:
|
||||||
|
full_prompt = self.build_prompt_from_context_details(context_details, suppress= ["ai_prefix"])
|
||||||
|
else:
|
||||||
|
full_prompt = self.build_prompt_from_context_details(context_details)
|
||||||
|
|
||||||
|
out = self.fast_gen(full_prompt)
|
||||||
|
nb_tokens = len(self.personality.model.tokenize(out))
|
||||||
|
if nb_tokens >= self.config.max_n_predict-1:
|
||||||
|
out = out+self.fast_gen(full_prompt+out, callback=callback)
|
||||||
|
if context_details["is_continue"]:
|
||||||
|
out = context_details["previous_chunk"] + out
|
||||||
|
if send_full:
|
||||||
|
self.full(out)
|
||||||
|
return out
|
||||||
|
|
||||||
def generate_with_function_calls(self, context_details: dict, functions: List[Dict[str, Any]], max_answer_length: Optional[int] = None, callback = None) -> List[Dict[str, Any]]:
|
def generate_with_function_calls(self, context_details: dict, functions: List[Dict[str, Any]], max_answer_length: Optional[int] = None, callback = None) -> List[Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
|
Loading…
x
Reference in New Issue
Block a user