mirror of
https://github.com/ParisNeo/lollms.git
synced 2024-12-24 06:46:40 +00:00
update
This commit is contained in:
parent
48cbd7b9c7
commit
310cc400d1
@ -1092,18 +1092,20 @@ class LollmsApplication(LoLLMsCom):
|
|||||||
self.personality.step_start("Building vector store query")
|
self.personality.step_start("Building vector store query")
|
||||||
q = f"{self.separator_template}".join([
|
q = f"{self.separator_template}".join([
|
||||||
"make a RAG vector database query from the last user prompt given this discussion.",
|
"make a RAG vector database query from the last user prompt given this discussion.",
|
||||||
f"{self.system_custom_header('discussion')}",
|
"--- discussion --",
|
||||||
"---",
|
|
||||||
f"{discussion[-2048:]}",
|
f"{discussion[-2048:]}",
|
||||||
"---",
|
"---",
|
||||||
])
|
])
|
||||||
template = """{
|
template = """{
|
||||||
"query": "[the rag query deduced from the last user prompt]"
|
"query": "[the rag query deduced from the last messgae in the discussion]"
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
query = self.personality.generate_code(q, self.personality.image_files, template, callback=self.personality.sink)
|
query = self.personality.generate_code(q, self.personality.image_files, template, callback=self.personality.sink)
|
||||||
query = json.loads(query)
|
if query is None:
|
||||||
query = query["query"]
|
query = current_message.content
|
||||||
|
else:
|
||||||
|
query = json.loads(query)
|
||||||
|
query = query["query"]
|
||||||
self.personality.step_end("Building vector store query")
|
self.personality.step_end("Building vector store query")
|
||||||
ASCIIColors.magenta(f"Query: {query}")
|
ASCIIColors.magenta(f"Query: {query}")
|
||||||
self.personality.step(f"Query: {query}")
|
self.personality.step(f"Query: {query}")
|
||||||
|
@ -811,6 +811,7 @@ Make sure only a single code tag is generated at each dialogue turn.
|
|||||||
{template}
|
{template}
|
||||||
```
|
```
|
||||||
{"Make sure you fill all fields and to use the exact same keys as the template." if language in ["json","yaml","xml"] else ""}
|
{"Make sure you fill all fields and to use the exact same keys as the template." if language in ["json","yaml","xml"] else ""}
|
||||||
|
The code tag is mandatory.
|
||||||
Don't forget encapsulate the code inside a markdown code tag. This is mandatory.
|
Don't forget encapsulate the code inside a markdown code tag. This is mandatory.
|
||||||
"""
|
"""
|
||||||
elif code_tag_format=="html":
|
elif code_tag_format=="html":
|
||||||
@ -819,10 +820,15 @@ Don't forget encapsulate the code inside a markdown code tag. This is mandatory.
|
|||||||
{template}
|
{template}
|
||||||
</code>
|
</code>
|
||||||
{"Make sure you fill all fields and to use the exact same keys as the template." if language in ["json","yaml","xml"] else ""}
|
{"Make sure you fill all fields and to use the exact same keys as the template." if language in ["json","yaml","xml"] else ""}
|
||||||
|
The code tag is mandatory.
|
||||||
Don't forget encapsulate the code inside a html code tag. This is mandatory.
|
Don't forget encapsulate the code inside a html code tag. This is mandatory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
full_prompt += self.ai_custom_header("assistant")
|
full_prompt += self.ai_custom_header("assistant")
|
||||||
|
if debug:
|
||||||
|
ASCIIColors.yellow("Prompt")
|
||||||
|
ASCIIColors.yellow(full_prompt)
|
||||||
|
|
||||||
if len(self.image_files)>0:
|
if len(self.image_files)>0:
|
||||||
response = self.generate_with_images(full_prompt, self.image_files, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
|
response = self.generate_with_images(full_prompt, self.image_files, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
|
||||||
elif len(images)>0:
|
elif len(images)>0:
|
||||||
@ -830,6 +836,9 @@ Don't forget encapsulate the code inside a html code tag. This is mandatory.
|
|||||||
else:
|
else:
|
||||||
response = self.generate(full_prompt, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
|
response = self.generate(full_prompt, max_size, temperature, top_k, top_p, repeat_penalty, repeat_last_n, callback, debug=debug)
|
||||||
response_full += response
|
response_full += response
|
||||||
|
if debug:
|
||||||
|
ASCIIColors.green("Response")
|
||||||
|
ASCIIColors.green(response_full)
|
||||||
codes = self.extract_code_blocks(response)
|
codes = self.extract_code_blocks(response)
|
||||||
if len(codes)==0 and accept_all_if_no_code_tags_is_present:
|
if len(codes)==0 and accept_all_if_no_code_tags_is_present:
|
||||||
if return_full_generated_code:
|
if return_full_generated_code:
|
||||||
|
Loading…
Reference in New Issue
Block a user