mirror of
https://github.com/ParisNeo/lollms.git
synced 2024-12-18 20:27:58 +00:00
fix
This commit is contained in:
parent
65f3fcf70b
commit
48cbd7b9c7
@ -342,6 +342,9 @@ class LollmsApplication(LoLLMsCom):
|
||||
elif self.config.rag_vectorizer=="openai":
|
||||
from lollmsvectordb.lollms_vectorizers.openai_vectorizer import OpenAIVectorizer
|
||||
vectorizer = OpenAIVectorizer(self.config.rag_vectorizer_model, self.config.rag_vectorizer_openai_key)
|
||||
elif self.config.rag_vectorizer=="ollama":
|
||||
from lollmsvectordb.lollms_vectorizers.ollama_vectorizer import OllamaVectorizer
|
||||
vectorizer = OllamaVectorizer(self.config.rag_vectorizer_model, self.config.rag_service_url)
|
||||
|
||||
vdb = VectorDatabase(Path(parts[1])/f"{db_name}.sqlite", vectorizer, None if self.config.rag_vectorizer=="semantic" else self.model if self.model else TikTokenTokenizer(), n_neighbors=self.config.rag_n_chunks)
|
||||
self.active_rag_dbs.append({"name":parts[0],"path":parts[1],"vectorizer":vdb})
|
||||
@ -1088,13 +1091,19 @@ class LollmsApplication(LoLLMsCom):
|
||||
if self.config.rag_build_keys_words:
|
||||
self.personality.step_start("Building vector store query")
|
||||
q = f"{self.separator_template}".join([
|
||||
f"{self.system_custom_header('instruction')}Read the entire discussion and rewrite the last prompt for someone who hasn't read the discussion.",
|
||||
"Do not answer the prompt. Do not provide any explanations.",
|
||||
"make a RAG vector database query from the last user prompt given this discussion.",
|
||||
f"{self.system_custom_header('discussion')}",
|
||||
"---",
|
||||
f"{discussion[-2048:]}",
|
||||
f"{self.ai_custom_header('enhanced_query')}"
|
||||
"---",
|
||||
])
|
||||
query = self.personality.fast_gen(q, max_generation_size=256, show_progress=True, callback=self.personality.sink)
|
||||
template = """{
|
||||
"query": "[the rag query deduced from the last user prompt]"
|
||||
}
|
||||
"""
|
||||
query = self.personality.generate_code(q, self.personality.image_files, template, callback=self.personality.sink)
|
||||
query = json.loads(query)
|
||||
query = query["query"]
|
||||
self.personality.step_end("Building vector store query")
|
||||
ASCIIColors.magenta(f"Query: {query}")
|
||||
self.personality.step(f"Query: {query}")
|
||||
|
@ -799,8 +799,7 @@ The generated code must be placed inside the html code tag.
|
||||
if debug is None:
|
||||
debug = self.config.debug
|
||||
response_full = ""
|
||||
full_prompt = f"""{self.system_full_header}Act as a code generation assistant who answers with a single code tag content.
|
||||
{self.system_custom_header("user")}
|
||||
full_prompt = f"""{self.system_full_header}Act as a code generation assistant who answers with a single code tag content.
|
||||
{prompt}
|
||||
Make sure only a single code tag is generated at each dialogue turn.
|
||||
"""
|
||||
@ -811,16 +810,16 @@ Make sure only a single code tag is generated at each dialogue turn.
|
||||
```{language}
|
||||
{template}
|
||||
```
|
||||
{"Make sure you fill all fields and tyo use the exact same keys as the template." if language in ["json","yaml","xml"] else ""}
|
||||
Don't forget to close the markdown code tag.
|
||||
{"Make sure you fill all fields and to use the exact same keys as the template." if language in ["json","yaml","xml"] else ""}
|
||||
Don't forget encapsulate the code inside a markdown code tag. This is mandatory.
|
||||
"""
|
||||
elif code_tag_format=="html":
|
||||
full_prompt +=f"""You must answer with the code placed inside the html code tag like this:
|
||||
<code language="{language}">
|
||||
{template}
|
||||
</code>
|
||||
{"Make sure you fill all fields and tyo use the exact same keys as the template." if language in ["json","yaml","xml"] else ""}
|
||||
Don't forget to close the html code tag
|
||||
{"Make sure you fill all fields and to use the exact same keys as the template." if language in ["json","yaml","xml"] else ""}
|
||||
Don't forget encapsulate the code inside a html code tag. This is mandatory.
|
||||
"""
|
||||
|
||||
full_prompt += self.ai_custom_header("assistant")
|
||||
|
Loading…
Reference in New Issue
Block a user