fixed personalities system

This commit is contained in:
Saifeddine ALOUI 2024-05-29 02:53:18 +02:00
parent dbee5586e9
commit f6757dcd5b
21 changed files with 310 additions and 241 deletions

8
app.py
View File

@ -5,14 +5,6 @@ Description: Singleton class for the LoLLMS web UI.
This file is the entry point to the webui.
"""
from lollms.utilities import PackageManager
if PackageManager.check_package_installed("pipmaster"):
PackageManager.install_package("pipmaster")
PackageManager.install_or_update("ascii_colors")
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.responses import HTMLResponse

View File

@ -1,5 +1,5 @@
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
version: 104
version: 106
binding_name: null
model_name: null
model_variant: null
@ -29,6 +29,11 @@ app_custom_logo: ""
# Genreration parameters
discussion_prompt_separator: "!@>"
start_header_id_template: "!@>"
end_header_id_template: ": "
separator_template: "\n"
system_message_template: "system"
seed: -1
ctx_size: 4084
max_n_predict: 4096

@ -1 +1 @@
Subproject commit 7468f4414c81c9a880fcc2fd7542dbf17d80a0a2
Subproject commit 2dde56275a20e93539037411a00389d3359d9a46

View File

@ -634,10 +634,15 @@ class LOLLMSWebUI(LOLLMSElfServer):
"""
Builds a title for a discussion
"""
discussion_prompt_separator = self.config.discussion_prompt_separator
start_header_id_template = self.config.start_header_id_template
end_header_id_template = self.config.end_header_id_template
separator_template = self.config.separator_template
system_message_template = self.config.system_message_template
# Get the list of messages
messages = discussion.get_messages()
discussion_messages = "!@>instruction: Create a short title to this discussion\nYour response should only contain the title without any comments.\n"
discussion_title = "\n!@>Discussion title:"
discussion_messages = f"{start_header_id_template}instruction{end_header_id_template}Create a short title to this discussion\nYour response should only contain the title without any comments.\n"
discussion_title = f"\n{start_header_id_template}Discussion title{end_header_id_template}"
available_space = self.config.ctx_size - 150 - len(self.model.tokenize(discussion_messages))- len(self.model.tokenize(discussion_title))
# Initialize a list to store the full messages
@ -708,16 +713,16 @@ class LOLLMSWebUI(LOLLMSElfServer):
if message["id"]<= message_id or message_id==-1:
if message["type"]!=MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER:
if message["sender"]==self.personality.name:
full_message_list.append(self.personality.ai_message_prefix+message["content"])
full_message_list.append(self.config.discussion_prompt_separator+self.personality.ai_message_prefix+message["content"])
else:
full_message_list.append(ump + message["content"])
link_text = "\n"# self.personality.link_text
if len(full_message_list) > self.config["nb_messages_to_remember"]:
discussion_messages = self.personality.personality_conditioning+ link_text.join(full_message_list[-self.config["nb_messages_to_remember"]:])
discussion_messages = self.config.discussion_prompt_separator + self.personality.personality_conditioning+ link_text.join(full_message_list[-self.config["nb_messages_to_remember"]:])
else:
discussion_messages = self.personality.personality_conditioning+ link_text.join(full_message_list)
discussion_messages = self.config.discussion_prompt_separator + self.personality.personality_conditioning+ link_text.join(full_message_list)
return discussion_messages # Removes the last return
@ -938,11 +943,16 @@ class LOLLMSWebUI(LOLLMSElfServer):
def close_message(self, client_id):
discussion_prompt_separator = self.config.discussion_prompt_separator
start_header_id_template = self.config.start_header_id_template
end_header_id_template = self.config.end_header_id_template
separator_template = self.config.separator_template
system_message_template = self.config.system_message_template
client = self.session.get_client(client_id)
if not client.discussion:
return
#fix halucination
client.generated_text=client.generated_text.split("!@>")[0]
client.generated_text=client.generated_text.split(f"{start_header_id_template}")[0]
# Send final message
client.discussion.current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
try:
@ -1118,6 +1128,12 @@ class LOLLMSWebUI(LOLLMSElfServer):
return txt
def _generate(self, prompt, n_predict, client_id, callback=None):
discussion_prompt_separator = self.config.discussion_prompt_separator
start_header_id_template = self.config.start_header_id_template
end_header_id_template = self.config.end_header_id_template
separator_template = self.config.separator_template
system_message_template = self.config.system_message_template
client = self.session.get_client(client_id)
self.nb_received_tokens = 0
self.start_time = datetime.now()
@ -1140,7 +1156,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
)
else:
prompt = "\n".join([
"!@>system: I am an AI assistant that can converse and analyze images. When asked to locate something in an image you send, I will reply with:",
f"{start_header_id_template}{system_message_template}{end_header_id_template}I am an AI assistant that can converse and analyze images. When asked to locate something in an image you send, I will reply with:",
"boundingbox(image_index, label, left, top, width, height)",
"Where:",
"image_index: 0-based index of the image",
@ -1155,7 +1171,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
prompt,
client.discussion.image_files,
callback=callback,
n_predict=min(n_predict,self.personality.model_n_predicts),
n_predict=n_predict,
temperature=self.personality.model_temperature,
top_k=self.personality.model_top_k,
top_p=self.personality.model_top_p,
@ -1189,7 +1205,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
output = self.model.generate(
prompt,
callback=callback,
n_predict=min(n_predict,self.personality.model_n_predicts),
n_predict=n_predict,
temperature=self.personality.model_temperature,
top_k=self.personality.model_top_k,
top_p=self.personality.model_top_p,
@ -1207,6 +1223,12 @@ class LOLLMSWebUI(LOLLMSElfServer):
return output
def start_message_generation(self, message, message_id, client_id, is_continue=False, generation_type=None, force_using_internet=False):
discussion_prompt_separator = self.config.discussion_prompt_separator
start_header_id_template = self.config.start_header_id_template
end_header_id_template = self.config.end_header_id_template
separator_template = self.config.separator_template
system_message_template = self.config.system_message_template
client = self.session.get_client(client_id)
if self.personality is None:
self.warning("Select a personality")
@ -1304,7 +1326,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
f'</a>',
])
sources_text += '</div>'
client.generated_text=client.generated_text.split("!@>")[0] + "\n" + sources_text
client.generated_text=client.generated_text.split(f"{start_header_id_template}")[0] + "\n" + sources_text
self.personality.full(client.generated_text)
except Exception as ex:
trace_exception(ex)

View File

@ -1,6 +1,6 @@
name: Build a Latex Book
content: |
@<Add some context information to give the AI some context about the book or leave blank if you have no specific idea>@
@<Add some context information to give the AI some context about the book or leave blank if you have no specific idea>@
```latex
\documentclass[12pt]{book}
\usepackage{url}
@ -17,8 +17,7 @@ content: |
snippets:
- Import graphics(required to add graphics to the page) :
\usepackage[demo]{graphic}
- Add graph: |
\includegraphics[width=4in]{@<Image name>@}% Replace with your own image path
- Add graph: | \includegraphics[width=4in]{@<Image name>@}% Replace with your own image path
help: |
Builds a latex code for a book
Builds a latex code for a book

View File

@ -1,5 +1,5 @@
name: Build a Markdown Book
content: |
@<Add some context information to give the AI some context about the book or leave blank if you have no specific idea>@
@<Add some context information to give the AI some context about the book or leave blank if you have no specific idea>@
# @<book title>@

View File

@ -1,4 +1,4 @@
content: '### System:
content: '###
Act as a text misspelling fixer assistant. Assist the user to fix his text

View File

@ -1,4 +1,4 @@
name: Simple Book writing
content: |
# @<Title of the book:The advantures of Gandalf and Darth Vador>@
# @<Title of the book:The advantures of Gandalf and Darth Vador>@
@<Start the story:Once apon a time in middle earth>@@<generation_placeholder>@

View File

@ -1,6 +1,6 @@
name: StableBulga2 Instruct
content: |
### System:
###
@<Put your system prompt here>@
### User:

View File

@ -7,8 +7,7 @@ content: |
parameter_1: this is parameter 1
parameter_2: this is parameter 2
parameter_3: 25
parameter_4: |
This is a multi
parameter_4: | This is a multi
line parameter
```
Translation:
@ -17,8 +16,7 @@ content: |
parameter_1: ceci est le paramètre 1
parameter_2: ceci est le paramètre 2
parameter_3: 25
parameter_4: |
Ceci est un paramètre
parameter_4: | Ceci est un paramètre
multiligne
```
Session 2:

View File

@ -11,7 +11,7 @@ setuptools
psutil
pytest
GitPython
ascii-colors>=0.3.0
ascii-colors>=0.3.2
beautifulsoup4
packaging

@ -1 +1 @@
Subproject commit 4b82338d679f61e0d2761fb8f76db9bb56258a4b
Subproject commit 28f48c5e0ba1f2c92922f7474d7ce3d72764cba1

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-8f4b9f56.js"></script>
<link rel="stylesheet" href="/assets/index-18773a3b.css">
<script type="module" crossorigin src="/assets/index-9cc63556.js"></script>
<link rel="stylesheet" href="/assets/index-dd02af76.css">
</head>
<body>
<div id="app"></div>

View File

@ -220,6 +220,7 @@ export default {
pers=pers.personality;
// Make a POST request to the '/get_personality_config' endpoint using Axios
axios.post('/get_personality_config', {
client_id:this.$store.state.client_id,
category: pers.category,
name: pers.folder,
})

View File

@ -43,14 +43,6 @@
<tr>
<td><label for="modelTemperature">Model Temperature:</label></td><td><input type="number" id="modelTemperature" v-model="config.model_temperature"></td>
</tr>
<tr>
<td><label for="modelNPredicts">Model N Predicts:</label></td>
<td><input class="dark:bg-black dark:text-primary w-full" type="number" id="modelNPredicts" v-model="config.model_n_predicts"></td>
</tr>
<tr>
<td><label for="modelNPredicts">Model N Predicts:</label></td>
<td><input class="dark:bg-black dark:text-primary w-full" type="number" id="modelNPredicts" v-model="config.model_n_predicts"></td>
</tr>
<tr>
<td><label for="modelTopK">Model Top K:</label></td>
<td><input class="dark:bg-black dark:text-primary w-full" type="number" id="modelTopK" v-model="config.model_top_k"></td>
@ -138,6 +130,7 @@ export default {
},
submitForm() {
axios.post('/set_personality_config', {
client_id: this.$store.state.client_id,
category: this.personality.category,
name: this.personality.folder,
config: this.config

View File

@ -887,6 +887,7 @@ export default {
addPreset() {
let title = prompt('Enter the title of the preset:');
this.presets[title] = {
client_id:this.$store.state.client_id,
name:title,
content:this.text
}

View File

@ -241,7 +241,65 @@
</td>
</tr>
<tr>
<td style="min-width: 200px;">
<label for="start_header_id_template" class="text-sm font-bold" style="margin-right: 1rem;">Start header id template:</label>
</td>
<td>
<input
type="text"
id="start_header_id_template"
required
v-model="configFile.start_header_id_template"
@change="settingsChanged=true"
class="w-full w-full mt-1 px-2 py-1 border border-gray-300 rounded dark:bg-gray-600 dark:bg-gray-600"
>
</td>
</tr>
<tr>
<td style="min-width: 200px;">
<label for="end_header_id_template" class="text-sm font-bold" style="margin-right: 1rem;">End header id template:</label>
</td>
<td>
<input
type="text"
id="end_header_id_template"
required
v-model="configFile.end_header_id_template"
@change="settingsChanged=true"
class="w-full w-full mt-1 px-2 py-1 border border-gray-300 rounded dark:bg-gray-600 dark:bg-gray-600"
>
</td>
</tr>
<tr>
<td style="min-width: 200px;">
<label for="separator_template" class="text-sm font-bold" style="margin-right: 1rem;">Separator template:</label>
</td>
<td>
<textarea
id="separator_template"
required
v-model="configFile.separator_template"
@change="settingsChanged=true"
class="min-h-[500px] w-full mt-1 px-2 py-1 border border-gray-300 rounded dark:bg-gray-600"
></textarea>
</td>
</tr>
<tr>
<td style="min-width: 200px;">
<label for="system_message_template" class="text-sm font-bold" style="margin-right: 1rem;">System template:</label>
</td>
<td>
<input
type="text"
id="system_message_template"
required
v-model="configFile.system_message_template"
@change="settingsChanged=true"
class="w-full w-full mt-1 px-2 py-1 border border-gray-300 rounded dark:bg-gray-600 dark:bg-gray-600"
>
</td>
</tr>
<tr>
<td style="min-width: 200px;">
<label for="hardware_mode" class="text-sm font-bold" style="margin-right: 1rem;">Hardware mode:</label>

@ -1 +1 @@
Subproject commit 2d01c59c4fd508355ef6c4299fc1f00f80350c4e
Subproject commit 2e17c0d61d64a0158651b868c9a7aed2c5c99219

@ -1 +1 @@
Subproject commit 0f07561baf8c268f24f69049806a4187707c6f08
Subproject commit d62e6c7d0a6ee252d3d5f5c66cce1adc4dcb57b4