mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2024-12-18 20:17:50 +00:00
Added user information
This commit is contained in:
parent
16d712abff
commit
9fb89ad44a
@ -1244,7 +1244,7 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
documentation = ""
|
||||
if len(self.personality.text_files) > 0 and self.personality.vectorizer:
|
||||
if documentation=="":
|
||||
documentation="Documentation:\n"
|
||||
documentation="!@>Documentation:\n"
|
||||
docs, sorted_similarities = self.personality.vectorizer.recover_text(current_message.content, top_k=self.config.data_vectorization_nb_chunks)
|
||||
for doc, infos in zip(docs, sorted_similarities):
|
||||
documentation += f"document chunk:\nchunk path: {infos[0]}\nchunk content:{doc}"
|
||||
@ -1253,11 +1253,17 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
history = ""
|
||||
if self.config.use_discussions_history and self.discussions_store is not None:
|
||||
if history=="":
|
||||
documentation="History:\n"
|
||||
documentation="!@>History:\n"
|
||||
docs, sorted_similarities = self.discussions_store.recover_text(current_message.content, top_k=self.config.data_vectorization_nb_chunks)
|
||||
for doc, infos in zip(docs, sorted_similarities):
|
||||
history += f"discussion chunk:\ndiscussion title: {infos[0]}\nchunk content:{doc}"
|
||||
|
||||
# Add information about the user
|
||||
user_description=""
|
||||
if self.config.use_user_name_in_discussions:
|
||||
user_description="!@>User description:\n"+self.config.user_description
|
||||
|
||||
|
||||
# Tokenize the conditionning text and calculate its number of tokens
|
||||
tokens_conditionning = self.model.tokenize(conditionning)
|
||||
n_cond_tk = len(tokens_conditionning)
|
||||
@ -1278,8 +1284,18 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
tokens_history = []
|
||||
n_history_tk = 0
|
||||
|
||||
|
||||
# Tokenize user description
|
||||
if len(user_description)>0:
|
||||
tokens_user_description = self.model.tokenize(user_description)
|
||||
n_user_description_tk = len(tokens_user_description)
|
||||
else:
|
||||
tokens_user_description = []
|
||||
n_user_description_tk = 0
|
||||
|
||||
|
||||
# Calculate the total number of tokens between conditionning, documentation, and history
|
||||
total_tokens = n_cond_tk + n_doc_tk + n_history_tk
|
||||
total_tokens = n_cond_tk + n_doc_tk + n_history_tk + n_user_description_tk
|
||||
|
||||
# Calculate the available space for the messages
|
||||
available_space = self.config.ctx_size - n_tokens - total_tokens
|
||||
@ -1332,7 +1348,7 @@ class LoLLMsAPPI(LollmsApplication):
|
||||
discussion_messages += self.model.detokenize(message_tokens)
|
||||
|
||||
# Build the final prompt by concatenating the conditionning and discussion messages
|
||||
prompt_data = conditionning + documentation + history + discussion_messages
|
||||
prompt_data = conditionning + documentation + history + user_description + discussion_messages
|
||||
|
||||
# Tokenize the prompt data
|
||||
tokens = self.model.tokenize(prompt_data)
|
||||
|
29
docs/youtube/lord_of_internet.md
Normal file
29
docs/youtube/lord_of_internet.md
Normal file
@ -0,0 +1,29 @@
|
||||
Hi there,
|
||||
|
||||
Let's take a look at Lord of Internet.
|
||||
|
||||
This agent does internet research about the question you are asking before answering you.
|
||||
|
||||
Let's ask it about the new announcements of open AI.
|
||||
|
||||
Here the AI is searching for information on internet, vectorizing it, extracting relevant data, then formulating answers and giving you its sources.
|
||||
|
||||
We can use the audio output button to read the output. You can select one of the available voices on you PC in the main configuration settings in the subsection audio.
|
||||
|
||||
OpenAI made several new announcements at their recent developer conference. They introduced a platform for creating and discovering custom versions of ChatGPT, which will be accessible through the GPT Store [1]. The GPT Store will allow users to share and sell their custom GPT bots, and OpenAI plans to compensate creators based on the usage of their GPTs [1]. OpenAI also announced improvements to their large language models, GPT-4 and GPT-3.5, including updated knowledge bases and a longer context window [2]. They introduced GPT-4 Turbo, which has been trained with information dating up to April 2023 and will be released in the next few weeks [2]. Additionally, OpenAI mentioned that they are working on transparently compensating creators and are exploring different payout models [1].
|
||||
|
||||
We can check the sources for more details.
|
||||
|
||||
We can configure the search parameters by pressing the icon of the agent.
|
||||
|
||||
For example we can activate craft_search_query which makes the ai craft a search query instead of using your question directly as the query. We can also control data vectorization parameters like chunk size and overlap as well as the number of chunks to keep and max summery size.
|
||||
|
||||
As you can see, here we have a little different answer but with more sources. The ai will always mention its sources when aanswering you to help you find the information. Also, it gives you the index of the chunk used to answer the question which allows you to know where in the website the information was recovered.
|
||||
|
||||
Please consiter testing Lord of Internet and give me some feedback on my lollmz discord channel.
|
||||
|
||||
Don't forget to comment, like and subscribe for more videos about lollmz
|
||||
|
||||
Thanks for watching
|
||||
|
||||
See ya
|
@ -35,19 +35,7 @@ def run_git_pull():
|
||||
ASCIIColors.info("Lollms_core found in the app space.\nPulling last lollms_core")
|
||||
subprocess.run(["git", "-C", str(execution_path/"lollms_core"), "pull"])
|
||||
subprocess.run(["git", "-C", str(execution_path/"utilities/safe_store"), "pull"])
|
||||
# Pull the repository if it already exists
|
||||
|
||||
ASCIIColors.info("Bindings zoo found in your personal space.\nPulling last personalities zoo")
|
||||
subprocess.run(["git", "-C", self.lollms_paths.bindings_zoo_path, "pull"])
|
||||
# Pull the repository if it already exists
|
||||
ASCIIColors.info("Personalities zoo found in your personal space.\nPulling last personalities zoo")
|
||||
subprocess.run(["git", "-C", self.lollms_paths.personalities_zoo_path, "pull"])
|
||||
# Pull the repository if it already exists
|
||||
ASCIIColors.info("Extensions zoo found in your personal space.\nPulling last Extensions zoo")
|
||||
subprocess.run(["git", "-C", self.lollms_paths.extensions_zoo_path, "pull"])
|
||||
# Pull the repository if it already exists
|
||||
ASCIIColors.info("Models zoo found in your personal space.\nPulling last Models zoo")
|
||||
subprocess.run(["git", "-C", self.lollms_paths.models_zoo_path, "pull"])
|
||||
|
||||
except Exception as ex:
|
||||
print("Couldn't update submodules")
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 41bf1bf955cd92b885c4ddbc04febf35177ad8d8
|
||||
Subproject commit 0b4349d69b78f35abeefc520e9891e263964d0b2
|
@ -1 +1 @@
|
||||
Subproject commit 8f16d4f306de3d463ab2f59a448a24f7d7948414
|
||||
Subproject commit 4a9288feeba2e66aa8bab31cc52247c48cff721f
|
Loading…
Reference in New Issue
Block a user