This commit is contained in:
saloui 2023-07-21 17:01:08 +02:00
parent d2ae6c8949
commit f9cc998285
6 changed files with 86 additions and 74 deletions

View File

@ -759,36 +759,10 @@ class LoLLMsAPPI(LollmsApplication):
ASCIIColors.green(f"Received {self.nb_received_tokens} tokens",end="\r")
sys.stdout = sys.__stdout__
sys.stdout.flush()
detected_anti_prompt = False
anti_prompt_to_remove=""
for anti_prompt in self.personality.anti_prompts:
if anti_prompt.lower() in self.connections[client_id]["generated_text"].lower():
detected_anti_prompt=True
anti_prompt_to_remove = anti_prompt.lower()
if not detected_anti_prompt:
self.socketio.emit('message', {
'data': chunk,# self.connections[client_id]["generated_text"],
'user_message_id':self.current_user_message_id,
'ai_message_id':self.current_ai_message_id,
'discussion_id':self.current_discussion.discussion_id,
'message_type': MSG_TYPE.MSG_TYPE_CHUNK.value if self.nb_received_tokens>1 else MSG_TYPE.MSG_TYPE_FULL.value,
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
self.socketio.sleep(0.01)
self.current_discussion.update_message(self.current_ai_message_id, self.connections[client_id]["generated_text"])
# if stop generation is detected then stop
if not self.cancel_gen:
return True
else:
self.cancel_gen = False
ASCIIColors.warning("Generation canceled")
return False
else:
self.connections[client_id]["generated_text"] = self.remove_text_from_string(self.connections[client_id]["generated_text"], anti_prompt_to_remove)
antiprompt = self.personality.detect_antiprompt(self.connections[client_id]["generated_text"])
if antiprompt:
ASCIIColors.warning(f"Detected hallucination with antiprompt: {antiprompt}")
self.connections[client_id]["generated_text"] = self.remove_text_from_string(self.connections[client_id]["generated_text"],antiprompt)
self.socketio.emit('message', {
'data': self.connections[client_id]["generated_text"],
'user_message_id':self.current_user_message_id,
@ -799,10 +773,30 @@ class LoLLMsAPPI(LollmsApplication):
"ai_message_id": self.current_ai_message_id,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
ASCIIColors.warning("The model is halucinating")
)
return False
else:
self.socketio.emit('message', {
'data': chunk,# self.connections[client_id]["generated_text"],
'user_message_id':self.current_user_message_id,
'ai_message_id':self.current_ai_message_id,
'discussion_id':self.current_discussion.discussion_id,
'message_type': MSG_TYPE.MSG_TYPE_CHUNK.value if self.nb_received_tokens>1 else MSG_TYPE.MSG_TYPE_FULL.value,
"user_message_id": self.current_user_message_id,
"ai_message_id": self.current_ai_message_id,
'finished_generating_at': self.current_discussion.current_message_finished_generating_at,
}, room=client_id
)
self.socketio.sleep(0.01)
self.current_discussion.update_message(self.current_ai_message_id, self.connections[client_id]["generated_text"])
# if stop generation is detected then stop
if not self.cancel_gen:
return True
else:
self.cancel_gen = False
ASCIIColors.warning("Generation canceled")
return False
# Stream the generated text to the main process
elif message_type == MSG_TYPE.MSG_TYPE_FULL:
self.connections[client_id]["generated_text"] = chunk

22
app.py
View File

@ -119,13 +119,31 @@ def check_update(branch_name="main"):
def run_restart_script(args):
restart_script = "restart_script.py"
# Convert Namespace object to a dictionary
args_dict = vars(args)
# Filter out any key-value pairs where the value is None
valid_args = {key: value for key, value in args_dict.items() if value is not None}
# Save the arguments to a temporary file
temp_file = "temp_args.txt"
with open(temp_file, "w") as file:
file.write(" ".join(args))
# Convert the valid_args dictionary to a string in the format "key1 value1 key2 value2 ..."
arg_string = " ".join([f"--{key} {value}" for key, value in valid_args.items()])
file.write(arg_string)
os.system(f"python {restart_script} {temp_file}")
sys.exit()
# Reload the main script with the original arguments
if os.path.exists(temp_file):
with open(temp_file, "r") as file:
args = file.read().split()
main_script = "app.py" # Replace with the actual name of your main script
os.system(f"python {main_script} {' '.join(args)}")
os.remove(temp_file)
else:
print("Error: Temporary arguments file not found.")
sys.exit(1)
def run_update_script(args):
update_script = "update_script.py"

File diff suppressed because one or more lines are too long

View File

@ -1,4 +1,4 @@
.dot{width:10px;height:10px;border-radius:50%}.dot-green{background-color:green}.dot-red{background-color:red}.active-tab{font-weight:700}.scrollbar[data-v-3cb88319]{scrollbar-width:thin;scrollbar-color:var(--scrollbar-thumb-color) var(--scrollbar-track-color);white-space:pre-wrap;overflow-wrap:break-word}.scrollbar[data-v-3cb88319]::-webkit-scrollbar{width:8px}.scrollbar[data-v-3cb88319]::-webkit-scrollbar-track{background-color:var(--scrollbar-track-color)}.scrollbar[data-v-3cb88319]::-webkit-scrollbar-thumb{background-color:var(--scrollbar-thumb-color);border-radius:4px}.scrollbar[data-v-3cb88319]::-webkit-scrollbar-thumb:hover{background-color:var(--scrollbar-thumb-hover-color)}.toastItem-enter-active[data-v-3ffdabf3],.toastItem-leave-active[data-v-3ffdabf3]{transition:all .5s ease}.toastItem-enter-from[data-v-3ffdabf3],.toastItem-leave-to[data-v-3ffdabf3]{opacity:0;transform:translate(-30px)}.selected-choice{background-color:#bde4ff}.list-move[data-v-f293bffd],.list-enter-active[data-v-f293bffd],.list-leave-active[data-v-f293bffd]{transition:all .5s ease}.list-enter-from[data-v-f293bffd]{transform:translatey(-30px)}.list-leave-to[data-v-f293bffd]{opacity:0;transform:translatey(30px)}.list-leave-active[data-v-f293bffd]{position:absolute}.bounce-enter-active[data-v-f293bffd]{animation:bounce-in-f293bffd .5s}.bounce-leave-active[data-v-f293bffd]{animation:bounce-in-f293bffd .5s reverse}@keyframes bounce-in-f293bffd{0%{transform:scale(0)}50%{transform:scale(1.25)}to{transform:scale(1)}}.bg-primary-light[data-v-f293bffd]{background-color:#0ff}.hover[data-v-f293bffd]:bg-primary-light:hover{background-color:#7fffd4}.font-bold[data-v-f293bffd]{font-weight:700}.hljs-comment,.hljs-quote{color:#7285b7}.hljs-deletion,.hljs-name,.hljs-regexp,.hljs-selector-class,.hljs-selector-id,.hljs-tag,.hljs-template-variable,.hljs-variable{color:#ff9da4}.hljs-built_in,.hljs-link,.hljs-literal,.hljs-meta,.hljs-number,.hljs-params,.hljs-type{color:#ffc58f}.hljs-attribute{color:#ffeead}.hljs-addition,.hljs-bullet,.hljs-string,.hljs-symbol{color:#d1f1a9}.hljs-section,.hljs-title{color:#bbdaff}.hljs-keyword,.hljs-selector-tag{color:#ebbbff}.hljs{background:#002451;color:#fff}pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}/*!
.dot{width:10px;height:10px;border-radius:50%}.dot-green{background-color:green}.dot-red{background-color:red}.active-tab{font-weight:700}.scrollbar[data-v-3cb88319]{scrollbar-width:thin;scrollbar-color:var(--scrollbar-thumb-color) var(--scrollbar-track-color);white-space:pre-wrap;overflow-wrap:break-word}.scrollbar[data-v-3cb88319]::-webkit-scrollbar{width:8px}.scrollbar[data-v-3cb88319]::-webkit-scrollbar-track{background-color:var(--scrollbar-track-color)}.scrollbar[data-v-3cb88319]::-webkit-scrollbar-thumb{background-color:var(--scrollbar-thumb-color);border-radius:4px}.scrollbar[data-v-3cb88319]::-webkit-scrollbar-thumb:hover{background-color:var(--scrollbar-thumb-hover-color)}.toastItem-enter-active[data-v-3ffdabf3],.toastItem-leave-active[data-v-3ffdabf3]{transition:all .5s ease}.toastItem-enter-from[data-v-3ffdabf3],.toastItem-leave-to[data-v-3ffdabf3]{opacity:0;transform:translate(-30px)}.selected-choice{background-color:#bde4ff}.list-move[data-v-80e0d28b],.list-enter-active[data-v-80e0d28b],.list-leave-active[data-v-80e0d28b]{transition:all .5s ease}.list-enter-from[data-v-80e0d28b]{transform:translatey(-30px)}.list-leave-to[data-v-80e0d28b]{opacity:0;transform:translatey(30px)}.list-leave-active[data-v-80e0d28b]{position:absolute}.bounce-enter-active[data-v-80e0d28b]{animation:bounce-in-80e0d28b .5s}.bounce-leave-active[data-v-80e0d28b]{animation:bounce-in-80e0d28b .5s reverse}@keyframes bounce-in-80e0d28b{0%{transform:scale(0)}50%{transform:scale(1.25)}to{transform:scale(1)}}.bg-primary-light[data-v-80e0d28b]{background-color:#0ff}.hover[data-v-80e0d28b]:bg-primary-light:hover{background-color:#7fffd4}.font-bold[data-v-80e0d28b]{font-weight:700}.hljs-comment,.hljs-quote{color:#7285b7}.hljs-deletion,.hljs-name,.hljs-regexp,.hljs-selector-class,.hljs-selector-id,.hljs-tag,.hljs-template-variable,.hljs-variable{color:#ff9da4}.hljs-built_in,.hljs-link,.hljs-literal,.hljs-meta,.hljs-number,.hljs-params,.hljs-type{color:#ffc58f}.hljs-attribute{color:#ffeead}.hljs-addition,.hljs-bullet,.hljs-string,.hljs-symbol{color:#d1f1a9}.hljs-section,.hljs-title{color:#bbdaff}.hljs-keyword,.hljs-selector-tag{color:#ebbbff}.hljs{background:#002451;color:#fff}pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}/*!
Theme: Tokyo-night-Dark
origin: https://github.com/enkia/tokyo-night-vscode-theme
Description: Original highlight.js style

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-657d4196.js"></script>
<link rel="stylesheet" href="/assets/index-2cdce675.css">
<script type="module" crossorigin src="/assets/index-2d32dbfc.js"></script>
<link rel="stylesheet" href="/assets/index-b25d2ad1.css">
</head>
<body>
<div id="app"></div>

View File

@ -1526,6 +1526,7 @@ export default {
data() {
return {
has_updates:false,
// Variant selection
variant_choices:[],
variantSelectionDialogVisible:false,
@ -1585,11 +1586,16 @@ export default {
},
async created() {
socket.on('loading_text',this.on_loading_text);
this.updateHasUpdates();
//await socket.on('install_progress', this.progressListener);
},
methods: {
async updateHasUpdates() {
let res = await this.api_get_req("check_update");
this.has_updates = res["update_availability"];
console.log("has_updates", this.has_updates);
},
onVariantChoiceSelected(choice){
this.selected_variant = choice
},
@ -2872,12 +2878,6 @@ export default {
}
},
computed: {
has_updates:{
async get(){
res = await this.api_get_req("check_update")
return res["update_availability"]
}
},
configFile: {
get() {
return this.$store.state.config;