upgraded ui

This commit is contained in:
Saifeddine ALOUI 2023-10-30 01:32:24 +01:00
parent 46012e316d
commit b8bbd1764d
13 changed files with 164 additions and 118 deletions

View File

@ -426,7 +426,7 @@ class LoLLMsAPPI(LollmsApplication):
def uninstall_model(data):
model_path = data['path']
model_type:str=data.get("type","ggml")
installation_dir = self.binding.searchModelParentFolder()
installation_dir = self.binding.searchModelParentFolder(model_path)
binding_folder = self.config["binding_name"]
if model_type=="gptq":
@ -700,18 +700,30 @@ class LoLLMsAPPI(LollmsApplication):
@self.socketio.on('create_empty_message')
def create_empty_message(data):
client_id = request.sid
if self.personality is None:
self.notify("Select a personality",False,None)
return
ASCIIColors.info(f"Text generation requested by client: {client_id}")
# send the message to the bot
print(f"Creating an empty message for AI answer orientation")
if self.connections[client_id]["current_discussion"]:
if not self.model:
self.notify("No model selected. Please make sure you select a model before starting generation", False, client_id)
return
self.new_message(client_id, self.personality.name, "<edit this to put your ai answer start>")
self.socketio.sleep(0.01)
type = data.get("type",0)
if type==0:
ASCIIColors.info(f"Building empty User message requested by : {client_id}")
# send the message to the bot
print(f"Creating an empty message for AI answer orientation")
if self.connections[client_id]["current_discussion"]:
if not self.model:
self.notify("No model selected. Please make sure you select a model before starting generation", False, client_id)
return
self.new_message(client_id, self.config.user_name, "", sender_type=SENDER_TYPES.SENDER_TYPES_USER)
self.socketio.sleep(0.01)
else:
if self.personality is None:
self.notify("Select a personality",False,None)
return
ASCIIColors.info(f"Building empty AI message requested by : {client_id}")
# send the message to the bot
print(f"Creating an empty message for AI answer orientation")
if self.connections[client_id]["current_discussion"]:
if not self.model:
self.notify("No model selected. Please make sure you select a model before starting generation", False, client_id)
return
self.new_message(client_id, self.personality.name, "[edit this to put your ai answer start]")
self.socketio.sleep(0.01)
# A copy of the original lollms-server generation code needed for playground
@self.socketio.on('generate_text')
@ -1302,8 +1314,8 @@ class LoLLMsAPPI(LollmsApplication):
def new_message(self,
client_id,
sender,
content,
sender=None,
content="",
parameters=None,
metadata=None,
ui=None,
@ -1312,7 +1324,8 @@ class LoLLMsAPPI(LollmsApplication):
):
mtdt = metadata if metadata is None or type(metadata) == str else json.dumps(metadata, indent=4)
if sender==None:
sender= self.personality.name
msg = self.connections[client_id]["current_discussion"].add_message(
message_type = message_type.value,
sender_type = sender_type.value,
@ -1329,7 +1342,7 @@ class LoLLMsAPPI(LollmsApplication):
self.socketio.emit('new_message',
{
"sender": self.personality.name,
"sender": sender,
"message_type": message_type.value,
"sender_type": SENDER_TYPES.SENDER_TYPES_AI.value,
"content": content,

9
app.py
View File

@ -931,7 +931,6 @@ class LoLLMsWebUI(LoLLMsAPPI):
try:
if self.binding:
self.binding.destroy_model()
self.binding = None
self.model = None
for per in self.mounted_personalities:
@ -2129,13 +2128,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
entry["value"] = [entry["value"]]
self.binding.binding_config.update_template(data)
self.binding.binding_config.config.save_config()
self.binding = None
self.model = None
for per in self.mounted_personalities:
per.model = None
gc.collect()
self.binding= BindingBuilder().build_binding(self.config, self.lollms_paths)
self.model = self.binding.build_model()
self.binding.settings_updated()
if self.config.auto_save:
ASCIIColors.info("Saving configuration")
self.config.save_config()

@ -1 +1 @@
Subproject commit 239955db8b9dc812d12668a32a26375fa0326298
Subproject commit e08bb1f052cb95ec8b849ba7c1e7de202f5e19c8

@ -1 +1 @@
Subproject commit 333807a01e4b847167b32013431fbcf30f582ccd
Subproject commit d8ed9802bec56efa9f1cf45a60584d462723567e

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-aa1d2d0e.js"></script>
<link rel="stylesheet" href="/assets/index-8f8ea2a2.css">
<script type="module" crossorigin src="/assets/index-ddd279d2.js"></script>
<link rel="stylesheet" href="/assets/index-575b8ec5.css">
</head>
<body>
<div id="app"></div>

View File

@ -136,7 +136,7 @@
<span class="sr-only">Selecting model...</span>
</div>
</div>
<div class="w-fit group relative">
<div class="w-fit group relative" v-if="!loading" >
<!-- :onShowPersList="onShowPersListFun" -->
<div class= "group w-full inline-flex absolute opacity-0 group-hover:opacity-100 transform group-hover:-translate-y-10 group-hover:translate-x-15 transition-all duration-300">
<div class="w-full"
@ -160,7 +160,7 @@
</div>
</div>
<div class="w-fit group relative">
<div class="w-fit group relative" v-if="!loading" >
<!-- :onShowPersList="onShowPersListFun" -->
<div class= "group w-full inline-flex absolute opacity-0 group-hover:opacity-100 transform group-hover:-translate-y-10 group-hover:translate-x-15 transition-all duration-300">
<div class="w-full"
@ -199,7 +199,7 @@
</div>
<div class="w-fit">
<div class="w-fit" v-if="!loading" >
<PersonalitiesCommands
v-if="personalities_ready && this.$store.state.mountedPersArr[this.$store.state.config.active_personality_id].commands!=''"
:commandsList="this.$store.state.mountedPersArr[this.$store.state.config.active_personality_id].commands"
@ -209,7 +209,7 @@
></PersonalitiesCommands>
</div>
<div class="relative grow">
<div class="relative grow" v-if="!loading" >
<textarea id="chat" rows="1" v-model="message" title="Hold SHIFT + ENTER to add new line"
class="inline-block no-scrollbar p-2.5 w-full text-sm text-gray-900 bg-bg-light rounded-lg border border-gray-300 focus:ring-blue-500 focus:border-blue-500 dark:bg-bg-dark dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-blue-500 dark:focus:border-blue-500"
placeholder="Send message..." @keydown.enter.exact="submitOnEnter($event)">
@ -226,7 +226,7 @@
</div>
<!-- BUTTONS -->
<div class="inline-flex justify-center rounded-full ">
<button
<button v-if="!loading"
type="button"
@click="startSpeechRecognition"
:class="{ 'text-red-500': isLesteningToVoice }"
@ -234,9 +234,16 @@
>
<i data-feather="mic"></i>
</button>
<button v-if="!loading" type="button" @click="makeAnEmptyUserMessage" title="New empty user message"
class=" w-6 text-blue-400 hover:text-secondary duration-75 active:scale-90">
<i data-feather="message-square"></i>
<span class="sr-only">New empty User message</span>
</button>
<button v-if="!loading" type="button" @click="makeAnEmptyMessage"
class=" w-6 hover:text-secondary duration-75 active:scale-90">
<button v-if="!loading" type="button" @click="makeAnEmptyAIMessage" title="New empty ai message"
class=" w-6 text-red-400 hover:text-secondary duration-75 active:scale-90">
<i data-feather="message-square"></i>
@ -319,7 +326,7 @@ console.log("modelImgPlaceholder:",modelImgPlaceholder)
const bUrl = import.meta.env.VITE_LOLLMS_API_BASEURL
export default {
name: 'ChatBox',
emits: ["messageSentEvent", "stopGenerating", "loaded", "createEmptyMessage"],
emits: ["messageSentEvent", "stopGenerating", "loaded", "createEmptyUserMessage", "createEmptyAIMessage"],
props: {
onTalk: Function,
discussionList: Array,
@ -385,7 +392,7 @@ export default {
},
async unmountPersonality(pers) {
this.isLoading = true
this.loading = true
if (!pers) { return }
const res = await this.unmount_personality(pers.personality || pers)
@ -412,7 +419,7 @@ export default {
this.$refs.toast.showToast("Could not unmount personality\nError: " + res.error, 4, false)
}
this.isLoading = false
this.loading = false
},
async unmount_personality(pers) {
if (!pers) { return { 'status': false, 'error': 'no personality - unmount_personality' } }
@ -616,7 +623,8 @@ export default {
console.log('File sent successfully');
this.isFileSentList[this.filesList.length-1]=true;
console.log(this.isFileSentList)
this.onShowToastMessage("File uploaded successfully",4,true);
this.onShowToastMessage("File uploaded successfully",4,true);
this.loading = false
next();
}
};
@ -627,8 +635,11 @@ export default {
console.log('Uploading file');
readNextChunk();
},
makeAnEmptyMessage() {
this.$emit('createEmptyMessage')
makeAnEmptyUserMessage() {
this.$emit('createEmptyUserMessage')
},
makeAnEmptyAIMessage() {
this.$emit('createEmptyAIMessage')
},
startSpeechRecognition() {
if ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window) {
@ -738,24 +749,24 @@ export default {
this.$emit('stopGenerating')
},
addFiles(event) {
console.log("Adding files");
const newFiles = [...event.target.files];
let index = 0;
const sendNextFile = () => {
if (index >= newFiles.length) {
console.log(`Files_list: ${this.filesList}`);
return;
}
const file = newFiles[index];
this.filesList.push(file);
this.isFileSentList.push(false);
this.send_file(file, () => {
index++;
console.log("Adding files");
const newFiles = [...event.target.files];
let index = 0;
const sendNextFile = () => {
if (index >= newFiles.length) {
console.log(`Files_list: ${this.filesList}`);
return;
}
const file = newFiles[index];
this.filesList.push(file);
this.isFileSentList.push(false);
this.send_file(file, () => {
index++;
sendNextFile();
}
);
};
sendNextFile();
}
);
};
sendNextFile();
}
},
watch: {

View File

@ -218,6 +218,9 @@
</div>
</div>
</div>
<div class="absolute bottom-0 left-0 w-full bg-blue-200 dark:bg-blue-800 text-white py-2 cursor-pointer hover:text-green-500" @click="showDatabaseSelector">
<p class="ml-2">Current database: {{ formatted_database_name }}</p>
</div>
</div>
</transition>
<div v-if="isReady" class="relative flex flex-col flex-grow " @dragover.stop.prevent="setDropZoneChat()">
@ -253,7 +256,8 @@
<div class=" bottom-0 container flex flex-row items-center justify-center " v-if="currentDiscussion.id">
<ChatBox ref="chatBox"
@messageSentEvent="sendMsg"
@createEmptyMessage="createEmptyMessage"
@createEmptyUserMessage="createEmptyUserMessage"
@createEmptyAIMessage="createEmptyAIMessage"
:loading="isGenerating"
:discussionList="discussionArr"
@stopGenerating="stopGenerating"
@ -427,6 +431,9 @@ export default {
}
},
methods: {
showDatabaseSelector() {
this.database_selectorDialogVisible = true;
},
async ondatabase_selectorDialogSelected(choice){
console.log("Selected:",choice)
},
@ -1016,8 +1023,11 @@ export default {
console.log("Error: Could not get generation status", error);
});
},
createEmptyMessage(){
socket.emit('create_empty_message', {});
createEmptyUserMessage(){
socket.emit('create_empty_message', {"type":0}); // 0 for user and 1 for AI
},
createEmptyAIMessage(){
socket.emit('create_empty_message', {"type":1}); // 0 for user and 1 for AI
},
sendMsg(msg) {
// Sends message to binding
@ -1755,6 +1765,11 @@ export default {
},
computed: {
formatted_database_name() {
const db_name = this.$store.state.config.db_path;
const trimmed_name = db_name.slice(0, db_name.length - 3);
return trimmed_name;
},
UseDiscussionHistory() {
return this.$store.state.config.use_discussions_history;
},

View File

@ -2988,6 +2988,9 @@ export default {
update_binding(value) {
// eslint-disable-next-line no-unused-vars
this.isLoading = true
this.$store.state.modelsZoo=[]
this.configFile.model_name = null
this.$store.state.config.model_name = null
console.log("updating binding_name")
this.update_setting('binding_name', value, async (res) => {
console.log("updated binding_name")
@ -3002,10 +3005,16 @@ export default {
this.settingsChanged = true
this.isLoading = false
nextTick(() => {
feather.replace()
})
console.log("updating model")
// If binding changes then reset model
this.update_model(null).then(()=>{
});
@ -3014,6 +3023,11 @@ export default {
})
})
nextTick(() => {
feather.replace()
})
},
async update_model(value) {

@ -1 +1 @@
Subproject commit 002376c68bb41ac07a363e8d37db557c52e057a1
Subproject commit 66ea83025bb17fb4bf9e8047f3f6fe1cc6a5b246

@ -1 +1 @@
Subproject commit ddd8c60e32cc21d4a6a1eec599b29377fbcc5ed5
Subproject commit d19610e0fba71785e4518ff566af31d48eea5eae

@ -1 +1 @@
Subproject commit 0dcce63c78983198630f7c14725845b7959bf80d
Subproject commit 2c2c172ef7e4f5c4dc8dffafc56b27e7ed559a06