This commit is contained in:
saloui 2023-09-29 19:31:20 +02:00
parent 2c15c559a1
commit 09a345a14e
7 changed files with 81 additions and 45 deletions

View File

@ -161,6 +161,8 @@ class LoLLMsAPPI(LollmsApplication):
self.connections[request.sid] = {
"current_discussion":self.db.load_last_discussion(),
"generated_text":"",
"continuing": False,
"first_chunk": True,
"cancel_generation": False,
"generation_thread": None,
"processing":False,
@ -788,6 +790,10 @@ class LoLLMsAPPI(LollmsApplication):
client_id = request.sid
self.connections[client_id]["generated_text"]=""
self.connections[client_id]["cancel_generation"]=False
self.connections[client_id]["continuing"]=False
self.connections[client_id]["first_chunk"]=True
if not self.model:
ASCIIColors.error("Model not selected. Please select a model")
@ -827,6 +833,9 @@ class LoLLMsAPPI(LollmsApplication):
@socketio.on('generate_msg_from')
def generate_msg_from(data):
client_id = request.sid
self.connections[client_id]["continuing"]=False
self.connections[client_id]["first_chunk"]=True
if self.connections[client_id]["current_discussion"] is None:
ASCIIColors.warning("Please select a discussion")
self.notify("Please select a discussion first", False, client_id)
@ -835,7 +844,7 @@ class LoLLMsAPPI(LollmsApplication):
if id_==-1:
message = self.connections[client_id]["current_discussion"].current_message
else:
message = self.connections[client_id]["current_discussion"].get_message(id_)
message = self.connections[client_id]["current_discussion"].load_message(id_)
if message is None:
return
self.connections[client_id]['generation_thread'] = threading.Thread(target=self.start_message_generation, args=(message, message.id, client_id))
@ -844,6 +853,9 @@ class LoLLMsAPPI(LollmsApplication):
@socketio.on('continue_generate_msg_from')
def handle_connection(data):
client_id = request.sid
self.connections[client_id]["continuing"]=True
self.connections[client_id]["first_chunk"]=True
if self.connections[client_id]["current_discussion"] is None:
ASCIIColors.yellow("Please select a discussion")
self.notify("Please select a discussion", False, client_id)
@ -852,7 +864,7 @@ class LoLLMsAPPI(LollmsApplication):
if id_==-1:
message = self.connections[client_id]["current_discussion"].current_message
else:
message = self.connections[client_id]["current_discussion"].get_message(id_)
message = self.connections[client_id]["current_discussion"].load_message(id_)
self.connections[client_id]["generated_text"]=message.content
self.connections[client_id]['generation_thread'] = threading.Thread(target=self.start_message_generation, args=(message, message.id, client_id, True))
@ -923,7 +935,10 @@ class LoLLMsAPPI(LollmsApplication):
installation_option=InstallOption.FORCE_INSTALL)
mounted_personalities.append(personality)
ASCIIColors.info("Reverted to default personality")
print(f'selected : {self.config["active_personality_id"]}')
if self.config["active_personality_id"]>=0 and self.config["active_personality_id"]<len(self.config["personalities"]):
ASCIIColors.success(f'selected model : {self.config["personalities"][self.config["active_personality_id"]]}')
else:
ASCIIColors.warning('An error was encountered while trying to mount personality')
ASCIIColors.success(f" ╔══════════════════════════════════════════════════╗ ")
ASCIIColors.success(f" ║ Done ║ ")
ASCIIColors.success(f" ╚══════════════════════════════════════════════════╝ ")
@ -1004,7 +1019,11 @@ class LoLLMsAPPI(LollmsApplication):
def prepare_reception(self, client_id):
self.connections[client_id]["generated_text"] = ""
if not self.connections[client_id]["continuing"]:
self.connections[client_id]["generated_text"] = ""
self.connections[client_id]["first_chunk"]=True
self.nb_received_tokens = 0
self.start_time = datetime.now()
@ -1292,7 +1311,11 @@ class LoLLMsAPPI(LollmsApplication):
return False
else:
self.nb_received_tokens += 1
self.update_message(client_id, chunk, parameters, metadata)
if self.connections[client_id]["continuing"] and self.connections[client_id]["first_chunk"]:
self.update_message(client_id, self.connections[client_id]["generated_text"], parameters, metadata)
else:
self.update_message(client_id, chunk, parameters, metadata)
self.connections[client_id]["first_chunk"]=False
# if stop generation is detected then stop
if not self.cancel_gen:
return True

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-a5d4fd66.js"></script>
<link rel="stylesheet" href="/assets/index-5e4931f7.css">
<script type="module" crossorigin src="/assets/index-fa3b8c0d.js"></script>
<link rel="stylesheet" href="/assets/index-c6ea07c5.css">
</head>
<body>
<div id="app"></div>

View File

@ -41,7 +41,7 @@
</div>
<div v-if="personality.language" class="flex items-center">
<i data-feather="globe" class="w-5 m-1"></i>
<b>Languages:&nbsp;</b>
<b>Language:&nbsp;</b>
{{ personality.language }}
</div>
<div class="flex items-center">

View File

@ -9,6 +9,16 @@ import './assets/tailwind.css'
const app = createApp(App)
console.log("Loaded main.js")
function copyObject(obj) {
const copy = {};
for (const key in obj) {
if (obj.hasOwnProperty(key)) {
copy[key] = obj[key];
}
}
return copy;
}
// Create a new store instance.
export const store = createStore({
state () {
@ -146,7 +156,7 @@ export const store = createStore({
// Handle error
}
},
async refreshPersonalitiesArr({ commit }) {
async refreshPersonalitiesZoo({ commit }) {
let personalities = []
const catdictionary = await api_get_req("get_all_personalities")
const catkeys = Object.keys(catdictionary); // returns categories
@ -200,11 +210,13 @@ export const store = createStore({
// console.log('perrs listo',this.state.personalities)
for (let i = 0; i < this.state.config.personalities.length; i++) {
const full_path_item = this.state.config.personalities[i]
const index = this.state.personalities.findIndex(item => item.full_path == full_path_item || item.full_path+':'+item.language == full_path_item)
const parts = full_path_item.split(':')
const index = this.state.personalities.findIndex(item => item.full_path == full_path_item || item.full_path == parts[0])
if(index>=0){
const pers = this.state.personalities[index]
console.log("Found personality : ", JSON.stringify(pers))
let pers = copyObject(this.state.personalities[index])
if(parts.length>0){
pers.language = parts[1]
}
// console.log(`Personality : ${JSON.stringify(pers)}`)
if (pers) {
mountedPersArr.push(pers)
@ -217,6 +229,7 @@ export const store = createStore({
console.log("Couldn't load personality : ",full_path_item)
}
}
console.log("Mounted personalities : ", mountedPersArr)
commit('setMountedPersArr', mountedPersArr);
this.state.mountedPers = this.state.personalities[this.state.personalities.findIndex(item => item.full_path == this.state.config.personalities[this.state.config.active_personality_id] || item.full_path+':'+item.language ==this.state.config.personalities[this.state.config.active_personality_id])]
@ -436,7 +449,7 @@ app.mixin({
this.$store.dispatch('refreshExtensionsZoo');
this.$store.dispatch('refreshModels');
await this.$store.dispatch('refreshPersonalitiesArr')
await this.$store.dispatch('refreshPersonalitiesZoo')
this.$store.dispatch('refreshMountedPersonalities');
this.$store.state.ready = true
@ -470,4 +483,4 @@ app.use(router)
app.use(store)
app.mount('#app')
export{logObjectProperties}
export{logObjectProperties, copyObject}

View File

@ -3168,7 +3168,7 @@ export default {
if (res) {
this.$store.dispatch('refreshConfig').then(() => {
this.$store.dispatch('refreshPersonalitiesArr').then(() => {
this.$store.dispatch('refreshPersonalitiesZoo').then(() => {
this.$store.dispatch('refreshMountedPersonalities');
});
});