upgraded code

This commit is contained in:
Saifeddine ALOUI 2023-05-20 19:48:38 +02:00
parent 2a67b06968
commit ca9b616d2f
3 changed files with 25 additions and 11 deletions

7
.gitignore vendored
View File

@ -166,4 +166,9 @@ web/.env.development
node_modules/
# Google chrome files
*.crdownload
*.crdownload
# outputs folder
outputs
# junk stuff
./src

22
app.py
View File

@ -518,18 +518,28 @@ class Gpt4AllWebUI(GPT4AllAPI):
def update_message(self):
discussion_id = request.args.get("id")
new_message = request.args.get("message")
self.current_discussion.update_message(discussion_id, new_message)
return jsonify({"status": "ok"})
try:
self.current_discussion.update_message(discussion_id, new_message)
return jsonify({"status": "ok"})
except Exception as ex:
return jsonify({"status": "nok", "error":str(ex)})
def message_rank_up(self):
discussion_id = request.args.get("id")
new_rank = self.current_discussion.message_rank_up(discussion_id)
return jsonify({"new_rank": new_rank})
try:
new_rank = self.current_discussion.message_rank_up(discussion_id)
return jsonify({"status": "ok", "new_rank": new_rank})
except Exception as ex:
return jsonify({"status": "nok", "error":str(ex)})
def message_rank_down(self):
discussion_id = request.args.get("id")
new_rank = self.current_discussion.message_rank_down(discussion_id)
return jsonify({"new_rank": new_rank})
try:
new_rank = self.current_discussion.message_rank_down(discussion_id)
return jsonify({"status": "ok", "new_rank": new_rank})
except Exception as ex:
return jsonify({"status": "nok", "error":str(ex)})
def delete_message(self):
discussion_id = request.args.get("id")

View File

@ -222,6 +222,7 @@ class ModelProcess:
def step_callback(self, text, message_type):
self.generation_queue.put((text,self.id, message_type))
def _run(self):
self._rebuild_model()
self._rebuild_personality()
@ -308,8 +309,7 @@ class ModelProcess:
def _callback(self, text):
if not self.ready:
print(".",end="")
sys.stdout.flush()
print(".",end="", flush=True)
return True
else:
# Stream the generated text to the main process
@ -667,8 +667,7 @@ class GPT4AllAPI():
return string
def process_chunk(self, chunk):
print(chunk,end="")
sys.stdout.flush()
print(chunk,end="", flush=True)
self.bot_says += chunk
if not self.personality.detect_antiprompt(self.bot_says):
self.socketio.emit('message', {