mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2025-01-18 02:39:47 +00:00
speed
This commit is contained in:
parent
2a6f70bd87
commit
79a0637610
@ -18,6 +18,7 @@ import time
|
||||
import requests
|
||||
from tqdm import tqdm
|
||||
import traceback
|
||||
import sys
|
||||
|
||||
__author__ = "parisneo"
|
||||
__github__ = "https://github.com/ParisNeo/gpt4all-ui"
|
||||
@ -368,10 +369,6 @@ class ModelProcess:
|
||||
return output
|
||||
|
||||
def _callback(self, text, text_type=0):
|
||||
try:
|
||||
print(str(text), end="", flush=True)
|
||||
except Exception as ex:
|
||||
print(".")
|
||||
self.curent_text += text
|
||||
detected_anti_prompt = False
|
||||
anti_prompt_to_remove=""
|
||||
@ -382,7 +379,6 @@ class ModelProcess:
|
||||
|
||||
if not detected_anti_prompt:
|
||||
if not self.ready:
|
||||
print(".",end="", flush=True)
|
||||
return True
|
||||
else:
|
||||
# Stream the generated text to the main process
|
||||
@ -768,6 +764,7 @@ class GPT4AllAPI():
|
||||
while(not self.process.completion_signal.is_set() or not self.process.generation_queue.empty()): # Simulating other commands being issued
|
||||
try:
|
||||
chunk, tok, message_type = self.process.generation_queue.get(False, 2)
|
||||
print(chunk, end="")
|
||||
if chunk!="":
|
||||
self.process_chunk(chunk, message_type)
|
||||
except Exception as ex:
|
||||
|
@ -1,3 +1,7 @@
|
||||
Hi there, welcome to a snippet about bindings and models selection.
|
||||
|
||||
In this short video we will look at bindings and models
|
||||
In this short video we will look at bindings and models, how to select them and how to use them.
|
||||
|
||||
First, to select a binding, go to the settings tab then Bindings zoo.
|
||||
|
||||
You will find a certain numbre of
|
Loading…
Reference in New Issue
Block a user