mirror of
https://github.com/ParisNeo/lollms.git
synced 2024-12-18 20:27:58 +00:00
upgraded
This commit is contained in:
parent
ddcf439ffd
commit
c68b687e7c
@ -123,6 +123,8 @@ async def lollms_generate(request: LollmsGenerateRequest):
|
||||
headers = { 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache', 'Connection': 'keep-alive',}
|
||||
reception_manager=RECEPTION_MANAGER()
|
||||
prompt = request.prompt
|
||||
if elf_server.config.debug:
|
||||
ASCIIColors.yellow(prompt)
|
||||
n_predict = request.n_predict if request.n_predict>0 else 1024
|
||||
stream = request.stream
|
||||
prompt_tokens = len(elf_server.binding.tokenize(prompt))
|
||||
@ -189,15 +191,12 @@ async def lollms_generate(request: LollmsGenerateRequest):
|
||||
# Yield each chunk of data
|
||||
if chunk is None:
|
||||
return True
|
||||
|
||||
rx = reception_manager.new_chunk(chunk)
|
||||
if rx.status!=ROLE_CHANGE_DECISION.MOVE_ON:
|
||||
if rx.status==ROLE_CHANGE_DECISION.PROGRESSING:
|
||||
return True
|
||||
elif rx.status==ROLE_CHANGE_DECISION.ROLE_CHANGED:
|
||||
return False
|
||||
else:
|
||||
chunk = chunk + rx.value
|
||||
reception_manager.reception_buffer += chunk
|
||||
antiprompt = elf_server.personality.detect_antiprompt(reception_manager.reception_buffer)
|
||||
if antiprompt:
|
||||
ASCIIColors.warning(f"\n{antiprompt} detected. Stopping generation")
|
||||
reception_manager.reception_buffer = elf_server.remove_text_from_string(reception_manager.reception_buffer,antiprompt)
|
||||
return False
|
||||
|
||||
|
||||
return True
|
||||
|
Loading…
Reference in New Issue
Block a user