This commit is contained in:
Saifeddine ALOUI 2024-07-30 00:02:16 +02:00
parent a8ae73d500
commit ddcf439ffd

View File

@ -49,6 +49,8 @@ def get_generation_status():
# ----------------------------------- Generation ----------------------------------------- # ----------------------------------- Generation -----------------------------------------
class LollmsTokenizeRequest(BaseModel): class LollmsTokenizeRequest(BaseModel):
prompt: str prompt: str
class LollmsDeTokenizeRequest(BaseModel):
tokens: List[int]
@router.post("/lollms_tokenize") @router.post("/lollms_tokenize")
async def lollms_tokenize(request: LollmsTokenizeRequest): async def lollms_tokenize(request: LollmsTokenizeRequest):
@ -63,6 +65,19 @@ async def lollms_tokenize(request: LollmsTokenizeRequest):
except Exception as ex: except Exception as ex:
return {"status":False,"error":str(ex)} return {"status":False,"error":str(ex)}
@router.post("/lollms_detokenize")
async def lollms_detokenize(request: LollmsDeTokenizeRequest):
try:
text = elf_server.model.detokenize(request.tokens)
named_tokens=[]
for token in request.tokens:
detoken = elf_server.model.detokenize([token])
named_tokens.append([detoken,token])
tokens = elf_server.model.tokenize(request.prompt)
return {"status":True,"raw_tokens":tokens, "named_tokens":named_tokens, "text":text}
except Exception as ex:
return {"status":False,"error":str(ex)}
class LollmsGenerateRequest(BaseModel): class LollmsGenerateRequest(BaseModel):
prompt: str prompt: str
model_name: Optional[str] = None model_name: Optional[str] = None