fixed vllm

This commit is contained in:
Saifeddine ALOUI 2024-02-20 23:41:20 +01:00
parent e3b7e7ff5d
commit a7c7495f4a
6 changed files with 32 additions and 5 deletions

2
lollms/security.py Normal file
View File

@ -0,0 +1,2 @@
def detect_LFI():

View File

@ -43,6 +43,31 @@ def install_ollama():
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}
@router.get("/start_ollama")
def start_vllm():
try:
if hasattr(lollmsElfServer,"vllm") and lollmsElfServer.vllm is not None:
return {"status":False, 'error':"Service is already on"}
if not hasattr(lollmsElfServer,"vllm") or lollmsElfServer.vllm is None:
lollmsElfServer.ShowBlockingMessage("Loading vllm server\nPlease stand by")
from lollms.services.vllm.lollms_vllm import get_vllm
server = get_vllm(lollmsElfServer)
if server:
lollmsElfServer.vllm = server(lollmsElfServer, lollmsElfServer.config.vllm_url)
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
else:
return {"status":False, 'error':'Service already running'}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()

View File

@ -113,13 +113,13 @@ async def text2Audio(request: LollmsText2AudioRequest):
if voice!="main_voice":
voices_folder = lollmsElfServer.lollms_paths.custom_voices_path
else:
voices_folder = Path(__file__).parent.parent/"voices"
voices_folder = Path(__file__).parent.parent.parent/"services/xtts/voices"
lollmsElfServer.tts.set_speaker_folder(voices_folder)
url = f"audio/{output_fn}"
preprocessed_text= add_period(request.text)
lollmsElfServer.tts.tts_to_file(preprocessed_text, f"{voice}.wav", f"{output_fn}", language=language)
lollmsElfServer.info("Voice file ready")
lollmsElfServer.info(f"Voice file ready at {url}")
return {"url": url}
except:
return {"url": None}

View File

@ -122,8 +122,8 @@ def add_events(sio:socketio):
# Emit the generated text to the client
run_async(partial(lollmsElfServer.sio.emit,'text_generated', {'text': generated_text}, to=client_id))
except Exception as ex:
run_async(partial(lollmsElfServer.sio.emit,'generation_error', {'error': str(ex)}, to=client_id))
ASCIIColors.error(f"\ndone")
lollmsElfServer.error(str(ex))
trace_exception(ex)
lollmsElfServer.busy = False
else:
try:

View File

@ -114,7 +114,7 @@ class Service:
self.wait_for_service(max_retries=wait_max_retries)
def wait_for_service(self, max_retries = 150, show_warning=True):
url = f"{self.base_url}"
url = f"{self.base_url}" if "0.0.0.0" not in self.base_url else self.base_url.replace("0.0.0.0","http://localhost")
# Adjust this value as needed
retries = 0