mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2025-02-21 01:31:20 +00:00
added endpoints
This commit is contained in:
parent
b39dc6ae6e
commit
7f6948824c
8
app.py
8
app.py
@ -82,6 +82,9 @@ if __name__ == "__main__":
|
||||
from endpoints.lollms_xtts import router as lollms_xtts_add_router
|
||||
from endpoints.lollms_sd import router as lollms_sd_router
|
||||
from endpoints.lollms_ollama import router as lollms_ollama_router
|
||||
from endpoints.lollms_petals import router as lollms_petals_router
|
||||
from endpoints.lollms_vllm import router as lollms_vllm_router
|
||||
|
||||
from endpoints.lollms_playground import router as lollms_playground_router
|
||||
|
||||
|
||||
@ -121,7 +124,10 @@ if __name__ == "__main__":
|
||||
app.include_router(lollms_xtts_add_router)
|
||||
|
||||
app.include_router(lollms_sd_router)
|
||||
app.include_router(lollms_ollama_router)
|
||||
app.include_router(lollms_ollama_router)
|
||||
app.include_router(lollms_petals_router)
|
||||
app.include_router(lollms_vllm_router)
|
||||
|
||||
app.include_router(lollms_playground_router)
|
||||
|
||||
|
||||
|
@ -29,9 +29,9 @@ lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
@router.get("/install_petals")
|
||||
def install_petals():
|
||||
try:
|
||||
lollmsElfServer.ShowBlockingMessage("Installing ollama server\nPlease stand by")
|
||||
from lollms.services.ollama.lollms_ollama import install_ollama
|
||||
if install_ollama(lollmsElfServer):
|
||||
lollmsElfServer.ShowBlockingMessage("Installing petals server\nPlease stand by")
|
||||
from lollms.services.petals.lollms_petals import install_petals
|
||||
if install_petals(lollmsElfServer):
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
return {"status":True}
|
||||
else:
|
||||
|
42
endpoints/lollms_vllm.py
Normal file
42
endpoints/lollms_vllm.py
Normal file
@ -0,0 +1,42 @@
|
||||
"""
|
||||
project: lollms_webui
|
||||
file: lollms_xtts.py
|
||||
author: ParisNeo
|
||||
description:
|
||||
This module contains a set of FastAPI routes that concerns petals service
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
|
||||
from pathlib import Path
|
||||
from ascii_colors import ASCIIColors
|
||||
import os
|
||||
import platform
|
||||
|
||||
# ----------------------- Defining router and main class ------------------------------
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
# ----------------------- voice ------------------------------
|
||||
|
||||
@router.get("/install_vllm")
|
||||
def install_petals():
|
||||
try:
|
||||
lollmsElfServer.ShowBlockingMessage("Installing ollama server\nPlease stand by")
|
||||
from lollms.services.vllm.lollms_vllm import install_vllm
|
||||
if install_vllm(lollmsElfServer):
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
return {"status":True}
|
||||
else:
|
||||
return {"status":False, 'error':str(ex)}
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
return {"status":False, 'error':str(ex)}
|
Loading…
x
Reference in New Issue
Block a user