mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2024-12-26 23:41:03 +00:00
43 lines
1.5 KiB
Python
43 lines
1.5 KiB
Python
|
"""
|
||
|
project: lollms_webui
|
||
|
file: lollms_xtts.py
|
||
|
author: ParisNeo
|
||
|
description:
|
||
|
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
|
||
|
application. These routes allow users to
|
||
|
|
||
|
"""
|
||
|
from fastapi import APIRouter, Request
|
||
|
from lollms_webui import LOLLMSWebUI
|
||
|
from pydantic import BaseModel
|
||
|
from starlette.responses import StreamingResponse
|
||
|
from lollms.types import MSG_TYPE
|
||
|
from lollms.main_config import BaseConfig
|
||
|
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
|
||
|
from pathlib import Path
|
||
|
from ascii_colors import ASCIIColors
|
||
|
import os
|
||
|
import platform
|
||
|
|
||
|
# ----------------------- Defining router and main class ------------------------------
|
||
|
|
||
|
router = APIRouter()
|
||
|
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||
|
|
||
|
|
||
|
# ----------------------- voice ------------------------------
|
||
|
|
||
|
@router.get("/install_ollama")
|
||
|
def install_ollama():
|
||
|
try:
|
||
|
lollmsElfServer.ShowBlockingMessage("Installing ollama server\nPlease stand by")
|
||
|
from lollms.services.ollama.lollms_ollama import install_ollama
|
||
|
if install_ollama(lollmsElfServer):
|
||
|
lollmsElfServer.HideBlockingMessage()
|
||
|
return {"status":True}
|
||
|
else:
|
||
|
return {"status":False, 'error':str(ex)}
|
||
|
except Exception as ex:
|
||
|
trace_exception(ex)
|
||
|
lollmsElfServer.HideBlockingMessage()
|
||
|
return {"status":False, 'error':str(ex)}
|