removed useless stuff

This commit is contained in:
Saifeddine ALOUI 2025-04-02 15:34:13 +02:00
parent 35d223c415
commit c5710a7cc4
14 changed files with 7 additions and 846 deletions

View File

@ -42,7 +42,7 @@ def main():
config.port=args.port
LOLLMSElfServer.build_instance(config=config, lollms_paths=lollms_paths, socketio=sio)
from lollms.server.endpoints.lollms_binding_files_server import router as lollms_binding_files_server_router
from lollms.server.endpoints.lollms_files_server import router as lollms_binding_files_server_router
from lollms.server.endpoints.lollms_infos import router as lollms_infos_router
from lollms.server.endpoints.lollms_hardware_infos import router as lollms_hardware_infos_router
from lollms.server.endpoints.lollms_binding_infos import router as lollms_binding_infos_router

View File

@ -1,164 +0,0 @@
"""
project: lollms_webui
file: lollms_asr.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request, UploadFile, File, HTTPException
from pydantic import BaseModel, Field
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.security import check_access
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import sanitize_path, validate_path
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
class ClientAuthentication(BaseModel):
client_id: str = Field(...)
class LollmsAudio2TextRequest(BaseModel):
client_id: str
text: str
voice: str = None
fn:str = None
# ----------------------- voice ------------------------------
@router.post("/asr/audio2test")
async def text2Audio(request: LollmsAudio2TextRequest):
"""
Executes Python code and returns the output.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Code execution is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Code execution is blocked when the server is exposed outside for very obvious reasons!"}
if request.fn:
request.fn = os.path.realpath(str((lollmsElfServer.lollms_paths.personal_outputs_path/"audio_out")/request.fn))
validate_path(request.fn,[str(lollmsElfServer.lollms_paths.personal_outputs_path/"audio_out")])
try:
# Get the JSON data from the POST request.
try:
from lollms.services.asr.lollms_asr import LollmsASR
voice=lollmsElfServer.config.asr_current_voice
if lollmsElfServer.asr is None:
voice=lollmsElfServer.config.asr_current_voice
if voice!="main_voice":
voices_folder = lollmsElfServer.lollms_paths.custom_voices_path
else:
voices_folder = Path(__file__).parent.parent.parent/"services/asr/voices"
lollmsElfServer.asr = LollmsASR(
lollmsElfServer,
voices_folder=voices_folder,
voice_samples_path=Path(__file__).parent/"voices",
asr_base_url= lollmsElfServer.config.asr_base_url
)
except Exception as ex:
return {"url": None, "error":f"{ex}"}
voice=lollmsElfServer.config.asr_current_voice if request.voice is None else request.voice
index = find_first_available_file_index(lollmsElfServer.asr.output_folder, "voice_sample_",".wav")
output_fn=f"voice_sample_{index}.wav" if request.fn is None else request.fn
if voice is None:
voice = "main_voice"
lollmsElfServer.info("Starting to build voice")
try:
from lollms.services.asr.lollms_asr import LollmsASR
# If the personality has a voice, then use it
personality_audio:Path = lollmsElfServer.personality.personality_package_path/"audio"
if personality_audio.exists() and len([v for v in personality_audio.iterdir()])>0:
voices_folder = personality_audio
elif voice!="main_voice":
voices_folder = lollmsElfServer.lollms_paths.custom_voices_path
else:
voices_folder = Path(__file__).parent.parent.parent/"services/asr/voices"
if lollmsElfServer.asr is None:
lollmsElfServer.asr = LollmsASR(
lollmsElfServer,
voices_folder=voices_folder,
voice_samples_path=Path(__file__).parent/"voices",
asr_base_url= lollmsElfServer.config.asr_base_url,
)
if lollmsElfServer.asr.ready:
language = lollmsElfServer.config.asr_current_language# convert_language_name()
lollmsElfServer.asr.set_speaker_folder(voices_folder)
preprocessed_text= add_period(request.text)
voice_file = [v for v in voices_folder.iterdir() if v.stem==voice and v.suffix==".wav"]
if len(voice_file)==0:
return {"status":False,"error":"Voice not found"}
lollmsElfServer.asr.tts_audio(preprocessed_text, voice_file[0].name, f"{output_fn}", language=language)
else:
lollmsElfServer.InfoMessage("asr is not up yet.\nPlease wait for it to load then try again. This may take some time.")
return {"status":False, "error":"Service not ready yet"}
except Exception as ex:
trace_exception(ex)
return {"url": None}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/install_asr")
def install_asr(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
from lollms.services.asr.lollms_asr import install_asr
lollmsElfServer.ShowBlockingMessage("Installing ASR api server\nPlease stand by")
install_asr(lollmsElfServer)
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}
@router.post("/start_asr")
def start_asr(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
lollmsElfServer.ShowBlockingMessage("Starting ASR api server\nPlease stand by")
from lollms.services.asr.lollms_asr import LollmsASR
if lollmsElfServer.asr is None:
lollmsElfServer.asr = LollmsASR(
lollmsElfServer,
voice_samples_path=Path(__file__).parent/"voices",
asr_base_url= lollmsElfServer.config.asr_base_url,
)
lollmsElfServer.HideBlockingMessage()
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"url": None, "error":f"{ex}"}
@router.get("/asr_is_ready")
def asr_is_ready():
if hasattr(lollmsElfServer,'sd') and lollmsElfServer.sd is not None:
if lollmsElfServer.sd.ready:
return {"status":True}
return {"status":False}

View File

@ -1,112 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from pydantic import BaseModel, Field
from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel
from lollms.security import check_access
from starlette.responses import StreamingResponse
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSElfServer = LOLLMSElfServer.get_instance()
# ----------------------- voice ------------------------------
class ClientAuthentication(BaseModel):
client_id: str = Field(...)
@router.post("/install_comfyui")
def install_comfyui(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Installing comfyui server\nPlease stand by")
from lollms.services.tti.comfyui.lollms_comfyui import install_comfyui
install_comfyui(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install Comfyui because of this error:\n{ex}\nThis is commonly caused by a previous version that I couldn't delete. PLease remove {lollmsElfServer.lollms_paths.personal_path}/shared/comfyui manually then try again")
return {"status":False, 'error':str(ex)}
@router.post("/upgrade_comfyui")
def upgrade_comfyui(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service upgrade is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service upgrade is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Upgrading comfyui server\nPlease stand by")
from lollms.services.tti.comfyui.lollms_comfyui import upgrade_comfyui
upgrade_comfyui(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install Comfyui because of this error:\n{ex}\nThis is commonly caused by a previous version that I couldn't delete. PLease remove {lollmsElfServer.lollms_paths.personal_path}/shared/comfyui manually then try again")
return {"status":False, 'error':str(ex)}
@router.post("/start_comfyui")
def start_comfyui(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Starting Comfyui\nPlease stand by")
from lollms.services.tti.comfyui.lollms_comfyui import get_comfyui
lollmsElfServer.comfyui = get_comfyui(lollmsElfServer.lollms_paths)(lollmsElfServer, lollmsElfServer.personality.name if lollmsElfServer.personality is not None else "Artbot")
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install comfyui because of this error:\n{ex}\nThis is commonly caused by a previous version that I couldn't delete. PLease remove {lollmsElfServer.lollms_paths.personal_path}/shared/comfyui manually then try again")
return {"status":False, 'error':str(ex)}
@router.post("/show_comfyui")
def show_comfyui(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
import webbrowser
webbrowser.open(lollmsElfServer.config.comfyui_base_url)
return {"status":True}
@router.get("/list_comfyui_models")
def list_comfyui_models():
from lollms.services.tti.comfyui.lollms_comfyui import LollmsComfyUI
return {"status":True, "models":LollmsComfyUI.get_models_list(lollmsElfServer)}

View File

@ -1,84 +0,0 @@
"""
project: lollms_webui
file: lollms_diffusers.py
author: ParisNeo
description:
This module is for diffusers installation and management
"""
from fastapi import APIRouter, Request
from lollms.server.elf_server import LOLLMSElfServer
from pydantic import BaseModel, ConfigDict
from starlette.responses import StreamingResponse
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import check_access
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSElfServer = LOLLMSElfServer.get_instance()
class Identification(BaseModel):
client_id: str
class ModelPost(BaseModel):
model_config = ConfigDict(protected_namespaces=())
client_id: str
model_url: str
# ----------------------- voice ------------------------------
@router.post("/install_diffusers")
# async def your_endpoint(request: Request):
# request_data = await request.json()
# print(request_data) # Use proper logging in real applications
def install_diffusers(data: Identification):
check_access(lollmsElfServer, data.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Installing Diffusers library\nPlease stand by")
from lollms.services.tti.diffusers.lollms_diffusers import install_diffusers
install_diffusers(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install SD because of this error:\n{ex}\nThis is commonly caused by a previous version that I couldn't delete. PLease remove {lollmsElfServer.lollms_paths.personal_path}/shared/auto_sd manually then try again")
return {"status":False, 'error':str(ex)}
@router.post("/upgrade_diffusers")
def upgrade_sd(data: Identification):
check_access(lollmsElfServer, data.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Upgrading Diffusers library\nPlease stand by")
from lollms.services.tti.diffusers.lollms_diffusers import upgrade_diffusers
upgrade_diffusers(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install SD because of this error:\n{ex}\nThis is commonly caused by a previous version that I couldn't delete. PLease remove {lollmsElfServer.lollms_paths.personal_path}/shared/auto_sd manually then try again")
return {"status":False, 'error':str(ex)}
@router.post("/install_diffusers_model")
def install_model(data: ModelPost):
check_access(lollmsElfServer, data.client_id)

View File

@ -246,5 +246,9 @@ def remove_discussion_file(data:RemoveFileData):
if lollmsElfServer.personality is None:
return {"state":False, "error":"No personality selected"}
client.discussion.remove_file(data.name)
try:
client.discussion.remove_file(data.name)
except Exception as ex:
trace_exception(ex)
return {"state":False, "error": ex}
return {"state":True}

View File

@ -1,51 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from pydantic import BaseModel, Field
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.security import check_access
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
class ClientAuthentication(BaseModel):
client_id: str = Field(...)
# ----------------------- voice ------------------------------
@router.post("/install_motion_ctrl")
def install_motion_ctrl(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Installing Motion Ctrl api server\nPlease stand by")
from lollms.services.motion_ctrl.lollms_motion_ctrl import install_motion_ctrl
install_motion_ctrl(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -1,79 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from pydantic import BaseModel, Field
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from lollms.security import check_access
from starlette.responses import StreamingResponse
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
class ClientAuthentication(BaseModel):
client_id: str = Field(...)
# ----------------------- voice ------------------------------
@router.post("/install_ollama")
def install_ollama(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Installing ollama server\nPlease stand by")
from lollms.services.ttt.ollama.lollms_ollama import install_ollama
if install_ollama(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}
@router.post("/start_ollama")
def start_ollama(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
if hasattr(lollmsElfServer,"vllm") and lollmsElfServer.vllm is not None:
return {"status":False, 'error':"Service is already on"}
if not hasattr(lollmsElfServer,"vllm") or lollmsElfServer.vllm is None:
lollmsElfServer.ShowBlockingMessage("Loading vllm server\nPlease stand by")
from lollms.services.ttt.vllm.lollms_vllm import get_vllm
server = get_vllm(lollmsElfServer)
if server:
lollmsElfServer.vllm = server(lollmsElfServer, lollmsElfServer.config.vllm_url)
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
else:
return {"status":False, 'error':'Service already running'}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -1,47 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that concerns petals service
"""
from fastapi import APIRouter, Request
from pydantic import BaseModel, Field
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.security import check_access
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
class ClientAuthentication(BaseModel):
client_id: str = Field(...)
# ----------------------- voice ------------------------------
@router.post("/install_petals")
def install_petals(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
lollmsElfServer.ShowBlockingMessage("Installing petals server\nPlease stand by")
from lollms.services.petals.lollms_petals import install_petals
if install_petals(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -1,124 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel, ConfigDict
from starlette.responses import StreamingResponse
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import check_access
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
class Identification(BaseModel):
client_id: str
class ModelPost(BaseModel):
model_config = ConfigDict(protected_namespaces=())
client_id: str
model_url: str
# ----------------------- voice ------------------------------
@router.post("/install_sd")
# async def your_endpoint(request: Request):
# request_data = await request.json()
# print(request_data) # Use proper logging in real applications
def install_sd(data: Identification):
check_access(lollmsElfServer, data.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Installing SD api server\nPlease stand by")
from lollms.services.tti.sd.lollms_sd import install_sd
install_sd(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install SD because of this error:\n{ex}\nThis is commonly caused by a previous version that I couldn't delete. PLease remove {lollmsElfServer.lollms_paths.personal_path}/shared/auto_sd manually then try again")
return {"status":False, 'error':str(ex)}
@router.post("/upgrade_sd")
def upgrade_sd(data: Identification):
check_access(lollmsElfServer, data.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Installing SD api server\nPlease stand by")
from lollms.services.tti.sd.lollms_sd import upgrade_sd
upgrade_sd(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install SD because of this error:\n{ex}\nThis is commonly caused by a previous version that I couldn't delete. PLease remove {lollmsElfServer.lollms_paths.personal_path}/shared/auto_sd manually then try again")
return {"status":False, 'error':str(ex)}
@router.post("/start_sd")
def start_sd(data: Identification):
check_access(lollmsElfServer, data.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Starting SD api server\nPlease stand by")
from lollms.services.tti.sd.lollms_sd import LollmsSD
lollmsElfServer.sd = LollmsSD.get(lollmsElfServer)(lollmsElfServer, lollmsElfServer.personality.name if lollmsElfServer.personality is not None else "Artbot")
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install SD because of this error:\n{ex}\nThis is commonly caused by a previous version that I couldn't delete. PLease remove {lollmsElfServer.lollms_paths.personal_path}/shared/auto_sd manually then try again")
return {"status":False, 'error':str(ex)}
@router.post("/show_sd")
def show_sd(data: Identification):
check_access(lollmsElfServer, data.client_id)
import webbrowser
webbrowser.open(lollmsElfServer.config.sd_base_url)
return {"status":True}
@router.post("/install_model")
def install_model(data: ModelPost):
check_access(lollmsElfServer, data.client_id)
@router.post("/sd_is_ready")
def show_sd(data: Identification):
check_access(lollmsElfServer, data.client_id)
if hasattr(lollmsElfServer,'sd') and lollmsElfServer.sd is not None:
if lollmsElfServer.sd.ready:
return {"status":True}
return {"status":False}

View File

@ -1,80 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that concerns petals service
"""
from fastapi import APIRouter, Request
from pydantic import BaseModel, Field
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.security import check_access
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
class ClientAuthentication(BaseModel):
client_id: str = Field(...)
# ----------------------- voice ------------------------------
@router.post("/install_vllm")
def install_vllm(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Installing vllm server\nPlease stand by")
from lollms.services.ttt.vllm.lollms_vllm import install_vllm
if install_vllm(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}
@router.post("/start_vllm")
def start_vllm(request: ClientAuthentication):
check_access(lollmsElfServer, request.client_id)
try:
if hasattr(lollmsElfServer,"vllm") and lollmsElfServer.vllm is not None:
return {"status":False, 'error':"Service is already on"}
if not hasattr(lollmsElfServer,"vllm") or lollmsElfServer.vllm is None:
lollmsElfServer.ShowBlockingMessage("Loading vllm server\nPlease stand by")
from lollms.services.ttt.vllm.lollms_vllm import get_vllm
server = get_vllm(lollmsElfServer)
if server:
lollmsElfServer.vllm = server(lollmsElfServer, lollmsElfServer.config.vllm_url)
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
else:
return {"status":False, 'error':'Service already running'}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -1,51 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request, UploadFile, File, HTTPException
from fastapi.responses import PlainTextResponse
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import find_next_available_filename, output_file_path_to_url, detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import sanitize_path, validate_path, check_access
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
class Identification(BaseModel):
client_id: str
# ----------------------- voice ------------------------------
@router.post("/install_whisper")
def install_whisper(data: Identification):
check_access(lollmsElfServer, data.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Installing whisper library\nPlease stand by")
from lollms.services.stt.whisper.lollms_whisper import LollmsWhisper
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install whisper because of this error:\n{ex}")
return {"status":False, 'error':str(ex)}

View File

@ -1,52 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request, UploadFile, File, HTTPException
from fastapi.responses import PlainTextResponse
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_OPERATION_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import find_next_available_filename, output_file_path_to_url, detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from lollms.security import sanitize_path, validate_path, check_access
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
class Identification(BaseModel):
client_id: str
# ----------------------- voice ------------------------------
@router.post("/install_xtts")
def install_xtts(data: Identification):
check_access(lollmsElfServer, data.client_id)
try:
if lollmsElfServer.config.headless_server_mode:
return {"status":False,"error":"Service installation is blocked when in headless mode for obvious security reasons!"}
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
lollmsElfServer.ShowBlockingMessage("Installing XTTS library\nPlease stand by")
from lollms.services.tts.xtts.lollms_xtts import xtts_install
xtts_install(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
lollmsElfServer.InfoMessage(f"It looks like I could not install XTT because of this error:\n{ex}")
return {"status":False, 'error':str(ex)}

View File

@ -65,6 +65,7 @@ def add_events(sio:socketio):
@sio.on('send_file_chunk')
def send_file_chunk(sid, data):
ASCIIColors.yellow("Receiving file")
client_id = sid
client = lollmsElfServer.session.get_client(client_id)