mirror of
https://github.com/ParisNeo/lollms.git
synced 2024-12-19 20:57:58 +00:00
New structure
This commit is contained in:
parent
32fa39583f
commit
9d19668e9a
@ -1,5 +1,5 @@
|
||||
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
|
||||
version: 133
|
||||
version: 134
|
||||
binding_name: null
|
||||
model_name: null
|
||||
model_variant: null
|
||||
@ -105,6 +105,7 @@ active_tts_service: "None" # xtts (offline), openai_tts (API key required), elev
|
||||
active_tti_service: "None" # autosd (offline), dall-e (online)
|
||||
active_stt_service: "None" # whisper (offline), asr (offline or online), openai_whiosper (API key required)
|
||||
active_ttm_service: "None" # musicgen (offline)
|
||||
active_ttv_service: "None" # cog_video_x (offline)
|
||||
# -------------------- Services --------------------------
|
||||
|
||||
# ***************** STT *****************
|
||||
@ -204,6 +205,10 @@ comfyui_model: v1-5-pruned-emaonly.ckpt
|
||||
enable_motion_ctrl_service: false
|
||||
motion_ctrl_base_url: http://localhost:7861
|
||||
|
||||
|
||||
# ***************** TTV *****************
|
||||
cog_video_x_model: "THUDM/CogVideoX-5b"
|
||||
|
||||
# ***************** TTT *****************
|
||||
|
||||
# ollama service
|
||||
|
@ -339,7 +339,7 @@ class LollmsApplication(LoLLMsCom):
|
||||
def start_ttt(*args, **kwargs):
|
||||
if self.config.enable_ollama_service:
|
||||
try:
|
||||
from lollms.services.ollama.lollms_ollama import Service
|
||||
from lollms.services.ttt.ollama.lollms_ollama import Service
|
||||
self.ollama = Service(self, base_url=self.config.ollama_base_url)
|
||||
tts_services.append("ollama")
|
||||
|
||||
@ -349,7 +349,7 @@ class LollmsApplication(LoLLMsCom):
|
||||
|
||||
if self.config.enable_vllm_service:
|
||||
try:
|
||||
from lollms.services.vllm.lollms_vllm import Service
|
||||
from lollms.services.ttt.vllm.lollms_vllm import Service
|
||||
self.vllm = Service(self, base_url=self.config.vllm_url)
|
||||
tts_services.append("vllm")
|
||||
except Exception as ex:
|
||||
@ -360,16 +360,16 @@ class LollmsApplication(LoLLMsCom):
|
||||
def start_stt(*args, **kwargs):
|
||||
if self.config.whisper_activate or self.config.active_stt_service == "whisper":
|
||||
try:
|
||||
from lollms.services.whisper.lollms_whisper import LollmsWhisper
|
||||
from lollms.services.stt.whisper.lollms_whisper import LollmsWhisper
|
||||
self.whisper = LollmsWhisper(self, self.config.whisper_model, self.lollms_paths.personal_outputs_path)
|
||||
stt_services.append("whisper")
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
if self.config.active_stt_service == "openai_whisper":
|
||||
from lollms.services.openai_whisper.lollms_openai_whisper import LollmsOpenAIWhisper
|
||||
from lollms.services.stt.openai_whisper.lollms_openai_whisper import LollmsOpenAIWhisper
|
||||
self.stt = LollmsOpenAIWhisper(self, self.config.openai_whisper_model, self.config.openai_whisper_key)
|
||||
elif self.config.active_stt_service == "whisper":
|
||||
from lollms.services.whisper.lollms_whisper import LollmsWhisper
|
||||
from lollms.services.stt.whisper.lollms_whisper import LollmsWhisper
|
||||
self.stt = LollmsWhisper(self, self.config.whisper_model)
|
||||
|
||||
ASCIIColors.execute_with_animation("Loading loacal STT services", start_stt, ASCIIColors.color_blue)
|
||||
@ -379,7 +379,7 @@ class LollmsApplication(LoLLMsCom):
|
||||
if self.config.active_tts_service == "xtts":
|
||||
ASCIIColors.yellow("Loading XTTS")
|
||||
try:
|
||||
from lollms.services.xtts.lollms_xtts import LollmsXTTS
|
||||
from lollms.services.tts.xtts.lollms_xtts import LollmsXTTS
|
||||
voice=self.config.xtts_current_voice
|
||||
if voice!="main_voice":
|
||||
voices_folder = self.lollms_paths.custom_voices_path
|
||||
@ -395,10 +395,10 @@ class LollmsApplication(LoLLMsCom):
|
||||
trace_exception(ex)
|
||||
self.warning(f"Couldn't load XTTS")
|
||||
if self.config.active_tts_service == "eleven_labs_tts":
|
||||
from lollms.services.eleven_labs_tts.lollms_eleven_labs_tts import LollmsElevenLabsTTS
|
||||
from lollms.services.tts.eleven_labs_tts.lollms_eleven_labs_tts import LollmsElevenLabsTTS
|
||||
self.tts = LollmsElevenLabsTTS(self, self.config.elevenlabs_tts_model_id, self.config.elevenlabs_tts_voice_id, self.config.elevenlabs_tts_key, stability=self.config.elevenlabs_tts_voice_stability, similarity_boost=self.config.elevenlabs_tts_voice_boost)
|
||||
elif self.config.active_tts_service == "openai_tts":
|
||||
from lollms.services.open_ai_tts.lollms_openai_tts import LollmsOpenAITTS
|
||||
from lollms.services.tts.open_ai_tts.lollms_openai_tts import LollmsOpenAITTS
|
||||
self.tts = LollmsOpenAITTS(self, self.config.openai_tts_model, self.config.openai_tts_voice, self.config.openai_tts_key)
|
||||
elif self.config.active_tts_service == "xtts" and self.xtts:
|
||||
self.tts = self.xtts
|
||||
@ -409,46 +409,38 @@ class LollmsApplication(LoLLMsCom):
|
||||
def start_tti(*args, **kwargs):
|
||||
if self.config.enable_sd_service:
|
||||
try:
|
||||
from lollms.services.sd.lollms_sd import LollmsSD
|
||||
from lollms.services.tti.sd.lollms_sd import LollmsSD
|
||||
self.sd = LollmsSD(self, auto_sd_base_url=self.config.sd_base_url)
|
||||
except:
|
||||
self.warning(f"Couldn't load SD")
|
||||
|
||||
if self.config.enable_comfyui_service:
|
||||
try:
|
||||
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
self.comfyui = LollmsComfyUI(self, comfyui_base_url=self.config.comfyui_base_url)
|
||||
except:
|
||||
self.warning(f"Couldn't load SD")
|
||||
|
||||
if self.config.enable_motion_ctrl_service:
|
||||
try:
|
||||
from lollms.services.motion_ctrl.lollms_motion_ctrl import Service
|
||||
self.motion_ctrl = Service(self, base_url=self.config.motion_ctrl_base_url)
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
self.warning(f"Couldn't load Motion control")
|
||||
|
||||
if self.config.active_tti_service == "diffusers":
|
||||
from lollms.services.diffusers.lollms_diffusers import LollmsDiffusers
|
||||
from lollms.services.tti.diffusers.lollms_diffusers import LollmsDiffusers
|
||||
self.tti = LollmsDiffusers(self)
|
||||
elif self.config.active_tti_service == "autosd":
|
||||
if self.sd:
|
||||
self.tti = self.sd
|
||||
else:
|
||||
from lollms.services.sd.lollms_sd import LollmsSD
|
||||
from lollms.services.tti.sd.lollms_sd import LollmsSD
|
||||
self.tti = LollmsSD(self)
|
||||
elif self.config.active_tti_service == "dall-e":
|
||||
from lollms.services.dalle.lollms_dalle import LollmsDalle
|
||||
from lollms.services.tti.dalle.lollms_dalle import LollmsDalle
|
||||
self.tti = LollmsDalle(self, self.config.dall_e_key)
|
||||
elif self.config.active_tti_service == "midjourney":
|
||||
from lollms.services.midjourney.lollms_midjourney import LollmsMidjourney
|
||||
from lollms.services.tti.midjourney.lollms_midjourney import LollmsMidjourney
|
||||
self.tti = LollmsMidjourney(self, self.config.midjourney_key, self.config.midjourney_timeout, self.config.midjourney_retries)
|
||||
elif self.config.active_tti_service == "comfyui" and (self.tti is None or self.tti.name!="comfyui"):
|
||||
if self.comfyui:
|
||||
self.tti = self.comfyui
|
||||
else:
|
||||
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
self.tti = LollmsComfyUI(self, comfyui_base_url=self.config.comfyui_base_url)
|
||||
|
||||
ASCIIColors.execute_with_animation("Loading loacal TTI services", start_tti, ASCIIColors.color_blue)
|
||||
@ -463,7 +455,7 @@ class LollmsApplication(LoLLMsCom):
|
||||
|
||||
if self.config.enable_ollama_service and self.ollama is None:
|
||||
try:
|
||||
from lollms.services.ollama.lollms_ollama import Service
|
||||
from lollms.services.ttt.ollama.lollms_ollama import Service
|
||||
self.ollama = Service(self, base_url=self.config.ollama_base_url)
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
@ -471,7 +463,7 @@ class LollmsApplication(LoLLMsCom):
|
||||
|
||||
if self.config.enable_vllm_service and self.vllm is None:
|
||||
try:
|
||||
from lollms.services.vllm.lollms_vllm import Service
|
||||
from lollms.services.ttt.vllm.lollms_vllm import Service
|
||||
self.vllm = Service(self, base_url=self.config.vllm_url)
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
@ -481,7 +473,7 @@ class LollmsApplication(LoLLMsCom):
|
||||
|
||||
if self.config.whisper_activate and self.whisper is None:
|
||||
try:
|
||||
from lollms.services.whisper.lollms_whisper import LollmsWhisper
|
||||
from lollms.services.stt.whisper.lollms_whisper import LollmsWhisper
|
||||
self.whisper = LollmsWhisper(self, self.config.whisper_model, self.lollms_paths.personal_outputs_path)
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
@ -490,7 +482,7 @@ class LollmsApplication(LoLLMsCom):
|
||||
if self.config.active_tts_service == "xtts" and self.xtts is None:
|
||||
ASCIIColors.yellow("Loading XTTS")
|
||||
try:
|
||||
from lollms.services.xtts.lollms_xtts import LollmsXTTS
|
||||
from lollms.services.tts.xtts.lollms_xtts import LollmsXTTS
|
||||
voice=self.config.xtts_current_voice
|
||||
if voice!="main_voice":
|
||||
voices_folder = self.lollms_paths.custom_voices_path
|
||||
@ -509,14 +501,14 @@ class LollmsApplication(LoLLMsCom):
|
||||
ASCIIColors.blue("Loading local TTI services")
|
||||
if self.config.enable_sd_service and self.sd is None:
|
||||
try:
|
||||
from lollms.services.sd.lollms_sd import LollmsSD
|
||||
from lollms.services.tti.sd.lollms_sd import LollmsSD
|
||||
self.sd = LollmsSD(self, auto_sd_base_url=self.config.sd_base_url)
|
||||
except:
|
||||
self.warning(f"Couldn't load SD")
|
||||
|
||||
if self.config.enable_comfyui_service and self.comfyui is None:
|
||||
try:
|
||||
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
self.comfyui = LollmsComfyUI(self, comfyui_base_url=self.config.comfyui_base_url)
|
||||
except:
|
||||
self.warning(f"Couldn't load Comfyui")
|
||||
@ -532,43 +524,43 @@ class LollmsApplication(LoLLMsCom):
|
||||
|
||||
ASCIIColors.blue("Activating TTI service")
|
||||
if self.config.active_tti_service == "diffusers" and (self.tti is None or self.tti.name!="diffusers"):
|
||||
from lollms.services.diffusers.lollms_diffusers import LollmsDiffusers
|
||||
from lollms.services.tti.diffusers.lollms_diffusers import LollmsDiffusers
|
||||
self.tti = LollmsDiffusers(self)
|
||||
elif self.config.active_tti_service == "autosd" and (self.tti is None or self.tti.name!="stable_diffusion"):
|
||||
if self.sd:
|
||||
self.tti = self.sd
|
||||
else:
|
||||
from lollms.services.sd.lollms_sd import LollmsSD
|
||||
from lollms.services.tti.sd.lollms_sd import LollmsSD
|
||||
self.tti = LollmsSD(self)
|
||||
elif self.config.active_tti_service == "dall-e" and (self.tti is None or self.tti.name!="dall-e-2" or type(self.tti.name)!="dall-e-3"):
|
||||
from lollms.services.dalle.lollms_dalle import LollmsDalle
|
||||
from lollms.services.tti.dalle.lollms_dalle import LollmsDalle
|
||||
self.tti = LollmsDalle(self, self.config.dall_e_key)
|
||||
elif self.config.active_tti_service == "midjourney" and (self.tti is None or self.tti.name!="midjourney"):
|
||||
from lollms.services.midjourney.lollms_midjourney import LollmsMidjourney
|
||||
from lollms.services.tti.midjourney.lollms_midjourney import LollmsMidjourney
|
||||
self.tti = LollmsMidjourney(self, self.config.midjourney_key, self.config.midjourney_timeout, self.config.midjourney_retries)
|
||||
elif self.config.active_tti_service == "comfyui" and (self.tti is None or self.tti.name!="comfyui"):
|
||||
if self.comfyui:
|
||||
self.tti = self.comfyui
|
||||
else:
|
||||
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
self.tti = LollmsComfyUI(self, comfyui_base_url=self.config.comfyui_base_url)
|
||||
|
||||
ASCIIColors.blue("Activating TTS service")
|
||||
if self.config.active_tts_service == "eleven_labs_tts":
|
||||
from lollms.services.eleven_labs_tts.lollms_eleven_labs_tts import LollmsElevenLabsTTS
|
||||
from lollms.services.tts.eleven_labs_tts.lollms_eleven_labs_tts import LollmsElevenLabsTTS
|
||||
self.tts = LollmsElevenLabsTTS(self, self.config.elevenlabs_tts_model_id, self.config.elevenlabs_tts_voice_id, self.config.elevenlabs_tts_key, stability=self.config.elevenlabs_tts_voice_stability, similarity_boost=self.config.elevenlabs_tts_voice_boost)
|
||||
elif self.config.active_tts_service == "openai_tts" and (self.tts is None or self.tts.name!="openai_tts"):
|
||||
from lollms.services.open_ai_tts.lollms_openai_tts import LollmsOpenAITTS
|
||||
from lollms.services.tts.open_ai_tts.lollms_openai_tts import LollmsOpenAITTS
|
||||
self.tts = LollmsOpenAITTS(self, self.config.openai_tts_model, self.config.openai_tts_voice, self.config.openai_tts_key)
|
||||
elif self.config.active_tts_service == "xtts" and self.xtts:
|
||||
self.tts = self.xtts
|
||||
|
||||
ASCIIColors.blue("Activating STT service")
|
||||
if self.config.active_stt_service == "openai_whisper" and (self.tts is None or self.tts.name!="openai_whisper"):
|
||||
from lollms.services.openai_whisper.lollms_openai_whisper import LollmsOpenAIWhisper
|
||||
from lollms.services.stt.openai_whisper.lollms_openai_whisper import LollmsOpenAIWhisper
|
||||
self.stt = LollmsOpenAIWhisper(self, self.config.openai_whisper_model, self.config.openai_whisper_key)
|
||||
elif self.config.active_stt_service == "whisper" and (self.tts is None or self.tts.name!="whisper") :
|
||||
from lollms.services.whisper.lollms_whisper import LollmsWhisper
|
||||
from lollms.services.stt.whisper.lollms_whisper import LollmsWhisper
|
||||
self.stt = LollmsWhisper(self, self.config.whisper_model)
|
||||
|
||||
except Exception as ex:
|
||||
|
@ -1,5 +1,5 @@
|
||||
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
|
||||
version: 133
|
||||
version: 134
|
||||
binding_name: null
|
||||
model_name: null
|
||||
model_variant: null
|
||||
@ -105,6 +105,7 @@ active_tts_service: "None" # xtts (offline), openai_tts (API key required), elev
|
||||
active_tti_service: "None" # autosd (offline), dall-e (online)
|
||||
active_stt_service: "None" # whisper (offline), asr (offline or online), openai_whiosper (API key required)
|
||||
active_ttm_service: "None" # musicgen (offline)
|
||||
active_ttv_service: "None" # cog_video_x (offline)
|
||||
# -------------------- Services --------------------------
|
||||
|
||||
# ***************** STT *****************
|
||||
@ -204,6 +205,10 @@ comfyui_model: v1-5-pruned-emaonly.ckpt
|
||||
enable_motion_ctrl_service: false
|
||||
motion_ctrl_base_url: http://localhost:7861
|
||||
|
||||
|
||||
# ***************** TTV *****************
|
||||
cog_video_x_model: "THUDM/CogVideoX-5b"
|
||||
|
||||
# ***************** TTT *****************
|
||||
|
||||
# ollama service
|
||||
|
@ -251,7 +251,7 @@ def build_meme_image_with_text_overlay(prompt, negative_prompt, width, height, t
|
||||
try:
|
||||
if processor.personality.config.active_tti_service == "diffusers":
|
||||
if not processor.personality.app.tti:
|
||||
from lollms.services.diffusers.lollms_diffusers import LollmsDiffusers
|
||||
from lollms.services.tti.diffusers.lollms_diffusers import LollmsDiffusers
|
||||
processor.step_start("Loading ParisNeo's fork of AUTOMATIC1111's stable diffusion service")
|
||||
processor.personality.app.tti = LollmsDiffusers(processor.personality.app, processor.personality.name)
|
||||
processor.personality.app.sd = processor.personality.app.tti
|
||||
@ -265,7 +265,7 @@ def build_meme_image_with_text_overlay(prompt, negative_prompt, width, height, t
|
||||
)
|
||||
elif processor.personality.config.active_tti_service == "autosd":
|
||||
if not processor.personality.app.tti:
|
||||
from lollms.services.sd.lollms_sd import LollmsSD
|
||||
from lollms.services.tti.sd.lollms_sd import LollmsSD
|
||||
processor.step_start("Loading ParisNeo's fork of AUTOMATIC1111's stable diffusion service")
|
||||
processor.personality.app.tti = LollmsSD(processor.personality.app, processor.personality.name, max_retries=-1, auto_sd_base_url=processor.personality.config.sd_base_url)
|
||||
processor.personality.app.sd = processor.personality.app.tti
|
||||
@ -279,7 +279,7 @@ def build_meme_image_with_text_overlay(prompt, negative_prompt, width, height, t
|
||||
)
|
||||
elif processor.personality.config.active_tti_service == "dall-e":
|
||||
if not processor.personality.app.tti:
|
||||
from lollms.services.dalle.lollms_dalle import LollmsDalle
|
||||
from lollms.services.tti.dalle.lollms_dalle import LollmsDalle
|
||||
processor.step_start("Loading dalle service")
|
||||
processor.personality.app.tti = LollmsDalle(processor.personality.app, processor.personality.config.dall_e_key, processor.personality.config.dall_e_generation_engine)
|
||||
processor.personality.app.dalle = processor.personality.app.tti
|
||||
@ -295,7 +295,7 @@ def build_meme_image_with_text_overlay(prompt, negative_prompt, width, height, t
|
||||
processor.step_end("Painting")
|
||||
elif processor.personality.config.active_tti_service == "comfyui":
|
||||
if not processor.personality.app.tti:
|
||||
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
processor.step_start("Loading comfyui service")
|
||||
processor.personality.app.tti = LollmsComfyUI(
|
||||
processor.personality.app,
|
||||
|
@ -42,7 +42,7 @@ def install_comfyui(request: ClientAuthentication):
|
||||
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Installing comfyui server\nPlease stand by")
|
||||
from lollms.services.comfyui.lollms_comfyui import install_comfyui
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import install_comfyui
|
||||
install_comfyui(lollmsElfServer)
|
||||
ASCIIColors.success("Done")
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
@ -63,7 +63,7 @@ def upgrade_comfyui(request: ClientAuthentication):
|
||||
return {"status":False,"error":"Service upgrade is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Upgrading comfyui server\nPlease stand by")
|
||||
from lollms.services.comfyui.lollms_comfyui import upgrade_comfyui
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import upgrade_comfyui
|
||||
upgrade_comfyui(lollmsElfServer)
|
||||
ASCIIColors.success("Done")
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
@ -88,7 +88,7 @@ def start_comfyui(request: ClientAuthentication):
|
||||
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Starting Comfyui\nPlease stand by")
|
||||
from lollms.services.comfyui.lollms_comfyui import get_comfyui
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import get_comfyui
|
||||
lollmsElfServer.comfyui = get_comfyui(lollmsElfServer.lollms_paths)(lollmsElfServer, lollmsElfServer.personality.name if lollmsElfServer.personality is not None else "Artbot")
|
||||
ASCIIColors.success("Done")
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
@ -108,5 +108,5 @@ def show_comfyui(request: ClientAuthentication):
|
||||
|
||||
@router.get("/list_comfyui_models")
|
||||
def list_comfyui_models():
|
||||
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
return {"status":True, "models":LollmsComfyUI.get_models_list(lollmsElfServer)}
|
@ -47,7 +47,7 @@ def install_diffusers(data: Identification):
|
||||
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Installing Diffusers library\nPlease stand by")
|
||||
from lollms.services.diffusers.lollms_diffusers import install_diffusers
|
||||
from lollms.services.tti.diffusers.lollms_diffusers import install_diffusers
|
||||
install_diffusers(lollmsElfServer)
|
||||
ASCIIColors.success("Done")
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
@ -69,7 +69,7 @@ def upgrade_sd(data: Identification):
|
||||
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Upgrading Diffusers library\nPlease stand by")
|
||||
from lollms.services.diffusers.lollms_diffusers import upgrade_diffusers
|
||||
from lollms.services.tti.diffusers.lollms_diffusers import upgrade_diffusers
|
||||
upgrade_diffusers(lollmsElfServer)
|
||||
ASCIIColors.success("Done")
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
|
@ -41,7 +41,7 @@ def install_ollama(request: ClientAuthentication):
|
||||
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Installing ollama server\nPlease stand by")
|
||||
from lollms.services.ollama.lollms_ollama import install_ollama
|
||||
from lollms.services.ttt.ollama.lollms_ollama import install_ollama
|
||||
if install_ollama(lollmsElfServer):
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
return {"status":True}
|
||||
@ -61,7 +61,7 @@ def start_ollama(request: ClientAuthentication):
|
||||
|
||||
if not hasattr(lollmsElfServer,"vllm") or lollmsElfServer.vllm is None:
|
||||
lollmsElfServer.ShowBlockingMessage("Loading vllm server\nPlease stand by")
|
||||
from lollms.services.vllm.lollms_vllm import get_vllm
|
||||
from lollms.services.ttt.vllm.lollms_vllm import get_vllm
|
||||
server = get_vllm(lollmsElfServer)
|
||||
|
||||
if server:
|
||||
|
@ -48,7 +48,7 @@ def install_sd(data: Identification):
|
||||
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Installing SD api server\nPlease stand by")
|
||||
from lollms.services.sd.lollms_sd import install_sd
|
||||
from lollms.services.tti.sd.lollms_sd import install_sd
|
||||
install_sd(lollmsElfServer)
|
||||
ASCIIColors.success("Done")
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
@ -70,7 +70,7 @@ def upgrade_sd(data: Identification):
|
||||
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Installing SD api server\nPlease stand by")
|
||||
from lollms.services.sd.lollms_sd import upgrade_sd
|
||||
from lollms.services.tti.sd.lollms_sd import upgrade_sd
|
||||
upgrade_sd(lollmsElfServer)
|
||||
ASCIIColors.success("Done")
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
@ -94,7 +94,7 @@ def start_sd(data: Identification):
|
||||
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Starting SD api server\nPlease stand by")
|
||||
from lollms.services.sd.lollms_sd import LollmsSD
|
||||
from lollms.services.tti.sd.lollms_sd import LollmsSD
|
||||
lollmsElfServer.sd = LollmsSD.get(lollmsElfServer)(lollmsElfServer, lollmsElfServer.personality.name if lollmsElfServer.personality is not None else "Artbot")
|
||||
ASCIIColors.success("Done")
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
|
@ -42,7 +42,7 @@ def install_vllm(request: ClientAuthentication):
|
||||
return {"status":False,"error":"Service installation is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Installing vllm server\nPlease stand by")
|
||||
from lollms.services.vllm.lollms_vllm import install_vllm
|
||||
from lollms.services.ttt.vllm.lollms_vllm import install_vllm
|
||||
if install_vllm(lollmsElfServer):
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
return {"status":True}
|
||||
@ -62,7 +62,7 @@ def start_vllm(request: ClientAuthentication):
|
||||
|
||||
if not hasattr(lollmsElfServer,"vllm") or lollmsElfServer.vllm is None:
|
||||
lollmsElfServer.ShowBlockingMessage("Loading vllm server\nPlease stand by")
|
||||
from lollms.services.vllm.lollms_vllm import get_vllm
|
||||
from lollms.services.ttt.vllm.lollms_vllm import get_vllm
|
||||
server = get_vllm(lollmsElfServer)
|
||||
|
||||
if server:
|
||||
|
@ -148,7 +148,7 @@ def get_comfyui(lollms_paths:LollmsPaths):
|
||||
ASCIIColors.success("comfyui found.")
|
||||
ASCIIColors.success("Loading source file...",end="")
|
||||
# use importlib to load the module from the file path
|
||||
from lollms.services.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
from lollms.services.tti.comfyui.lollms_comfyui import LollmsComfyUI
|
||||
ASCIIColors.success("ok")
|
||||
return LollmsComfyUI
|
||||
|
||||
|
@ -193,7 +193,7 @@ class LollmsDiffusers(LollmsTTI):
|
||||
ASCIIColors.success("lollms_diffusers found.")
|
||||
ASCIIColors.success("Loading source file...",end="")
|
||||
# use importlib to load the module from the file path
|
||||
from lollms.services.diffusers.lollms_diffusers import LollmsDiffusers
|
||||
from lollms.services.tti.diffusers.lollms_diffusers import LollmsDiffusers
|
||||
ASCIIColors.success("ok")
|
||||
return LollmsDiffusers
|
||||
|
||||
|
@ -349,7 +349,7 @@ class LollmsSD(LollmsTTI):
|
||||
ASCIIColors.success("lollms_sd found.")
|
||||
ASCIIColors.success("Loading source file...",end="")
|
||||
# use importlib to load the module from the file path
|
||||
from lollms.services.sd.lollms_sd import LollmsSD
|
||||
from lollms.services.tti.sd.lollms_sd import LollmsSD
|
||||
ASCIIColors.success("ok")
|
||||
return LollmsSD
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user