This commit is contained in:
Saifeddine ALOUI 2024-01-30 00:09:04 +01:00
parent dda12c411a
commit 4c5a6f2560
15 changed files with 123 additions and 428 deletions

13
app.py
View File

@ -78,17 +78,18 @@ if __name__ == "__main__":
from lollms.server.endpoints.lollms_generator import router as lollms_generator_router
from lollms.server.endpoints.lollms_configuration_infos import router as lollms_configuration_infos_router
from lollms.server.endpoints.lollms_user import router as lollms_user_router
from lollms.server.endpoints.lollms_xtts import router as lollms_xtts_add_router
from lollms.server.endpoints.lollms_sd import router as lollms_sd_router
from lollms.server.endpoints.lollms_ollama import router as lollms_ollama_router
from lollms.server.endpoints.lollms_vllm import router as lollms_vllm_router
from endpoints.lollms_webui_infos import router as lollms_webui_infos_router
from endpoints.lollms_discussion import router as lollms_discussion_router
from endpoints.lollms_message import router as lollms_message_router
from endpoints.lollms_user import router as lollms_user_router
from endpoints.lollms_advanced import router as lollms_advanced_router
from endpoints.chat_bar import router as chat_bar_router
from endpoints.lollms_xtts import router as lollms_xtts_add_router
from endpoints.lollms_sd import router as lollms_sd_router
from endpoints.lollms_ollama import router as lollms_ollama_router
from endpoints.lollms_petals import router as lollms_petals_router
from endpoints.lollms_vllm import router as lollms_vllm_router
from lollms.server.endpoints.lollms_petals import router as lollms_petals_router
from endpoints.lollms_playground import router as lollms_playground_router

View File

@ -1,43 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_ollama")
def install_ollama():
try:
lollmsElfServer.ShowBlockingMessage("Installing ollama server\nPlease stand by")
from lollms.services.ollama.lollms_ollama import install_ollama
if install_ollama(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -1,42 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that concerns petals service
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_petals")
def install_petals():
try:
lollmsElfServer.ShowBlockingMessage("Installing petals server\nPlease stand by")
from lollms.services.petals.lollms_petals import install_petals
if install_petals(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -1,41 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_sd")
def install_sd():
try:
lollmsElfServer.ShowBlockingMessage("Installing SD api server\nPlease stand by")
from lollms.services.sd.lollms_sd import install_sd
install_sd(lollmsElfServer)
ASCIIColors.success("Done")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -1,51 +0,0 @@
"""
project: lollms_user
file: lollms_user.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to manipulate user information.
"""
from fastapi import APIRouter
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string
from ascii_colors import ASCIIColors
from api.db import DiscussionsDB
from pathlib import Path
from safe_store.text_vectorizer import TextVectorizer, VectorizationMethod, VisualizationMethod
import tqdm
from fastapi import FastAPI, UploadFile, File
import shutil
class PersonalPathParameters(BaseModel):
path:str
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer = LOLLMSWebUI.get_instance()
@router.get("/switch_personal_path")
def switch_personal_path(data:PersonalPathParameters):
path = data.path
global_paths_cfg = Path("./global_paths_cfg.yaml")
if global_paths_cfg.exists():
try:
cfg = BaseConfig()
cfg.load_config(global_paths_cfg)
cfg.lollms_personal_path = path
cfg.save_config(global_paths_cfg)
return {"status": True}
except Exception as ex:
print(ex)
return {"status": False, 'error':f"Couldn't switch path: {ex}"}
@router.post("/upload_avatar")
def upload_avatar(avatar: UploadFile = File(...)):
with open(lollmsElfServer.lollms_paths.personal_user_infos_path/avatar.filename, "wb") as buffer:
shutil.copyfileobj(avatar.file, buffer)
return {"status": True,"fileName":avatar.filename}

View File

@ -1,42 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that concerns petals service
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_vllm")
def install_petals():
try:
lollmsElfServer.ShowBlockingMessage("Installing ollama server\nPlease stand by")
from lollms.services.vllm.lollms_vllm import install_vllm
if install_vllm(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -1,116 +0,0 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/list_voices")
def list_voices():
ASCIIColors.yellow("Listing voices")
voices=["main_voice"]
voices_dir:Path=lollmsElfServer.lollms_paths.custom_voices_path
voices += [v.stem for v in voices_dir.iterdir() if v.suffix==".wav"]
return {"voices":voices}
@router.post("/set_voice")
async def set_voice(request: Request):
"""
Changes current voice
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
lollmsElfServer.config.current_voice=data["voice"]
if lollmsElfServer.config.auto_save:
lollmsElfServer.config.save_config()
return {"status":True}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.post("/text2Audio")
async def text2Audio(request: Request):
"""
Executes Python code and returns the output.
:param request: The HTTP request object.
:return: A JSON response with the status of the operation.
"""
try:
data = (await request.json())
# Get the JSON data from the POST request.
try:
from lollms.services.xtts.lollms_xtts import LollmsXTTS
if lollmsElfServer.tts is None:
lollmsElfServer.tts = LollmsXTTS(lollmsElfServer, voice_samples_path=Path(__file__).parent/"voices", xtts_base_url= lollmsElfServer.config.xtts_base_url)
except:
return {"url": None}
voice=data.get("voice",lollmsElfServer.config.current_voice)
index = find_first_available_file_index(lollmsElfServer.tts.output_folder, "voice_sample_",".wav")
output_fn=data.get("fn",f"voice_sample_{index}.wav")
if voice is None:
voice = "main_voice"
lollmsElfServer.info("Starting to build voice")
try:
from lollms.services.xtts.lollms_xtts import LollmsXTTS
if lollmsElfServer.tts is None:
lollmsElfServer.tts = LollmsXTTS(lollmsElfServer, voice_samples_path=Path(__file__).parent/"voices", xtts_base_url= lollmsElfServer.config.xtts_base_url)
language = lollmsElfServer.config.current_language# convert_language_name()
if voice!="main_voice":
voices_folder = lollmsElfServer.lollms_paths.custom_voices_path
else:
voices_folder = Path(__file__).parent.parent/"voices"
lollmsElfServer.tts.set_speaker_folder(voices_folder)
url = f"audio/{output_fn}"
preprocessed_text= add_period(data['text'])
lollmsElfServer.tts.tts_to_file(preprocessed_text, f"{voice}.wav", f"{output_fn}", language=language)
lollmsElfServer.info("Voice file ready")
return {"url": url}
except:
return {"url": None}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)}
@router.get("/install_xtts")
def install_xtts():
try:
from lollms.services.xtts.lollms_xtts import install_xtts
lollmsElfServer.ShowBlockingMessage("Installing xTTS api server\nPlease stand by")
install_xtts(lollmsElfServer)
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

@ -1 +1 @@
Subproject commit b5de353df365b18250518bc486cc80eb2aefe82c
Subproject commit cd202fd74f7682c0d153fea9c84c9195f41b5a24

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

8
web/dist/assets/index-f6bc0a28.css vendored Normal file

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-abe2c772.js"></script>
<link rel="stylesheet" href="/assets/index-ecb0c556.css">
<script type="module" crossorigin src="/assets/index-c61e02f3.js"></script>
<link rel="stylesheet" href="/assets/index-f6bc0a28.css">
</head>
<body>
<div id="app"></div>

View File

@ -15,6 +15,14 @@
>
<i data-feather="mic"></i>
</button>
<button
type="button"
@click="startRecording"
:class="{ 'text-green-500': isLesteningToVoice }"
class="w-6 hover:text-secondary duration-75 active:scale-90 cursor-pointer text-red-500"
>
<i data-feather="mic"></i>
</button>
<button
title="speak"
@click.stop="speak()"
@ -809,6 +817,16 @@ export default {
this.$refs.toast.showToast(`Error: ${ex}`,4,false)
});
},
startRecording(){
axios.get('./get_presets').then(response => {
console.log(response.data)
this.presets=response.data
this.selectedPreset = this.presets[0]
}).catch(ex=>{
this.$refs.toast.showToast(`Error: ${ex}`,4,false)
});
},
startSpeechRecognition() {
if ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window) {
this.recognition = new (window.SpeechRecognition || window.webkitSpeechRecognition)();

View File

@ -1095,11 +1095,12 @@
</tr>
<tr>
<td style="min-width: 200px;">
<label for="vllm_url" class="text-sm font-bold" style="margin-right: 1rem;">Install cLLM service:</label>
<label for="vllm_url" class="text-sm font-bold" style="margin-right: 1rem;">Install vLLM service:</label>
</td>
<td>
<div class="flex flex-row">
<button class="hover:text-primary bg-green-200 rounded-lg p-4 m-4 w-full text-center items-center" @click="reinstallvLLMService">install vLLM service</button>
<button class="hover:text-primary bg-green-200 rounded-lg p-4 m-4 w-full text-center items-center" @click="startvLLMService">starrt vLLM service</button>
</div>
</td>
</tr>
@ -2442,9 +2443,19 @@ export default {
console.error(error);
});
},
startvLLMService(){
axios.get('start_vllm')
.then(response => {
})
.catch(error => {
console.error(error);
});
},
reinstallPetalsService(){
axios.get('install_vllm')
axios.get('install_petals')
.then(response => {
})

@ -1 +1 @@
Subproject commit 4d5628f145a46ab57895f98b95653123e3f0c96d
Subproject commit 3ee3cb49fa4be5fe4bbf42cec8c8a9e698c92b6c