This commit is contained in:
Saifeddine ALOUI 2024-01-11 02:32:21 +01:00
parent 22c3b83608
commit 02f485da28
15 changed files with 4105 additions and 25 deletions

2
.gitignore vendored
View File

@ -211,3 +211,5 @@ outputs_*
output_* output_*
mPLUG-Owl mPLUG-Owl
xtts_models

8
app.py
View File

@ -393,6 +393,11 @@ try:
self.add_endpoint("/text2Audio", "text2Audio", self.text2Audio, methods=["POST"]) self.add_endpoint("/text2Audio", "text2Audio", self.text2Audio, methods=["POST"])
self.add_endpoint("/install_xtts", "install_xtts", self.install_xtts, methods=["GET"]) self.add_endpoint("/install_xtts", "install_xtts", self.install_xtts, methods=["GET"])
self.add_endpoint("/install_sd", "install_sd", self.install_sd, methods=["GET"])
# ---- # ----
@ -417,9 +422,6 @@ try:
"/execute_code", "execute_code", self.execute_code, methods=["POST"] "/execute_code", "execute_code", self.execute_code, methods=["POST"]
) )
self.add_endpoint(
"/install_sd", "install_sd", self.install_sd, methods=["GET"]
)
self.add_endpoint("/update_binding_settings", "update_binding_settings", self.update_binding_settings, methods=["GET"]) self.add_endpoint("/update_binding_settings", "update_binding_settings", self.update_binding_settings, methods=["GET"])

View File

@ -1,5 +1,5 @@
# =================== Lord Of Large Language Models Configuration file =========================== # =================== Lord Of Large Language Models Configuration file ===========================
version: 40 version: 41
binding_name: null binding_name: null
model_name: null model_name: null
@ -60,6 +60,10 @@ current_language: en
enable_sd_service: false enable_sd_service: false
sd_base_url: http://127.0.0.1:7860 sd_base_url: http://127.0.0.1:7860
# ollama service
enable_ollama_service: false
ollama_base_url: http://0.0.0.0:11434
# Audio # Audio
media_on: false media_on: false
audio_in_language: 'en-US' audio_in_language: 'en-US'

View File

@ -0,0 +1,43 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_ollama")
def install_ollama():
try:
lollmsElfServer.ShowBlockingMessage("Installing ollama server\nPlease stand by")
from lollms.services.ollama.lollms_ollama import install_ollama
if install_ollama(lollmsElfServer):
lollmsElfServer.HideBlockingMessage()
return {"status":True}
else:
return {"status":False, 'error':str(ex)}
except Exception as ex:
trace_exception(ex)
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -0,0 +1,45 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
import yaml
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_ollama")
def get_presets():
presets = []
presets_folder = Path("__file__").parent/"presets"
for filename in presets_folder.glob('*.yaml'):
with open(filename, 'r', encoding='utf-8') as file:
preset = yaml.safe_load(file)
if preset is not None:
presets.append(preset)
presets_folder = lollmsElfServer.lollms_paths.personal_databases_path/"lollms_playground_presets"
presets_folder.mkdir(exist_ok=True, parents=True)
for filename in presets_folder.glob('*.yaml'):
with open(filename, 'r', encoding='utf-8') as file:
preset = yaml.safe_load(file)
if preset is not None:
presets.append(preset)
return presets

40
endpoints/lollms_sd.py Normal file
View File

@ -0,0 +1,40 @@
"""
project: lollms_webui
file: lollms_xtts.py
author: ParisNeo
description:
This module contains a set of FastAPI routes that provide information about the Lord of Large Language and Multimodal Systems (LoLLMs) Web UI
application. These routes allow users to
"""
from fastapi import APIRouter, Request
from lollms_webui import LOLLMSWebUI
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path
from ascii_colors import ASCIIColors
import os
import platform
# ----------------------- Defining router and main class ------------------------------
router = APIRouter()
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------
@router.get("/install_xtts")
def install_sd():
try:
lollmsElfServer.ShowBlockingMessage("Installing SD api server\nPlease stand by")
from lollms.services.sd.lollms_sd import install_sd
install_sd()
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

View File

@ -13,7 +13,7 @@ from pydantic import BaseModel
from starlette.responses import StreamingResponse from starlette.responses import StreamingResponse
from lollms.types import MSG_TYPE from lollms.types import MSG_TYPE
from lollms.main_config import BaseConfig from lollms.main_config import BaseConfig
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
from pathlib import Path from pathlib import Path
from ascii_colors import ASCIIColors from ascii_colors import ASCIIColors
import os import os
@ -27,7 +27,7 @@ lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
# ----------------------- voice ------------------------------ # ----------------------- voice ------------------------------
@router.get("/set_voice") @router.get("/list_voices")
def list_voices(): def list_voices():
ASCIIColors.yellow("Listing voices") ASCIIColors.yellow("Listing voices")
voices=["main_voice"] voices=["main_voice"]
@ -103,3 +103,14 @@ async def text2Audio(request: Request):
trace_exception(ex) trace_exception(ex)
lollmsElfServer.error(ex) lollmsElfServer.error(ex)
return {"status":False,"error":str(ex)} return {"status":False,"error":str(ex)}
@router.get("/install_xtts")
def install_xtts():
try:
lollmsElfServer.ShowBlockingMessage("Installing xTTS api server\nPlease stand by")
PackageManager.install_package("xtts-api-server")
lollmsElfServer.HideBlockingMessage()
return {"status":True}
except Exception as ex:
lollmsElfServer.HideBlockingMessage()
return {"status":False, 'error':str(ex)}

@ -1 +1 @@
Subproject commit d440b3db8a88cbd667e5f77ef24238d9f134f166 Subproject commit 84e25d271445f29abf3fbbd1d4d30569b9eabc79

View File

@ -70,6 +70,7 @@ if __name__ == "__main__":
from lollms.server.endpoints.lollms_extensions_infos import router as lollms_extensions_infos_router from lollms.server.endpoints.lollms_extensions_infos import router as lollms_extensions_infos_router
from lollms.server.endpoints.lollms_generator import router as lollms_generator_router from lollms.server.endpoints.lollms_generator import router as lollms_generator_router
from lollms.server.endpoints.lollms_configuration_infos import router as lollms_configuration_infos_router from lollms.server.endpoints.lollms_configuration_infos import router as lollms_configuration_infos_router
from endpoints.lollms_webui_infos import router as lollms_webui_infos_router from endpoints.lollms_webui_infos import router as lollms_webui_infos_router
from endpoints.lollms_discussion import router as lollms_discussion_router from endpoints.lollms_discussion import router as lollms_discussion_router
from endpoints.lollms_message import router as lollms_message_router from endpoints.lollms_message import router as lollms_message_router
@ -77,7 +78,8 @@ if __name__ == "__main__":
from endpoints.lollms_advanced import router as lollms_advanced_router from endpoints.lollms_advanced import router as lollms_advanced_router
from endpoints.chat_bar import router as chat_bar_router from endpoints.chat_bar import router as chat_bar_router
from endpoints.lollms_xtts import router as lollms_xtts_add_router from endpoints.lollms_xtts import router as lollms_xtts_add_router
from endpoints.lollms_sd import router as lollms_sd_router
from endpoints.lollms_ollama import router as lollms_ollama_router
from lollms.server.events.lollms_generation_events import add_events as lollms_generation_events_add from lollms.server.events.lollms_generation_events import add_events as lollms_generation_events_add
@ -100,6 +102,7 @@ if __name__ == "__main__":
app.include_router(lollms_webui_infos_router) app.include_router(lollms_webui_infos_router)
app.include_router(lollms_generator_router) app.include_router(lollms_generator_router)
app.include_router(lollms_discussion_router) app.include_router(lollms_discussion_router)
@ -109,6 +112,8 @@ if __name__ == "__main__":
app.include_router(chat_bar_router) app.include_router(chat_bar_router)
app.include_router(lollms_xtts_add_router) app.include_router(lollms_xtts_add_router)
app.include_router(lollms_sd_router)
app.include_router(lollms_ollama_router)

8
web/dist/assets/index-b4b20421.css vendored Normal file

File diff suppressed because one or more lines are too long

3852
web/dist/assets/index-ce5f22eb.js vendored Normal file

File diff suppressed because one or more lines are too long

4
web/dist/index.html vendored
View File

@ -6,8 +6,8 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>LoLLMS WebUI - Welcome</title> <title>LoLLMS WebUI - Welcome</title>
<script type="module" crossorigin src="/assets/index-c7401936.js"></script> <script type="module" crossorigin src="/assets/index-ce5f22eb.js"></script>
<link rel="stylesheet" href="/assets/index-27c35171.css"> <link rel="stylesheet" href="/assets/index-b4b20421.css">
</head> </head>
<body> <body>
<div id="app"></div> <div id="app"></div>

View File

@ -2,7 +2,7 @@ import { defineNode, NodeInterface, TextInterface, SelectInterface } from "bakla
import axios from 'axios'; import axios from 'axios';
import { store } from '../main' import { store } from '../main'
export const AgentNode = defineNode({ export const PersonalityNode = defineNode({
type: "PersonalityNode", type: "PersonalityNode",
title: "Personality", title: "Personality",
inputs: { inputs: {
@ -13,14 +13,13 @@ export const AgentNode = defineNode({
), ),
}, },
outputs: { outputs: {
display: () => new TextInterface("Output", ""),
response: () => new NodeInterface("Response", "") response: () => new NodeInterface("Response", "")
}, },
async calculate({ request }) { async calculate({ request }) {
console.log(store.state.config.personalities) console.log(store.state.config.personalities)
let response = ''; let response = '';
try { try {
const result = await axios.get('/generate', { params: { text: request } }); const result = await axios.post('/generate', { params: { text: request } });
response = result.data; response = result.data;
} catch (error) { } catch (error) {
console.error(error); console.error(error);

View File

@ -17,9 +17,12 @@
//import "../css/classic.css"; //import "../css/classic.css";
import "@baklavajs/themes/dist/syrup-dark.css"; import "@baklavajs/themes/dist/syrup-dark.css";
import { AgentNode } from "../nodes/Personality"; import { PersonalityNode } from "../nodes/Personality";
import { RAGNode } from "../nodes/Rag"; import { RAGNode } from "../nodes/Rag";
import { TaskNode } from "../nodes/Task"; import { TaskNode } from "../nodes/Task";
import { TextDisplayNode } from "../nodes/TextDisplay";
import { LLMNode } from "../nodes/LLM";
import { MultichoiceNode } from "../nodes/Multichoice"
export default defineComponent({ export default defineComponent({
components: { components: {
@ -30,9 +33,15 @@
const engine = new DependencyEngine(baklava.editor); const engine = new DependencyEngine(baklava.editor);
baklava.editor.registerNodeType(AgentNode); baklava.editor.registerNodeType(PersonalityNode);
baklava.editor.registerNodeType(TaskNode); baklava.editor.registerNodeType(TaskNode);
baklava.editor.registerNodeType(RAGNode); baklava.editor.registerNodeType(RAGNode);
baklava.editor.registerNodeType(TextDisplayNode);
baklava.editor.registerNodeType(LLMNode);
baklava.editor.registerNodeType(MultichoiceNode);
const token = Symbol(); const token = Symbol();
engine.events.afterRun.subscribe(token, (result) => { engine.events.afterRun.subscribe(token, (result) => {
@ -51,12 +60,16 @@
return n; return n;
} }
const node1 = addNodeWithCoordinates(TaskNode, 300, 140); const node1 = addNodeWithCoordinates(TaskNode, 300, 140);
const node2 = addNodeWithCoordinates(AgentNode, 550, 140); const node2 = addNodeWithCoordinates(LLMNode, 550, 140);
const node3 = addNodeWithCoordinates(TextDisplayNode, 850, 140);
baklava.displayedGraph.addConnection( baklava.displayedGraph.addConnection(
node1.outputs.result, node1.outputs.prompt,
node2.inputs.value node2.inputs.request
);
baklava.displayedGraph.addConnection(
node2.outputs.response,
node3.inputs.text2display
); );
return { return {
baklava, baklava,
saveGraph: () => { saveGraph: () => {

View File

@ -955,6 +955,55 @@
</tr> </tr>
</table> </table>
</Card> </Card>
<Card title="Ollama service" :is_subcard="true" class="pb-2 m-2">
<table class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-blue-500 dark:focus:border-blue-500">
<tr>
<td style="min-width: 200px;">
<label for="enable_ollama_service" class="text-sm font-bold" style="margin-right: 1rem;">Enable ollama service:</label>
</td>
<td>
<div class="flex flex-row">
<input
type="checkbox"
id="enable_ollama_service"
required
v-model="configFile.enable_ollama_service"
@change="settingsChanged=true"
class="mt-1 px-2 py-1 border border-gray-300 rounded dark:bg-gray-600"
>
</div>
</td>
</tr>
<tr>
<td style="min-width: 200px;">
<label for="ollama_base_url" class="text-sm font-bold" style="margin-right: 1rem;">Reinstall Ollama service:</label>
</td>
<td>
<div class="flex flex-row">
<button class="hover:text-primary bg-green-200 rounded-lg p-4 m-4 w-full text-center items-center" @click="reinstallOLLAMAService">Reinstall olama service</button>
</div>
</td>
</tr>
<tr>
<td style="min-width: 200px;">
<label for="ollama_base_url" class="text-sm font-bold" style="margin-right: 1rem;">ollama base url:</label>
</td>
<td>
<div class="flex flex-row">
<input
type="text"
id="sd_base_url"
required
v-model="configFile.sd_base_url"
@change="settingsChanged=true"
class="mt-1 px-2 py-1 border border-gray-300 rounded dark:bg-gray-600"
>
</div>
</td>
</tr>
</table>
</Card>
<Card title="XTTS service" :is_subcard="true" class="pb-2 m-2"> <Card title="XTTS service" :is_subcard="true" class="pb-2 m-2">
<table class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-blue-500 dark:focus:border-blue-500"> <table class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-blue-500 dark:focus:border-blue-500">
<tr> <tr>
@ -2177,6 +2226,16 @@ export default {
console.error(error); console.error(error);
}); });
},
reinstallOLLAMAService(){
axios.get('install_ollama')
.then(response => {
})
.catch(error => {
console.error(error);
});
}, },
reinstallAudioService(){ reinstallAudioService(){
axios.get('install_xtts') axios.get('install_xtts')
@ -3328,9 +3387,6 @@ export default {
break; break;
} }
} }
else{
}
} }
} }