mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2024-12-23 22:32:24 +00:00
upgraded linting
This commit is contained in:
parent
87b878fda9
commit
3669be503f
6
.pre-commit-config.yaml
Normal file
6
.pre-commit-config.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/ParisNeo/parisneo-precommit-hooks
|
||||
rev: v0.1.0 # Use the latest version
|
||||
hooks:
|
||||
- id: parisneo-python-check
|
||||
- id: parisneo-js-check
|
@ -4,7 +4,7 @@
|
||||
# Author : ParisNeo with the help of the community
|
||||
# Supported by Nomic-AI
|
||||
# license : Apache 2.0
|
||||
# Description :
|
||||
# Description :
|
||||
# Configuration management tool
|
||||
######
|
||||
import yaml
|
||||
@ -14,8 +14,9 @@ __github__ = "https://github.com/ParisNeo/lollms-webui"
|
||||
__copyright__ = "Copyright 2023, "
|
||||
__license__ = "Apache 2.0"
|
||||
|
||||
|
||||
def load_config(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as stream:
|
||||
with open(file_path, "r", encoding="utf-8") as stream:
|
||||
config = yaml.safe_load(stream)
|
||||
|
||||
return config
|
||||
@ -23,4 +24,4 @@ def load_config(file_path):
|
||||
|
||||
def save_config(config, filepath):
|
||||
with open(filepath, "w") as f:
|
||||
yaml.dump(config, f)
|
||||
yaml.dump(config, f)
|
||||
|
@ -1,9 +1,9 @@
|
||||
|
||||
__author__ = "parisneo"
|
||||
__github__ = "https://github.com/ParisNeo/lollms-webui"
|
||||
__copyright__ = "Copyright 2023, "
|
||||
__license__ = "Apache 2.0"
|
||||
|
||||
|
||||
def compare_lists(list1, list2):
|
||||
if len(list1) != len(list2):
|
||||
return False
|
||||
|
450
app.py
450
app.py
@ -5,68 +5,87 @@ Description: Singleton class for the LoLLMS web UI.
|
||||
|
||||
This file is the entry point to the webui.
|
||||
"""
|
||||
from lollms.utilities import PackageManager
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
import os
|
||||
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
|
||||
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from lollms.utilities import PackageManager
|
||||
|
||||
os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"
|
||||
|
||||
expected_ascii_colors_version = "0.4.2"
|
||||
print(f"Checking ascii_colors ({expected_ascii_colors_version}) ...", end="", flush=True)
|
||||
if not PackageManager.check_package_installed_with_version("ascii_colors", expected_ascii_colors_version):
|
||||
print(
|
||||
f"Checking ascii_colors ({expected_ascii_colors_version}) ...", end="", flush=True
|
||||
)
|
||||
if not PackageManager.check_package_installed_with_version(
|
||||
"ascii_colors", expected_ascii_colors_version
|
||||
):
|
||||
PackageManager.install_or_update("ascii_colors")
|
||||
from ascii_colors import ASCIIColors
|
||||
|
||||
ASCIIColors.success("OK")
|
||||
|
||||
expected_pipmaster_version = "0.3.2"
|
||||
ASCIIColors.yellow(f"Checking pipmaster ({expected_pipmaster_version}) ...", end="", flush=True)
|
||||
if not PackageManager.check_package_installed_with_version("pipmaster", expected_pipmaster_version):
|
||||
ASCIIColors.yellow(
|
||||
f"Checking pipmaster ({expected_pipmaster_version}) ...", end="", flush=True
|
||||
)
|
||||
if not PackageManager.check_package_installed_with_version(
|
||||
"pipmaster", expected_pipmaster_version
|
||||
):
|
||||
PackageManager.install_or_update("pipmaster")
|
||||
import pipmaster as pm
|
||||
ASCIIColors.success("OK")
|
||||
|
||||
ASCIIColors.success("OK")
|
||||
|
||||
|
||||
def animate(text: str, stop_event: threading.Event):
|
||||
animation = "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏"
|
||||
idx = 0
|
||||
while not stop_event.is_set():
|
||||
ASCIIColors.yellow(f"\r{text} {animation[idx % len(animation)]}", end="", flush=True)
|
||||
ASCIIColors.yellow(
|
||||
f"\r{text} {animation[idx % len(animation)]}", end="", flush=True
|
||||
)
|
||||
idx += 1
|
||||
time.sleep(0.1)
|
||||
print("\r" + " " * 50, end="\r") # Clear the line
|
||||
|
||||
|
||||
def check_and_install_package(package: str, version: str):
|
||||
stop_event = threading.Event()
|
||||
animation_thread = threading.Thread(target=animate, args=(f"Checking {package} ({version})", stop_event))
|
||||
animation_thread = threading.Thread(
|
||||
target=animate, args=(f"Checking {package} ({version})", stop_event)
|
||||
)
|
||||
animation_thread.start()
|
||||
|
||||
try:
|
||||
installed = PackageManager.check_package_installed_with_version(package, version)
|
||||
|
||||
installed = PackageManager.check_package_installed_with_version(
|
||||
package, version
|
||||
)
|
||||
|
||||
if not installed:
|
||||
stop_event.set()
|
||||
animation_thread.join()
|
||||
print("\r" + " " * 50, end="\r") # Clear the line
|
||||
PackageManager.install_or_update(package)
|
||||
|
||||
|
||||
stop_event.set()
|
||||
animation_thread.join()
|
||||
|
||||
|
||||
print("\r" + " " * 50, end="\r") # Clear the line
|
||||
ASCIIColors.yellow(f"Checking {package} ({version}) ...", end="")
|
||||
ASCIIColors.success("OK")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
stop_event.set()
|
||||
animation_thread.join()
|
||||
print("\r" + " " * 50, end="\r") # Clear the line
|
||||
ASCIIColors.red(f"Error checking/installing {package}: {str(e)}")
|
||||
|
||||
|
||||
packages: List[Tuple[str, str]] = [
|
||||
("freedom_search", "0.1.9"),
|
||||
("scrapemaster", "0.2.1"),
|
||||
@ -74,11 +93,12 @@ packages: List[Tuple[str, str]] = [
|
||||
("lollmsvectordb", "1.2.8"),
|
||||
]
|
||||
|
||||
if pm.is_installed("einops"):
|
||||
if not pm.is_installed("einops"):
|
||||
pm.install("einops")
|
||||
if pm.is_installed("datasets"):
|
||||
if not pm.is_installed("datasets"):
|
||||
pm.install("datasets")
|
||||
#einops datasets
|
||||
# einops datasets
|
||||
|
||||
|
||||
def check_pn_libs():
|
||||
ASCIIColors.cyan("Checking ParisNeo libraries installation")
|
||||
@ -90,64 +110,69 @@ def check_pn_libs():
|
||||
|
||||
ASCIIColors.green("All packages have been checked and are up to date!")
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from starlette.responses import FileResponse
|
||||
from lollms.paths import LollmsPaths
|
||||
from lollms.main_config import LOLLMSConfig
|
||||
from lollms.utilities import trace_exception
|
||||
from lollms.security import sanitize_path
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import webbrowser
|
||||
from pathlib import Path
|
||||
from ascii_colors import ASCIIColors
|
||||
|
||||
import psutil
|
||||
import socketio
|
||||
import uvicorn
|
||||
import argparse
|
||||
from socketio import ASGIApp
|
||||
import webbrowser
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from pydantic import ValidationError
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
import socket
|
||||
import psutil
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from lollms.main_config import LOLLMSConfig
|
||||
from lollms.paths import LollmsPaths
|
||||
from lollms.security import sanitize_path
|
||||
from lollms.utilities import trace_exception
|
||||
from pydantic import ValidationError
|
||||
from socketio import ASGIApp
|
||||
from starlette.responses import FileResponse
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
|
||||
def get_ip_addresses():
|
||||
hostname = socket.gethostname()
|
||||
hostname = socket.gethostname()
|
||||
ip_addresses = [socket.gethostbyname(hostname)]
|
||||
|
||||
for interface_name, interface_addresses in psutil.net_if_addrs().items():
|
||||
for address in interface_addresses:
|
||||
if str(address.family) == 'AddressFamily.AF_INET':
|
||||
if str(address.family) == "AddressFamily.AF_INET":
|
||||
ip_addresses.append(address.address)
|
||||
|
||||
return ip_addresses
|
||||
|
||||
|
||||
app = FastAPI(title="LoLLMS", description="This is the LoLLMS-Webui API documentation")
|
||||
|
||||
|
||||
try:
|
||||
from lollms.security import MultipartBoundaryCheck
|
||||
|
||||
# Add the MultipartBoundaryCheck middleware
|
||||
app.add_middleware(MultipartBoundaryCheck)
|
||||
except:
|
||||
print("Couldn't activate MultipartBoundaryCheck")
|
||||
|
||||
#app.mount("/socket.io", StaticFiles(directory="path/to/socketio.js"))
|
||||
# app.mount("/socket.io", StaticFiles(directory="path/to/socketio.js"))
|
||||
|
||||
if __name__ == "__main__":
|
||||
desired_version = (3, 11)
|
||||
if not sys.version_info >= desired_version:
|
||||
ASCIIColors.error(f"Your Python version is {sys.version_info.major}.{sys.version_info.minor}, but version {desired_version[0]}.{desired_version[1]} or higher is required.")
|
||||
ASCIIColors.error(
|
||||
f"Your Python version is {sys.version_info.major}.{sys.version_info.minor}, but version {desired_version[0]}.{desired_version[1]} or higher is required."
|
||||
)
|
||||
sys.exit(1)
|
||||
# Parsong parameters
|
||||
parser = argparse.ArgumentParser(description="Start the chatbot FastAPI app.")
|
||||
|
||||
|
||||
parser.add_argument(
|
||||
"--host", type=str, default=None, help="the hostname to listen on"
|
||||
)
|
||||
@ -155,43 +180,59 @@ if __name__ == "__main__":
|
||||
|
||||
args = parser.parse_args()
|
||||
root_path = Path(__file__).parent
|
||||
lollms_paths = LollmsPaths.find_paths(force_local=True, custom_default_cfg_path="configs/config.yaml")
|
||||
lollms_paths = LollmsPaths.find_paths(
|
||||
force_local=True, custom_default_cfg_path="configs/config.yaml"
|
||||
)
|
||||
config = LOLLMSConfig.autoload(lollms_paths)
|
||||
|
||||
if config.auto_update:
|
||||
check_pn_libs()
|
||||
|
||||
|
||||
if config.debug_log_file_path!="":
|
||||
if config.debug_log_file_path != "":
|
||||
ASCIIColors.log_path = config.debug_log_file_path
|
||||
if args.host:
|
||||
config.host=args.host
|
||||
config.host = args.host
|
||||
if args.port:
|
||||
config.port=args.port
|
||||
config.port = args.port
|
||||
|
||||
# Define the path to your custom CA bundle file
|
||||
ca_bundle_path = lollms_paths.personal_certificates/"truststore.pem"
|
||||
ca_bundle_path = lollms_paths.personal_certificates / "truststore.pem"
|
||||
|
||||
if ca_bundle_path.exists():
|
||||
# Set the environment variable
|
||||
os.environ['REQUESTS_CA_BUNDLE'] = str(ca_bundle_path)
|
||||
os.environ["REQUESTS_CA_BUNDLE"] = str(ca_bundle_path)
|
||||
|
||||
cert_file_path = lollms_paths.personal_certificates/"cert.pem"
|
||||
key_file_path = lollms_paths.personal_certificates/"key.pem"
|
||||
cert_file_path = lollms_paths.personal_certificates / "cert.pem"
|
||||
key_file_path = lollms_paths.personal_certificates / "key.pem"
|
||||
if os.path.exists(cert_file_path) and os.path.exists(key_file_path):
|
||||
is_https = True
|
||||
else:
|
||||
is_https = False
|
||||
is_https = False
|
||||
|
||||
# Create a Socket.IO server
|
||||
if config["host"]!="localhost":
|
||||
if config["host"]!="0.0.0.0":
|
||||
config.allowed_origins.append(f"https://{config['host']}:{config['port']}" if is_https else f"http://{config['host']}:{config['port']}")
|
||||
if config["host"] != "localhost":
|
||||
if config["host"] != "0.0.0.0":
|
||||
config.allowed_origins.append(
|
||||
f"https://{config['host']}:{config['port']}"
|
||||
if is_https
|
||||
else f"http://{config['host']}:{config['port']}"
|
||||
)
|
||||
else:
|
||||
config.allowed_origins += [f"https://{ip}:{config['port']}" if is_https else f"http://{ip}:{config['port']}" for ip in get_ip_addresses()]
|
||||
allowed_origins = config.allowed_origins+[f"https://localhost:{config['port']}" if is_https else f"http://localhost:{config['port']}"]
|
||||
|
||||
|
||||
config.allowed_origins += [
|
||||
(
|
||||
f"https://{ip}:{config['port']}"
|
||||
if is_https
|
||||
else f"http://{ip}:{config['port']}"
|
||||
)
|
||||
for ip in get_ip_addresses()
|
||||
]
|
||||
allowed_origins = config.allowed_origins + [
|
||||
(
|
||||
f"https://localhost:{config['port']}"
|
||||
if is_https
|
||||
else f"http://localhost:{config['port']}"
|
||||
)
|
||||
]
|
||||
|
||||
# class EndpointSpecificCORSMiddleware(BaseHTTPMiddleware):
|
||||
# async def dispatch(self, request: Request, call_next):
|
||||
@ -225,128 +266,141 @@ if __name__ == "__main__":
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
sio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins=allowed_origins, ping_timeout=1200, ping_interval=30) # Enable CORS for selected origins
|
||||
|
||||
sio = socketio.AsyncServer(
|
||||
async_mode="asgi",
|
||||
cors_allowed_origins=allowed_origins,
|
||||
ping_timeout=1200,
|
||||
ping_interval=30,
|
||||
) # Enable CORS for selected origins
|
||||
|
||||
# A simple fix for v 11.0 to 12 alpha
|
||||
if config.rag_vectorizer=="bert":
|
||||
config.rag_vectorizer="tfidf"
|
||||
if config.rag_vectorizer == "bert":
|
||||
config.rag_vectorizer = "tfidf"
|
||||
config.save_config()
|
||||
|
||||
LOLLMSWebUI.build_instance(config=config, lollms_paths=lollms_paths, args=args, sio=sio)
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
LOLLMSWebUI.build_instance(
|
||||
config=config, lollms_paths=lollms_paths, args=args, sio=sio
|
||||
)
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
lollmsElfServer.verbose = True
|
||||
|
||||
|
||||
# Import all endpoints
|
||||
from lollms.server.endpoints.lollms_binding_files_server import router as lollms_binding_files_server_router
|
||||
from lollms.server.endpoints.lollms_infos import router as lollms_infos_router
|
||||
from lollms.server.endpoints.lollms_hardware_infos import router as lollms_hardware_infos_router
|
||||
from lollms.server.endpoints.lollms_binding_infos import router as lollms_binding_infos_router
|
||||
from lollms.server.endpoints.lollms_models_infos import router as lollms_models_infos_router
|
||||
from lollms.server.endpoints.lollms_personalities_infos import router as lollms_personalities_infos_router
|
||||
from lollms.server.endpoints.lollms_generator import router as lollms_generator_router
|
||||
from lollms.server.endpoints.lollms_configuration_infos import router as lollms_configuration_infos_router
|
||||
from lollms.server.endpoints.lollms_skills_library import router as lollms_skills_library_router
|
||||
|
||||
from lollms.server.endpoints.lollms_binding_files_server import \
|
||||
router as lollms_binding_files_server_router
|
||||
from lollms.server.endpoints.lollms_binding_infos import \
|
||||
router as lollms_binding_infos_router
|
||||
from lollms.server.endpoints.lollms_comfyui import \
|
||||
router as lollms_comfyui_router
|
||||
from lollms.server.endpoints.lollms_configuration_infos import \
|
||||
router as lollms_configuration_infos_router
|
||||
from lollms.server.endpoints.lollms_diffusers import \
|
||||
router as lollms_diffusers_router
|
||||
from lollms.server.endpoints.lollms_discussion import \
|
||||
router as lollms_discussion_router
|
||||
from lollms.server.endpoints.lollms_file_system import \
|
||||
router as lollms_file_system_router
|
||||
from lollms.server.endpoints.lollms_generator import \
|
||||
router as lollms_generator_router
|
||||
from lollms.server.endpoints.lollms_hardware_infos import \
|
||||
router as lollms_hardware_infos_router
|
||||
from lollms.server.endpoints.lollms_infos import \
|
||||
router as lollms_infos_router
|
||||
from lollms.server.endpoints.lollms_models_infos import \
|
||||
router as lollms_models_infos_router
|
||||
from lollms.server.endpoints.lollms_motion_ctrl import \
|
||||
router as lollms_motion_ctrl_router
|
||||
from lollms.server.endpoints.lollms_ollama import \
|
||||
router as lollms_ollama_router
|
||||
from lollms.server.endpoints.lollms_personalities_infos import \
|
||||
router as lollms_personalities_infos_router
|
||||
from lollms.server.endpoints.lollms_petals import \
|
||||
router as lollms_petals_router
|
||||
from lollms.server.endpoints.lollms_rag import router as lollms_rag_router
|
||||
from lollms.server.endpoints.lollms_sd import router as lollms_sd_router
|
||||
from lollms.server.endpoints.lollms_skills_library import \
|
||||
router as lollms_skills_library_router
|
||||
from lollms.server.endpoints.lollms_tti import router as lollms_tti_router
|
||||
|
||||
|
||||
from lollms.server.endpoints.lollms_user import router as lollms_user_router
|
||||
from lollms.server.endpoints.lollms_tts import router as lollms_tts_add_router
|
||||
|
||||
from lollms.server.endpoints.lollms_xtts import router as lollms_xtts_add_router
|
||||
from lollms.server.endpoints.lollms_tts import \
|
||||
router as lollms_tts_add_router
|
||||
from lollms.server.endpoints.lollms_user import \
|
||||
router as lollms_user_router
|
||||
from lollms.server.endpoints.lollms_vllm import \
|
||||
router as lollms_vllm_router
|
||||
from lollms.server.endpoints.lollms_whisper import router as lollms_whisper
|
||||
from lollms.server.endpoints.lollms_xtts import \
|
||||
router as lollms_xtts_add_router
|
||||
from lollms.server.events.lollms_files_events import \
|
||||
add_events as lollms_files_events_add
|
||||
from lollms.server.events.lollms_generation_events import \
|
||||
add_events as lollms_generation_events_add
|
||||
from lollms.server.events.lollms_model_events import \
|
||||
add_events as lollms_model_events_add
|
||||
from lollms.server.events.lollms_personality_events import \
|
||||
add_events as lollms_personality_events_add
|
||||
|
||||
from lollms.server.endpoints.lollms_sd import router as lollms_sd_router
|
||||
from lollms.server.endpoints.lollms_diffusers import router as lollms_diffusers_router
|
||||
|
||||
from lollms.server.endpoints.lollms_comfyui import router as lollms_comfyui_router
|
||||
|
||||
from lollms.server.endpoints.lollms_ollama import router as lollms_ollama_router
|
||||
from lollms.server.endpoints.lollms_vllm import router as lollms_vllm_router
|
||||
from lollms.server.endpoints.lollms_motion_ctrl import router as lollms_motion_ctrl_router
|
||||
from lollms.server.endpoints.lollms_discussion import router as lollms_discussion_router
|
||||
from lollms.server.endpoints.lollms_petals import router as lollms_petals_router
|
||||
from lollms.server.endpoints.lollms_rag import router as lollms_rag_router
|
||||
|
||||
|
||||
from endpoints.lollms_webui_infos import router as lollms_webui_infos_router
|
||||
from endpoints.lollms_message import router as lollms_message_router
|
||||
from endpoints.chat_bar import router as chat_bar_router
|
||||
from endpoints.lollms_advanced import router as lollms_advanced_router
|
||||
from endpoints.lollms_apps import router as lollms_apps_router
|
||||
from endpoints.chat_bar import router as chat_bar_router
|
||||
|
||||
from endpoints.lollms_help import router as help_router
|
||||
|
||||
|
||||
from endpoints.lollms_playground import router as lollms_playground_router
|
||||
|
||||
from lollms.server.endpoints.lollms_file_system import router as lollms_file_system_router
|
||||
|
||||
from lollms.server.events.lollms_generation_events import add_events as lollms_generation_events_add
|
||||
from lollms.server.events.lollms_personality_events import add_events as lollms_personality_events_add
|
||||
from lollms.server.events.lollms_files_events import add_events as lollms_files_events_add
|
||||
from lollms.server.events.lollms_model_events import add_events as lollms_model_events_add
|
||||
#from lollms.server.events.lollms_rag_events import add_events as lollms_rag_events_add
|
||||
|
||||
|
||||
|
||||
from events.lollms_generation_events import add_events as lollms_webui_generation_events_add
|
||||
from events.lollms_discussion_events import add_events as lollms_webui_discussion_events_add
|
||||
from events.lollms_chatbox_events import add_events as lollms_chatbox_events_add
|
||||
from events.lollms_interactive_events import add_events as lollms_interactive_events_add
|
||||
|
||||
from endpoints.lollms_message import router as lollms_message_router
|
||||
from endpoints.lollms_playground import router as lollms_playground_router
|
||||
from endpoints.lollms_webui_infos import \
|
||||
router as lollms_webui_infos_router
|
||||
from events.lollms_chatbox_events import \
|
||||
add_events as lollms_chatbox_events_add
|
||||
from events.lollms_discussion_events import \
|
||||
add_events as lollms_webui_discussion_events_add
|
||||
# from lollms.server.events.lollms_rag_events import add_events as lollms_rag_events_add
|
||||
from events.lollms_generation_events import \
|
||||
add_events as lollms_webui_generation_events_add
|
||||
from events.lollms_interactive_events import \
|
||||
add_events as lollms_interactive_events_add
|
||||
|
||||
# endpoints for remote access
|
||||
app.include_router(lollms_generator_router)
|
||||
|
||||
# Endpoints reserved for local access
|
||||
if (not config.headless_server_mode) or config.force_accept_remote_access: # Be aware that forcing force_accept_remote_access can expose the server to attacks
|
||||
# Endpoints reserved for local access
|
||||
if (
|
||||
not config.headless_server_mode
|
||||
) or config.force_accept_remote_access: # Be aware that forcing force_accept_remote_access can expose the server to attacks
|
||||
app.include_router(lollms_infos_router)
|
||||
app.include_router(lollms_binding_files_server_router)
|
||||
app.include_router(lollms_hardware_infos_router)
|
||||
app.include_router(lollms_hardware_infos_router)
|
||||
app.include_router(lollms_binding_infos_router)
|
||||
app.include_router(lollms_models_infos_router)
|
||||
app.include_router(lollms_personalities_infos_router)
|
||||
app.include_router(lollms_skills_library_router)
|
||||
app.include_router(lollms_tti_router)
|
||||
|
||||
|
||||
app.include_router(lollms_models_infos_router)
|
||||
app.include_router(lollms_personalities_infos_router)
|
||||
app.include_router(lollms_skills_library_router)
|
||||
app.include_router(lollms_tti_router)
|
||||
|
||||
app.include_router(lollms_webui_infos_router)
|
||||
app.include_router(lollms_discussion_router)
|
||||
app.include_router(lollms_message_router)
|
||||
app.include_router(lollms_user_router)
|
||||
app.include_router(lollms_advanced_router)
|
||||
app.include_router(lollms_apps_router)
|
||||
|
||||
|
||||
app.include_router(chat_bar_router)
|
||||
app.include_router(help_router)
|
||||
|
||||
|
||||
app.include_router(lollms_tts_add_router)
|
||||
app.include_router(lollms_xtts_add_router)
|
||||
app.include_router(lollms_whisper)
|
||||
|
||||
app.include_router(lollms_sd_router)
|
||||
app.include_router(lollms_diffusers_router)
|
||||
app.include_router(lollms_comfyui_router)
|
||||
|
||||
app.include_router(lollms_ollama_router)
|
||||
app.include_router(lollms_petals_router)
|
||||
|
||||
app.include_router(lollms_rag_router)
|
||||
app.include_router(lollms_vllm_router)
|
||||
app.include_router(lollms_motion_ctrl_router)
|
||||
|
||||
app.include_router(lollms_file_system_router)
|
||||
|
||||
|
||||
app.include_router(lollms_playground_router)
|
||||
|
||||
app.include_router(lollms_sd_router)
|
||||
app.include_router(lollms_diffusers_router)
|
||||
app.include_router(lollms_comfyui_router)
|
||||
|
||||
app.include_router(lollms_ollama_router)
|
||||
app.include_router(lollms_petals_router)
|
||||
|
||||
app.include_router(lollms_rag_router)
|
||||
app.include_router(lollms_vllm_router)
|
||||
app.include_router(lollms_motion_ctrl_router)
|
||||
|
||||
app.include_router(lollms_file_system_router)
|
||||
|
||||
app.include_router(lollms_playground_router)
|
||||
app.include_router(lollms_configuration_infos_router)
|
||||
|
||||
|
||||
|
||||
|
||||
@sio.event
|
||||
async def disconnect(sid):
|
||||
@ -357,59 +411,77 @@ if __name__ == "__main__":
|
||||
ASCIIColors.yellow(f"Message from {sid}: {data}")
|
||||
await sio.send(sid, "Message received!")
|
||||
|
||||
|
||||
lollms_generation_events_add(sio)
|
||||
|
||||
if (not config.headless_server_mode) or config.force_accept_remote_access: # Be aware that forcing force_accept_remote_access can expose the server to attacks
|
||||
if (
|
||||
not config.headless_server_mode
|
||||
) or config.force_accept_remote_access: # Be aware that forcing force_accept_remote_access can expose the server to attacks
|
||||
lollms_personality_events_add(sio)
|
||||
lollms_files_events_add(sio)
|
||||
lollms_model_events_add(sio)
|
||||
#lollms_rag_events_add(sio)
|
||||
# lollms_rag_events_add(sio)
|
||||
|
||||
lollms_webui_generation_events_add(sio)
|
||||
lollms_webui_discussion_events_add(sio)
|
||||
lollms_chatbox_events_add(sio)
|
||||
lollms_interactive_events_add(sio)
|
||||
|
||||
|
||||
app.mount("/extensions", StaticFiles(directory=Path(__file__).parent/"web"/"dist", html=True), name="extensions")
|
||||
app.mount("/playground", StaticFiles(directory=Path(__file__).parent/"web"/"dist", html=True), name="playground")
|
||||
app.mount("/settings", StaticFiles(directory=Path(__file__).parent/"web"/"dist", html=True), name="settings")
|
||||
app.mount(
|
||||
"/extensions",
|
||||
StaticFiles(directory=Path(__file__).parent / "web" / "dist", html=True),
|
||||
name="extensions",
|
||||
)
|
||||
app.mount(
|
||||
"/playground",
|
||||
StaticFiles(directory=Path(__file__).parent / "web" / "dist", html=True),
|
||||
name="playground",
|
||||
)
|
||||
app.mount(
|
||||
"/settings",
|
||||
StaticFiles(directory=Path(__file__).parent / "web" / "dist", html=True),
|
||||
name="settings",
|
||||
)
|
||||
|
||||
# Custom route to serve JavaScript files with the correct MIME type
|
||||
@app.get("/{path:path}")
|
||||
async def serve_js(path: str):
|
||||
sanitize_path(path)
|
||||
if path=="":
|
||||
return FileResponse(Path(__file__).parent / "web" / "dist" / "index.html", media_type="text/html")
|
||||
if path == "":
|
||||
return FileResponse(
|
||||
Path(__file__).parent / "web" / "dist" / "index.html",
|
||||
media_type="text/html",
|
||||
)
|
||||
file_path = Path(__file__).parent / "web" / "dist" / path
|
||||
if file_path.suffix == ".js":
|
||||
return FileResponse(file_path, media_type="application/javascript")
|
||||
return FileResponse(file_path)
|
||||
|
||||
|
||||
app.mount("/", StaticFiles(directory=Path(__file__).parent/"web"/"dist", html=True), name="static")
|
||||
|
||||
app.mount(
|
||||
"/",
|
||||
StaticFiles(directory=Path(__file__).parent / "web" / "dist", html=True),
|
||||
name="static",
|
||||
)
|
||||
|
||||
@app.exception_handler(ValidationError)
|
||||
async def validation_exception_handler(request: Request, exc: ValidationError):
|
||||
print(f"Error: {exc.errors()}") # Print the validation error details
|
||||
if (hasattr(exc,"body")):
|
||||
if hasattr(exc, "body"):
|
||||
return JSONResponse(
|
||||
status_code=422,
|
||||
content=jsonable_encoder({"detail": exc.errors(), "body": await exc.body}), # Send the error details and the original request body
|
||||
content=jsonable_encoder(
|
||||
{"detail": exc.errors(), "body": await exc.body}
|
||||
), # Send the error details and the original request body
|
||||
)
|
||||
else:
|
||||
return JSONResponse(
|
||||
status_code=422,
|
||||
content=jsonable_encoder({"detail": exc.errors(), "body": ""}), # Send the error details and the original request body
|
||||
content=jsonable_encoder(
|
||||
{"detail": exc.errors(), "body": ""}
|
||||
), # Send the error details and the original request body
|
||||
)
|
||||
|
||||
|
||||
app = ASGIApp(socketio_server=sio, other_asgi_app=app)
|
||||
|
||||
|
||||
|
||||
lollmsElfServer.app = app
|
||||
|
||||
try:
|
||||
@ -422,30 +494,40 @@ if __name__ == "__main__":
|
||||
# host = ":".join(parts[0:2])
|
||||
# port = int(parts[2])
|
||||
# uvicorn.run(app, host=host, port=port)
|
||||
# New thread
|
||||
# New thread
|
||||
# thread = threading.Thread(target=run_lollms_server)
|
||||
|
||||
# start thread
|
||||
# start thread
|
||||
# thread.start()
|
||||
|
||||
# if autoshow
|
||||
|
||||
|
||||
if config.auto_show_browser and not config.headless_server_mode:
|
||||
if config['host']=="0.0.0.0":
|
||||
webbrowser.open(f"https://localhost:{config['port']}" if is_https else f"http://localhost:{config['port']}")
|
||||
#webbrowser.open(f"http://localhost:{6523}") # needed for debug (to be removed in production)
|
||||
if config["host"] == "0.0.0.0":
|
||||
webbrowser.open(
|
||||
f"https://localhost:{config['port']}"
|
||||
if is_https
|
||||
else f"http://localhost:{config['port']}"
|
||||
)
|
||||
# webbrowser.open(f"http://localhost:{6523}") # needed for debug (to be removed in production)
|
||||
else:
|
||||
webbrowser.open(f"https://{config['host']}:{config['port']}" if is_https else f"http://{config['host']}:{config['port']}")
|
||||
#webbrowser.open(f"http://{config['host']}:{6523}") # needed for debug (to be removed in production)
|
||||
|
||||
webbrowser.open(
|
||||
f"https://{config['host']}:{config['port']}"
|
||||
if is_https
|
||||
else f"http://{config['host']}:{config['port']}"
|
||||
)
|
||||
# webbrowser.open(f"http://{config['host']}:{6523}") # needed for debug (to be removed in production)
|
||||
|
||||
if is_https:
|
||||
uvicorn.run(app, host=config.host, port=config.port, ssl_certfile=cert_file_path, ssl_keyfile=key_file_path)
|
||||
uvicorn.run(
|
||||
app,
|
||||
host=config.host,
|
||||
port=config.port,
|
||||
ssl_certfile=cert_file_path,
|
||||
ssl_keyfile=key_file_path,
|
||||
)
|
||||
else:
|
||||
uvicorn.run(app, host=config.host, port=config.port)
|
||||
|
||||
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
|
||||
|
||||
|
||||
|
@ -7,51 +7,54 @@ description:
|
||||
application. These routes are linked to lollms_webui chatbox
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel, Field
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
from lollms.types import SENDER_TYPES
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
import tqdm
|
||||
from fastapi import FastAPI, UploadFile, File
|
||||
import shutil
|
||||
|
||||
import os
|
||||
import platform
|
||||
from urllib.parse import urlparse
|
||||
from functools import partial
|
||||
from datetime import datetime
|
||||
from utilities.execution_engines.python_execution_engine import execute_python
|
||||
from utilities.execution_engines.latex_execution_engine import execute_latex
|
||||
from utilities.execution_engines.shell_execution_engine import execute_bash
|
||||
from lollms.security import sanitize_path, forbid_remote_access
|
||||
from lollms.internet import scrape_and_save
|
||||
from urllib.parse import urlparse
|
||||
import shutil
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import tqdm
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import (APIRouter, FastAPI, File, HTTPException, Request,
|
||||
UploadFile)
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
from lollms.internet import scrape_and_save
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.security import forbid_remote_access, sanitize_path
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import (detect_antiprompt,
|
||||
find_first_available_file_index,
|
||||
remove_text_from_string, trace_exception)
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from utilities.execution_engines.latex_execution_engine import execute_latex
|
||||
from utilities.execution_engines.python_execution_engine import execute_python
|
||||
from utilities.execution_engines.shell_execution_engine import execute_bash
|
||||
|
||||
# ----------------------- Defining router and main class ------------------------------
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
class AddWebPageRequest(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
client_id: str = Field(...)
|
||||
url: str = Field(..., description="Url to be used")
|
||||
|
||||
|
||||
class CmdExecutionRequest(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
client_id: str = Field(...)
|
||||
command: str = Field(..., description="Url to be used")
|
||||
parameters: List[str] = Field(..., description="Command parameters")
|
||||
|
||||
|
||||
|
||||
"""
|
||||
@router.post("/execute_personality_command")
|
||||
async def execute_personality_command(request: CmdExecutionRequest):
|
||||
@ -110,28 +113,43 @@ async def execute_personality_command(request: CmdExecutionRequest):
|
||||
"""
|
||||
MAX_PAGE_SIZE = 10000000
|
||||
|
||||
|
||||
@router.post("/add_webpage")
|
||||
async def add_webpage(request: AddWebPageRequest):
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
client = lollmsElfServer.session.get_client(request.client_id)
|
||||
if client is None:
|
||||
raise HTTPException(status_code=400, detail="Unknown client. This service only accepts lollms webui requests")
|
||||
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Unknown client. This service only accepts lollms webui requests",
|
||||
)
|
||||
|
||||
def do_scraping():
|
||||
lollmsElfServer.ShowBlockingMessage("Scraping web page\nPlease wait...")
|
||||
ASCIIColors.yellow("Scaping web page")
|
||||
client = lollmsElfServer.session.get_client(request.client_id)
|
||||
url = request.url
|
||||
index = find_first_available_file_index(client.discussion.discussion_folder/"text_data","web_",".txt")
|
||||
index = find_first_available_file_index(
|
||||
client.discussion.discussion_folder / "text_data", "web_", ".txt"
|
||||
)
|
||||
try:
|
||||
file_path=sanitize_path(str(client.discussion.discussion_folder/"text_data"/f"web_{index}.txt"),True)
|
||||
file_path = sanitize_path(
|
||||
str(
|
||||
client.discussion.discussion_folder
|
||||
/ "text_data"
|
||||
/ f"web_{index}.txt"
|
||||
),
|
||||
True,
|
||||
)
|
||||
except Exception as ex:
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
raise ex
|
||||
try:
|
||||
result = urlparse(url)
|
||||
if all([result.scheme, result.netloc]): # valid URL
|
||||
if not scrape_and_save(url=url, file_path=file_path,max_size=MAX_PAGE_SIZE):
|
||||
if not scrape_and_save(
|
||||
url=url, file_path=file_path, max_size=MAX_PAGE_SIZE
|
||||
):
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
raise HTTPException(status_code=400, detail="Web page too large")
|
||||
else:
|
||||
@ -141,18 +159,23 @@ async def add_webpage(request: AddWebPageRequest):
|
||||
trace_exception(e)
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
raise HTTPException(status_code=400, detail=f"Exception : {e}")
|
||||
|
||||
|
||||
try:
|
||||
client.discussion.add_file(file_path, client, partial(lollmsElfServer.process_data, client_id = request.client_id))
|
||||
# File saved successfully
|
||||
client.discussion.add_file(
|
||||
file_path,
|
||||
client,
|
||||
partial(lollmsElfServer.process_data, client_id=request.client_id),
|
||||
)
|
||||
# File saved successfully
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
lollmsElfServer.refresh_files()
|
||||
except Exception as e:
|
||||
# Error occurred while saving the file
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
lollmsElfServer.refresh_files()
|
||||
return {'status':False,"error":str(e)}
|
||||
return {"status": False, "error": str(e)}
|
||||
|
||||
client.generation_thread = threading.Thread(target=do_scraping)
|
||||
client.generation_thread.start()
|
||||
|
||||
return {'status':True}
|
||||
|
||||
return {"status": True}
|
||||
|
@ -6,32 +6,36 @@ description:
|
||||
This module contains a set of FastAPI routes that provide access to advanced functionalities of lollms. These routes allow users to do advanced stuff like executing code.
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, show_yes_no_dialog, add_period
|
||||
from lollms.security import sanitize_path, forbid_remote_access, check_access, sanitize_svg
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
from lollms.client_session import Client
|
||||
from pathlib import Path
|
||||
import tqdm
|
||||
from fastapi import FastAPI, UploadFile, File
|
||||
import shutil
|
||||
|
||||
import io
|
||||
import os
|
||||
import platform
|
||||
import string
|
||||
import re
|
||||
import subprocess
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import FastAPI, Query, HTTPException
|
||||
import tqdm
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import (APIRouter, FastAPI, File, HTTPException, Query, Request,
|
||||
UploadFile)
|
||||
from fastapi.responses import FileResponse, StreamingResponse
|
||||
from pydantic import BaseModel
|
||||
import io
|
||||
from lollms.client_session import Client
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.security import (check_access, forbid_remote_access, sanitize_path,
|
||||
sanitize_svg)
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.utilities import (add_period, detect_antiprompt,
|
||||
remove_text_from_string, show_yes_no_dialog,
|
||||
trace_exception)
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
|
||||
def validate_file_path(path):
|
||||
try:
|
||||
@ -41,39 +45,46 @@ def validate_file_path(path):
|
||||
print(f"Path validation error: {str(e)}")
|
||||
return False
|
||||
|
||||
from utilities.execution_engines.python_execution_engine import execute_python
|
||||
from utilities.execution_engines.latex_execution_engine import execute_latex
|
||||
from utilities.execution_engines.shell_execution_engine import execute_bash
|
||||
from utilities.execution_engines.javascript_execution_engine import execute_javascript
|
||||
from utilities.execution_engines.html_execution_engine import execute_html
|
||||
|
||||
from utilities.execution_engines.mermaid_execution_engine import execute_mermaid
|
||||
from utilities.execution_engines.graphviz_execution_engine import execute_graphviz
|
||||
from utilities.execution_engines.svg_execution_engine import execute_svg
|
||||
|
||||
from utilities.execution_engines.lilypond_execution_engine import execute_lilypond
|
||||
|
||||
|
||||
import os
|
||||
from fastapi import FastAPI, UploadFile, File
|
||||
from fastapi.responses import JSONResponse
|
||||
import tempfile
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from fastapi import FastAPI, File, UploadFile
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from utilities.execution_engines.graphviz_execution_engine import \
|
||||
execute_graphviz
|
||||
from utilities.execution_engines.html_execution_engine import execute_html
|
||||
from utilities.execution_engines.javascript_execution_engine import \
|
||||
execute_javascript
|
||||
from utilities.execution_engines.latex_execution_engine import execute_latex
|
||||
from utilities.execution_engines.lilypond_execution_engine import \
|
||||
execute_lilypond
|
||||
from utilities.execution_engines.mermaid_execution_engine import \
|
||||
execute_mermaid
|
||||
from utilities.execution_engines.python_execution_engine import execute_python
|
||||
from utilities.execution_engines.shell_execution_engine import execute_bash
|
||||
from utilities.execution_engines.svg_execution_engine import execute_svg
|
||||
|
||||
# ----------------------- Defining router and main class ------------------------------
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
class Identification(BaseModel):
|
||||
client_id:str
|
||||
client_id: str
|
||||
|
||||
|
||||
class CodeRequest(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
client_id: str = Field(...)
|
||||
code: str = Field(..., description="Code to be executed")
|
||||
discussion_id: int = Field(..., description="Discussion ID")
|
||||
message_id: int = Field(..., description="Message ID")
|
||||
language: str = Field(..., description="Programming language of the code")
|
||||
|
||||
|
||||
@router.post("/execute_code")
|
||||
async def execute_code(request: CodeRequest):
|
||||
"""
|
||||
@ -84,15 +95,26 @@ async def execute_code(request: CodeRequest):
|
||||
"""
|
||||
client = check_access(lollmsElfServer, request.client_id)
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Code execution is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Code execution is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
forbid_remote_access(lollmsElfServer, "Code execution is blocked when the server is exposed outside for very obvious reasons!")
|
||||
forbid_remote_access(
|
||||
lollmsElfServer,
|
||||
"Code execution is blocked when the server is exposed outside for very obvious reasons!",
|
||||
)
|
||||
if not lollmsElfServer.config.turn_on_code_execution:
|
||||
return {"status":False,"error":"Code execution is blocked by the configuration!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Code execution is blocked by the configuration!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.turn_on_code_validation:
|
||||
if not show_yes_no_dialog("Validation","Do you validate the execution of the code?"):
|
||||
return {"status":False,"error":"User refused the execution!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Validation", "Do you validate the execution of the code?"
|
||||
):
|
||||
return {"status": False, "error": "User refused the execution!"}
|
||||
|
||||
try:
|
||||
code = request.code
|
||||
@ -100,42 +122,48 @@ async def execute_code(request: CodeRequest):
|
||||
message_id = request.message_id
|
||||
language = request.language
|
||||
|
||||
if language=="function":
|
||||
if language == "function":
|
||||
ASCIIColors.info("Executing function call:")
|
||||
ASCIIColors.yellow(code)
|
||||
try:
|
||||
out = lollmsElfServer.personality.execute_function(code)
|
||||
return out if type(out)==str else out[0] if type(out) is tuple and type(out[0])==str else "Done"
|
||||
return (
|
||||
out
|
||||
if type(out) == str
|
||||
else (
|
||||
out[0] if type(out) is tuple and type(out[0]) == str else "Done"
|
||||
)
|
||||
)
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
return ex
|
||||
if language=="python":
|
||||
if language == "python":
|
||||
ASCIIColors.info("Executing python code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_python(code, client, message_id)
|
||||
if language=="svg":
|
||||
if language == "svg":
|
||||
ASCIIColors.info("Executing svg code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_svg(sanitize_svg(code), client, message_id)
|
||||
if language=="lilypond":
|
||||
if language == "lilypond":
|
||||
ASCIIColors.info("Executing svg code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_lilypond(code, client, message_id)
|
||||
|
||||
if language=="javascript":
|
||||
|
||||
if language == "javascript":
|
||||
ASCIIColors.info("Executing javascript code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_javascript(code, client, message_id)
|
||||
if language in ["html","html5","svg"]:
|
||||
if language in ["html", "html5", "svg"]:
|
||||
ASCIIColors.info("Executing javascript code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_html(code, client, message_id)
|
||||
|
||||
elif language=="latex":
|
||||
elif language == "latex":
|
||||
ASCIIColors.info("Executing latex code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_latex(code, client, message_id)
|
||||
elif language in ["bash","shell","cmd","powershell"]:
|
||||
elif language in ["bash", "shell", "cmd", "powershell"]:
|
||||
ASCIIColors.info("Executing shell code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_bash(code, client, message_id)
|
||||
@ -143,7 +171,7 @@ async def execute_code(request: CodeRequest):
|
||||
ASCIIColors.info("Executing mermaid code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_mermaid(code, client, message_id)
|
||||
elif language in ["graphviz","dot"]:
|
||||
elif language in ["graphviz", "dot"]:
|
||||
ASCIIColors.info("Executing graphviz code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_graphviz(code, client, message_id)
|
||||
@ -151,8 +179,9 @@ async def execute_code(request: CodeRequest):
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.error(ex)
|
||||
return {"status":False,"error":str(ex)}
|
||||
|
||||
return {"status": False, "error": str(ex)}
|
||||
|
||||
|
||||
@router.post("/execute_code_in_new_tab")
|
||||
async def execute_code_in_new_tab(request: CodeRequest):
|
||||
"""
|
||||
@ -163,15 +192,26 @@ async def execute_code_in_new_tab(request: CodeRequest):
|
||||
"""
|
||||
client = check_access(lollmsElfServer, request.client_id)
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Code execution is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Code execution is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
forbid_remote_access(lollmsElfServer, "Code execution is blocked when the server is exposed outside for very obvious reasons!")
|
||||
forbid_remote_access(
|
||||
lollmsElfServer,
|
||||
"Code execution is blocked when the server is exposed outside for very obvious reasons!",
|
||||
)
|
||||
if not lollmsElfServer.config.turn_on_code_execution:
|
||||
return {"status":False,"error":"Code execution is blocked by the configuration!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Code execution is blocked by the configuration!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.turn_on_code_validation:
|
||||
if not show_yes_no_dialog("Validation","Do you validate the execution of the code?"):
|
||||
return {"status":False,"error":"User refused the execution!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Validation", "Do you validate the execution of the code?"
|
||||
):
|
||||
return {"status": False, "error": "User refused the execution!"}
|
||||
|
||||
try:
|
||||
code = request.code
|
||||
@ -179,24 +219,24 @@ async def execute_code_in_new_tab(request: CodeRequest):
|
||||
message_id = request.message_id
|
||||
language = request.language
|
||||
|
||||
if language=="python":
|
||||
if language == "python":
|
||||
ASCIIColors.info("Executing python code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_python(code, client, message_id, True)
|
||||
if language=="javascript":
|
||||
if language == "javascript":
|
||||
ASCIIColors.info("Executing javascript code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_javascript(code, client, message_id, True)
|
||||
if language in ["html","html5","svg"]:
|
||||
if language in ["html", "html5", "svg"]:
|
||||
ASCIIColors.info("Executing javascript code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_html(code, client, message_id, True)
|
||||
|
||||
elif language=="latex":
|
||||
elif language == "latex":
|
||||
ASCIIColors.info("Executing latex code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_latex(code, client, message_id, True)
|
||||
elif language in ["bash","shell","cmd","powershell"]:
|
||||
elif language in ["bash", "shell", "cmd", "powershell"]:
|
||||
ASCIIColors.info("Executing shell code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_bash(code, client)
|
||||
@ -204,7 +244,7 @@ async def execute_code_in_new_tab(request: CodeRequest):
|
||||
ASCIIColors.info("Executing mermaid code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_mermaid(code, client, message_id, True)
|
||||
elif language in ["graphviz","dot"]:
|
||||
elif language in ["graphviz", "dot"]:
|
||||
ASCIIColors.info("Executing graphviz code:")
|
||||
ASCIIColors.yellow(code)
|
||||
return execute_graphviz(code, client, message_id, True)
|
||||
@ -212,13 +252,14 @@ async def execute_code_in_new_tab(request: CodeRequest):
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.error(ex)
|
||||
return {"status":False,"error":str(ex)}
|
||||
|
||||
return {"status": False, "error": str(ex)}
|
||||
|
||||
|
||||
class FilePath(BaseModel):
|
||||
client_id: str
|
||||
path: Optional[str] = Field(None, max_length=500)
|
||||
|
||||
|
||||
@router.post("/open_file")
|
||||
async def open_file(file_path: FilePath):
|
||||
"""
|
||||
@ -228,37 +269,48 @@ async def open_file(file_path: FilePath):
|
||||
:return: A JSON response with the status of the operation.
|
||||
"""
|
||||
check_access(lollmsElfServer, client_id=file_path.client_id)
|
||||
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Open file is blocked when in headless mode for obvious security reasons!"}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Open file is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open file is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open file is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.turn_on_open_file_validation:
|
||||
if not show_yes_no_dialog("Validation","Do you validate the opening of a file?"):
|
||||
return {"status":False,"error":"User refused the opeining file!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Validation", "Do you validate the opening of a file?"
|
||||
):
|
||||
return {"status": False, "error": "User refused the opeining file!"}
|
||||
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
# Validate the 'path' parameter
|
||||
path = sanitize_path(file_path.path, allow_absolute_path=True)
|
||||
|
||||
try:
|
||||
try:
|
||||
if Path(path).exists():
|
||||
# Use subprocess.Popen to safely open the file
|
||||
ASCIIColors.yellow(f"Starting file : {path}")
|
||||
if os.name == "nt": # if the operating system is Windows
|
||||
subprocess.Popen(f'start {path}', shell=True)
|
||||
else: # for other operating systems
|
||||
if os.name == "nt": # if the operating system is Windows
|
||||
subprocess.Popen(f"start {path}", shell=True)
|
||||
else: # for other operating systems
|
||||
subprocess.Popen([path], shell=True)
|
||||
|
||||
|
||||
return {"status": True, "execution_time": 0}
|
||||
|
||||
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.error(ex)
|
||||
return {"status":False,"error":str(ex)}
|
||||
|
||||
return {"status": False, "error": str(ex)}
|
||||
|
||||
|
||||
@router.post("/open_folder")
|
||||
async def open_folder(file_path: FilePath):
|
||||
@ -268,14 +320,22 @@ async def open_folder(file_path: FilePath):
|
||||
:param file_path: The file path object.
|
||||
:return: A JSON response with the status of the operation.
|
||||
"""
|
||||
forbid_remote_access(lollmsElfServer, "Open file is blocked when the server is exposed outside for very obvious reasons!")
|
||||
forbid_remote_access(
|
||||
lollmsElfServer,
|
||||
"Open file is blocked when the server is exposed outside for very obvious reasons!",
|
||||
)
|
||||
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Open file is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open file is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.turn_on_open_file_validation:
|
||||
if not show_yes_no_dialog("Validation","Do you validate the opening of a folder?"):
|
||||
return {"status":False,"error":"User refused the opening folder!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Validation", "Do you validate the opening of a folder?"
|
||||
):
|
||||
return {"status": False, "error": "User refused the opening folder!"}
|
||||
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
try:
|
||||
@ -284,21 +344,21 @@ async def open_folder(file_path: FilePath):
|
||||
ASCIIColors.yellow(f"Opening folder : {path}")
|
||||
if Path(path).exists():
|
||||
# Use subprocess.Popen to safely open the file
|
||||
if platform.system() == 'Windows':
|
||||
path = path.replace('/','\\')
|
||||
if platform.system() == "Windows":
|
||||
path = path.replace("/", "\\")
|
||||
subprocess.Popen(f'explorer "{path}"')
|
||||
elif platform.system() == 'Linux':
|
||||
subprocess.run(['xdg-open', str(path)], check=True, shell=True)
|
||||
elif platform.system() == 'Darwin':
|
||||
subprocess.run(['open', str(path)], check=True, shell=True)
|
||||
elif platform.system() == "Linux":
|
||||
subprocess.run(["xdg-open", str(path)], check=True, shell=True)
|
||||
elif platform.system() == "Darwin":
|
||||
subprocess.run(["open", str(path)], check=True, shell=True)
|
||||
|
||||
|
||||
return {"status": True, "execution_time": 0}
|
||||
|
||||
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.error(ex)
|
||||
return {"status":False,"error":str(ex)}
|
||||
return {"status": False, "error": str(ex)}
|
||||
|
||||
|
||||
class OpenCodeFolderInVsCodeRequestModel(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
@ -306,47 +366,62 @@ class OpenCodeFolderInVsCodeRequestModel(BaseModel):
|
||||
message_id: Optional[int] = Field(None, gt=0)
|
||||
code: Optional[str]
|
||||
|
||||
|
||||
@router.post("/open_discussion_folder_in_vs_code")
|
||||
async def open_discussion_folder_in_vs_code(request: OpenCodeFolderInVsCodeRequestModel):
|
||||
async def open_discussion_folder_in_vs_code(
|
||||
request: OpenCodeFolderInVsCodeRequestModel,
|
||||
):
|
||||
|
||||
client = check_access(lollmsElfServer, request.client_id)
|
||||
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Open code folder in vscode is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open code folder in vscode is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Open code folder in vscode is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open code folder in vscode is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.turn_on_open_file_validation:
|
||||
if not show_yes_no_dialog("Validation","Do you validate the opening of folder in vscode?"):
|
||||
return {"status":False,"error":"User refused the execution!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Validation", "Do you validate the opening of folder in vscode?"
|
||||
):
|
||||
return {"status": False, "error": "User refused the execution!"}
|
||||
|
||||
try:
|
||||
if request.discussion_id:
|
||||
if request.discussion_id:
|
||||
ASCIIColors.info("Opening folder:")
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
tmp_file = root_folder/f"ai_code_{request.message_id}.py"
|
||||
with open(tmp_file,"w",encoding="utf-8", errors='ignore') as f:
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"ai_code_{request.message_id}.py"
|
||||
with open(tmp_file, "w", encoding="utf-8", errors="ignore") as f:
|
||||
f.write(request.code)
|
||||
|
||||
if os.path.isdir(root_folder):
|
||||
path = '"'+str(root_folder)+'"'.replace("\\","/")
|
||||
subprocess.run(['code', '-n', path], shell=True)
|
||||
|
||||
if os.path.isdir(root_folder):
|
||||
path = '"' + str(root_folder) + '"'.replace("\\", "/")
|
||||
subprocess.run(["code", "-n", path], shell=True)
|
||||
|
||||
return {"status": True, "execution_time": 0}
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.error(str(ex))
|
||||
return {"status":False,"error":"An error occurred during processing."}
|
||||
|
||||
return {"status": False, "error": "An error occurred during processing."}
|
||||
|
||||
|
||||
class VSCodeData(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
discussion_id: Optional[int] = Field(None, ge=0)
|
||||
message_id: Optional[int] = Field(None, ge=0)
|
||||
code: str = Field(...)
|
||||
|
||||
|
||||
@router.post("/open_code_in_vs_code")
|
||||
async def open_code_in_vs_code(vs_code_data: VSCodeData):
|
||||
"""
|
||||
@ -358,14 +433,25 @@ async def open_code_in_vs_code(vs_code_data: VSCodeData):
|
||||
client = check_access(lollmsElfServer, vs_code_data.client_id)
|
||||
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Open code in vs code is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open code in vs code is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Open code in vs code is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open code in vs code is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.turn_on_open_file_validation:
|
||||
if not show_yes_no_dialog("Validation","Do you validate the opening of a code in vscode?"):
|
||||
return {"status":False,"error":"User refused the opeining file!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Validation", "Do you validate the opening of a code in vscode?"
|
||||
):
|
||||
return {"status": False, "error": "User refused the opeining file!"}
|
||||
|
||||
try:
|
||||
discussion_id = vs_code_data.discussion_id
|
||||
@ -376,24 +462,26 @@ async def open_code_in_vs_code(vs_code_data: VSCodeData):
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
tmp_file = root_folder/f"ai_code_{message_id}.py"
|
||||
with open(tmp_file,"w", encoding="utf-8", errors='ignore') as f:
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"ai_code_{message_id}.py"
|
||||
with open(tmp_file, "w", encoding="utf-8", errors="ignore") as f:
|
||||
f.write(code)
|
||||
|
||||
|
||||
# Use subprocess.Popen to safely open the file
|
||||
os.system(f'code -n "{tmp_file}"')
|
||||
|
||||
|
||||
return {"status": True, "execution_time": 0}
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.error(ex)
|
||||
return {"status":False,"error":str(ex)}
|
||||
|
||||
return {"status": False, "error": str(ex)}
|
||||
|
||||
|
||||
class DiscussionFolderRequest(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
discussion_id: int = Field(...)
|
||||
|
||||
|
||||
@router.post("/open_discussion_folder")
|
||||
async def open_discussion_folder(request: DiscussionFolderRequest):
|
||||
"""
|
||||
@ -405,37 +493,53 @@ async def open_discussion_folder(request: DiscussionFolderRequest):
|
||||
client = check_access(lollmsElfServer, request.client_id)
|
||||
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Open code folder is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open code folder is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open code folder is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Open code folder is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
if lollmsElfServer.config.turn_on_open_file_validation:
|
||||
if not show_yes_no_dialog("Validation","Do you validate the opening of a folder?"):
|
||||
return {"status":False,"error":"User refused the opeining folder!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Validation", "Do you validate the opening of a folder?"
|
||||
):
|
||||
return {"status": False, "error": "User refused the opeining folder!"}
|
||||
|
||||
try:
|
||||
ASCIIColors.info("Opening folder:")
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
if platform.system() == 'Windows':
|
||||
if platform.system() == "Windows":
|
||||
subprocess.Popen(f'explorer "{root_folder}"')
|
||||
elif platform.system() == 'Linux':
|
||||
subprocess.run(['xdg-open', str(root_folder)], check=True)
|
||||
elif platform.system() == 'Darwin':
|
||||
subprocess.run(['open', str(root_folder)], check=True)
|
||||
elif platform.system() == "Linux":
|
||||
subprocess.run(["xdg-open", str(root_folder)], check=True)
|
||||
elif platform.system() == "Darwin":
|
||||
subprocess.run(["open", str(root_folder)], check=True)
|
||||
return {"status": True, "execution_time": 0}
|
||||
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.error(ex)
|
||||
return {"status": False, "error": "An error occurred while processing the request"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "An error occurred while processing the request",
|
||||
}
|
||||
|
||||
|
||||
class PersonalityFolderRequest(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
personality_folder: str = Field(...)
|
||||
|
||||
|
||||
@router.post("/open_personality_folder")
|
||||
async def open_personality_folder(request: PersonalityFolderRequest):
|
||||
"""
|
||||
@ -448,80 +552,122 @@ async def open_personality_folder(request: PersonalityFolderRequest):
|
||||
personality_folder = sanitize_path(request.personality_folder)
|
||||
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Open code folder is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open code folder is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Open code folder is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Open code folder is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
if lollmsElfServer.config.turn_on_open_file_validation:
|
||||
if not show_yes_no_dialog("Validation","Do you validate the opening of a folder?"):
|
||||
return {"status":False,"error":"User refused the opeining folder!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Validation", "Do you validate the opening of a folder?"
|
||||
):
|
||||
return {"status": False, "error": "User refused the opeining folder!"}
|
||||
|
||||
try:
|
||||
ASCIIColors.info("Opening folder:")
|
||||
# Create a temporary file.
|
||||
root_folder = lollmsElfServer.lollms_paths.personalities_zoo_path/personality_folder
|
||||
root_folder = (
|
||||
lollmsElfServer.lollms_paths.personalities_zoo_path / personality_folder
|
||||
)
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
if platform.system() == 'Windows':
|
||||
if platform.system() == "Windows":
|
||||
subprocess.Popen(f'explorer "{root_folder}"')
|
||||
elif platform.system() == 'Linux':
|
||||
subprocess.run(['xdg-open', str(root_folder)], check=True)
|
||||
elif platform.system() == 'Darwin':
|
||||
subprocess.run(['open', str(root_folder)], check=True)
|
||||
elif platform.system() == "Linux":
|
||||
subprocess.run(["xdg-open", str(root_folder)], check=True)
|
||||
elif platform.system() == "Darwin":
|
||||
subprocess.run(["open", str(root_folder)], check=True)
|
||||
return {"status": True, "execution_time": 0}
|
||||
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.error(ex)
|
||||
return {"status": False, "error": "An error occurred while processing the request"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "An error occurred while processing the request",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/is_rt_on")
|
||||
def is_rt_on():
|
||||
return {"status": lollmsElfServer.rt_com is not None}
|
||||
|
||||
|
||||
@router.post("/start_recording")
|
||||
def start_recording(data:Identification):
|
||||
def start_recording(data: Identification):
|
||||
client = check_access(lollmsElfServer, data.client_id)
|
||||
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Start recording is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Start recording is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Start recording is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Start recording is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
lollmsElfServer.info("Starting audio capture")
|
||||
if not lollmsElfServer.tts or not lollmsElfServer.stt:
|
||||
lollmsElfServer.InfoMessage("TTS or STT are not configured.\nPlease go to settings and configure them first")
|
||||
return {"status":False,"error":"TTS or STT not configured"}
|
||||
lollmsElfServer.InfoMessage(
|
||||
"TTS or STT are not configured.\nPlease go to settings and configure them first"
|
||||
)
|
||||
return {"status": False, "error": "TTS or STT not configured"}
|
||||
|
||||
if not lollmsElfServer.tts.ready or not lollmsElfServer.stt.ready:
|
||||
lollmsElfServer.InfoMessage("TTS is not ready yet.\nPlease wait")
|
||||
return {"status":False,"error":"TTS not ready"}
|
||||
return {"status": False, "error": "TTS not ready"}
|
||||
|
||||
lollmsElfServer.info("Starting audio capture")
|
||||
try:
|
||||
from lollms.media import AudioNinja
|
||||
lollmsElfServer.rec_output_folder = lollmsElfServer.lollms_paths.personal_outputs_path/"audio_rec"
|
||||
|
||||
lollmsElfServer.rec_output_folder = (
|
||||
lollmsElfServer.lollms_paths.personal_outputs_path / "audio_rec"
|
||||
)
|
||||
lollmsElfServer.rec_output_folder.mkdir(exist_ok=True, parents=True)
|
||||
lollmsElfServer.summoned = False
|
||||
lollmsElfServer.audioNinja = AudioNinja(
|
||||
lollmsElfServer,
|
||||
logs_folder=lollmsElfServer.rec_output_folder
|
||||
)
|
||||
lollmsElfServer, logs_folder=lollmsElfServer.rec_output_folder
|
||||
)
|
||||
lollmsElfServer.audioNinja.start_recording()
|
||||
except:
|
||||
lollmsElfServer.InfoMessage("Couldn't load media library.\nYou will not be able to perform any of the media linked operations. please verify the logs and install any required installations")
|
||||
lollmsElfServer.InfoMessage(
|
||||
"Couldn't load media library.\nYou will not be able to perform any of the media linked operations. please verify the logs and install any required installations"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/stop_recording")
|
||||
def stop_recording(data:Identification):
|
||||
def stop_recording(data: Identification):
|
||||
client = check_access(lollmsElfServer, data.client_id)
|
||||
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Stop recording is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Stop recording is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Stop recording is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Stop recording is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
lollmsElfServer.info("Stopping audio capture")
|
||||
fn = lollmsElfServer.audioNinja.stop_recording()
|
||||
@ -535,54 +681,56 @@ def stop_recording(data:Identification):
|
||||
|
||||
@router.post("/transcribe")
|
||||
async def transcribe_audio(file: UploadFile = File(...)):
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix='.wav') as temp_file:
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as temp_file:
|
||||
# Copy the contents of the uploaded file to the temporary file
|
||||
shutil.copyfileobj(file.file, temp_file)
|
||||
temp_file_path = temp_file.name
|
||||
|
||||
try:
|
||||
if hasattr(lollmsElfServer, 'stt') and lollmsElfServer.stt:
|
||||
if hasattr(lollmsElfServer, "stt") and lollmsElfServer.stt:
|
||||
text = lollmsElfServer.stt.transcribe(temp_file_path)
|
||||
return JSONResponse(content={"transcription": text})
|
||||
else:
|
||||
return JSONResponse(content={"error": "STT service not available"}, status_code=503)
|
||||
return JSONResponse(
|
||||
content={"error": "STT service not available"}, status_code=503
|
||||
)
|
||||
finally:
|
||||
# Clean up the temporary file
|
||||
os.unlink(temp_file_path)
|
||||
|
||||
|
||||
|
||||
class TTSRequest(BaseModel):
|
||||
text: str
|
||||
speaker: str = None
|
||||
language: str = "en"
|
||||
|
||||
|
||||
@router.post("/tts/file")
|
||||
async def text_to_speech_file(request: TTSRequest):
|
||||
try:
|
||||
file_path = lollmsElfServer.tts.tts_file(
|
||||
text=request.text,
|
||||
file_name_or_path=lollmsElfServer.lollms_paths.personal_outputs_path/"output.wav",
|
||||
file_name_or_path=lollmsElfServer.lollms_paths.personal_outputs_path
|
||||
/ "output.wav",
|
||||
speaker=request.speaker,
|
||||
language=request.language
|
||||
language=request.language,
|
||||
)
|
||||
return FileResponse(file_path, media_type="audio/wav", filename="speech.wav")
|
||||
except Exception as e:
|
||||
trace_exception(e)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/tts/stream")
|
||||
async def text_to_speech_stream(request: TTSRequest):
|
||||
try:
|
||||
audio_data = lollmsElfServer.tts.tts_audio(
|
||||
text=request.text,
|
||||
speaker=request.speaker,
|
||||
language=request.language
|
||||
text=request.text, speaker=request.speaker, language=request.language
|
||||
)
|
||||
return StreamingResponse(io.BytesIO(audio_data), media_type="audio/wav")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
|
||||
@router.get("/tts/voices")
|
||||
async def get_available_voices():
|
||||
@ -590,4 +738,4 @@ async def get_available_voices():
|
||||
voices = lollmsElfServer.tts.get_voices()
|
||||
return JSONResponse(content={"voices": voices})
|
||||
except Exception as e:
|
||||
return JSONResponse(content={"error": str(e)}, status_code=500)
|
||||
return JSONResponse(content={"error": str(e)}, status_code=500)
|
||||
|
@ -1,72 +1,76 @@
|
||||
from fastapi import FastAPI, File, UploadFile, HTTPException, APIRouter, Response
|
||||
from fastapi.responses import JSONResponse, StreamingResponse, PlainTextResponse
|
||||
from pydantic import BaseModel, Field
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from packaging import version
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import uuid
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
import pipmaster as pm
|
||||
import requests
|
||||
import yaml
|
||||
from lollms.security import check_access, sanitize_path
|
||||
import os
|
||||
import subprocess
|
||||
import uuid
|
||||
import platform
|
||||
from ascii_colors import ASCIIColors, trace_exception
|
||||
import pipmaster as pm
|
||||
import sys
|
||||
from fastapi import (APIRouter, FastAPI, File, HTTPException, Response,
|
||||
UploadFile)
|
||||
from fastapi.responses import (FileResponse, JSONResponse, PlainTextResponse,
|
||||
StreamingResponse)
|
||||
from lollms.security import check_access, sanitize_path
|
||||
from packaging import version
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
if not pm.is_installed("httpx"):
|
||||
pm.install("httpx")
|
||||
import httpx
|
||||
from lollms.utilities import PackageManager
|
||||
|
||||
|
||||
# Pull the repository if it already exists
|
||||
def check_lollms_models_zoo():
|
||||
if not PackageManager.check_package_installed("zipfile"):
|
||||
PackageManager.install_or_update("zipfile36")
|
||||
|
||||
|
||||
ASCIIColors.execute_with_animation("Checking zip library.", check_lollms_models_zoo)
|
||||
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pathlib import Path
|
||||
import zipfile
|
||||
import io
|
||||
|
||||
|
||||
import shutil
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
class AuthRequest(BaseModel):
|
||||
client_id: str
|
||||
|
||||
|
||||
class AppInfo:
|
||||
def __init__(
|
||||
self,
|
||||
uid: str,
|
||||
name: str,
|
||||
folder_name: str,
|
||||
icon: str,
|
||||
category:str,
|
||||
description: str,
|
||||
author:str,
|
||||
version:str,
|
||||
creation_date:str,
|
||||
last_update_date:str,
|
||||
model_name:str,
|
||||
disclaimer:str,
|
||||
has_server:bool,
|
||||
has_readme:bool,
|
||||
is_public:bool,
|
||||
has_update:bool,
|
||||
installed: bool
|
||||
):
|
||||
self,
|
||||
uid: str,
|
||||
name: str,
|
||||
folder_name: str,
|
||||
icon: str,
|
||||
category: str,
|
||||
description: str,
|
||||
author: str,
|
||||
version: str,
|
||||
creation_date: str,
|
||||
last_update_date: str,
|
||||
model_name: str,
|
||||
disclaimer: str,
|
||||
has_server: bool,
|
||||
has_readme: bool,
|
||||
is_public: bool,
|
||||
has_update: bool,
|
||||
installed: bool,
|
||||
):
|
||||
self.uid = uid
|
||||
self.name = name
|
||||
self.folder_name = folder_name
|
||||
@ -85,11 +89,12 @@ class AppInfo:
|
||||
self.is_public = is_public
|
||||
self.installed = installed
|
||||
|
||||
|
||||
@router.get("/apps")
|
||||
async def list_apps():
|
||||
apps = []
|
||||
apps_zoo_path = lollmsElfServer.lollms_paths.apps_zoo_path
|
||||
REPO_DIR = lollmsElfServer.lollms_paths.personal_path/"apps_zoo_repo"
|
||||
REPO_DIR = lollmsElfServer.lollms_paths.personal_path / "apps_zoo_repo"
|
||||
if REPO_DIR.exists():
|
||||
remote_apps = [a.stem for a in REPO_DIR.iterdir()]
|
||||
else:
|
||||
@ -107,35 +112,43 @@ async def list_apps():
|
||||
has_server = False
|
||||
has_readme = False
|
||||
is_public = app_name.stem in remote_apps
|
||||
|
||||
|
||||
if description_path.exists():
|
||||
with open(description_path, 'r') as file:
|
||||
with open(description_path, "r") as file:
|
||||
data = yaml.safe_load(file)
|
||||
application_name = data.get('name', app_name.name)
|
||||
category = data.get('category', 'generic')
|
||||
description = data.get('description', '')
|
||||
author = data.get('author', '')
|
||||
current_version = data.get('version', '')
|
||||
creation_date = data.get('creation_date', 'unknown')
|
||||
last_update_date = data.get('last_update_date', '')
|
||||
current_version = data.get('version', '')
|
||||
model_name = data.get('model_name', '')
|
||||
disclaimer = data.get('disclaimer', 'No disclaimer provided.')
|
||||
has_server = data.get('has_server', (Path(app_name)/"server.py").exists())
|
||||
has_readme = data.get('has_readme', (Path(app_name)/"README.md").exists())
|
||||
application_name = data.get("name", app_name.name)
|
||||
category = data.get("category", "generic")
|
||||
description = data.get("description", "")
|
||||
author = data.get("author", "")
|
||||
current_version = data.get("version", "")
|
||||
creation_date = data.get("creation_date", "unknown")
|
||||
last_update_date = data.get("last_update_date", "")
|
||||
current_version = data.get("version", "")
|
||||
model_name = data.get("model_name", "")
|
||||
disclaimer = data.get("disclaimer", "No disclaimer provided.")
|
||||
has_server = data.get(
|
||||
"has_server", (Path(app_name) / "server.py").exists()
|
||||
)
|
||||
has_readme = data.get(
|
||||
"has_readme", (Path(app_name) / "README.md").exists()
|
||||
)
|
||||
installed = True
|
||||
else:
|
||||
installed = False
|
||||
|
||||
if is_public:
|
||||
try:
|
||||
with (REPO_DIR / app_name.stem / "description.yaml").open("r") as file:
|
||||
with (REPO_DIR / app_name.stem / "description.yaml").open(
|
||||
"r"
|
||||
) as file:
|
||||
# Parse the YAML content
|
||||
yaml_content = yaml.safe_load(file)
|
||||
repo_version = yaml_content.get("version", "0")
|
||||
|
||||
|
||||
# Compare versions using packaging.version
|
||||
has_update = version.parse(str(repo_version)) > version.parse(str(current_version))
|
||||
has_update = version.parse(
|
||||
str(repo_version)
|
||||
) > version.parse(str(current_version))
|
||||
except (yaml.YAMLError, FileNotFoundError) as e:
|
||||
print(f"Error reading or parsing YAML file: {e}")
|
||||
has_update = False
|
||||
@ -144,34 +157,37 @@ async def list_apps():
|
||||
|
||||
if icon_path.exists():
|
||||
uid = str(uuid.uuid4())
|
||||
apps.append(AppInfo(
|
||||
uid=uid,
|
||||
name=application_name,
|
||||
folder_name = app_name.name,
|
||||
icon=f"/apps/{app_name.name}/icon.png",
|
||||
category=category,
|
||||
description=description,
|
||||
author=author,
|
||||
version=current_version,
|
||||
creation_date=creation_date,
|
||||
last_update_date = last_update_date,
|
||||
model_name=model_name,
|
||||
disclaimer=disclaimer,
|
||||
has_server=has_server,
|
||||
has_readme=has_readme,
|
||||
is_public=is_public,
|
||||
has_update=has_update,
|
||||
installed=installed
|
||||
))
|
||||
apps.append(
|
||||
AppInfo(
|
||||
uid=uid,
|
||||
name=application_name,
|
||||
folder_name=app_name.name,
|
||||
icon=f"/apps/{app_name.name}/icon.png",
|
||||
category=category,
|
||||
description=description,
|
||||
author=author,
|
||||
version=current_version,
|
||||
creation_date=creation_date,
|
||||
last_update_date=last_update_date,
|
||||
model_name=model_name,
|
||||
disclaimer=disclaimer,
|
||||
has_server=has_server,
|
||||
has_readme=has_readme,
|
||||
is_public=is_public,
|
||||
has_update=has_update,
|
||||
installed=installed,
|
||||
)
|
||||
)
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
|
||||
|
||||
return apps
|
||||
|
||||
|
||||
class ShowAppsFolderRequest(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
|
||||
|
||||
@router.post("/show_apps_folder")
|
||||
async def open_folder_in_vscode(request: ShowAppsFolderRequest):
|
||||
check_access(lollmsElfServer, request.client_id)
|
||||
@ -181,28 +197,30 @@ async def open_folder_in_vscode(request: ShowAppsFolderRequest):
|
||||
try:
|
||||
if current_os == "Windows":
|
||||
# For Windows
|
||||
subprocess.run(['explorer', lollmsElfServer.lollms_paths.apps_zoo_path])
|
||||
subprocess.run(["explorer", lollmsElfServer.lollms_paths.apps_zoo_path])
|
||||
elif current_os == "Darwin":
|
||||
# For macOS
|
||||
subprocess.run(['open', lollmsElfServer.lollms_paths.apps_zoo_path])
|
||||
subprocess.run(["open", lollmsElfServer.lollms_paths.apps_zoo_path])
|
||||
elif current_os == "Linux":
|
||||
# For Linux
|
||||
subprocess.run(['xdg-open', lollmsElfServer.lollms_paths.apps_zoo_path])
|
||||
subprocess.run(["xdg-open", lollmsElfServer.lollms_paths.apps_zoo_path])
|
||||
else:
|
||||
print("Unsupported operating system.")
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
|
||||
|
||||
class OpenFolderRequest(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
app_name: str = Field(...)
|
||||
|
||||
|
||||
@router.post("/open_app_in_vscode")
|
||||
async def open_folder_in_vscode(request: OpenFolderRequest):
|
||||
check_access(lollmsElfServer, request.client_id)
|
||||
sanitize_path(request.app_name)
|
||||
# Construct the folder path
|
||||
folder_path = lollmsElfServer.lollms_paths.apps_zoo_path/ request.app_name
|
||||
folder_path = lollmsElfServer.lollms_paths.apps_zoo_path / request.app_name
|
||||
|
||||
# Check if the folder exists
|
||||
if not folder_path.exists():
|
||||
@ -218,18 +236,19 @@ async def open_folder_in_vscode(request: OpenFolderRequest):
|
||||
|
||||
@router.get("/apps/{app_name}/{file}")
|
||||
async def get_app_file(app_name: str, file: str):
|
||||
app_name=sanitize_path(app_name)
|
||||
file=sanitize_path(file)
|
||||
app_name = sanitize_path(app_name)
|
||||
file = sanitize_path(file)
|
||||
app_path = lollmsElfServer.lollms_paths.apps_zoo_path / app_name / file
|
||||
if not app_path.exists():
|
||||
raise HTTPException(status_code=404, detail="App file not found")
|
||||
return FileResponse(app_path)
|
||||
|
||||
|
||||
@router.get("/apps/{app_name}/{subfolder}/{file}")
|
||||
async def get_app_file(app_name: str, subfolder: str, file: str):
|
||||
app_name=sanitize_path(app_name)
|
||||
subfolder=sanitize_path(subfolder)
|
||||
file=sanitize_path(file)
|
||||
app_name = sanitize_path(app_name)
|
||||
subfolder = sanitize_path(subfolder)
|
||||
file = sanitize_path(file)
|
||||
app_path = lollmsElfServer.lollms_paths.apps_zoo_path / app_name / subfolder / file
|
||||
if not app_path.exists():
|
||||
raise HTTPException(status_code=404, detail="App file not found")
|
||||
@ -241,16 +260,13 @@ class AppNameInput(BaseModel):
|
||||
app_name: str
|
||||
|
||||
|
||||
import tempfile
|
||||
|
||||
|
||||
from fastapi.responses import FileResponse
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
from fastapi.responses import Response
|
||||
from io import BytesIO
|
||||
import tempfile
|
||||
import zipfile
|
||||
from io import BytesIO
|
||||
|
||||
from fastapi.responses import FileResponse, Response
|
||||
|
||||
|
||||
@router.post("/download_app")
|
||||
async def download_app(input_data: AppNameInput):
|
||||
@ -265,9 +281,9 @@ async def download_app(input_data: AppNameInput):
|
||||
zip_buffer = BytesIO()
|
||||
|
||||
try:
|
||||
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
||||
for file in app_path.rglob('*'):
|
||||
if file.is_file() and '.git' not in file.parts:
|
||||
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
|
||||
for file in app_path.rglob("*"):
|
||||
if file.is_file() and ".git" not in file.parts:
|
||||
relative_path = file.relative_to(app_path)
|
||||
zip_file.write(file, arcname=str(relative_path))
|
||||
|
||||
@ -282,18 +298,20 @@ async def download_app(input_data: AppNameInput):
|
||||
"Content-Disposition": f"attachment; filename={app_name}.zip",
|
||||
"Cache-Control": "no-cache",
|
||||
"Pragma": "no-cache",
|
||||
"Expires": "0"
|
||||
}
|
||||
"Expires": "0",
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error creating ZIP file: {str(e)}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Error creating ZIP file: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/upload_app")
|
||||
async def upload_app(client_id: str, file: UploadFile = File(...)):
|
||||
check_access(lollmsElfServer, client_id)
|
||||
sanitize_path(file.filename)
|
||||
|
||||
|
||||
# Create a temporary directory to extract the zip file
|
||||
temp_dir = lollmsElfServer.lollms_paths.personal_path / "temp"
|
||||
os.makedirs(temp_dir, exist_ok=True)
|
||||
@ -305,33 +323,42 @@ async def upload_app(client_id: str, file: UploadFile = File(...)):
|
||||
shutil.copyfileobj(file.file, buffer)
|
||||
|
||||
# Extract the zip file
|
||||
with zipfile.ZipFile(temp_file, 'r') as zip_ref:
|
||||
with zipfile.ZipFile(temp_file, "r") as zip_ref:
|
||||
zip_ref.extractall(temp_dir)
|
||||
|
||||
# Check for required files
|
||||
required_files = ['index.html', 'description.yaml', 'icon.png']
|
||||
required_files = ["index.html", "description.yaml", "icon.png"]
|
||||
for required_file in required_files:
|
||||
if not os.path.exists(os.path.join(temp_dir, required_file)):
|
||||
raise HTTPException(status_code=400, detail=f"Missing required file: {required_file}")
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Missing required file: {required_file}"
|
||||
)
|
||||
|
||||
# Read the description.yaml file
|
||||
with open(os.path.join(temp_dir, 'description.yaml'), 'r') as yaml_file:
|
||||
with open(os.path.join(temp_dir, "description.yaml"), "r") as yaml_file:
|
||||
description = yaml.safe_load(yaml_file)
|
||||
|
||||
# Get the app name from the description
|
||||
app_name = description.get('name')
|
||||
app_name = description.get("name")
|
||||
if not app_name:
|
||||
raise HTTPException(status_code=400, detail="App name not found in description.yaml")
|
||||
raise HTTPException(
|
||||
status_code=400, detail="App name not found in description.yaml"
|
||||
)
|
||||
|
||||
# Create the app directory
|
||||
app_dir = lollmsElfServer.lollms_paths.apps_zoo_path / app_name
|
||||
if os.path.exists(app_dir):
|
||||
raise HTTPException(status_code=400, detail="An app with this name already exists")
|
||||
raise HTTPException(
|
||||
status_code=400, detail="An app with this name already exists"
|
||||
)
|
||||
|
||||
# Move the extracted files to the app directory
|
||||
shutil.move(temp_dir, app_dir)
|
||||
|
||||
return JSONResponse(content={"message": f"App '{app_name}' uploaded successfully"}, status_code=200)
|
||||
return JSONResponse(
|
||||
content={"message": f"App '{app_name}' uploaded successfully"},
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
except zipfile.BadZipFile:
|
||||
raise HTTPException(status_code=400, detail="Invalid zip file")
|
||||
@ -345,70 +372,76 @@ async def upload_app(client_id: str, file: UploadFile = File(...)):
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
import json
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
import json
|
||||
|
||||
|
||||
|
||||
@router.post("/install/{app_name}")
|
||||
async def install_app(app_name: str, auth: AuthRequest):
|
||||
check_access(lollmsElfServer, auth.client_id)
|
||||
app_name=sanitize_path(app_name)
|
||||
app_name = sanitize_path(app_name)
|
||||
|
||||
REPO_DIR = lollmsElfServer.lollms_paths.personal_path / "apps_zoo_repo"
|
||||
|
||||
REPO_DIR = lollmsElfServer.lollms_paths.personal_path/"apps_zoo_repo"
|
||||
|
||||
# Create the app directory
|
||||
app_path = lollmsElfServer.lollms_paths.apps_zoo_path/app_name
|
||||
app_path = lollmsElfServer.lollms_paths.apps_zoo_path / app_name
|
||||
os.makedirs(app_path, exist_ok=True)
|
||||
|
||||
source_dir = REPO_DIR/app_name
|
||||
|
||||
source_dir = REPO_DIR / app_name
|
||||
|
||||
if not source_dir.exists():
|
||||
raise HTTPException(status_code=404, detail=f"App {app_name} not found in the local repository")
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"App {app_name} not found in the local repository"
|
||||
)
|
||||
|
||||
# Define directories to exclude
|
||||
exclude_dirs = {'.vscode', '.git'}
|
||||
exclude_dirs = {".vscode", ".git"}
|
||||
|
||||
# Copy all files and directories, excluding the ones in exclude_dirs
|
||||
for item in source_dir.glob('*'):
|
||||
for item in source_dir.glob("*"):
|
||||
if item.is_dir():
|
||||
if item.name not in exclude_dirs:
|
||||
shutil.copytree(item, app_path/item.name, dirs_exist_ok=True)
|
||||
shutil.copytree(item, app_path / item.name, dirs_exist_ok=True)
|
||||
else:
|
||||
shutil.copy2(item, app_path)
|
||||
|
||||
try:
|
||||
description_path = app_path/"description.yaml"
|
||||
requirements = app_path/"requirements.txt"
|
||||
description_path = app_path / "description.yaml"
|
||||
requirements = app_path / "requirements.txt"
|
||||
|
||||
if description_path.exists() and requirements.exists():
|
||||
with open(description_path, 'r') as file:
|
||||
with open(description_path, "r") as file:
|
||||
description_data = yaml.safe_load(file)
|
||||
if description_data.get("has_server", (Path(app_path)/"server.py").exists()):
|
||||
pass
|
||||
if description_data.get(
|
||||
"has_server", (Path(app_path) / "server.py").exists()
|
||||
):
|
||||
pass
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
|
||||
return {"message": f"App {app_name} installed successfully."}
|
||||
|
||||
|
||||
@router.post("/uninstall/{app_name}")
|
||||
async def uninstall_app(app_name: str, auth: AuthRequest):
|
||||
check_access(lollmsElfServer, auth.client_id)
|
||||
app_name=sanitize_path(app_name)
|
||||
app_name = sanitize_path(app_name)
|
||||
app_path = lollmsElfServer.lollms_paths.apps_zoo_path / app_name
|
||||
if app_path.exists():
|
||||
shutil.rmtree(app_path)
|
||||
return {"message": f"App {app_name} uninstalled successfully."}
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="App not found")
|
||||
|
||||
|
||||
|
||||
REPO_URL = "https://github.com/ParisNeo/lollms_apps_zoo.git"
|
||||
|
||||
|
||||
class ProxyRequest(BaseModel):
|
||||
url: str
|
||||
|
||||
|
||||
@router.post("/api/proxy")
|
||||
async def proxy(request: ProxyRequest):
|
||||
try:
|
||||
@ -417,57 +450,74 @@ async def proxy(request: ProxyRequest):
|
||||
return {"content": response.text}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
|
||||
def clone_repo():
|
||||
REPO_DIR = Path(lollmsElfServer.lollms_paths.personal_path) / "apps_zoo_repo"
|
||||
|
||||
|
||||
# Check if the directory exists and if it is empty
|
||||
if REPO_DIR.exists():
|
||||
if any(REPO_DIR.iterdir()): # Check if the directory is not empty
|
||||
print(f"Directory {REPO_DIR} is not empty. Aborting clone.")
|
||||
return
|
||||
else:
|
||||
REPO_DIR.mkdir(parents=True, exist_ok=True) # Create the directory if it doesn't exist
|
||||
REPO_DIR.mkdir(
|
||||
parents=True, exist_ok=True
|
||||
) # Create the directory if it doesn't exist
|
||||
|
||||
# Clone the repository
|
||||
subprocess.run(["git", "clone", REPO_URL, str(REPO_DIR)], check=True)
|
||||
print(f"Repository cloned into {REPO_DIR}")
|
||||
|
||||
|
||||
def pull_repo():
|
||||
REPO_DIR = lollmsElfServer.lollms_paths.personal_path/"apps_zoo_repo"
|
||||
REPO_DIR = lollmsElfServer.lollms_paths.personal_path / "apps_zoo_repo"
|
||||
subprocess.run(["git", "-C", str(REPO_DIR), "pull"], check=True)
|
||||
|
||||
|
||||
def load_apps_data():
|
||||
apps = []
|
||||
REPO_DIR = lollmsElfServer.lollms_paths.personal_path/"apps_zoo_repo"
|
||||
REPO_DIR = lollmsElfServer.lollms_paths.personal_path / "apps_zoo_repo"
|
||||
for item in os.listdir(REPO_DIR):
|
||||
item_path = os.path.join(REPO_DIR, item)
|
||||
if os.path.isdir(item_path):
|
||||
description_path = os.path.join(item_path, "description.yaml")
|
||||
icon_url = f"https://github.com/ParisNeo/lollms_apps_zoo/blob/main/{item}/icon.png?raw=true"
|
||||
|
||||
|
||||
if os.path.exists(description_path):
|
||||
with open(description_path, 'r') as file:
|
||||
with open(description_path, "r") as file:
|
||||
description_data = yaml.safe_load(file)
|
||||
apps.append(AppInfo(
|
||||
uid=str(uuid.uuid4()),
|
||||
name=description_data.get("name",item),
|
||||
folder_name=item,
|
||||
icon=icon_url,
|
||||
category=description_data.get('category', 'generic'),
|
||||
description=description_data.get('description', ''),
|
||||
author=description_data.get('author', ''),
|
||||
version=description_data.get('version', ''),
|
||||
creation_date=description_data.get('creation_date', 'unknown'),
|
||||
last_update_date=description_data.get('last_update_date', 'unknown'),
|
||||
model_name=description_data.get('model_name', ''),
|
||||
disclaimer=description_data.get('disclaimer', 'No disclaimer provided.'),
|
||||
has_server=description_data.get('has_server', (Path(item_path)/"server.py").exists()),
|
||||
has_readme=description_data.get('has_readme', (Path(item_path)/"README.md").exists()),
|
||||
is_public=True,
|
||||
has_update=False,
|
||||
installed=True
|
||||
))
|
||||
apps.append(
|
||||
AppInfo(
|
||||
uid=str(uuid.uuid4()),
|
||||
name=description_data.get("name", item),
|
||||
folder_name=item,
|
||||
icon=icon_url,
|
||||
category=description_data.get("category", "generic"),
|
||||
description=description_data.get("description", ""),
|
||||
author=description_data.get("author", ""),
|
||||
version=description_data.get("version", ""),
|
||||
creation_date=description_data.get(
|
||||
"creation_date", "unknown"
|
||||
),
|
||||
last_update_date=description_data.get(
|
||||
"last_update_date", "unknown"
|
||||
),
|
||||
model_name=description_data.get("model_name", ""),
|
||||
disclaimer=description_data.get(
|
||||
"disclaimer", "No disclaimer provided."
|
||||
),
|
||||
has_server=description_data.get(
|
||||
"has_server", (Path(item_path) / "server.py").exists()
|
||||
),
|
||||
has_readme=description_data.get(
|
||||
"has_readme", (Path(item_path) / "README.md").exists()
|
||||
),
|
||||
is_public=True,
|
||||
has_update=False,
|
||||
installed=True,
|
||||
)
|
||||
)
|
||||
return apps
|
||||
|
||||
|
||||
@ -496,7 +546,9 @@ async def lollms_assets(asset_type: str, file_name: str):
|
||||
# Construct the full file path
|
||||
file_path = directory / f"{safe_file_name}{file_extension}"
|
||||
file_path_with_entension = directory / f"{safe_file_name}"
|
||||
if file_path_with_entension.is_file() and file_path_with_entension.is_relative_to(directory):
|
||||
if file_path_with_entension.is_file() and file_path_with_entension.is_relative_to(
|
||||
directory
|
||||
):
|
||||
file_path = file_path_with_entension
|
||||
|
||||
# Check if the file exists and is within the allowed directory
|
||||
@ -505,12 +557,13 @@ async def lollms_assets(asset_type: str, file_name: str):
|
||||
|
||||
# Read and return the file content with the appropriate content type
|
||||
try:
|
||||
with file_path.open('r') as file:
|
||||
with file_path.open("r") as file:
|
||||
content = file.read()
|
||||
return Response(content=content, media_type=content_type)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error reading file: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/template")
|
||||
async def lollms_js():
|
||||
return {
|
||||
@ -523,9 +576,10 @@ async def lollms_js():
|
||||
"start_ai_header_id_template": lollmsElfServer.config.start_ai_header_id_template,
|
||||
"end_ai_header_id_template": lollmsElfServer.config.end_ai_header_id_template,
|
||||
"end_ai_message_id_template": lollmsElfServer.config.end_ai_message_id_template,
|
||||
"system_message_template": lollmsElfServer.config.system_message_template
|
||||
"system_message_template": lollmsElfServer.config.system_message_template,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/github/apps")
|
||||
async def fetch_github_apps():
|
||||
try:
|
||||
@ -533,7 +587,9 @@ async def fetch_github_apps():
|
||||
pull_repo()
|
||||
except:
|
||||
ASCIIColors.error("Couldn't interact with ")
|
||||
lollmsElfServer.error("Couldn't interact with github.\nPlease verify your internet connection")
|
||||
lollmsElfServer.error(
|
||||
"Couldn't interact with github.\nPlease verify your internet connection"
|
||||
)
|
||||
apps = load_apps_data()
|
||||
return {"apps": apps}
|
||||
|
||||
@ -542,13 +598,16 @@ def install_requirements(app_path: Path):
|
||||
requirements_file = app_path / "requirements.txt"
|
||||
if requirements_file.exists():
|
||||
try:
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", "-r", str(requirements_file)])
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "install", "-r", str(requirements_file)]
|
||||
)
|
||||
print("Requirements installed successfully.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error installing requirements: {e}")
|
||||
raise
|
||||
|
||||
def run_server(app_path: Path):
|
||||
|
||||
def run_server(app_path: Path):
|
||||
server_script = app_path / "server.py"
|
||||
if server_script.exists():
|
||||
try:
|
||||
@ -558,11 +617,29 @@ def run_server(app_path: Path):
|
||||
# Determine the platform and open a terminal to execute the Python code.
|
||||
system = platform.system()
|
||||
if system == "Windows":
|
||||
process = subprocess.Popen(f"""start cmd /k "cd /d "{app_path}" && python "{server_script}" && pause" """, shell=True)
|
||||
process = subprocess.Popen(
|
||||
f"""start cmd /k "cd /d "{app_path}" && python "{server_script}" && pause" """,
|
||||
shell=True,
|
||||
)
|
||||
elif system == "Darwin": # macOS
|
||||
process = subprocess.Popen(["open", "-a", "Terminal", f'cd "{app_path}" && python "{server_script}"'], shell=True)
|
||||
process = subprocess.Popen(
|
||||
[
|
||||
"open",
|
||||
"-a",
|
||||
"Terminal",
|
||||
f'cd "{app_path}" && python "{server_script}"',
|
||||
],
|
||||
shell=True,
|
||||
)
|
||||
elif system == "Linux":
|
||||
process = subprocess.Popen(["x-terminal-emulator", "-e", f'bash -c "cd \\"{app_path}\\" && python \\"{server_script}\\"; exec bash"'], shell=True)
|
||||
process = subprocess.Popen(
|
||||
[
|
||||
"x-terminal-emulator",
|
||||
"-e",
|
||||
f'bash -c "cd \\"{app_path}\\" && python \\"{server_script}\\"; exec bash"',
|
||||
],
|
||||
shell=True,
|
||||
)
|
||||
else:
|
||||
raise Exception(f"Unsupported platform: {system}")
|
||||
|
||||
@ -572,20 +649,23 @@ def run_server(app_path: Path):
|
||||
else:
|
||||
ASCIIColors.error(f"Server script not found for app: {app_path.name}")
|
||||
|
||||
|
||||
@router.post("/apps/start_server")
|
||||
async def start_app_server(request: OpenFolderRequest):
|
||||
check_access(lollmsElfServer, request.client_id)
|
||||
app_name = sanitize_path(request.app_name)
|
||||
app_path = lollmsElfServer.lollms_paths.apps_zoo_path / app_name
|
||||
|
||||
|
||||
if not app_path.exists():
|
||||
raise HTTPException(status_code=404, detail="App not found")
|
||||
|
||||
|
||||
server_script = app_path / "server.py"
|
||||
if not server_script.exists():
|
||||
raise HTTPException(status_code=404, detail="Server script not found for this app")
|
||||
|
||||
raise HTTPException(
|
||||
status_code=404, detail="Server script not found for this app"
|
||||
)
|
||||
|
||||
# Start the server in the background
|
||||
run_server(app_path)
|
||||
|
||||
|
||||
return {"status": "success", "message": f"Server for {app_path} is starting"}
|
||||
|
@ -7,29 +7,36 @@ description:
|
||||
application. These routes allow users to do advanced stuff like executing code.
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request, HTTPException
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, show_yes_no_dialog, add_period
|
||||
from lollms.security import sanitize_path, forbid_remote_access, check_access, sanitize_svg, sanitize_path_from_endpoint
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
from lollms.client_session import Client
|
||||
from pathlib import Path
|
||||
import tqdm
|
||||
from fastapi import FastAPI, UploadFile, File
|
||||
import shutil
|
||||
|
||||
import os
|
||||
import platform
|
||||
import string
|
||||
import re
|
||||
import subprocess
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import tqdm
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import (APIRouter, FastAPI, File, HTTPException, Request,
|
||||
UploadFile)
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.client_session import Client
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.security import (check_access, forbid_remote_access, sanitize_path,
|
||||
sanitize_path_from_endpoint, sanitize_svg)
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.utilities import (add_period, detect_antiprompt,
|
||||
remove_text_from_string, show_yes_no_dialog,
|
||||
trace_exception)
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
|
||||
def validate_file_path(path):
|
||||
try:
|
||||
sanitized_path = sanitize_path(path, allow_absolute_path=False)
|
||||
@ -39,8 +46,7 @@ def validate_file_path(path):
|
||||
return False
|
||||
|
||||
|
||||
|
||||
# ----------------------- Defining router and main class ------------------------------
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
@ -7,25 +7,28 @@ description:
|
||||
application. These routes allow users to manipulate the message elements.
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Body, Request
|
||||
from pydantic import Field
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
|
||||
|
||||
import json
|
||||
from typing import Any, Optional
|
||||
|
||||
import tqdm
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import APIRouter, Body, Request
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
from lollms.security import forbid_remote_access
|
||||
import tqdm
|
||||
from typing import Any, Optional
|
||||
from pydantic import BaseModel, ValidationError
|
||||
import json
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.utilities import (detect_antiprompt, remove_text_from_string,
|
||||
trace_exception)
|
||||
from pydantic import BaseModel, Field, ValidationError
|
||||
from starlette.responses import StreamingResponse
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
# ----------------------- Defining router and main class ------------------------------
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
class EditMessageParameters(BaseModel):
|
||||
client_id: str = Field(..., min_length=1)
|
||||
@ -33,26 +36,29 @@ class EditMessageParameters(BaseModel):
|
||||
message: str = Field(...)
|
||||
metadata: list = Field(default=[])
|
||||
|
||||
|
||||
@router.post("/edit_message")
|
||||
async def edit_message(edit_params: EditMessageParameters):
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
client_id = edit_params.client_id
|
||||
message_id = edit_params.id
|
||||
new_message = edit_params.message
|
||||
metadata = json.dumps(edit_params.metadata,indent=4)
|
||||
metadata = json.dumps(edit_params.metadata, indent=4)
|
||||
try:
|
||||
lollmsElfServer.session.get_client(client_id).discussion.edit_message(message_id, new_message, new_metadata=metadata)
|
||||
lollmsElfServer.session.get_client(client_id).discussion.edit_message(
|
||||
message_id, new_message, new_metadata=metadata
|
||||
)
|
||||
return {"status": True}
|
||||
except Exception as ex:
|
||||
trace_exception(ex) # Assuming 'trace_exception' function logs the error
|
||||
return {"status": False, "error": "There was an error editing the message"}
|
||||
|
||||
|
||||
|
||||
class MessageRankParameters(BaseModel):
|
||||
client_id: str = Field(..., min_length=1)
|
||||
id: int = Field(...)
|
||||
|
||||
|
||||
@router.post("/message_rank_up")
|
||||
async def message_rank_up(rank_params: MessageRankParameters):
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
@ -60,7 +66,9 @@ async def message_rank_up(rank_params: MessageRankParameters):
|
||||
message_id = rank_params.id
|
||||
|
||||
try:
|
||||
new_rank = lollmsElfServer.session.get_client(client_id).discussion.message_rank_up(message_id)
|
||||
new_rank = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.message_rank_up(message_id)
|
||||
return {"status": True, "new_rank": new_rank}
|
||||
except Exception as ex:
|
||||
trace_exception(ex) # Assuming 'trace_exception' function logs the error
|
||||
@ -73,15 +81,19 @@ def message_rank_down(rank_params: MessageRankParameters):
|
||||
client_id = rank_params.client_id
|
||||
message_id = rank_params.id
|
||||
try:
|
||||
new_rank = lollmsElfServer.session.get_client(client_id).discussion.message_rank_down(message_id)
|
||||
new_rank = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.message_rank_down(message_id)
|
||||
return {"status": True, "new_rank": new_rank}
|
||||
except Exception as ex:
|
||||
return {"status": False, "error":str(ex)}
|
||||
return {"status": False, "error": str(ex)}
|
||||
|
||||
|
||||
class MessageDeleteParameters(BaseModel):
|
||||
client_id: str = Field(..., min_length=1)
|
||||
id: int = Field(...)
|
||||
|
||||
|
||||
@router.post("/delete_message")
|
||||
async def delete_message(delete_params: MessageDeleteParameters):
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
@ -89,14 +101,14 @@ async def delete_message(delete_params: MessageDeleteParameters):
|
||||
message_id = delete_params.id
|
||||
|
||||
if lollmsElfServer.session.get_client(client_id).discussion is None:
|
||||
return {"status": False,"message":"No discussion is selected"}
|
||||
return {"status": False, "message": "No discussion is selected"}
|
||||
else:
|
||||
try:
|
||||
new_rank = lollmsElfServer.session.get_client(client_id).discussion.delete_message(message_id)
|
||||
new_rank = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.delete_message(message_id)
|
||||
ASCIIColors.yellow("Message deleted")
|
||||
return {"status":True,"new_rank": new_rank}
|
||||
return {"status": True, "new_rank": new_rank}
|
||||
except Exception as ex:
|
||||
trace_exception(ex) # Assuming 'trace_exception' function logs the error
|
||||
return {"status": False, "error": "There was an error deleting the message"}
|
||||
|
||||
|
||||
|
@ -7,33 +7,40 @@ description:
|
||||
application. These routes allow users to
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi import HTTPException
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
|
||||
from lollms.security import sanitize_path_from_endpoint, validate_path, forbid_remote_access, check_access
|
||||
from pathlib import Path
|
||||
from ascii_colors import ASCIIColors
|
||||
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import yaml, json
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import APIRouter, HTTPException, Request
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.security import (check_access, forbid_remote_access,
|
||||
sanitize_path_from_endpoint, validate_path)
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.utilities import (PackageManager, add_period, detect_antiprompt,
|
||||
find_first_available_file_index,
|
||||
remove_text_from_string, trace_exception)
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
# ----------------------- Defining router and main class ------------------------------
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
# ----------------------- voice ------------------------------
|
||||
@router.get("/get_presets")
|
||||
def get_presets():
|
||||
presets = []
|
||||
presets_folder = Path("__file__").parent/"presets"
|
||||
for filename in presets_folder.glob('*.yaml'):
|
||||
with open(filename, 'r', encoding='utf-8') as file:
|
||||
presets_folder = Path("__file__").parent / "presets"
|
||||
for filename in presets_folder.glob("*.yaml"):
|
||||
with open(filename, "r", encoding="utf-8") as file:
|
||||
try:
|
||||
print(filename)
|
||||
preset = yaml.safe_load(file)
|
||||
@ -41,19 +48,24 @@ def get_presets():
|
||||
presets.append(preset)
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
presets_folder = lollmsElfServer.lollms_paths.personal_discussions_path/"lollms_playground_presets"
|
||||
presets_folder = (
|
||||
lollmsElfServer.lollms_paths.personal_discussions_path
|
||||
/ "lollms_playground_presets"
|
||||
)
|
||||
presets_folder.mkdir(exist_ok=True, parents=True)
|
||||
for filename in presets_folder.glob('*.yaml'):
|
||||
with open(filename, 'r', encoding='utf-8') as file:
|
||||
for filename in presets_folder.glob("*.yaml"):
|
||||
with open(filename, "r", encoding="utf-8") as file:
|
||||
preset = yaml.safe_load(file)
|
||||
if preset is not None:
|
||||
presets.append(preset)
|
||||
return presets
|
||||
|
||||
|
||||
class PresetData(BaseModel):
|
||||
client_id: str
|
||||
name: str = Field(..., min_length=1)
|
||||
|
||||
|
||||
@router.post("/add_preset")
|
||||
async def add_preset(preset_data: PresetData):
|
||||
"""
|
||||
@ -66,22 +78,28 @@ async def add_preset(preset_data: PresetData):
|
||||
check_access(lollmsElfServer, preset_data.client_id)
|
||||
try:
|
||||
|
||||
presets_folder = lollmsElfServer.lollms_paths.personal_discussions_path/"lollms_playground_presets"
|
||||
presets_folder = (
|
||||
lollmsElfServer.lollms_paths.personal_discussions_path
|
||||
/ "lollms_playground_presets"
|
||||
)
|
||||
if not presets_folder.exists():
|
||||
presets_folder.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
# Ensure the name doesn't contain any path manipulation characters
|
||||
sanitize_path_from_endpoint(preset_data.name,exception_text="Invalid preset name")
|
||||
sanitize_path_from_endpoint(
|
||||
preset_data.name, exception_text="Invalid preset name"
|
||||
)
|
||||
|
||||
fn = preset_data.name.lower().replace(" ","_")
|
||||
filename = presets_folder/f"{fn}.yaml"
|
||||
with open(filename, 'w', encoding='utf-8') as file:
|
||||
fn = preset_data.name.lower().replace(" ", "_")
|
||||
filename = presets_folder / f"{fn}.yaml"
|
||||
with open(filename, "w", encoding="utf-8") as file:
|
||||
yaml.dump(preset_data, file)
|
||||
return {"status": True}
|
||||
except Exception as ex:
|
||||
trace_exception(ex) # Assuming 'trace_exception' function logs the error
|
||||
return {"status": False, "error": "There was an error adding the preset"}
|
||||
|
||||
|
||||
@router.post("/del_preset")
|
||||
async def del_preset(preset_data: PresetData):
|
||||
"""
|
||||
@ -94,24 +112,31 @@ async def del_preset(preset_data: PresetData):
|
||||
check_access(lollmsElfServer, preset_data.client_id)
|
||||
# Get the JSON data from the POST request.
|
||||
if preset_data.name is None:
|
||||
raise HTTPException(status_code=400, detail="Preset name is missing in the request")
|
||||
|
||||
# Ensure the name doesn't contain any path manipulation characters
|
||||
sanitize_path_from_endpoint(preset_data.name,exception_text="Invalid preset name")
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Preset name is missing in the request"
|
||||
)
|
||||
|
||||
presets_file = lollmsElfServer.lollms_paths.personal_discussions_path/"lollms_playground_presets"/preset_data.name
|
||||
# Ensure the name doesn't contain any path manipulation characters
|
||||
sanitize_path_from_endpoint(preset_data.name, exception_text="Invalid preset name")
|
||||
|
||||
presets_file = (
|
||||
lollmsElfServer.lollms_paths.personal_discussions_path
|
||||
/ "lollms_playground_presets"
|
||||
/ preset_data.name
|
||||
)
|
||||
try:
|
||||
presets_file.unlink()
|
||||
return {"status":True}
|
||||
return {"status": True}
|
||||
except:
|
||||
return {"status":False}
|
||||
return {"status": False}
|
||||
|
||||
|
||||
class PresetDataWithValue(BaseModel):
|
||||
client_id: str
|
||||
name: str = Field(..., min_length=1)
|
||||
preset: str
|
||||
|
||||
|
||||
|
||||
@router.post("/save_presets")
|
||||
async def save_presets(preset_data: PresetDataWithValue):
|
||||
"""
|
||||
@ -124,15 +149,18 @@ async def save_presets(preset_data: PresetDataWithValue):
|
||||
check_access(lollmsElfServer, preset_data.client_id)
|
||||
# Get the JSON data from the POST request.
|
||||
if preset_data.preset is None:
|
||||
raise HTTPException(status_code=400, detail="Preset data is missing in the request")
|
||||
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Preset data is missing in the request"
|
||||
)
|
||||
|
||||
# Ensure the name doesn't contain any path manipulation characters
|
||||
sanitize_path_from_endpoint(preset_data.name,exception_text="Invalid preset name")
|
||||
sanitize_path_from_endpoint(preset_data.name, exception_text="Invalid preset name")
|
||||
|
||||
presets_file = lollmsElfServer.lollms_paths.personal_discussions_path/"presets.json"
|
||||
presets_file = (
|
||||
lollmsElfServer.lollms_paths.personal_discussions_path / "presets.json"
|
||||
)
|
||||
# Save the JSON data to a file.
|
||||
with open(presets_file, "w") as f:
|
||||
json.dump(preset_data.preset, f, indent=4)
|
||||
|
||||
return {"status":True,"message":"Preset saved successfully!"}
|
||||
return {"status": True, "message": "Preset saved successfully!"}
|
||||
|
@ -9,67 +9,72 @@ description:
|
||||
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Request
|
||||
from pydantic import BaseModel, Field
|
||||
import pkg_resources
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.utilities import load_config, run_async, show_yes_no_dialog
|
||||
from lollms.security import sanitize_path, forbid_remote_access, check_access
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
import sys
|
||||
|
||||
import pkg_resources
|
||||
import socketio
|
||||
import time
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import APIRouter, Request
|
||||
from lollms.security import check_access, forbid_remote_access, sanitize_path
|
||||
from lollms.utilities import load_config, run_async, show_yes_no_dialog
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
# ----------------------- Defining router and main class ------------------------------
|
||||
|
||||
# Create an instance of the LoLLMSWebUI class
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
class LastViewedVideoUrlRequest(BaseModel):
|
||||
client_id: str = Field(...)
|
||||
client_id: str = Field(...)
|
||||
last_viewed_video_url: str = Field(..., description="Last viewed model")
|
||||
|
||||
|
||||
@router.get("/get_versionID")
|
||||
async def get_lollms_version():
|
||||
"""Get the version of the LoLLMs Web UI application."""
|
||||
# Return the version string
|
||||
return {"id":9}
|
||||
"""Get the version of the LoLLMs Web UI application."""
|
||||
# Return the version string
|
||||
return {"id": 9}
|
||||
|
||||
|
||||
@router.get("/get_changeLog")
|
||||
async def get_lollms_version():
|
||||
"""Get the changelog."""
|
||||
# Return the version string
|
||||
with open("CHANGELOG.md","r",encoding="utf8") as f:
|
||||
infos = f.read()
|
||||
return infos
|
||||
"""Get the changelog."""
|
||||
# Return the version string
|
||||
with open("CHANGELOG.md", "r", encoding="utf8") as f:
|
||||
infos = f.read()
|
||||
return infos
|
||||
|
||||
|
||||
@router.get("/get_news")
|
||||
async def get_lollms_version():
|
||||
"""Get the changelog."""
|
||||
base_path = Path(__file__).parent
|
||||
infos = base_path/"news"/"current.html"
|
||||
infos = base_path / "news" / "current.html"
|
||||
return infos.read_text(encoding="utf8")
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@router.get("/get_last_video_url")
|
||||
async def get_last_video_url():
|
||||
"""Get the URL and type of the last video."""
|
||||
base_path = Path(__file__).parent
|
||||
info_file = base_path / "news" / "latest_video.json"
|
||||
|
||||
|
||||
try:
|
||||
with open(info_file, 'r', encoding='utf-8') as file:
|
||||
with open(info_file, "r", encoding="utf-8") as file:
|
||||
video_info = json.load(file)
|
||||
|
||||
return {
|
||||
"url": video_info["url"],
|
||||
"type": video_info["type"]
|
||||
}
|
||||
|
||||
return {"url": video_info["url"], "type": video_info["type"]}
|
||||
except FileNotFoundError:
|
||||
return {"error": "Video information not found"}
|
||||
except json.JSONDecodeError:
|
||||
@ -86,50 +91,66 @@ async def get_last_video_url():
|
||||
|
||||
|
||||
@router.post("/set_last_viewed_video_url")
|
||||
async def set_last_video_url(req:LastViewedVideoUrlRequest):
|
||||
async def set_last_video_url(req: LastViewedVideoUrlRequest):
|
||||
"""Get the URL of the last video."""
|
||||
# This is a static URL for demonstration purposes
|
||||
check_access(lollmsElfServer,req.client_id)
|
||||
check_access(lollmsElfServer, req.client_id)
|
||||
lollmsElfServer.config.last_viewed_video = req.last_viewed_video_url
|
||||
lollmsElfServer.config.save_config()
|
||||
|
||||
|
||||
@router.get("/get_themes")
|
||||
async def get_themes():
|
||||
"""Get the list of available themes."""
|
||||
base_path = Path(__file__).parent.parent
|
||||
themes_path = base_path / "web" / "dist" / "themes"
|
||||
|
||||
|
||||
# Get all .css files in the themes directory
|
||||
theme_files = list(themes_path.glob('*.css'))
|
||||
|
||||
theme_files = list(themes_path.glob("*.css"))
|
||||
|
||||
# Remove the .css extension from each file name
|
||||
themes = [theme_file.stem for theme_file in theme_files]
|
||||
|
||||
|
||||
return themes
|
||||
|
||||
|
||||
@router.get("/get_lollms_webui_version")
|
||||
async def get_lollms_webui_version():
|
||||
"""Get the version of the LoLLMs Web UI application."""
|
||||
# Return the version string
|
||||
return lollmsElfServer.version
|
||||
"""Get the version of the LoLLMs Web UI application."""
|
||||
# Return the version string
|
||||
return lollmsElfServer.version
|
||||
|
||||
|
||||
class Identification(BaseModel):
|
||||
client_id:str
|
||||
client_id: str
|
||||
|
||||
|
||||
@router.post("/restart_program")
|
||||
async def restart_program(data:Identification):
|
||||
async def restart_program(data: Identification):
|
||||
check_access(lollmsElfServer, data.client_id)
|
||||
"""Restart the program."""
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Restarting app is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Restarting app is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Restarting app is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Restarting app is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.turn_on_setting_update_validation:
|
||||
if not show_yes_no_dialog("Validation","Reboot requested from client\nDo you validate rebooting the app?"):
|
||||
return {"status":False,"error":"User refused the execution!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Validation",
|
||||
"Reboot requested from client\nDo you validate rebooting the app?",
|
||||
):
|
||||
return {"status": False, "error": "User refused the execution!"}
|
||||
|
||||
lollmsElfServer.ShowBlockingMessage("Restarting program.\nPlease stand by...")
|
||||
# Stop the socketIO server
|
||||
@ -148,22 +169,35 @@ async def restart_program(data:Identification):
|
||||
ASCIIColors.info("")
|
||||
ASCIIColors.info("")
|
||||
lollmsElfServer.run_restart_script(lollmsElfServer.args)
|
||||
|
||||
|
||||
|
||||
@router.post("/update_software")
|
||||
async def update_software(data:Identification):
|
||||
async def update_software(data: Identification):
|
||||
check_access(lollmsElfServer, data.client_id)
|
||||
"""Update the software."""
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Updating app is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Updating app is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Updating app is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Updating app is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
if lollmsElfServer.config.turn_on_setting_update_validation:
|
||||
if not show_yes_no_dialog("Validation","App upgrade requested from client\nDo you validate rebooting the app?"):
|
||||
return {"status":False,"error":"User refused the execution!"}
|
||||
|
||||
if not show_yes_no_dialog(
|
||||
"Validation",
|
||||
"App upgrade requested from client\nDo you validate rebooting the app?",
|
||||
):
|
||||
return {"status": False, "error": "User refused the execution!"}
|
||||
|
||||
# Display an informative message
|
||||
ASCIIColors.info("")
|
||||
ASCIIColors.info("")
|
||||
@ -177,7 +211,7 @@ async def update_software(data:Identification):
|
||||
# Stop the socketIO server
|
||||
await lollmsElfServer.sio.shutdown()
|
||||
# Sleep for 1 second before rebooting
|
||||
time.sleep(1)
|
||||
time.sleep(1)
|
||||
|
||||
# Run the update script using the provided arguments
|
||||
lollmsElfServer.run_update_script(lollmsElfServer.args)
|
||||
@ -190,14 +224,22 @@ def check_update():
|
||||
"""Checks if an update is available"""
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Checking updates is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Checking updates is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Checking updates is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Checking updates is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
|
||||
if lollmsElfServer.config.auto_update:
|
||||
res = lollmsElfServer.check_update_()
|
||||
return {'update_availability':res}
|
||||
return {"update_availability": res}
|
||||
else:
|
||||
return {'update_availability':False}
|
||||
|
||||
return {"update_availability": False}
|
||||
|
@ -7,50 +7,59 @@ description:
|
||||
application. These routes are specific to chatbox operation
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel
|
||||
import pkg_resources
|
||||
from lollms.server.elf_server import LOLLMSElfServer
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import load_config, trace_exception, gc
|
||||
from lollms.utilities import find_first_available_file_index, convert_language_name, PackageManager, run_async
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from functools import partial
|
||||
import socketio
|
||||
import threading
|
||||
import os
|
||||
import time
|
||||
|
||||
from lollms.internet import scrape_and_save
|
||||
import pkg_resources
|
||||
import socketio
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import APIRouter, HTTPException, Request
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from lollms.databases.discussions_database import Discussion
|
||||
from lollms.internet import scrape_and_save
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.security import forbid_remote_access
|
||||
from datetime import datetime
|
||||
from lollms.server.elf_server import LOLLMSElfServer
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import (PackageManager, convert_language_name,
|
||||
find_first_available_file_index, gc, load_config,
|
||||
run_async, trace_exception)
|
||||
from pydantic import BaseModel
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
# ----------------------------------- events -----------------------------------------
|
||||
def add_events(sio:socketio):
|
||||
def add_events(sio: socketio):
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
@sio.on('create_empty_message')
|
||||
|
||||
@sio.on("create_empty_message")
|
||||
def create_empty_message(sid, data):
|
||||
client_id = sid
|
||||
type = int(data.get("type",0))
|
||||
message = data.get("message","")
|
||||
if type==0:
|
||||
type = int(data.get("type", 0))
|
||||
message = data.get("message", "")
|
||||
if type == 0:
|
||||
ASCIIColors.info(f"Building empty User message requested by : {client_id}")
|
||||
# send the message to the bot
|
||||
print(f"Creating an empty message for AI answer orientation")
|
||||
if lollmsElfServer.session.get_client(client_id).discussion:
|
||||
lollmsElfServer.new_message(client_id, lollmsElfServer.config.user_name, message, sender_type=SENDER_TYPES.SENDER_TYPES_USER, open=True)
|
||||
lollmsElfServer.new_message(
|
||||
client_id,
|
||||
lollmsElfServer.config.user_name,
|
||||
message,
|
||||
sender_type=SENDER_TYPES.SENDER_TYPES_USER,
|
||||
open=True,
|
||||
)
|
||||
else:
|
||||
if lollmsElfServer.personality is None:
|
||||
lollmsElfServer.warning("Select a personality")
|
||||
@ -59,34 +68,58 @@ def add_events(sio:socketio):
|
||||
# send the message to the bot
|
||||
print(f"Creating an empty message for AI answer orientation")
|
||||
if lollmsElfServer.session.get_client(client_id).discussion:
|
||||
lollmsElfServer.new_message(client_id, lollmsElfServer.personality.name, "[edit this to put your ai answer start]", open=True)
|
||||
lollmsElfServer.new_message(
|
||||
client_id,
|
||||
lollmsElfServer.personality.name,
|
||||
"[edit this to put your ai answer start]",
|
||||
open=True,
|
||||
)
|
||||
|
||||
|
||||
@sio.on('add_webpage')
|
||||
@sio.on("add_webpage")
|
||||
def add_webpage(sid, data):
|
||||
lollmsElfServer.ShowBlockingMessage("Scraping web page\nPlease wait...")
|
||||
ASCIIColors.yellow("Scaping web page")
|
||||
client = lollmsElfServer.session.get_client(sid)
|
||||
url = data['url']
|
||||
index = find_first_available_file_index(lollmsElfServer.lollms_paths.personal_uploads_path,"web_",".txt")
|
||||
file_path=lollmsElfServer.lollms_paths.personal_uploads_path/f"web_{index}.txt"
|
||||
url = data["url"]
|
||||
index = find_first_available_file_index(
|
||||
lollmsElfServer.lollms_paths.personal_uploads_path, "web_", ".txt"
|
||||
)
|
||||
file_path = (
|
||||
lollmsElfServer.lollms_paths.personal_uploads_path / f"web_{index}.txt"
|
||||
)
|
||||
scrape_and_save(url=url, file_path=file_path)
|
||||
try:
|
||||
if not lollmsElfServer.personality.processor is None:
|
||||
lollmsElfServer.personality.processor.add_file(file_path, client, partial(lollmsElfServer.process_data, client_id = sid))
|
||||
lollmsElfServer.personality.processor.add_file(
|
||||
file_path,
|
||||
client,
|
||||
partial(lollmsElfServer.process_data, client_id=sid),
|
||||
)
|
||||
# File saved successfully
|
||||
run_async(partial(sio.emit,'web_page_added', {'status':True,}))
|
||||
run_async(
|
||||
partial(
|
||||
sio.emit,
|
||||
"web_page_added",
|
||||
{
|
||||
"status": True,
|
||||
},
|
||||
)
|
||||
)
|
||||
else:
|
||||
lollmsElfServer.personality.add_file(file_path, client, partial(lollmsElfServer.process_data, client_id = sid))
|
||||
lollmsElfServer.personality.add_file(
|
||||
file_path,
|
||||
client,
|
||||
partial(lollmsElfServer.process_data, client_id=sid),
|
||||
)
|
||||
# File saved successfully
|
||||
run_async(partial(sio.emit,'web_page_added', {'status':True}))
|
||||
run_async(partial(sio.emit, "web_page_added", {"status": True}))
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
except Exception as e:
|
||||
# Error occurred while saving the file
|
||||
run_async(partial(sio.emit,'web_page_added', {'status':False}))
|
||||
run_async(partial(sio.emit, "web_page_added", {"status": False}))
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
|
||||
@sio.on('take_picture')
|
||||
@sio.on("take_picture")
|
||||
def take_picture(sid):
|
||||
try:
|
||||
client = lollmsElfServer.session.get_client(sid)
|
||||
@ -97,40 +130,66 @@ def add_events(sio:socketio):
|
||||
if not PackageManager.check_package_installed("cv2"):
|
||||
PackageManager.install_package("opencv-python")
|
||||
import cv2
|
||||
|
||||
cap = cv2.VideoCapture(0)
|
||||
n = time.time()
|
||||
lollmsElfServer.info("Stand by for taking a shot in 2s")
|
||||
while(time.time()-n<2):
|
||||
while time.time() - n < 2:
|
||||
_, frame = cap.read()
|
||||
_, frame = cap.read()
|
||||
cap.release()
|
||||
lollmsElfServer.info("Shot taken")
|
||||
cam_shot_path = client.discussion.discussion_images_folder
|
||||
cam_shot_path.mkdir(parents=True, exist_ok=True)
|
||||
filename = find_first_available_file_index(cam_shot_path, "cam_shot_", extension=".png")
|
||||
save_path = cam_shot_path/f"cam_shot_{filename}.png" # Specify the desired folder path
|
||||
filename = find_first_available_file_index(
|
||||
cam_shot_path, "cam_shot_", extension=".png"
|
||||
)
|
||||
save_path = (
|
||||
cam_shot_path / f"cam_shot_{filename}.png"
|
||||
) # Specify the desired folder path
|
||||
|
||||
try:
|
||||
cv2.imwrite(str(save_path), frame)
|
||||
if not lollmsElfServer.personality.processor is None:
|
||||
lollmsElfServer.info("Sending file to scripted persona")
|
||||
client.discussion.add_file(save_path, client, lollmsElfServer.tasks_library, partial(lollmsElfServer.process_data, client_id = sid))
|
||||
client.discussion.add_file(
|
||||
save_path,
|
||||
client,
|
||||
lollmsElfServer.tasks_library,
|
||||
partial(lollmsElfServer.process_data, client_id=sid),
|
||||
)
|
||||
# lollmsElfServer.personality.processor.add_file(save_path, client, partial(lollmsElfServer.process_data, client_id = sid))
|
||||
# File saved successfully
|
||||
run_async(partial(sio.emit,'picture_taken', {'status':True, 'progress': 100}))
|
||||
run_async(
|
||||
partial(
|
||||
sio.emit, "picture_taken", {"status": True, "progress": 100}
|
||||
)
|
||||
)
|
||||
lollmsElfServer.info("File sent to scripted persona")
|
||||
else:
|
||||
lollmsElfServer.info("Sending file to persona")
|
||||
client.discussion.add_file(save_path, client, lollmsElfServer.tasks_library, partial(lollmsElfServer.process_data, client_id = sid))
|
||||
#lollmsElfServer.personality.add_file(save_path, client, partial(lollmsElfServer.process_data, client_id = sid))
|
||||
client.discussion.add_file(
|
||||
save_path,
|
||||
client,
|
||||
lollmsElfServer.tasks_library,
|
||||
partial(lollmsElfServer.process_data, client_id=sid),
|
||||
)
|
||||
# lollmsElfServer.personality.add_file(save_path, client, partial(lollmsElfServer.process_data, client_id = sid))
|
||||
# File saved successfully
|
||||
run_async(partial(sio.emit,'picture_taken', {'status':True, 'progress': 100}))
|
||||
run_async(
|
||||
partial(
|
||||
sio.emit, "picture_taken", {"status": True, "progress": 100}
|
||||
)
|
||||
)
|
||||
lollmsElfServer.info("File sent to persona")
|
||||
except Exception as e:
|
||||
trace_exception(e)
|
||||
# Error occurred while saving the file
|
||||
run_async(partial(sio.emit,'picture_taken', {'status':False, 'error': str(e)}))
|
||||
|
||||
run_async(
|
||||
partial(
|
||||
sio.emit, "picture_taken", {"status": False, "error": str(e)}
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
|
@ -7,37 +7,42 @@ description:
|
||||
application. These routes are specific to discussion operation
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel
|
||||
import pkg_resources
|
||||
from lollms.server.elf_server import LOLLMSElfServer
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import load_config, trace_exception, gc
|
||||
from lollms.utilities import find_first_available_file_index, convert_language_name, PackageManager
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
import pkg_resources
|
||||
import socketio
|
||||
import threading
|
||||
import os
|
||||
import yaml
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import APIRouter, HTTPException, Request
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from lollms.databases.discussions_database import Discussion
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.security import forbid_remote_access
|
||||
from datetime import datetime
|
||||
import shutil
|
||||
from lollms.server.elf_server import LOLLMSElfServer
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import (PackageManager, convert_language_name,
|
||||
find_first_available_file_index, gc, load_config,
|
||||
trace_exception)
|
||||
from pydantic import BaseModel
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
# ----------------------------------- events -----------------------------------------
|
||||
def add_events(sio:socketio):
|
||||
def add_events(sio: socketio):
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
@sio.on('new_discussion')
|
||||
|
||||
@sio.on("new_discussion")
|
||||
async def new_discussion(sid, data):
|
||||
if lollmsElfServer.personality is None:
|
||||
lollmsElfServer.error("Please select a personality first")
|
||||
@ -49,61 +54,115 @@ def add_events(sio:socketio):
|
||||
client.discussion = lollmsElfServer.db.create_discussion(title)
|
||||
# Get the current timestamp
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
# Return a success response
|
||||
if lollmsElfServer.session.get_client(client_id).discussion is None:
|
||||
lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.load_last_discussion()
|
||||
|
||||
if lollmsElfServer.personality.welcome_message!="":
|
||||
lollmsElfServer.session.get_client(client_id).discussion = (
|
||||
lollmsElfServer.db.load_last_discussion()
|
||||
)
|
||||
|
||||
if lollmsElfServer.personality.welcome_message != "":
|
||||
if lollmsElfServer.personality.welcome_audio_path.exists():
|
||||
for voice in lollmsElfServer.personality.welcome_audio_path.iterdir():
|
||||
if voice.suffix.lower() in [".wav",".mp3"]:
|
||||
try:
|
||||
if not PackageManager.check_package_installed("pygame"):
|
||||
PackageManager.install_package("pygame")
|
||||
import pygame
|
||||
pygame.mixer.init()
|
||||
pygame.mixer.music.load(voice)
|
||||
pygame.mixer.music.play()
|
||||
except Exception as ex:
|
||||
pass
|
||||
if lollmsElfServer.personality.language:
|
||||
default_language = lollmsElfServer.personality.language.lower().strip().split()[0]
|
||||
else:
|
||||
default_language = 'english'
|
||||
|
||||
current_language = lollmsElfServer.config.current_language.lower().strip().split()[0]
|
||||
if voice.suffix.lower() in [".wav", ".mp3"]:
|
||||
try:
|
||||
if not PackageManager.check_package_installed("pygame"):
|
||||
PackageManager.install_package("pygame")
|
||||
import pygame
|
||||
|
||||
if lollmsElfServer.config.current_language and current_language!= default_language:
|
||||
language_path = lollmsElfServer.lollms_paths.personal_configuration_path/"personalities"/lollmsElfServer.personality.name/f"languages_{current_language}.yaml"
|
||||
pygame.mixer.init()
|
||||
pygame.mixer.music.load(voice)
|
||||
pygame.mixer.music.play()
|
||||
except Exception as ex:
|
||||
pass
|
||||
if lollmsElfServer.personality.language:
|
||||
default_language = (
|
||||
lollmsElfServer.personality.language.lower().strip().split()[0]
|
||||
)
|
||||
else:
|
||||
default_language = "english"
|
||||
|
||||
current_language = (
|
||||
lollmsElfServer.config.current_language.lower().strip().split()[0]
|
||||
)
|
||||
|
||||
if (
|
||||
lollmsElfServer.config.current_language
|
||||
and current_language != default_language
|
||||
):
|
||||
language_path = (
|
||||
lollmsElfServer.lollms_paths.personal_configuration_path
|
||||
/ "personalities"
|
||||
/ lollmsElfServer.personality.name
|
||||
/ f"languages_{current_language}.yaml"
|
||||
)
|
||||
if not language_path.exists():
|
||||
#checking if there is already a translation in the personality folder
|
||||
persona_language_path = lollmsElfServer.lollms_paths.personalities_zoo_path/lollmsElfServer.personality.category/lollmsElfServer.personality.name.replace(" ","_")/"languages"/f"{current_language}.yaml"
|
||||
# checking if there is already a translation in the personality folder
|
||||
persona_language_path = (
|
||||
lollmsElfServer.lollms_paths.personalities_zoo_path
|
||||
/ lollmsElfServer.personality.category
|
||||
/ lollmsElfServer.personality.name.replace(" ", "_")
|
||||
/ "languages"
|
||||
/ f"{current_language}.yaml"
|
||||
)
|
||||
if persona_language_path.exists():
|
||||
shutil.copy(persona_language_path, language_path)
|
||||
with open(language_path,"r",encoding="utf-8", errors="ignore") as f:
|
||||
with open(
|
||||
language_path, "r", encoding="utf-8", errors="ignore"
|
||||
) as f:
|
||||
language_pack = yaml.safe_load(f)
|
||||
conditionning = language_pack["personality_conditioning"]
|
||||
else:
|
||||
lollmsElfServer.ShowBlockingMessage(f"This is the first time this personality speaks {current_language}\nLollms is reconditionning the persona in that language.\nThis will be done just once. Next time, the personality will speak {current_language} out of the box")
|
||||
lollmsElfServer.ShowBlockingMessage(
|
||||
f"This is the first time this personality speaks {current_language}\nLollms is reconditionning the persona in that language.\nThis will be done just once. Next time, the personality will speak {current_language} out of the box"
|
||||
)
|
||||
language_path.parent.mkdir(exist_ok=True, parents=True)
|
||||
# Translating
|
||||
conditionning = lollmsElfServer.tasks_library.translate_conditionning(lollmsElfServer.personality._personality_conditioning, lollmsElfServer.personality.language, current_language)
|
||||
welcome_message = lollmsElfServer.tasks_library.translate_message(lollmsElfServer.personality.welcome_message, lollmsElfServer.personality.language, current_language)
|
||||
with open(language_path,"w",encoding="utf-8", errors="ignore") as f:
|
||||
yaml.safe_dump({"personality_conditioning":conditionning,"welcome_message":welcome_message}, f)
|
||||
conditionning = (
|
||||
lollmsElfServer.tasks_library.translate_conditionning(
|
||||
lollmsElfServer.personality._personality_conditioning,
|
||||
lollmsElfServer.personality.language,
|
||||
current_language,
|
||||
)
|
||||
)
|
||||
welcome_message = (
|
||||
lollmsElfServer.tasks_library.translate_message(
|
||||
lollmsElfServer.personality.welcome_message,
|
||||
lollmsElfServer.personality.language,
|
||||
current_language,
|
||||
)
|
||||
)
|
||||
with open(
|
||||
language_path, "w", encoding="utf-8", errors="ignore"
|
||||
) as f:
|
||||
yaml.safe_dump(
|
||||
{
|
||||
"personality_conditioning": conditionning,
|
||||
"welcome_message": welcome_message,
|
||||
},
|
||||
f,
|
||||
)
|
||||
lollmsElfServer.HideBlockingMessage()
|
||||
else:
|
||||
with open(language_path,"r",encoding="utf-8", errors="ignore") as f:
|
||||
with open(
|
||||
language_path, "r", encoding="utf-8", errors="ignore"
|
||||
) as f:
|
||||
language_pack = yaml.safe_load(f)
|
||||
welcome_message = language_pack.get("welcome_message", lollmsElfServer.personality.welcome_message)
|
||||
welcome_message = language_pack.get(
|
||||
"welcome_message",
|
||||
lollmsElfServer.personality.welcome_message,
|
||||
)
|
||||
else:
|
||||
welcome_message = lollmsElfServer.personality.welcome_message
|
||||
|
||||
if lollmsElfServer.personality.processor:
|
||||
lollmsElfServer.ShowBlockingMessage("Building custom welcome message.\nPlease standby.")
|
||||
lollmsElfServer.ShowBlockingMessage(
|
||||
"Building custom welcome message.\nPlease standby."
|
||||
)
|
||||
try:
|
||||
welcome_message = lollmsElfServer.personality.processor.get_welcome(welcome_message,client)
|
||||
welcome_message = lollmsElfServer.personality.processor.get_welcome(
|
||||
welcome_message, client
|
||||
)
|
||||
if welcome_message is None:
|
||||
welcome_message = lollmsElfServer.personality.welcome_message
|
||||
except Exception as ex:
|
||||
@ -114,51 +173,76 @@ def add_events(sio:socketio):
|
||||
nb_tokens = len(lollmsElfServer.model.tokenize(welcome_message))
|
||||
except:
|
||||
nb_tokens = None
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
|
||||
message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value if lollmsElfServer.personality.include_welcome_message_in_discussion else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI.value,
|
||||
sender_type = SENDER_TYPES.SENDER_TYPES_AI.value,
|
||||
sender = lollmsElfServer.personality.name,
|
||||
content = welcome_message,
|
||||
steps = [],
|
||||
metadata = None,
|
||||
rank = 0,
|
||||
parent_message_id = -1,
|
||||
binding = lollmsElfServer.config.binding_name,
|
||||
model = lollmsElfServer.config.model_name,
|
||||
personality = lollmsElfServer.config.personalities[lollmsElfServer.config.active_personality_id],
|
||||
created_at = None,
|
||||
started_generating_at = None,
|
||||
finished_generating_at = None,
|
||||
nb_tokens = nb_tokens
|
||||
message = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.add_message(
|
||||
message_type=(
|
||||
MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value
|
||||
if lollmsElfServer.personality.include_welcome_message_in_discussion
|
||||
else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI.value
|
||||
),
|
||||
sender_type=SENDER_TYPES.SENDER_TYPES_AI.value,
|
||||
sender=lollmsElfServer.personality.name,
|
||||
content=welcome_message,
|
||||
steps=[],
|
||||
metadata=None,
|
||||
rank=0,
|
||||
parent_message_id=-1,
|
||||
binding=lollmsElfServer.config.binding_name,
|
||||
model=lollmsElfServer.config.model_name,
|
||||
personality=lollmsElfServer.config.personalities[
|
||||
lollmsElfServer.config.active_personality_id
|
||||
],
|
||||
created_at=None,
|
||||
started_generating_at=None,
|
||||
finished_generating_at=None,
|
||||
nb_tokens=nb_tokens,
|
||||
)
|
||||
|
||||
await lollmsElfServer.sio.emit('discussion_created',
|
||||
{'id':lollmsElfServer.session.get_client(client_id).discussion.discussion_id},
|
||||
to=client_id
|
||||
)
|
||||
await lollmsElfServer.sio.emit(
|
||||
"discussion_created",
|
||||
{
|
||||
"id": lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.discussion_id
|
||||
},
|
||||
to=client_id,
|
||||
)
|
||||
else:
|
||||
await lollmsElfServer.sio.emit('discussion_created',
|
||||
{'id':lollmsElfServer.session.get_client(client_id).discussion.discussion_id},
|
||||
to=client_id
|
||||
)
|
||||
await lollmsElfServer.sio.emit(
|
||||
"discussion_created",
|
||||
{
|
||||
"id": lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.discussion_id
|
||||
},
|
||||
to=client_id,
|
||||
)
|
||||
|
||||
@sio.on('load_discussion')
|
||||
async def load_discussion(sid, data):
|
||||
@sio.on("load_discussion")
|
||||
async def load_discussion(sid, data):
|
||||
client_id = sid
|
||||
ASCIIColors.yellow(f"Loading discussion for client {client_id} ... ", end="")
|
||||
if "id" in data:
|
||||
discussion_id = data["id"]
|
||||
lollmsElfServer.session.get_client(client_id).discussion = Discussion(lollmsElfServer, discussion_id, lollmsElfServer.db)
|
||||
lollmsElfServer.session.get_client(client_id).discussion = Discussion(
|
||||
lollmsElfServer, discussion_id, lollmsElfServer.db
|
||||
)
|
||||
else:
|
||||
if lollmsElfServer.session.get_client(client_id).discussion is not None:
|
||||
discussion_id = lollmsElfServer.session.get_client(client_id).discussion.discussion_id
|
||||
lollmsElfServer.session.get_client(client_id).discussion = Discussion(lollmsElfServer, discussion_id, lollmsElfServer.db)
|
||||
discussion_id = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.discussion_id
|
||||
lollmsElfServer.session.get_client(client_id).discussion = Discussion(
|
||||
lollmsElfServer, discussion_id, lollmsElfServer.db
|
||||
)
|
||||
else:
|
||||
lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.create_discussion()
|
||||
messages = lollmsElfServer.session.get_client(client_id).discussion.get_messages()
|
||||
lollmsElfServer.session.get_client(client_id).discussion = (
|
||||
lollmsElfServer.db.create_discussion()
|
||||
)
|
||||
messages = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.get_messages()
|
||||
jsons = [m.to_json() for m in messages]
|
||||
await lollmsElfServer.sio.emit('discussion',
|
||||
jsons,
|
||||
to=client_id
|
||||
)
|
||||
await lollmsElfServer.sio.emit("discussion", jsons, to=client_id)
|
||||
ASCIIColors.green(f"ok")
|
||||
|
@ -7,194 +7,254 @@ description:
|
||||
application. These routes are specific to text generation operation
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel
|
||||
import pkg_resources
|
||||
from lollms.server.elf_server import LOLLMSElfServer
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import load_config, trace_exception, gc
|
||||
from lollms.utilities import find_first_available_file_index, convert_language_name
|
||||
from lollms.security import forbid_remote_access
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
import socketio
|
||||
|
||||
import os
|
||||
import threading
|
||||
from datetime import datetime
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
import pkg_resources
|
||||
import socketio
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import APIRouter, HTTPException, Request
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.security import forbid_remote_access
|
||||
from lollms.server.elf_server import LOLLMSElfServer
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import (convert_language_name,
|
||||
find_first_available_file_index, gc, load_config,
|
||||
trace_exception)
|
||||
from pydantic import BaseModel
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
# ----------------------------------- events -----------------------------------------
|
||||
def add_events(sio:socketio):
|
||||
def add_events(sio: socketio):
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
@sio.on('generate_msg')
|
||||
def handle_generate_msg(sid, data, use_threading=True):
|
||||
|
||||
@sio.on("generate_msg")
|
||||
def handle_generate_msg(sid, data, use_threading=True):
|
||||
client_id = sid
|
||||
lollmsElfServer.cancel_gen = False
|
||||
client = lollmsElfServer.session.get_client(client_id)
|
||||
|
||||
client.generated_text=""
|
||||
client.cancel_generation=False
|
||||
client.continuing=False
|
||||
client.first_chunk=True
|
||||
|
||||
|
||||
client.generated_text = ""
|
||||
client.cancel_generation = False
|
||||
client.continuing = False
|
||||
client.first_chunk = True
|
||||
|
||||
if not lollmsElfServer.model:
|
||||
ASCIIColors.error("Model not selected. Please select a model")
|
||||
lollmsElfServer.error("Model not selected. Please select a model", client_id=client_id)
|
||||
lollmsElfServer.error(
|
||||
"Model not selected. Please select a model", client_id=client_id
|
||||
)
|
||||
return
|
||||
|
||||
if not lollmsElfServer.busy:
|
||||
if lollmsElfServer.session.get_client(client_id).discussion is None:
|
||||
if lollmsElfServer.db.does_last_discussion_have_messages():
|
||||
lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.create_discussion()
|
||||
lollmsElfServer.session.get_client(client_id).discussion = (
|
||||
lollmsElfServer.db.create_discussion()
|
||||
)
|
||||
else:
|
||||
lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.load_last_discussion()
|
||||
lollmsElfServer.session.get_client(client_id).discussion = (
|
||||
lollmsElfServer.db.load_last_discussion()
|
||||
)
|
||||
|
||||
prompt = data["prompt"]
|
||||
try:
|
||||
nb_tokens = len(lollmsElfServer.model.tokenize(prompt))
|
||||
except:
|
||||
nb_tokens = None
|
||||
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
ump = lollmsElfServer.config.discussion_prompt_separator +lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
|
||||
message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
|
||||
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
|
||||
created_at = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
ump = (
|
||||
lollmsElfServer.config.discussion_prompt_separator
|
||||
+ lollmsElfServer.config.user_name.strip()
|
||||
if lollmsElfServer.config.use_user_name_in_discussions
|
||||
else lollmsElfServer.personality.user_message_prefix
|
||||
)
|
||||
message = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.add_message(
|
||||
message_type=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
sender_type=SENDER_TYPES.SENDER_TYPES_USER.value,
|
||||
sender=ump.replace(
|
||||
lollmsElfServer.config.discussion_prompt_separator, ""
|
||||
).replace(":", ""),
|
||||
content=prompt,
|
||||
steps=[],
|
||||
metadata=None,
|
||||
parent_message_id=lollmsElfServer.message_id,
|
||||
created_at=created_at,
|
||||
nb_tokens=nb_tokens
|
||||
nb_tokens=nb_tokens,
|
||||
)
|
||||
|
||||
ASCIIColors.green("Starting message generation by "+lollmsElfServer.personality.name)
|
||||
ASCIIColors.green(
|
||||
"Starting message generation by " + lollmsElfServer.personality.name
|
||||
)
|
||||
if use_threading:
|
||||
client.generation_thread = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id))
|
||||
client.generation_thread = threading.Thread(
|
||||
target=lollmsElfServer.start_message_generation,
|
||||
args=(message, message.id, client_id),
|
||||
)
|
||||
client.generation_thread.start()
|
||||
else:
|
||||
lollmsElfServer.start_message_generation(message, message.id, client_id)
|
||||
|
||||
|
||||
# lollmsElfServer.sio.sleep(0.01)
|
||||
ASCIIColors.info("Started generation task")
|
||||
lollmsElfServer.busy=True
|
||||
#tpe = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message_id, client_id))
|
||||
#tpe.start()
|
||||
lollmsElfServer.busy = True
|
||||
# tpe = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message_id, client_id))
|
||||
# tpe.start()
|
||||
else:
|
||||
lollmsElfServer.error("I am busy. Come back later.", client_id=client_id)
|
||||
@sio.on('generate_msg_with_internet')
|
||||
def generate_msg_with_internet(sid, data):
|
||||
|
||||
@sio.on("generate_msg_with_internet")
|
||||
def generate_msg_with_internet(sid, data):
|
||||
client_id = sid
|
||||
lollmsElfServer.cancel_gen = False
|
||||
client = lollmsElfServer.session.get_client(client_id)
|
||||
|
||||
client.generated_text=""
|
||||
client.cancel_generation=False
|
||||
client.continuing=False
|
||||
client.first_chunk=True
|
||||
|
||||
client.generated_text = ""
|
||||
client.cancel_generation = False
|
||||
client.continuing = False
|
||||
client.first_chunk = True
|
||||
|
||||
|
||||
if not lollmsElfServer.model:
|
||||
ASCIIColors.error("Model not selected. Please select a model")
|
||||
lollmsElfServer.error("Model not selected. Please select a model", client_id=client_id)
|
||||
lollmsElfServer.error(
|
||||
"Model not selected. Please select a model", client_id=client_id
|
||||
)
|
||||
return
|
||||
|
||||
if not lollmsElfServer.busy:
|
||||
if lollmsElfServer.session.get_client(client_id).discussion is None:
|
||||
if lollmsElfServer.db.does_last_discussion_have_messages():
|
||||
lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.create_discussion()
|
||||
lollmsElfServer.session.get_client(client_id).discussion = (
|
||||
lollmsElfServer.db.create_discussion()
|
||||
)
|
||||
else:
|
||||
lollmsElfServer.session.get_client(client_id).discussion = lollmsElfServer.db.load_last_discussion()
|
||||
lollmsElfServer.session.get_client(client_id).discussion = (
|
||||
lollmsElfServer.db.load_last_discussion()
|
||||
)
|
||||
|
||||
prompt = data["prompt"]
|
||||
try:
|
||||
nb_tokens = len(lollmsElfServer.model.tokenize(prompt))
|
||||
except:
|
||||
nb_tokens = None
|
||||
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
ump = lollmsElfServer.config.discussion_prompt_separator +lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
|
||||
message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
|
||||
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
|
||||
content = prompt,
|
||||
steps = [],
|
||||
metadata = None,
|
||||
created_at = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
ump = (
|
||||
lollmsElfServer.config.discussion_prompt_separator
|
||||
+ lollmsElfServer.config.user_name.strip()
|
||||
if lollmsElfServer.config.use_user_name_in_discussions
|
||||
else lollmsElfServer.personality.user_message_prefix
|
||||
)
|
||||
message = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.add_message(
|
||||
message_type=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
sender_type=SENDER_TYPES.SENDER_TYPES_USER.value,
|
||||
sender=ump.replace(
|
||||
lollmsElfServer.config.discussion_prompt_separator, ""
|
||||
).replace(":", ""),
|
||||
content=prompt,
|
||||
steps=[],
|
||||
metadata=None,
|
||||
parent_message_id=lollmsElfServer.message_id,
|
||||
created_at=created_at,
|
||||
nb_tokens=nb_tokens
|
||||
nb_tokens=nb_tokens,
|
||||
)
|
||||
|
||||
ASCIIColors.green("Starting message generation by "+lollmsElfServer.personality.name)
|
||||
|
||||
client.generation_thread = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id, False, None, True))
|
||||
ASCIIColors.green(
|
||||
"Starting message generation by " + lollmsElfServer.personality.name
|
||||
)
|
||||
|
||||
client.generation_thread = threading.Thread(
|
||||
target=lollmsElfServer.start_message_generation,
|
||||
args=(message, message.id, client_id, False, None, True),
|
||||
)
|
||||
client.generation_thread.start()
|
||||
|
||||
|
||||
# lollmsElfServer.sio.sleep(0.01)
|
||||
ASCIIColors.info("Started generation task")
|
||||
lollmsElfServer.busy=True
|
||||
#tpe = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message_id, client_id))
|
||||
#tpe.start()
|
||||
lollmsElfServer.busy = True
|
||||
# tpe = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message_id, client_id))
|
||||
# tpe.start()
|
||||
else:
|
||||
lollmsElfServer.error("I am busy. Come back later.", client_id=client_id)
|
||||
|
||||
@sio.on('generate_msg_from')
|
||||
@sio.on("generate_msg_from")
|
||||
def handle_generate_msg_from(sid, data):
|
||||
client_id = sid
|
||||
client = lollmsElfServer.session.get_client(client_id)
|
||||
lollmsElfServer.cancel_gen = False
|
||||
client.continuing=False
|
||||
client.first_chunk=True
|
||||
|
||||
client.continuing = False
|
||||
client.first_chunk = True
|
||||
|
||||
if lollmsElfServer.session.get_client(client_id).discussion is None:
|
||||
ASCIIColors.warning("Please select a discussion")
|
||||
lollmsElfServer.error("Please select a discussion first", client_id=client_id)
|
||||
lollmsElfServer.error(
|
||||
"Please select a discussion first", client_id=client_id
|
||||
)
|
||||
return
|
||||
id_ = data['id']
|
||||
generation_type = data.get('msg_type',None)
|
||||
if id_==-1:
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.current_message
|
||||
id_ = data["id"]
|
||||
generation_type = data.get("msg_type", None)
|
||||
if id_ == -1:
|
||||
message = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.current_message
|
||||
else:
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.load_message(id_)
|
||||
message = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.load_message(id_)
|
||||
if message is None:
|
||||
return
|
||||
client.generation_thread = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id, False, generation_type))
|
||||
return
|
||||
client.generation_thread = threading.Thread(
|
||||
target=lollmsElfServer.start_message_generation,
|
||||
args=(message, message.id, client_id, False, generation_type),
|
||||
)
|
||||
client.generation_thread.start()
|
||||
|
||||
@sio.on('continue_generate_msg_from')
|
||||
@sio.on("continue_generate_msg_from")
|
||||
def handle_continue_generate_msg_from(sid, data):
|
||||
client_id = sid
|
||||
client = lollmsElfServer.session.get_client(client_id)
|
||||
lollmsElfServer.cancel_gen = False
|
||||
client.continuing=True
|
||||
client.first_chunk=True
|
||||
|
||||
client.continuing = True
|
||||
client.first_chunk = True
|
||||
|
||||
if lollmsElfServer.session.get_client(client_id).discussion is None:
|
||||
ASCIIColors.yellow("Please select a discussion")
|
||||
lollmsElfServer.error("Please select a discussion", client_id=client_id)
|
||||
return
|
||||
id_ = data['id']
|
||||
if id_==-1:
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.current_message
|
||||
id_ = data["id"]
|
||||
if id_ == -1:
|
||||
message = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.current_message
|
||||
else:
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.load_message(id_)
|
||||
message = lollmsElfServer.session.get_client(
|
||||
client_id
|
||||
).discussion.load_message(id_)
|
||||
|
||||
client.generated_text=message.content
|
||||
client.generation_thread = threading.Thread(target=lollmsElfServer.start_message_generation, args=(message, message.id, client_id, True))
|
||||
client.generated_text = message.content
|
||||
client.generation_thread = threading.Thread(
|
||||
target=lollmsElfServer.start_message_generation,
|
||||
args=(message, message.id, client_id, True),
|
||||
)
|
||||
client.generation_thread.start()
|
||||
|
||||
#add functions to lollm
|
||||
lollmsElfServer.handle_generate_msg = handle_generate_msg
|
||||
lollmsElfServer.generate_msg_with_internet = generate_msg_with_internet
|
||||
lollmsElfServer.handle_continue_generate_msg_from = handle_continue_generate_msg_from
|
||||
# add functions to lollm
|
||||
lollmsElfServer.handle_generate_msg = handle_generate_msg
|
||||
lollmsElfServer.generate_msg_with_internet = generate_msg_with_internet
|
||||
lollmsElfServer.handle_continue_generate_msg_from = (
|
||||
handle_continue_generate_msg_from
|
||||
)
|
||||
|
@ -7,118 +7,142 @@ description:
|
||||
application. These routes are specific to discussion operation
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel
|
||||
import pkg_resources
|
||||
from lollms.server.elf_server import LOLLMSElfServer
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import load_config, trace_exception, gc
|
||||
from lollms.utilities import find_first_available_file_index, convert_language_name, PackageManager, run_async, add_period
|
||||
from lollms.security import forbid_remote_access, check_access
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from functools import partial
|
||||
import socketio
|
||||
import threading
|
||||
import os
|
||||
import time
|
||||
|
||||
import pkg_resources
|
||||
import socketio
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import APIRouter, HTTPException, Request
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from lollms.databases.discussions_database import Discussion
|
||||
from datetime import datetime
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.security import check_access, forbid_remote_access
|
||||
from lollms.server.elf_server import LOLLMSElfServer
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import (PackageManager, add_period,
|
||||
convert_language_name,
|
||||
find_first_available_file_index, gc, load_config,
|
||||
run_async, trace_exception)
|
||||
from pydantic import BaseModel
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
router = APIRouter()
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
# ----------------------------------- events -----------------------------------------
|
||||
def add_events(sio:socketio):
|
||||
def add_events(sio: socketio):
|
||||
forbid_remote_access(lollmsElfServer)
|
||||
@sio.on('start_webcam_video_stream')
|
||||
|
||||
@sio.on("start_webcam_video_stream")
|
||||
def start_webcam_video_stream(sid):
|
||||
lollmsElfServer.info("Starting video capture")
|
||||
try:
|
||||
from lollms.media import WebcamImageSender
|
||||
lollmsElfServer.webcam = WebcamImageSender(sio,lollmsCom=lollmsElfServer)
|
||||
|
||||
lollmsElfServer.webcam = WebcamImageSender(sio, lollmsCom=lollmsElfServer)
|
||||
lollmsElfServer.webcam.start_capture()
|
||||
except:
|
||||
lollmsElfServer.InfoMessage("Couldn't load media library.\nYou will not be able to perform any of the media linked operations. please verify the logs and install any required installations")
|
||||
lollmsElfServer.InfoMessage(
|
||||
"Couldn't load media library.\nYou will not be able to perform any of the media linked operations. please verify the logs and install any required installations"
|
||||
)
|
||||
|
||||
@sio.on('stop_webcam_video_stream')
|
||||
@sio.on("stop_webcam_video_stream")
|
||||
def stop_webcam_video_stream(sid):
|
||||
lollmsElfServer.info("Stopping video capture")
|
||||
lollmsElfServer.webcam.stop_capture()
|
||||
|
||||
@sio.on('start_bidirectional_audio_stream')
|
||||
@sio.on("start_bidirectional_audio_stream")
|
||||
def start_bidirectional_audio_stream(sid):
|
||||
client = check_access(lollmsElfServer, sid)
|
||||
if lollmsElfServer.config.headless_server_mode:
|
||||
return {"status":False,"error":"Start recording is blocked when in headless mode for obvious security reasons!"}
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Start recording is blocked when in headless mode for obvious security reasons!",
|
||||
}
|
||||
|
||||
if lollmsElfServer.config.host!="localhost" and lollmsElfServer.config.host!="127.0.0.1":
|
||||
return {"status":False,"error":"Start recording is blocked when the server is exposed outside for very obvious reasons!"}
|
||||
if (
|
||||
lollmsElfServer.config.host != "localhost"
|
||||
and lollmsElfServer.config.host != "127.0.0.1"
|
||||
):
|
||||
return {
|
||||
"status": False,
|
||||
"error": "Start recording is blocked when the server is exposed outside for very obvious reasons!",
|
||||
}
|
||||
|
||||
lollmsElfServer.info("Starting audio capture")
|
||||
if not lollmsElfServer.tts or not lollmsElfServer.stt:
|
||||
lollmsElfServer.InfoMessage("TTS or STT are not configured.\nPlease go to settings and configure them first")
|
||||
return {"status":False,"error":"TTS or STT not configured"}
|
||||
lollmsElfServer.InfoMessage(
|
||||
"TTS or STT are not configured.\nPlease go to settings and configure them first"
|
||||
)
|
||||
return {"status": False, "error": "TTS or STT not configured"}
|
||||
|
||||
if not lollmsElfServer.tts.ready or not lollmsElfServer.stt.ready:
|
||||
lollmsElfServer.InfoMessage("TTS is not ready yet.\nPlease wait")
|
||||
return {"status":False,"error":"TTS not ready"}
|
||||
return {"status": False, "error": "TTS not ready"}
|
||||
|
||||
if lollmsElfServer.rt_com:
|
||||
lollmsElfServer.info("audio_mode is already on\nTurning it off")
|
||||
lollmsElfServer.info("Stopping audio capture")
|
||||
lollmsElfServer.rt_com.stop_recording()
|
||||
lollmsElfServer.rt_com = None
|
||||
lollmsElfServer.emit_socket_io_info("rtcom_status_changed",{"status":False}, client.client_id)
|
||||
return {"status":False,"error":"Already running"}
|
||||
lollmsElfServer.emit_socket_io_info(
|
||||
"rtcom_status_changed", {"status": False}, client.client_id
|
||||
)
|
||||
return {"status": False, "error": "Already running"}
|
||||
|
||||
try:
|
||||
from lollms.media import RTCom
|
||||
lollmsElfServer.rec_output_folder = lollmsElfServer.lollms_paths.personal_outputs_path/"audio_rec"
|
||||
|
||||
lollmsElfServer.rec_output_folder = (
|
||||
lollmsElfServer.lollms_paths.personal_outputs_path / "audio_rec"
|
||||
)
|
||||
lollmsElfServer.rec_output_folder.mkdir(exist_ok=True, parents=True)
|
||||
lollmsElfServer.summoned = False
|
||||
lollmsElfServer.rt_com = RTCom(
|
||||
lollmsElfServer,
|
||||
lollmsElfServer.sio,
|
||||
lollmsElfServer.personality,
|
||||
client=client,
|
||||
threshold=lollmsElfServer.config.stt_listening_threshold,
|
||||
silence_duration=lollmsElfServer.config.stt_silence_duration,
|
||||
sound_threshold_percentage=lollmsElfServer.config.stt_sound_threshold_percentage,
|
||||
gain=lollmsElfServer.config.stt_gain,
|
||||
rate=lollmsElfServer.config.stt_rate,
|
||||
channels=lollmsElfServer.config.stt_channels,
|
||||
buffer_size=lollmsElfServer.config.stt_buffer_size,
|
||||
snd_input_device=lollmsElfServer.config.stt_input_device,
|
||||
snd_output_device=lollmsElfServer.config.tts_output_device,
|
||||
logs_folder=lollmsElfServer.rec_output_folder,
|
||||
block_while_talking=True,
|
||||
use_keyword_audio=lollmsElfServer.config.stt_activate_word_detection,
|
||||
keyword_audio_path=lollmsElfServer.config.stt_word_detection_file
|
||||
)
|
||||
lollmsElfServer,
|
||||
lollmsElfServer.sio,
|
||||
lollmsElfServer.personality,
|
||||
client=client,
|
||||
threshold=lollmsElfServer.config.stt_listening_threshold,
|
||||
silence_duration=lollmsElfServer.config.stt_silence_duration,
|
||||
sound_threshold_percentage=lollmsElfServer.config.stt_sound_threshold_percentage,
|
||||
gain=lollmsElfServer.config.stt_gain,
|
||||
rate=lollmsElfServer.config.stt_rate,
|
||||
channels=lollmsElfServer.config.stt_channels,
|
||||
buffer_size=lollmsElfServer.config.stt_buffer_size,
|
||||
snd_input_device=lollmsElfServer.config.stt_input_device,
|
||||
snd_output_device=lollmsElfServer.config.tts_output_device,
|
||||
logs_folder=lollmsElfServer.rec_output_folder,
|
||||
block_while_talking=True,
|
||||
use_keyword_audio=lollmsElfServer.config.stt_activate_word_detection,
|
||||
keyword_audio_path=lollmsElfServer.config.stt_word_detection_file,
|
||||
)
|
||||
lollmsElfServer.rt_com.start_recording()
|
||||
lollmsElfServer.emit_socket_io_info("rtcom_status_changed",{"status":True}, client.client_id)
|
||||
lollmsElfServer.emit_socket_io_info(
|
||||
"rtcom_status_changed", {"status": True}, client.client_id
|
||||
)
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
lollmsElfServer.InfoMessage("Couldn't load media library.\nYou will not be able to perform any of the media linked operations. please verify the logs and install any required installations")
|
||||
lollmsElfServer.emit_socket_io_info("rtcom_status_changed",{"status":False}, client.client_id)
|
||||
lollmsElfServer.InfoMessage(
|
||||
"Couldn't load media library.\nYou will not be able to perform any of the media linked operations. please verify the logs and install any required installations"
|
||||
)
|
||||
lollmsElfServer.emit_socket_io_info(
|
||||
"rtcom_status_changed", {"status": False}, client.client_id
|
||||
)
|
||||
|
||||
|
||||
|
||||
@sio.on('stop_bidirectional_audio_stream')
|
||||
@sio.on("stop_bidirectional_audio_stream")
|
||||
def stop_bidirectional_audio_stream(sid):
|
||||
client = check_access(lollmsElfServer, sid)
|
||||
lollmsElfServer.info("Stopping audio capture")
|
||||
lollmsElfServer.rt_com.stop_recording()
|
||||
lollmsElfServer.rt_com = None
|
||||
|
||||
|
||||
|
||||
|
@ -1,67 +1,84 @@
|
||||
import json
|
||||
import argparse
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def convert_discussions(input_data, flatten=False):
|
||||
discussions = []
|
||||
|
||||
for discussion in input_data:
|
||||
converted_discussion = {
|
||||
"id": discussion['id'],
|
||||
"id": discussion["id"],
|
||||
"messages": [],
|
||||
"title": discussion['title']
|
||||
"title": discussion["title"],
|
||||
}
|
||||
|
||||
mapping = discussion['mapping']
|
||||
mapping = discussion["mapping"]
|
||||
message_ids = list(mapping.keys())
|
||||
|
||||
messages = [mapping[message_id]['message'] for message_id in message_ids if mapping[message_id]['message']]
|
||||
messages = [
|
||||
mapping[message_id]["message"]
|
||||
for message_id in message_ids
|
||||
if mapping[message_id]["message"]
|
||||
]
|
||||
|
||||
for i, message in enumerate(messages):
|
||||
created_at = ''
|
||||
create_time = message.get('create_time')
|
||||
created_at = ""
|
||||
create_time = message.get("create_time")
|
||||
|
||||
if create_time is not None:
|
||||
created_at = datetime.fromtimestamp(create_time).strftime("%Y-%m-%d %H:%M:%S")
|
||||
created_at = datetime.fromtimestamp(create_time).strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
)
|
||||
|
||||
content = message['content'].get('parts', [''])[0]
|
||||
content = message["content"].get("parts", [""])[0]
|
||||
if content:
|
||||
parent = i - 1 if flatten and i > 0 else mapping[message_ids[i]]['parent'] or -1
|
||||
parent = (
|
||||
i - 1
|
||||
if flatten and i > 0
|
||||
else mapping[message_ids[i]]["parent"] or -1
|
||||
)
|
||||
|
||||
converted_message = {
|
||||
"binding": message['content'].get('binding', ''),
|
||||
"binding": message["content"].get("binding", ""),
|
||||
"content": content,
|
||||
"created_at": created_at,
|
||||
"finished_generating_at": '',
|
||||
"model": '',
|
||||
"finished_generating_at": "",
|
||||
"model": "",
|
||||
"parent": parent,
|
||||
"personality": '',
|
||||
"personality": "",
|
||||
"rank": 0,
|
||||
"sender": message['author']['role'],
|
||||
"type": 0
|
||||
"sender": message["author"]["role"],
|
||||
"type": 0,
|
||||
}
|
||||
|
||||
converted_discussion['messages'].append(converted_message)
|
||||
converted_discussion["messages"].append(converted_message)
|
||||
|
||||
discussions.append(converted_discussion)
|
||||
|
||||
return discussions
|
||||
|
||||
|
||||
def convert_json(input_file, output_file, flatten=False):
|
||||
with open(input_file, 'r') as file:
|
||||
with open(input_file, "r") as file:
|
||||
input_json = file.read()
|
||||
|
||||
input_data = json.loads(input_json)
|
||||
converted_data = convert_discussions(input_data, flatten=flatten)
|
||||
|
||||
with open(output_file, 'w') as file:
|
||||
with open(output_file, "w") as file:
|
||||
json.dump(converted_data, file, indent=4)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Convert JSON files from the first format to the second format.')
|
||||
parser.add_argument('input_file', help='Input JSON file path')
|
||||
parser.add_argument('output_file', help='Output JSON file path')
|
||||
parser.add_argument('--flatten', action='store_true', help='Flatten the discussion hierarchy')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Convert JSON files from the first format to the second format."
|
||||
)
|
||||
parser.add_argument("input_file", help="Input JSON file path")
|
||||
parser.add_argument("output_file", help="Output JSON file path")
|
||||
parser.add_argument(
|
||||
"--flatten", action="store_true", help="Flatten the discussion hierarchy"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
convert_json(args.input_file, args.output_file, flatten=args.flatten)
|
||||
|
@ -2,24 +2,37 @@ import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def indent_json_file(input_file, output_file=None, indent=4):
|
||||
# Read the JSON file
|
||||
with open(input_file, 'r') as file:
|
||||
with open(input_file, "r") as file:
|
||||
data = json.load(file)
|
||||
|
||||
# Determine the output file path
|
||||
if output_file is None:
|
||||
output_file = input_file.with_stem(input_file.stem + '_indented' + input_file.suffix)
|
||||
output_file = input_file.with_stem(
|
||||
input_file.stem + "_indented" + input_file.suffix
|
||||
)
|
||||
|
||||
# Write the indented JSON to the output file
|
||||
with open(output_file, 'w') as file:
|
||||
with open(output_file, "w") as file:
|
||||
json.dump(data, file, indent=indent)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Indent a JSON file and save it to a new file.')
|
||||
parser.add_argument('input_file', type=Path, help='path to the input JSON file')
|
||||
parser.add_argument('-o', '--output_file', type=Path, help='path to the output JSON file')
|
||||
parser.add_argument('--indent', type=int, default=4, help='number of spaces for indentation (default: 4)')
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Indent a JSON file and save it to a new file."
|
||||
)
|
||||
parser.add_argument("input_file", type=Path, help="path to the input JSON file")
|
||||
parser.add_argument(
|
||||
"-o", "--output_file", type=Path, help="path to the output JSON file"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--indent",
|
||||
type=int,
|
||||
default=4,
|
||||
help="number of spaces for indentation (default: 4)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
@ -29,5 +42,6 @@ def main():
|
||||
|
||||
indent_json_file(input_file, output_file, indent)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
1749
lollms_webui.py
1749
lollms_webui.py
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 1:
|
||||
print("Usage: python restart_script.py")
|
||||
@ -18,5 +19,6 @@ def main():
|
||||
print("Error: Temporary arguments file not found.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -1,36 +1,46 @@
|
||||
import re
|
||||
import os
|
||||
import re
|
||||
|
||||
# Define a list of file extensions to process
|
||||
file_extensions = ['.js', '.vue', '.html']
|
||||
file_extensions = [".js", ".vue", ".html"]
|
||||
|
||||
# Regular expressions to match lines where semicolons can be added
|
||||
javascript_pattern = r'\b(?:(?<!if|else|while|for|switch|catch|return|function)\s*[^;]*$|^\s*{)'
|
||||
vue_pattern = r'\b(?:data|computed|methods|watch|beforeCreate|created|beforeMount|mounted|beforeUpdate|updated|beforeDestroy|destroyed)\s*:[^;]*$'
|
||||
html_pattern = r'<[^>]*>$'
|
||||
javascript_pattern = (
|
||||
r"\b(?:(?<!if|else|while|for|switch|catch|return|function)\s*[^;]*$|^\s*{)"
|
||||
)
|
||||
vue_pattern = r"\b(?:data|computed|methods|watch|beforeCreate|created|beforeMount|mounted|beforeUpdate|updated|beforeDestroy|destroyed)\s*:[^;]*$"
|
||||
html_pattern = r"<[^>]*>$"
|
||||
|
||||
|
||||
# Function to add semicolons to the end of lines in a file
|
||||
def add_semicolons_to_file(file_path):
|
||||
try:
|
||||
with open(file_path, 'r') as file:
|
||||
with open(file_path, "r") as file:
|
||||
lines = file.readlines()
|
||||
|
||||
with open(file_path, 'w') as file:
|
||||
with open(file_path, "w") as file:
|
||||
for line in lines:
|
||||
if file_path.endswith('.js') and re.search(javascript_pattern, line.strip()):
|
||||
line = line.rstrip() + ';'
|
||||
elif file_path.endswith('.vue') and re.search(vue_pattern, line.strip()):
|
||||
line = line.rstrip() + ';'
|
||||
elif file_path.endswith('.html') and re.search(html_pattern, line.strip()):
|
||||
line = line.rstrip() + ';'
|
||||
if file_path.endswith(".js") and re.search(
|
||||
javascript_pattern, line.strip()
|
||||
):
|
||||
line = line.rstrip() + ";"
|
||||
elif file_path.endswith(".vue") and re.search(
|
||||
vue_pattern, line.strip()
|
||||
):
|
||||
line = line.rstrip() + ";"
|
||||
elif file_path.endswith(".html") and re.search(
|
||||
html_pattern, line.strip()
|
||||
):
|
||||
line = line.rstrip() + ";"
|
||||
file.write(line)
|
||||
except FileNotFoundError:
|
||||
print(f"File not found: {file_path}")
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {str(e)}")
|
||||
|
||||
|
||||
# Specify the path to the directory containing your JavaScript, Vue.js, or HTML files
|
||||
directory_path = '/path/to/your/files'
|
||||
directory_path = "/path/to/your/files"
|
||||
|
||||
# Iterate through files in the directory and add semicolons
|
||||
for root, _, files in os.walk(directory_path):
|
||||
|
@ -3,35 +3,41 @@ Project: lollms_installer
|
||||
Author: ParisNeo
|
||||
Description: This tool is designed to install and configure the LoLLMS system on your machine. LoLLMS is a multi-bindings, multi-personalities LLM full-stack system for AI applications in robotics. It provides a user-friendly interface for setting up and managing the system.
|
||||
"""
|
||||
|
||||
import webbrowser
|
||||
from pathlib import Path
|
||||
|
||||
import socketio
|
||||
import uvicorn
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from pydantic import BaseModel
|
||||
import uvicorn
|
||||
from lollms.paths import LollmsPaths
|
||||
from lollms.main_config import LOLLMSConfig
|
||||
from lollms.utilities import check_and_install_torch, PackageManager, check_torch_version, reinstall_pytorch_with_cuda, reinstall_pytorch_with_cpu, reinstall_pytorch_with_rocm
|
||||
from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.app import LollmsApplication
|
||||
from pathlib import Path
|
||||
from ascii_colors import ASCIIColors
|
||||
from pathlib import Path
|
||||
import webbrowser
|
||||
import socketio
|
||||
from fastapi import FastAPI
|
||||
from lollms.com import LoLLMsCom, NotificationDisplayType, NotificationType
|
||||
from lollms.main_config import LOLLMSConfig
|
||||
from lollms.paths import LollmsPaths
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import (PackageManager, check_and_install_torch,
|
||||
check_torch_version, reinstall_pytorch_with_cpu,
|
||||
reinstall_pytorch_with_cuda,
|
||||
reinstall_pytorch_with_rocm)
|
||||
from pydantic import BaseModel
|
||||
from socketio import ASGIApp
|
||||
|
||||
|
||||
root_path = Path(__file__).parent.parent.parent.parent
|
||||
global_path = root_path/"global_paths_cfg.yaml"
|
||||
global_path = root_path / "global_paths_cfg.yaml"
|
||||
if global_path.exists():
|
||||
ASCIIColors.yellow(f"global_path: {global_path}")
|
||||
lollms_paths = LollmsPaths(global_path, prepare_configuration=False)
|
||||
config = LOLLMSConfig.autoload(lollms_paths,lollms_paths.personal_configuration_path/"local_config.yaml")
|
||||
config = LOLLMSConfig.autoload(
|
||||
lollms_paths, lollms_paths.personal_configuration_path / "local_config.yaml"
|
||||
)
|
||||
else:
|
||||
ASCIIColors.yellow(f"global_path: {global_path}")
|
||||
lollms_paths = LollmsPaths(global_path, prepare_configuration=False)
|
||||
config = LOLLMSConfig.autoload(lollms_paths,lollms_paths.personal_configuration_path/"local_config.yaml")
|
||||
config = LOLLMSConfig.autoload(
|
||||
lollms_paths, lollms_paths.personal_configuration_path / "local_config.yaml"
|
||||
)
|
||||
|
||||
|
||||
ASCIIColors.red(" ")
|
||||
@ -43,28 +49,31 @@ ASCIIColors.red(" Configurator ")
|
||||
ASCIIColors.red(" LoLLMS configuratoin tool")
|
||||
ASCIIColors.yellow(f"Root dir : {root_path}")
|
||||
|
||||
sio = socketio.AsyncServer(async_mode='asgi')
|
||||
sio = socketio.AsyncServer(async_mode="asgi")
|
||||
app = FastAPI(title="LoLLMS", description="This is the LoLLMS-Webui documentation")
|
||||
|
||||
lollms_app = LollmsApplication(
|
||||
"lollms_installer",
|
||||
config=config,
|
||||
lollms_paths=lollms_paths,
|
||||
load_binding=False,
|
||||
load_model=False,
|
||||
load_voice_service=False,
|
||||
load_sd_service=False,
|
||||
socketio=sio,
|
||||
free_mode=True)
|
||||
"lollms_installer",
|
||||
config=config,
|
||||
lollms_paths=lollms_paths,
|
||||
load_binding=False,
|
||||
load_model=False,
|
||||
load_voice_service=False,
|
||||
load_sd_service=False,
|
||||
socketio=sio,
|
||||
free_mode=True,
|
||||
)
|
||||
|
||||
|
||||
class InstallProperties(BaseModel):
|
||||
mode: str
|
||||
|
||||
|
||||
@app.get("/get_personal_path")
|
||||
def get_personal_path():
|
||||
return lollms_paths.personal_path
|
||||
|
||||
|
||||
@app.post("/start_installing")
|
||||
def start_installing(data: InstallProperties):
|
||||
"""
|
||||
@ -77,58 +86,64 @@ def start_installing(data: InstallProperties):
|
||||
- A dictionary with a "message" key indicating the success of the installation.
|
||||
"""
|
||||
# Install mode (cpu, cpu-noavx, nvidia-tensorcores, nvidia, amd-noavx, amd, apple-intel, apple-silicon)
|
||||
if data.mode=="cpu":
|
||||
config.hardware_mode="cpu"
|
||||
if data.mode == "cpu":
|
||||
config.hardware_mode = "cpu"
|
||||
try:
|
||||
lollms_app.ShowBlockingMessage("Setting hardware configuration to CPU")
|
||||
config.save_config()
|
||||
lollms_app.HideBlockingMessage()
|
||||
except:
|
||||
lollms_app.HideBlockingMessage()
|
||||
if data.mode=="cpu-noavx":
|
||||
config.hardware_mode="cpu-noavx"
|
||||
if data.mode == "cpu-noavx":
|
||||
config.hardware_mode = "cpu-noavx"
|
||||
try:
|
||||
lollms_app.ShowBlockingMessage("Setting hardware configuration to CPU with no avx support")
|
||||
lollms_app.ShowBlockingMessage(
|
||||
"Setting hardware configuration to CPU with no avx support"
|
||||
)
|
||||
config.save_config()
|
||||
lollms_app.HideBlockingMessage()
|
||||
except:
|
||||
lollms_app.HideBlockingMessage()
|
||||
elif data.mode=="nvidia":
|
||||
config.hardware_mode="nvidia"
|
||||
elif data.mode == "nvidia":
|
||||
config.hardware_mode = "nvidia"
|
||||
try:
|
||||
lollms_app.ShowBlockingMessage("Installing pytorch for nVidia GPU (cuda)")
|
||||
config.save_config()
|
||||
lollms_app.HideBlockingMessage()
|
||||
except:
|
||||
lollms_app.HideBlockingMessage()
|
||||
elif data.mode=="nvidia-tensorcores":
|
||||
config.hardware_mode="nvidia-tensorcores"
|
||||
elif data.mode == "nvidia-tensorcores":
|
||||
config.hardware_mode = "nvidia-tensorcores"
|
||||
try:
|
||||
lollms_app.ShowBlockingMessage("Installing pytorch for nVidia GPU (cuda)")
|
||||
config.save_config()
|
||||
lollms_app.HideBlockingMessage()
|
||||
except:
|
||||
lollms_app.HideBlockingMessage()
|
||||
elif data.mode=="amd":
|
||||
config.hardware_mode="amd"
|
||||
elif data.mode == "amd":
|
||||
config.hardware_mode = "amd"
|
||||
try:
|
||||
lollms_app.ShowBlockingMessage("Installing pytorch for AMD GPU (rocm)")
|
||||
config.save_config()
|
||||
lollms_app.HideBlockingMessage()
|
||||
except:
|
||||
lollms_app.HideBlockingMessage()
|
||||
elif data.mode=="apple-silicon":
|
||||
config.hardware_mode="apple-silicon"
|
||||
elif data.mode == "apple-silicon":
|
||||
config.hardware_mode = "apple-silicon"
|
||||
try:
|
||||
lollms_app.ShowBlockingMessage("Installing pytorch for Apple Silicon (Metal)")
|
||||
lollms_app.ShowBlockingMessage(
|
||||
"Installing pytorch for Apple Silicon (Metal)"
|
||||
)
|
||||
config.save_config()
|
||||
lollms_app.HideBlockingMessage()
|
||||
except:
|
||||
lollms_app.HideBlockingMessage()
|
||||
elif data.mode=="apple-intel":
|
||||
config.hardware_mode="apple-intel"
|
||||
elif data.mode == "apple-intel":
|
||||
config.hardware_mode = "apple-intel"
|
||||
try:
|
||||
lollms_app.ShowBlockingMessage("Installing pytorch for Apple Silicon (Metal)")
|
||||
lollms_app.ShowBlockingMessage(
|
||||
"Installing pytorch for Apple Silicon (Metal)"
|
||||
)
|
||||
config.save_config()
|
||||
lollms_app.HideBlockingMessage()
|
||||
except:
|
||||
@ -136,9 +151,14 @@ def start_installing(data: InstallProperties):
|
||||
# Your code here
|
||||
return {"message": "Item created successfully"}
|
||||
|
||||
app.mount("/", StaticFiles(directory=Path(__file__).parent/"frontend"/"dist", html=True), name="static")
|
||||
|
||||
app.mount(
|
||||
"/",
|
||||
StaticFiles(directory=Path(__file__).parent / "frontend" / "dist", html=True),
|
||||
name="static",
|
||||
)
|
||||
app = ASGIApp(socketio_server=sio, other_asgi_app=app)
|
||||
|
||||
if __name__ == "__main__":
|
||||
webbrowser.open(f"http://localhost:8000")
|
||||
uvicorn.run(app, host="localhost", port=8000)
|
||||
uvicorn.run(app, host="localhost", port=8000)
|
||||
|
@ -1,17 +1,18 @@
|
||||
from lollms.paths import LollmsPaths
|
||||
from pathlib import Path
|
||||
from ascii_colors import ASCIIColors
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
global_path = Path(__file__).parent.parent.parent/"global_paths_cfg.yaml"
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.paths import LollmsPaths
|
||||
|
||||
global_path = Path(__file__).parent.parent.parent / "global_paths_cfg.yaml"
|
||||
ASCIIColors.yellow(f"global_path: {global_path}")
|
||||
lollms_paths = LollmsPaths(global_path)
|
||||
shared_folder = lollms_paths.personal_path/"shared"
|
||||
shared_folder = lollms_paths.personal_path / "shared"
|
||||
sd_folder = shared_folder / "auto_sd"
|
||||
output_dir = lollms_paths.personal_path / "outputs/sd"
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
script_path = sd_folder / "lollms_sd.bat"
|
||||
output_folder = lollms_paths.personal_outputs_path/"audio_out"
|
||||
output_folder = lollms_paths.personal_outputs_path / "audio_out"
|
||||
|
||||
ASCIIColors.red(" ")
|
||||
ASCIIColors.red(" __ _____ __ __ _____ _____ _____ ____ ")
|
||||
@ -24,4 +25,4 @@ ASCIIColors.red(" Forked from Auto1111's Stable diffusion api")
|
||||
ASCIIColors.red(" Integration in lollms by ParisNeo using mix1009's sdwebuiapi ")
|
||||
|
||||
|
||||
subprocess.Popen(str(script_path) +" --share", cwd=sd_folder)
|
||||
subprocess.Popen(str(script_path) + " --share", cwd=sd_folder)
|
||||
|
@ -1,21 +1,44 @@
|
||||
from lollms.paths import LollmsPaths
|
||||
from pathlib import Path
|
||||
from ascii_colors import ASCIIColors
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
global_path = Path(__file__).parent.parent.parent/"global_paths_cfg.yaml"
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.paths import LollmsPaths
|
||||
|
||||
global_path = Path(__file__).parent.parent.parent / "global_paths_cfg.yaml"
|
||||
ASCIIColors.yellow(f"global_path: {global_path}")
|
||||
lollms_paths = LollmsPaths(global_path)
|
||||
output_folder = lollms_paths.personal_outputs_path/"audio_out"
|
||||
output_folder = lollms_paths.personal_outputs_path / "audio_out"
|
||||
|
||||
ASCIIColors.red(".____ ________ .____ .____ _____ _________ ____ __________________________________ ")
|
||||
ASCIIColors.red("| | \_____ \ | | | | / \ / _____/ \ \/ /\__ ___/\__ ___/ _____/ ")
|
||||
ASCIIColors.red("| | / | \| | | | / \ / \ \_____ \ ______ \ / | | | | \_____ \ ")
|
||||
ASCIIColors.red("| |___/ | \ |___| |___/ Y \/ \ /_____/ / \ | | | | / \ ")
|
||||
ASCIIColors.red("|_______ \_______ /_______ \_______ \____|__ /_______ / /___/\ \ |____| |____| /_______ / ")
|
||||
ASCIIColors.red(" \/ \/ \/ \/ \/ \/ \_/ \/ ")
|
||||
ASCIIColors.red(
|
||||
".____ ________ .____ .____ _____ _________ ____ __________________________________ "
|
||||
)
|
||||
ASCIIColors.red(
|
||||
"| | \_____ \ | | | | / \ / _____/ \ \/ /\__ ___/\__ ___/ _____/ "
|
||||
)
|
||||
ASCIIColors.red(
|
||||
"| | / | \| | | | / \ / \ \_____ \ ______ \ / | | | | \_____ \ "
|
||||
)
|
||||
ASCIIColors.red(
|
||||
"| |___/ | \ |___| |___/ Y \/ \ /_____/ / \ | | | | / \ "
|
||||
)
|
||||
ASCIIColors.red(
|
||||
"|_______ \_______ /_______ \_______ \____|__ /_______ / /___/\ \ |____| |____| /_______ / "
|
||||
)
|
||||
ASCIIColors.red(
|
||||
" \/ \/ \/ \/ \/ \/ \_/ \/ "
|
||||
)
|
||||
|
||||
ASCIIColors.red(" Forked from daswer123's XTTS server")
|
||||
ASCIIColors.red(" Integration in lollms by ParisNeo using daswer123's webapi ")
|
||||
|
||||
subprocess.Popen(["python", "-m", "xtts_api_server", "-o", f"{output_folder}", "-sf", f"{lollms_paths.custom_voices_path}"])
|
||||
subprocess.Popen(
|
||||
[
|
||||
"python",
|
||||
"-m",
|
||||
"xtts_api_server",
|
||||
"-o",
|
||||
f"{output_folder}",
|
||||
"-sf",
|
||||
f"{lollms_paths.custom_voices_path}",
|
||||
]
|
||||
)
|
||||
|
@ -1,28 +1,30 @@
|
||||
import argparse
|
||||
import csv
|
||||
|
||||
|
||||
# Define a function to split text into blocks
|
||||
def split_text_into_blocks(text):
|
||||
return text.split('\n')
|
||||
return text.split("\n")
|
||||
|
||||
|
||||
# Define the main function
|
||||
def process_text_file(input_file, output_file=None):
|
||||
# If output_file is not provided, generate a default output file name based on the input file name
|
||||
output_file = output_file or input_file.split('.')[0] + '.csv'
|
||||
output_file = output_file or input_file.split(".")[0] + ".csv"
|
||||
|
||||
# Read the text from the input file
|
||||
with open(input_file, 'r', encoding='utf-8') as input_file:
|
||||
with open(input_file, "r", encoding="utf-8") as input_file:
|
||||
text_content = input_file.read()
|
||||
|
||||
# Split the text into blocks
|
||||
blocks = split_text_into_blocks(text_content)
|
||||
|
||||
# Create a list of dictionaries with id and text for each block
|
||||
data = [{'id': i, 'text': block} for i, block in enumerate(blocks)]
|
||||
data = [{"id": i, "text": block} for i, block in enumerate(blocks)]
|
||||
|
||||
# Write the data to a CSV file
|
||||
with open(output_file, 'w', newline='', encoding='utf-8') as output_file:
|
||||
fieldnames = ['id', 'text']
|
||||
with open(output_file, "w", newline="", encoding="utf-8") as output_file:
|
||||
fieldnames = ["id", "text"]
|
||||
writer = csv.DictWriter(output_file, fieldnames=fieldnames)
|
||||
|
||||
# Write the header row
|
||||
@ -33,13 +35,16 @@ def process_text_file(input_file, output_file=None):
|
||||
|
||||
print(f'CSV file "{output_file}" has been created.')
|
||||
|
||||
|
||||
# Check if the script is being run as a standalone program
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Split text from a file into blocks and create a CSV file.')
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Split text from a file into blocks and create a CSV file."
|
||||
)
|
||||
|
||||
# Add arguments for input and output file names
|
||||
parser.add_argument('input_file', help='Input text file name')
|
||||
parser.add_argument('-o', '--output_file', help='Output CSV file name')
|
||||
parser.add_argument("input_file", help="Input text file name")
|
||||
parser.add_argument("-o", "--output_file", help="Output CSV file name")
|
||||
|
||||
# Parse the command-line arguments
|
||||
args = parser.parse_args()
|
||||
|
@ -1,12 +1,13 @@
|
||||
import subprocess
|
||||
import platform
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Create a temporary file.
|
||||
code = "print('Hello world');input('hi')"
|
||||
message_id=102
|
||||
message_id = 102
|
||||
root_folder = Path(r"E:\lollms\discussion_databases\html stuff\105")
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"ai_code_{message_id}.py"
|
||||
@ -17,14 +18,39 @@ if __name__ == "__main__":
|
||||
# Determine the platform and open a terminal to execute the Python code.
|
||||
system = platform.system()
|
||||
if system == "Windows":
|
||||
subprocess.Popen(f"""start cmd /k "cd /d {root_folder} && python {tmp_file} && pause" """, shell=True)
|
||||
subprocess.Popen(
|
||||
f"""start cmd /k "cd /d {root_folder} && python {tmp_file} && pause" """,
|
||||
shell=True,
|
||||
)
|
||||
elif system == "Darwin": # macOS
|
||||
subprocess.Popen(["open", "-a", "Terminal", f'cd "{root_folder}" && python "{tmp_file}"'], shell=True)
|
||||
subprocess.Popen(
|
||||
[
|
||||
"open",
|
||||
"-a",
|
||||
"Terminal",
|
||||
f'cd "{root_folder}" && python "{tmp_file}"',
|
||||
],
|
||||
shell=True,
|
||||
)
|
||||
elif system == "Linux":
|
||||
subprocess.Popen(["x-terminal-emulator", "-e", f'bash -c "cd \\"{root_folder}\\" && python \\"{tmp_file}\\"; exec bash"'], shell=True)
|
||||
subprocess.Popen(
|
||||
[
|
||||
"x-terminal-emulator",
|
||||
"-e",
|
||||
f'bash -c "cd \\"{root_folder}\\" && python \\"{tmp_file}\\"; exec bash"',
|
||||
],
|
||||
shell=True,
|
||||
)
|
||||
else:
|
||||
raise Exception(f"Unsupported platform: {system}")
|
||||
|
||||
except Exception as ex:
|
||||
error_message = f"Error executing Python code: {ex}"
|
||||
error_json = {"output": "<div class='text-red-500'>" + error_message + "\n" + get_trace_exception(ex) + "</div>", "execution_time": 0}
|
||||
error_json = {
|
||||
"output": "<div class='text-red-500'>"
|
||||
+ error_message
|
||||
+ "\n"
|
||||
+ get_trace_exception(ex)
|
||||
+ "</div>",
|
||||
"execution_time": 0,
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
from llama_cpp import Llama
|
||||
|
||||
llm = Llama(
|
||||
model_path=r"E:\lollms\test_lollms\personal_data\models\gguf\Mistral-7B-Instruct-v0.3-GGUF\Mistral-7B-Instruct-v0.3-IQ1_M.gguf",
|
||||
n_gpu_layers=-1, # Uncomment to use GPU acceleration
|
||||
# seed=1337, # Uncomment to set a specific seed
|
||||
n_ctx=4096, # Uncomment to increase the context window
|
||||
model_path=r"E:\lollms\test_lollms\personal_data\models\gguf\Mistral-7B-Instruct-v0.3-GGUF\Mistral-7B-Instruct-v0.3-IQ1_M.gguf",
|
||||
n_gpu_layers=-1, # Uncomment to use GPU acceleration
|
||||
# seed=1337, # Uncomment to set a specific seed
|
||||
n_ctx=4096, # Uncomment to increase the context window
|
||||
)
|
||||
output = llm(
|
||||
"""
|
||||
@ -43,11 +43,11 @@ How can I help you today?
|
||||
!@>lollmz: Hello, how can I assist you today?
|
||||
!@>User: write a poem about keyboards
|
||||
!@>LoLLMZ:
|
||||
""", # Prompt
|
||||
max_tokens=4096, # Generate up to 32 tokens, set to None to generate up to the end of the context window
|
||||
stop=["!@>"], # Stop generating just before the model would generate a new question
|
||||
echo=True, # Echo the prompt back in the output
|
||||
stream=True
|
||||
) # Generate a completion, can also call create_completion
|
||||
""", # Prompt
|
||||
max_tokens=4096, # Generate up to 32 tokens, set to None to generate up to the end of the context window
|
||||
stop=["!@>"], # Stop generating just before the model would generate a new question
|
||||
echo=True, # Echo the prompt back in the output
|
||||
stream=True,
|
||||
) # Generate a completion, can also call create_completion
|
||||
for chunk in output:
|
||||
print(chunk["choices"][0]["text"],end="", flush=True)
|
||||
print(chunk["choices"][0]["text"], end="", flush=True)
|
||||
|
@ -1,42 +1,48 @@
|
||||
from fastapi.testclient import TestClient
|
||||
from main import app # Replace with the actual name of your FastAPI app
|
||||
import json
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
from main import app # Replace with the actual name of your FastAPI app
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def test_open_code_in_vs_code_valid():
|
||||
response = client.post(
|
||||
"/open_code_in_vs_code",
|
||||
data=json.dumps({
|
||||
"discussion_id": 1,
|
||||
"message_id": 1,
|
||||
"code": "print('Hello, World!')"
|
||||
}),
|
||||
data=json.dumps(
|
||||
{"discussion_id": 1, "message_id": 1, "code": "print('Hello, World!')"}
|
||||
),
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["status"] == True
|
||||
|
||||
|
||||
def test_open_code_in_vs_code_invalid():
|
||||
response = client.post(
|
||||
"/open_code_in_vs_code",
|
||||
data=json.dumps({
|
||||
"discussion_id": "1; copy file.exe /some/path/",
|
||||
"message_id": "1",
|
||||
"code": "print('Hello, World!')"
|
||||
}),
|
||||
data=json.dumps(
|
||||
{
|
||||
"discussion_id": "1; copy file.exe /some/path/",
|
||||
"message_id": "1",
|
||||
"code": "print('Hello, World!')",
|
||||
}
|
||||
),
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
assert response.status_code == 422 # Unprocessable Entity
|
||||
|
||||
|
||||
def test_open_code_in_vs_code_attack():
|
||||
response = client.post(
|
||||
"/open_code_in_vs_code",
|
||||
data=json.dumps({
|
||||
"discussion_id": 1,
|
||||
"message_id": 1,
|
||||
"code": "print('This is a harmless test.')" # Dangerous code
|
||||
}),
|
||||
data=json.dumps(
|
||||
{
|
||||
"discussion_id": 1,
|
||||
"message_id": 1,
|
||||
"code": "print('This is a harmless test.')", # Dangerous code
|
||||
}
|
||||
),
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -1,12 +1,12 @@
|
||||
import requests
|
||||
|
||||
# URL of your API endpoint
|
||||
url = 'http://localhost:9600/user_infos/'
|
||||
url = "http://localhost:9600/user_infos/"
|
||||
|
||||
# Attempt to access etc/passwd
|
||||
payload = {'path': '../../../../etc/passwd'}
|
||||
payload = {"path": "../../../../etc/passwd"}
|
||||
|
||||
response = requests.get(url, params=payload)
|
||||
|
||||
print(f'Response status: {response.status_code}')
|
||||
print(f'Response body: {response.text}')
|
||||
print(f"Response status: {response.status_code}")
|
||||
print(f"Response body: {response.text}")
|
||||
|
@ -9,13 +9,13 @@ headers = {
|
||||
# The exact raw payload as specified
|
||||
data = (
|
||||
"-----------------------------284178091740602105783377960069\r\n"
|
||||
"Content-Disposition: form-data; name=\"uploadFile\"; filename=\"test.txt\"\r\n"
|
||||
'Content-Disposition: form-data; name="uploadFile"; filename="test.txt"\r\n'
|
||||
"Content-Type: text/plain\r\n\r\n"
|
||||
"Hello I am test\r\n"
|
||||
"-----------------------------284178091740602105783377960069--" + '4' * num + "\r\n"
|
||||
"-----------------------------284178091740602105783377960069--" + "4" * num + "\r\n"
|
||||
)
|
||||
|
||||
response = requests.post(url, headers=headers, data=data.encode('utf-8'), verify=False)
|
||||
response = requests.post(url, headers=headers, data=data.encode("utf-8"), verify=False)
|
||||
|
||||
print(response.status_code)
|
||||
print(response.text)
|
||||
print(response.text)
|
||||
|
@ -1,32 +1,35 @@
|
||||
import requests
|
||||
from requests_toolbelt import MultipartEncoder
|
||||
import io
|
||||
import zipfile
|
||||
|
||||
import requests
|
||||
from requests_toolbelt import MultipartEncoder
|
||||
|
||||
|
||||
def test_valid_file_upload():
|
||||
base_url = "http://localhost:9600/upload_app"
|
||||
client_id = "3qxKnpFF8aJU8KsZAAAH" # Replace with a valid client id
|
||||
url = f"{base_url}?client_id={client_id}" # Add client_id as a query parameter
|
||||
|
||||
|
||||
# Create a test zip file in memory
|
||||
zip_buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||
zipf.writestr('index.html', '<html><body>Test</body></html>')
|
||||
zipf.writestr('description.yaml', 'name: TestApp\n')
|
||||
zipf.writestr('icon.png', b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\nIDATx\x9cc\x00\x01\x00\x00\x05\x00\x01\r\n-\xb4\x00\x00\x00\x00IEND\xaeB`\x82')
|
||||
|
||||
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
zipf.writestr("index.html", "<html><body>Test</body></html>")
|
||||
zipf.writestr("description.yaml", "name: TestApp\n")
|
||||
zipf.writestr(
|
||||
"icon.png",
|
||||
b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\nIDATx\x9cc\x00\x01\x00\x00\x05\x00\x01\r\n-\xb4\x00\x00\x00\x00IEND\xaeB`\x82",
|
||||
)
|
||||
|
||||
zip_buffer.seek(0)
|
||||
|
||||
try:
|
||||
# Prepare the multipart/form-data request
|
||||
m = MultipartEncoder(
|
||||
fields={'file': ('test_upload.zip', zip_buffer, 'application/zip')}
|
||||
fields={"file": ("test_upload.zip", zip_buffer, "application/zip")}
|
||||
)
|
||||
|
||||
headers = {
|
||||
'Content-Type': m.content_type
|
||||
}
|
||||
|
||||
headers = {"Content-Type": m.content_type}
|
||||
|
||||
# Send the POST request
|
||||
response = requests.post(url, data=m, headers=headers)
|
||||
|
||||
@ -44,8 +47,12 @@ def test_valid_file_upload():
|
||||
print("Could not parse error details as JSON")
|
||||
|
||||
# Assert the expected behavior
|
||||
assert response.status_code == 200, f"Expected status code 200, but got {response.status_code}"
|
||||
assert "App 'TestApp' uploaded successfully" in response.text, "File upload confirmation message not found in response"
|
||||
assert (
|
||||
response.status_code == 200
|
||||
), f"Expected status code 200, but got {response.status_code}"
|
||||
assert (
|
||||
"App 'TestApp' uploaded successfully" in response.text
|
||||
), "File upload confirmation message not found in response"
|
||||
|
||||
print("Test passed successfully!")
|
||||
|
||||
@ -54,5 +61,6 @@ def test_valid_file_upload():
|
||||
except AssertionError as e:
|
||||
print(f"Test failed: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_valid_file_upload()
|
||||
|
@ -11,19 +11,18 @@ This code is trying to exploit the path traversal vulnerability in your endpoint
|
||||
To protect against this type of attack, you should implement the measures I mentioned earlier, such as file validation, randomized file names, and using a dedicated directory for storing uploaded files. Additionally, make sure to sanitize and validate all user input to prevent malicious file paths from being accepted.
|
||||
"""
|
||||
|
||||
|
||||
import requests
|
||||
|
||||
# Endpoint URL
|
||||
url = 'http://localhost:9600/upload_avatar'
|
||||
url = "http://localhost:9600/upload_avatar"
|
||||
|
||||
# Path to the file you want to upload
|
||||
file_path = 'test.txt'
|
||||
file_path = "test.txt"
|
||||
|
||||
# Open the file in binary mode and send it as 'avatar'
|
||||
with open(file_path, 'rb') as f:
|
||||
files = {'avatar': ('../../../../../../../../tmp/teeest.txt', f)}
|
||||
with open(file_path, "rb") as f:
|
||||
files = {"avatar": ("../../../../../../../../tmp/teeest.txt", f)}
|
||||
response = requests.post(url, files=files)
|
||||
|
||||
# Print the response from the server
|
||||
print(response.json())
|
||||
print(response.json())
|
||||
|
@ -3,11 +3,7 @@ import requests
|
||||
url = "http://localhost:9600/open_code_in_vs_code"
|
||||
|
||||
# Valid request
|
||||
valid_payload = {
|
||||
"discussion_id": "0",
|
||||
"message_id": "0",
|
||||
"code": "print('test')"
|
||||
}
|
||||
valid_payload = {"discussion_id": "0", "message_id": "0", "code": "print('test')"}
|
||||
response = requests.post(url, json=valid_payload)
|
||||
print("Valid request response:", response.json())
|
||||
|
||||
@ -15,7 +11,7 @@ print("Valid request response:", response.json())
|
||||
invalid_payload = {
|
||||
"discussion_id": "../invalid_discussion",
|
||||
"message_id": "../invalid_message",
|
||||
"code": "print('test')"
|
||||
"code": "print('test')",
|
||||
}
|
||||
response = requests.post(url, json=invalid_payload)
|
||||
print("Invalid request response:", response.json())
|
||||
|
@ -1,6 +1,8 @@
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def test_vulnerabilities():
|
||||
"""
|
||||
This function tests for two vulnerabilities in the '/open_file' endpoint.
|
||||
@ -13,7 +15,7 @@ def test_vulnerabilities():
|
||||
payload = "../../../../../../../../../../etc/passwd"
|
||||
url = "http://localhost:9600/open_file"
|
||||
data = {"path": payload}
|
||||
|
||||
|
||||
response = requests.post(url, json=data)
|
||||
if response.status_code == 200:
|
||||
if "root:x" in response.text:
|
||||
@ -26,7 +28,7 @@ def test_vulnerabilities():
|
||||
# Test command injection vulnerability
|
||||
payload = "&& echo 'Command Injection Test'"
|
||||
data = {"path": payload}
|
||||
|
||||
|
||||
response = requests.post(url, json=data)
|
||||
if response.status_code == 200:
|
||||
if "Command Injection Test" in response.text:
|
||||
@ -36,5 +38,6 @@ def test_vulnerabilities():
|
||||
else:
|
||||
print("Error: ", response.status_code)
|
||||
|
||||
|
||||
# Call the test function
|
||||
test_vulnerabilities()
|
||||
|
@ -1,35 +1,38 @@
|
||||
import requests
|
||||
import unittest
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
class TestUserInfosEndpoint(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.base_url = 'http://127.0.0.1:9600'
|
||||
self.base_url = "http://127.0.0.1:9600"
|
||||
|
||||
def test_user_infos_endpoint(self):
|
||||
print("Testing user_infos endpoint...")
|
||||
|
||||
# Test valid path
|
||||
print("Testing valid path...")
|
||||
valid_path = '0dbb0245-7b6b-4834-835d-4d9d460b336c.png'
|
||||
response = requests.get(f'{self.base_url}/user_infos/{valid_path}')
|
||||
valid_path = "0dbb0245-7b6b-4834-835d-4d9d460b336c.png"
|
||||
response = requests.get(f"{self.base_url}/user_infos/{valid_path}")
|
||||
self.assertEqual(response.status_code, 200)
|
||||
print(f"Status code: {response.status_code} (expected: 200)\n")
|
||||
|
||||
# Test path starting with a double slash
|
||||
print("Testing path starting with a double slash...")
|
||||
invalid_path = '//Windows/win.ini'
|
||||
response = requests.get(f'{self.base_url}/user_infos/{invalid_path}')
|
||||
invalid_path = "//Windows/win.ini"
|
||||
response = requests.get(f"{self.base_url}/user_infos/{invalid_path}")
|
||||
print(f"Response content: {response.content}\n")
|
||||
self.assertEqual(response.status_code, 400)
|
||||
print(f"Status code: {response.status_code} (expected: 400)\n")
|
||||
|
||||
# Test path containing suspicious patterns
|
||||
print("Testing path containing suspicious patterns...")
|
||||
suspicious_path = '../../etc/passwd'
|
||||
response = requests.get(f'{self.base_url}/user_infos/{suspicious_path}')
|
||||
suspicious_path = "../../etc/passwd"
|
||||
response = requests.get(f"{self.base_url}/user_infos/{suspicious_path}")
|
||||
print(f"Response content: {response.content}\n")
|
||||
self.assertEqual(response.status_code, 400)
|
||||
print(f"Status code: {response.status_code} (expected: 400)\n")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
@ -1,10 +1,11 @@
|
||||
'''
|
||||
"""
|
||||
This python script is performing a Denial of Service (DoS) attack on your endpoint.
|
||||
It is creating a large number of requests (1000 in this case) to the '/open_discussion_folder' API endpoint of your server.
|
||||
This could potentially overload your server, making it unable to serve normal, legitimate requests.
|
||||
|
||||
Please make sure you test this only on a virtual machine since it can overload your own PC and crush it
|
||||
'''
|
||||
"""
|
||||
|
||||
import requests
|
||||
|
||||
IP_ADDRESS = "localhost"
|
||||
@ -12,7 +13,9 @@ PORT = 9600
|
||||
|
||||
for i in range(1000):
|
||||
data = {
|
||||
"discussion_id":f"{i}",
|
||||
"discussion_id": f"{i}",
|
||||
}
|
||||
response = requests.post(f"http://{IP_ADDRESS}:{str(PORT)}/open_discussion_folder", json=data)
|
||||
print(i, response.json())
|
||||
response = requests.post(
|
||||
f"http://{IP_ADDRESS}:{str(PORT)}/open_discussion_folder", json=data
|
||||
)
|
||||
print(i, response.json())
|
||||
|
@ -1,11 +1,10 @@
|
||||
import requests
|
||||
|
||||
IP_ADDRESS = "https://localhost" #replace me
|
||||
IP_ADDRESS = "https://localhost" # replace me
|
||||
PORT = 9600
|
||||
|
||||
data = {
|
||||
"id": "0 OR 1=1",
|
||||
"client_id": 0
|
||||
}
|
||||
response = requests.post(f"http://{IP_ADDRESS}:{str(PORT)}/delete_discussion", json=data)
|
||||
print(response.json())
|
||||
data = {"id": "0 OR 1=1", "client_id": 0}
|
||||
response = requests.post(
|
||||
f"http://{IP_ADDRESS}:{str(PORT)}/delete_discussion", json=data
|
||||
)
|
||||
print(response.json())
|
||||
|
@ -1,23 +1,30 @@
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from ascii_colors import ASCIIColors
|
||||
from fastapi import HTTPException
|
||||
from pathlib import Path
|
||||
import re
|
||||
import pytest
|
||||
def sanitize_path_from_endpoint(path: str, error_text="A suspected LFI attack detected. The path sent to the server has suspicious elements in it!", exception_text="Invalid path!"):
|
||||
|
||||
|
||||
def sanitize_path_from_endpoint(
|
||||
path: str,
|
||||
error_text="A suspected LFI attack detected. The path sent to the server has suspicious elements in it!",
|
||||
exception_text="Invalid path!",
|
||||
):
|
||||
if path.strip().startswith("/"):
|
||||
raise HTTPException(status_code=400, detail=exception_text)
|
||||
# Fix the case of "/" at the beginning on the path
|
||||
if path is None:
|
||||
return path
|
||||
|
||||
|
||||
# Regular expression to detect patterns like "...." and multiple forward slashes
|
||||
suspicious_patterns = re.compile(r'(\.\.+)|(/+/)')
|
||||
|
||||
suspicious_patterns = re.compile(r"(\.\.+)|(/+/)")
|
||||
|
||||
if suspicious_patterns.search(path) or Path(path).is_absolute():
|
||||
ASCIIColors.error(error_text)
|
||||
raise HTTPException(status_code=400, detail=exception_text)
|
||||
|
||||
path = path.lstrip('/')
|
||||
|
||||
path = path.lstrip("/")
|
||||
return path
|
||||
|
||||
|
||||
@ -25,15 +32,16 @@ def test_sanitize_path_from_endpoint():
|
||||
# Test a valid path
|
||||
valid_path = "example/path"
|
||||
assert sanitize_path_from_endpoint(valid_path) == "example/path"
|
||||
|
||||
|
||||
# Test a path with suspicious elements
|
||||
suspicious_path = "/D:/POC/secret.txt"
|
||||
|
||||
#suspicious_path = "/images//D:/POC/secret.txt"
|
||||
|
||||
# suspicious_path = "/images//D:/POC/secret.txt"
|
||||
with pytest.raises(HTTPException):
|
||||
sanitize_path_from_endpoint(suspicious_path)
|
||||
|
||||
|
||||
# Add more test cases as needed
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_sanitize_path_from_endpoint()
|
||||
|
@ -1,5 +1,4 @@
|
||||
import pytest
|
||||
|
||||
from app_old import app
|
||||
|
||||
|
||||
|
@ -1,26 +1,25 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
url = "http://localhost:1234/v1/chat/completions"
|
||||
|
||||
payload = {
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You are a research engineer specialized in the applications of AI in robotics."
|
||||
"content": "You are a research engineer specialized in the applications of AI in robotics.",
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What are some popular AI frameworks for robotics?"
|
||||
}
|
||||
"content": "What are some popular AI frameworks for robotics?",
|
||||
},
|
||||
],
|
||||
"max_tokens": 100,
|
||||
"temperature": 0.5
|
||||
"temperature": 0.5,
|
||||
}
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
response = requests.post(url, data=json.dumps(payload), headers=headers)
|
||||
|
||||
@ -30,4 +29,4 @@ if response.status_code == 200:
|
||||
completion = data["choices"][0]["message"]["content"]
|
||||
print(completion)
|
||||
else:
|
||||
print("Error:", response.status_code)
|
||||
print("Error:", response.status_code)
|
||||
|
@ -1,27 +1,26 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
url = "http://localhost:1234/v1/chat/completions"
|
||||
|
||||
payload = {
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You are a research engineer specialized in the applications of AI in robotics."
|
||||
"content": "You are a research engineer specialized in the applications of AI in robotics.",
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "List a number of libraries I can use for robotics."
|
||||
}
|
||||
"content": "List a number of libraries I can use for robotics.",
|
||||
},
|
||||
],
|
||||
"max_tokens": 100,
|
||||
"temperature": 0.5,
|
||||
"stream": True
|
||||
"stream": True,
|
||||
}
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
response = requests.post(url, data=json.dumps(payload), headers=headers, stream=True)
|
||||
|
||||
|
@ -1,17 +1,12 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
url = "http://localhost:1234/lollms_generate"
|
||||
|
||||
payload = {
|
||||
"prompt": "Once apon a time, ",
|
||||
"temperature": 0.1,
|
||||
"stream": True
|
||||
}
|
||||
payload = {"prompt": "Once apon a time, ", "temperature": 0.1, "stream": True}
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
response = requests.post(url, data=json.dumps(payload), headers=headers, stream=True)
|
||||
|
||||
@ -19,6 +14,6 @@ if response.status_code == 200:
|
||||
for response_chunk in response.iter_lines():
|
||||
if response_chunk:
|
||||
rc = response_chunk.decode()
|
||||
print(rc,end="",flush=True)
|
||||
print(rc, end="", flush=True)
|
||||
else:
|
||||
print("Error:", response.status_code)
|
||||
|
@ -1,17 +1,12 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
url = "http://localhost:9600/delete_message"
|
||||
|
||||
payload = {
|
||||
"client_id": "test",
|
||||
"id": 283
|
||||
}
|
||||
payload = {"client_id": "test", "id": 283}
|
||||
|
||||
headers = {
|
||||
'accept': 'application/json',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
headers = {"accept": "application/json", "Content-Type": "application/json"}
|
||||
|
||||
response = requests.post(url, headers=headers, data=json.dumps(payload))
|
||||
|
||||
|
@ -1,78 +1,125 @@
|
||||
import os
|
||||
import tkinter as tk
|
||||
import customtkinter as ctk
|
||||
import os
|
||||
import torch
|
||||
import torchaudio
|
||||
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||
from tortoise.api import TextToSpeech
|
||||
from tortoise.utils.audio import load_voice
|
||||
import vlc
|
||||
from tkVideoPlayer import TkinterVideo
|
||||
|
||||
# Setup the app
|
||||
import customtkinter as ctk
|
||||
import torch
|
||||
import torchaudio
|
||||
import vlc
|
||||
from tkVideoPlayer import TkinterVideo
|
||||
from tortoise.api import TextToSpeech
|
||||
from tortoise.utils.audio import load_voice
|
||||
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||
|
||||
# Setup the app
|
||||
app = tk.Tk()
|
||||
app.geometry("600x550")
|
||||
app.title("Rap God v2.0")
|
||||
ctk.set_appearance_mode("dark")
|
||||
ctk.set_appearance_mode("dark")
|
||||
|
||||
promptFrame = tk.Frame()
|
||||
promptFrame.pack(padx=10, pady=10)
|
||||
buttonFrame = tk.Frame()
|
||||
buttonFrame.pack()
|
||||
|
||||
prompt = ctk.CTkEntry(promptFrame, height=40, width=300, text_color="black", fg_color="white")
|
||||
prompt.pack(side='left', padx=10)
|
||||
lyrics = ctk.CTkEntry(None,height=240, width=500, text_color="black", fg_color="white")
|
||||
prompt = ctk.CTkEntry(
|
||||
promptFrame, height=40, width=300, text_color="black", fg_color="white"
|
||||
)
|
||||
prompt.pack(side="left", padx=10)
|
||||
lyrics = ctk.CTkEntry(None, height=240, width=500, text_color="black", fg_color="white")
|
||||
lyrics.pack()
|
||||
|
||||
def generateText():
|
||||
model = AutoModelForCausalLM.from_pretrained('stormzy').to("cuda")
|
||||
tokenizer = AutoTokenizer.from_pretrained('distilgpt2', use_fast=True)
|
||||
tokens = tokenizer.encode(prompt.get(), return_tensors='pt')
|
||||
|
||||
def generateText():
|
||||
model = AutoModelForCausalLM.from_pretrained("stormzy").to("cuda")
|
||||
tokenizer = AutoTokenizer.from_pretrained("distilgpt2", use_fast=True)
|
||||
tokens = tokenizer.encode(prompt.get(), return_tensors="pt")
|
||||
tokens = tokens.to("cuda")
|
||||
attn_mask = torch.ones_like(tokens)
|
||||
out = model.generate(tokens, attention_mask=attn_mask, num_beams=5, early_stopping=True, max_length=200, no_repeat_ngram_size=2)
|
||||
attn_mask = torch.ones_like(tokens)
|
||||
out = model.generate(
|
||||
tokens,
|
||||
attention_mask=attn_mask,
|
||||
num_beams=5,
|
||||
early_stopping=True,
|
||||
max_length=200,
|
||||
no_repeat_ngram_size=2,
|
||||
)
|
||||
rap = tokenizer.decode(out[0])
|
||||
lyrics.delete(0, tk.END)
|
||||
lyrics.insert(0, rap)
|
||||
|
||||
def generateAudio():
|
||||
voice_samples, conditioning_latents = load_voice('stormzy', extra_voice_dirs=['stormzy_samples'])
|
||||
|
||||
def generateAudio():
|
||||
voice_samples, conditioning_latents = load_voice(
|
||||
"stormzy", extra_voice_dirs=["stormzy_samples"]
|
||||
)
|
||||
tts = TextToSpeech()
|
||||
gen = tts.tts_with_preset(lyrics.get(), voice_samples=voice_samples, conditioning_latents=conditioning_latents, preset='ultra_fast')
|
||||
torchaudio.save('generated.wav', gen.squeeze(0).cpu(), 24000)
|
||||
gen = tts.tts_with_preset(
|
||||
lyrics.get(),
|
||||
voice_samples=voice_samples,
|
||||
conditioning_latents=conditioning_latents,
|
||||
preset="ultra_fast",
|
||||
)
|
||||
torchaudio.save("generated.wav", gen.squeeze(0).cpu(), 24000)
|
||||
|
||||
def playAudio():
|
||||
if os.path.exists('generated.wav'):
|
||||
p = vlc.MediaPlayer('file:///generated.wav')
|
||||
|
||||
def playAudio():
|
||||
if os.path.exists("generated.wav"):
|
||||
p = vlc.MediaPlayer("file:///generated.wav")
|
||||
p.play()
|
||||
|
||||
videoplayer = TkinterVideo(master=app, scaled=True, keep_aspect=True)
|
||||
def generateVideo():
|
||||
|
||||
|
||||
videoplayer = TkinterVideo(master=app, scaled=True, keep_aspect=True)
|
||||
|
||||
|
||||
def generateVideo():
|
||||
os.system("xcopy /y generated.wav .\MakeItTalk\examples")
|
||||
os.system("cd MakeItTalk & python generate.py")
|
||||
os.system("cd MakeItTalk & python generate.py")
|
||||
|
||||
if os.path.exists('generated.wav'):
|
||||
p = vlc.MediaPlayer('file:///generated.wav')
|
||||
if os.path.exists("generated.wav"):
|
||||
p = vlc.MediaPlayer("file:///generated.wav")
|
||||
p.play()
|
||||
|
||||
|
||||
videoplayer.load("MakeItTalk\examples\stormzy_pred_fls_generated_audio_embed.mp4")
|
||||
videoplayer.pack(fill="both", expand=True)
|
||||
videoplayer.pack(fill="both", expand=True)
|
||||
videoplayer.play()
|
||||
|
||||
|
||||
|
||||
genTextButton = ctk.CTkButton(
|
||||
promptFrame,
|
||||
height=40,
|
||||
width=120,
|
||||
text_color="black",
|
||||
text="Generate",
|
||||
command=generateText,
|
||||
)
|
||||
genTextButton.pack(side="right")
|
||||
genAudioButton = ctk.CTkButton(
|
||||
buttonFrame,
|
||||
height=40,
|
||||
width=120,
|
||||
text_color="black",
|
||||
text="Syn Audio",
|
||||
command=generateAudio,
|
||||
)
|
||||
genAudioButton.pack(side="left", padx=10)
|
||||
playAudioButton = ctk.CTkButton(
|
||||
buttonFrame,
|
||||
height=40,
|
||||
width=120,
|
||||
text_color="black",
|
||||
text="Play Rap",
|
||||
command=playAudio,
|
||||
)
|
||||
playAudioButton.pack(side="left", padx=10)
|
||||
genVideoButton = ctk.CTkButton(
|
||||
buttonFrame,
|
||||
height=40,
|
||||
width=120,
|
||||
text_color="black",
|
||||
text="Syn Video",
|
||||
command=generateVideo,
|
||||
)
|
||||
genVideoButton.pack(side="left", padx=10)
|
||||
|
||||
|
||||
|
||||
genTextButton =ctk.CTkButton(promptFrame, height=40, width=120, text_color="black", text="Generate", command=generateText)
|
||||
genTextButton.pack(side='right')
|
||||
genAudioButton =ctk.CTkButton(buttonFrame, height=40, width=120, text_color="black", text="Syn Audio", command=generateAudio)
|
||||
genAudioButton.pack(side='left', padx=10)
|
||||
playAudioButton =ctk.CTkButton(buttonFrame, height=40, width=120, text_color="black", text="Play Rap", command=playAudio)
|
||||
playAudioButton.pack(side='left', padx=10)
|
||||
genVideoButton =ctk.CTkButton(buttonFrame, height=40, width=120, text_color="black", text="Syn Video", command=generateVideo)
|
||||
genVideoButton.pack(side='left', padx=10)
|
||||
|
||||
# Run the app
|
||||
app.mainloop()
|
||||
# Run the app
|
||||
app.mainloop()
|
||||
|
@ -1,42 +1,59 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.patches as patches
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
def parse_and_visualize_weird_palette(palette_str):
|
||||
# Split the input string into lines
|
||||
lines = palette_str.split(';')
|
||||
|
||||
lines = palette_str.split(";")
|
||||
|
||||
# Initialize an empty dictionary to hold the color information
|
||||
colors = {}
|
||||
|
||||
|
||||
# Parse each line
|
||||
for line in lines:
|
||||
if not line.strip():
|
||||
continue # Skip empty lines
|
||||
# Split each line into name and color value
|
||||
name, color = line.split(':')
|
||||
name, color = line.split(":")
|
||||
# Remove leading/trailing whitespace and replace dashes with spaces in the name for readability
|
||||
name = name.strip().replace('--color-', '').replace('-', ' ').title()
|
||||
name = name.strip().replace("--color-", "").replace("-", " ").title()
|
||||
color = color.strip()
|
||||
# Add to the colors dictionary
|
||||
colors[name] = color
|
||||
|
||||
|
||||
# Now, we have the colors dictionary filled, we can visualize it
|
||||
# Create a figure and a grid of subplots
|
||||
fig, ax = plt.subplots(figsize=(10, 8))
|
||||
|
||||
# Remove the axes
|
||||
ax.axis('off')
|
||||
ax.axis("off")
|
||||
|
||||
# Title
|
||||
ax.set_title('Weird Color Palette Visualization', fontsize=16)
|
||||
ax.set_title("Weird Color Palette Visualization", fontsize=16)
|
||||
|
||||
# Plot each color as a rectangle
|
||||
for i, (name, color) in enumerate(colors.items()):
|
||||
row = i // 4
|
||||
col = i % 4
|
||||
rect = patches.Rectangle((col * 2.5, row * -1.5), 2, 1, linewidth=1, edgecolor='none', facecolor=color)
|
||||
rect = patches.Rectangle(
|
||||
(col * 2.5, row * -1.5),
|
||||
2,
|
||||
1,
|
||||
linewidth=1,
|
||||
edgecolor="none",
|
||||
facecolor=color,
|
||||
)
|
||||
ax.add_patch(rect)
|
||||
ax.text(col * 2.5 + 1, row * -1.5 + 0.5, name, ha='center', va='center', fontsize=9, color='white' if i < 14 else 'black', weight='bold')
|
||||
ax.text(
|
||||
col * 2.5 + 1,
|
||||
row * -1.5 + 0.5,
|
||||
name,
|
||||
ha="center",
|
||||
va="center",
|
||||
fontsize=9,
|
||||
color="white" if i < 14 else "black",
|
||||
weight="bold",
|
||||
)
|
||||
|
||||
# Adjust the limits and aspect of the plot
|
||||
ax.set_xlim(0, 10)
|
||||
@ -45,6 +62,7 @@ def parse_and_visualize_weird_palette(palette_str):
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
# Example of a weird palette
|
||||
weird_palette_str = """
|
||||
--color-primary: #88c999;
|
||||
|
@ -1,9 +1,13 @@
|
||||
import sys
|
||||
from PyQt5.QtWidgets import (QApplication, QMainWindow, QWidget, QVBoxLayout, QHBoxLayout,
|
||||
QPushButton, QLineEdit, QTableView, QHeaderView, QMessageBox,
|
||||
QFileDialog, QListWidget, QSplitter, QDialog, QLabel, QFormLayout)
|
||||
|
||||
from PyQt5.QtCore import QSortFilterProxyModel, Qt
|
||||
from PyQt5.QtSql import QSqlDatabase, QSqlTableModel
|
||||
from PyQt5.QtCore import Qt, QSortFilterProxyModel
|
||||
from PyQt5.QtWidgets import (QApplication, QDialog, QFileDialog, QFormLayout,
|
||||
QHBoxLayout, QHeaderView, QLabel, QLineEdit,
|
||||
QListWidget, QMainWindow, QMessageBox,
|
||||
QPushButton, QSplitter, QTableView, QVBoxLayout,
|
||||
QWidget)
|
||||
|
||||
|
||||
class AddRecordDialog(QDialog):
|
||||
def __init__(self, columns, parent=None):
|
||||
@ -24,14 +28,18 @@ class AddRecordDialog(QDialog):
|
||||
self.layout.addRow(self.submit_button)
|
||||
|
||||
def get_values(self):
|
||||
return {column: line_edit.text() for column, line_edit in self.line_edits.items()}
|
||||
return {
|
||||
column: line_edit.text() for column, line_edit in self.line_edits.items()
|
||||
}
|
||||
|
||||
|
||||
class LiteSQLViewer(QMainWindow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.setWindowTitle("LiteSQL Viewer")
|
||||
self.setGeometry(100, 100, 800, 600)
|
||||
self.setStyleSheet("""
|
||||
self.setStyleSheet(
|
||||
"""
|
||||
QMainWindow {
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
@ -70,7 +78,8 @@ class LiteSQLViewer(QMainWindow):
|
||||
background-color: #ffffff;
|
||||
border: 1px solid #ddd;
|
||||
}
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
self.db = QSqlDatabase.addDatabase("QSQLITE")
|
||||
self.model = None
|
||||
@ -107,7 +116,9 @@ class LiteSQLViewer(QMainWindow):
|
||||
self.delete_button = QPushButton("Delete Record", self)
|
||||
self.delete_button.clicked.connect(self.delete_record)
|
||||
self.scroll_button = QPushButton("Scroll to Bottom", self)
|
||||
self.scroll_button.clicked.connect(self.scroll_to_bottom) # Connect to scroll method
|
||||
self.scroll_button.clicked.connect(
|
||||
self.scroll_to_bottom
|
||||
) # Connect to scroll method
|
||||
button_layout.addWidget(self.open_button)
|
||||
button_layout.addWidget(self.commit_button)
|
||||
button_layout.addWidget(self.add_button)
|
||||
@ -124,8 +135,12 @@ class LiteSQLViewer(QMainWindow):
|
||||
|
||||
# Table view
|
||||
self.table_view = QTableView()
|
||||
self.table_view.horizontalHeader().setSectionResizeMode(QHeaderView.ResizeToContents)
|
||||
self.table_view.verticalHeader().setSectionResizeMode(QHeaderView.ResizeToContents)
|
||||
self.table_view.horizontalHeader().setSectionResizeMode(
|
||||
QHeaderView.ResizeToContents
|
||||
)
|
||||
self.table_view.verticalHeader().setSectionResizeMode(
|
||||
QHeaderView.ResizeToContents
|
||||
)
|
||||
main_layout.addWidget(self.table_view)
|
||||
|
||||
self.splitter.addWidget(main_widget)
|
||||
@ -134,7 +149,12 @@ class LiteSQLViewer(QMainWindow):
|
||||
central_widget.setLayout(layout)
|
||||
|
||||
def open_database(self):
|
||||
file_name, _ = QFileDialog.getOpenFileName(self, "Open Database", "", "SQLite Database Files (*.db *.sqlite);;All Files (*)")
|
||||
file_name, _ = QFileDialog.getOpenFileName(
|
||||
self,
|
||||
"Open Database",
|
||||
"",
|
||||
"SQLite Database Files (*.db *.sqlite);;All Files (*)",
|
||||
)
|
||||
if file_name:
|
||||
self.db.setDatabaseName(file_name)
|
||||
if not self.db.open():
|
||||
@ -171,10 +191,14 @@ class LiteSQLViewer(QMainWindow):
|
||||
if not self.model:
|
||||
QMessageBox.warning(self, "Warning", "No table selected")
|
||||
return
|
||||
|
||||
|
||||
# Get the column names, excluding the ID field
|
||||
columns = [self.model.record().fieldName(i) for i in range(self.model.record().count()) if self.model.record().fieldName(i).lower() != "id"]
|
||||
|
||||
columns = [
|
||||
self.model.record().fieldName(i)
|
||||
for i in range(self.model.record().count())
|
||||
if self.model.record().fieldName(i).lower() != "id"
|
||||
]
|
||||
|
||||
# Create and show the dialog
|
||||
dialog = AddRecordDialog(columns, self)
|
||||
if dialog.exec_() == QDialog.Accepted:
|
||||
@ -184,12 +208,14 @@ class LiteSQLViewer(QMainWindow):
|
||||
def insert_record(self, values):
|
||||
if not self.model:
|
||||
return
|
||||
|
||||
|
||||
row = self.model.rowCount()
|
||||
self.model.insertRow(row)
|
||||
|
||||
|
||||
for column, value in values.items():
|
||||
self.model.setData(self.model.index(row, self.model.fieldIndex(column)), value)
|
||||
self.model.setData(
|
||||
self.model.index(row, self.model.fieldIndex(column)), value
|
||||
)
|
||||
|
||||
def edit_record(self):
|
||||
if not self.model:
|
||||
@ -210,8 +236,12 @@ class LiteSQLViewer(QMainWindow):
|
||||
if not indexes:
|
||||
QMessageBox.warning(self, "Warning", "Please select row(s) to delete")
|
||||
return
|
||||
confirm = QMessageBox.question(self, "Confirm Deletion", f"Are you sure you want to delete {len(indexes)} row(s)?",
|
||||
QMessageBox.Yes | QMessageBox.No)
|
||||
confirm = QMessageBox.question(
|
||||
self,
|
||||
"Confirm Deletion",
|
||||
f"Are you sure you want to delete {len(indexes)} row(s)?",
|
||||
QMessageBox.Yes | QMessageBox.No,
|
||||
)
|
||||
if confirm == QMessageBox.Yes:
|
||||
for index in sorted(indexes, reverse=True):
|
||||
source_index = self.proxy_model.mapToSource(index)
|
||||
@ -221,15 +251,22 @@ class LiteSQLViewer(QMainWindow):
|
||||
def commit_changes(self):
|
||||
if self.model:
|
||||
if self.model.submitAll():
|
||||
QMessageBox.information(self, "Success", "Changes committed successfully.")
|
||||
QMessageBox.information(
|
||||
self, "Success", "Changes committed successfully."
|
||||
)
|
||||
else:
|
||||
QMessageBox.critical(self, "Error", "Failed to commit changes: " + self.model.lastError().text())
|
||||
QMessageBox.critical(
|
||||
self,
|
||||
"Error",
|
||||
"Failed to commit changes: " + self.model.lastError().text(),
|
||||
)
|
||||
|
||||
def scroll_to_bottom(self):
|
||||
if self.model and self.model.rowCount() > 0:
|
||||
self.table_view.scrollToBottom() # Scroll to the bottom of the table view
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = QApplication(sys.argv)
|
||||
window = LiteSQLViewer()
|
||||
window.show()
|
||||
|
@ -1,19 +1,22 @@
|
||||
import os
|
||||
from transformers import AutoModelForCausalLM, AutoTokenizer, get_scheduler, LlamaForCausalLM
|
||||
import torch
|
||||
from torch.optim import AdamW
|
||||
from argparse import ArgumentParser
|
||||
from read import read_config
|
||||
|
||||
import torch
|
||||
import wandb
|
||||
from accelerate import Accelerator
|
||||
from accelerate.utils import DummyScheduler, DummyOptim, set_seed
|
||||
from peft import get_peft_model, LoraConfig, TaskType
|
||||
from accelerate.utils import DummyOptim, DummyScheduler, set_seed
|
||||
from data import load_data
|
||||
from peft import LoraConfig, TaskType, get_peft_model
|
||||
from read import read_config
|
||||
from torch.optim import AdamW
|
||||
from torchmetrics import MeanMetric
|
||||
from tqdm import tqdm
|
||||
import wandb
|
||||
from transformers import (AutoModelForCausalLM, AutoTokenizer,
|
||||
LlamaForCausalLM, get_scheduler)
|
||||
|
||||
torch.backends.cuda.matmul.allow_tf32 = True
|
||||
|
||||
|
||||
def format_metrics(metrics, split, prefix=""):
|
||||
log = f"[{split}]" + prefix
|
||||
log += " ".join([f"{key}: {value:.4f}" for key, value in metrics.items()])
|
||||
@ -37,32 +40,38 @@ def evaluate(model, val_dataloader):
|
||||
|
||||
|
||||
def train(accelerator, config):
|
||||
set_seed(config['seed'])
|
||||
set_seed(config["seed"])
|
||||
|
||||
accelerator.print(config)
|
||||
accelerator.print(f"Using {accelerator.num_processes} GPUs")
|
||||
|
||||
tokenizer = AutoTokenizer.from_pretrained(config['tokenizer_name'], model_max_length=config['max_length'])
|
||||
tokenizer = AutoTokenizer.from_pretrained(
|
||||
config["tokenizer_name"], model_max_length=config["max_length"]
|
||||
)
|
||||
# if no pad token, set it to eos
|
||||
if tokenizer.pad_token is None:
|
||||
tokenizer.pad_token = tokenizer.eos_token
|
||||
|
||||
|
||||
with accelerator.main_process_first():
|
||||
train_dataloader, val_dataloader = load_data(config, tokenizer)
|
||||
|
||||
train_dataloader, val_dataloader = load_data(config, tokenizer)
|
||||
|
||||
checkpoint = config["gradient_checkpointing"]
|
||||
model = AutoModelForCausalLM.from_pretrained(config["model_name"],
|
||||
use_cache=False if checkpoint else True,
|
||||
trust_remote_code=True)
|
||||
model = AutoModelForCausalLM.from_pretrained(
|
||||
config["model_name"],
|
||||
use_cache=False if checkpoint else True,
|
||||
trust_remote_code=True,
|
||||
)
|
||||
if checkpoint:
|
||||
model.gradient_checkpointing_enable()
|
||||
|
||||
if config["lora"]:
|
||||
peft_config = LoraConfig(
|
||||
# should R be configurable?
|
||||
task_type=TaskType.CAUSAL_LM, inference_mode=False, r=8, lora_alpha=32, lora_dropout=0.1
|
||||
task_type=TaskType.CAUSAL_LM,
|
||||
inference_mode=False,
|
||||
r=8,
|
||||
lora_alpha=32,
|
||||
lora_dropout=0.1,
|
||||
)
|
||||
model = get_peft_model(model, peft_config)
|
||||
model.print_trainable_parameters()
|
||||
@ -76,17 +85,23 @@ def train(accelerator, config):
|
||||
|
||||
# karpathy doesn't decay embeddding, maybe we should exclude
|
||||
# https://github.com/karpathy/minGPT/commit/bbbdac74fa9b2e55574d70056163ffbae42310c1#diff-2075fa9c224b395be5bda85544dd36572b59c76c54562819eadadbf268602834R157s
|
||||
optimizer = optimizer_cls(model.parameters(), lr=config["lr"], weight_decay=config["weight_decay"])
|
||||
optimizer = optimizer_cls(
|
||||
model.parameters(), lr=config["lr"], weight_decay=config["weight_decay"]
|
||||
)
|
||||
|
||||
if accelerator.state.deepspeed_plugin is not None:
|
||||
gradient_accumulation_steps = accelerator.state.deepspeed_plugin.deepspeed_config[
|
||||
"gradient_accumulation_steps"
|
||||
]
|
||||
gradient_accumulation_steps = (
|
||||
accelerator.state.deepspeed_plugin.deepspeed_config[
|
||||
"gradient_accumulation_steps"
|
||||
]
|
||||
)
|
||||
|
||||
# decay to min_lr instead of 0
|
||||
lr_ratio = config["min_lr"] / config["lr"]
|
||||
accelerator.print(f"Len of train_dataloader: {len(train_dataloader)}")
|
||||
total_num_steps = (len(train_dataloader) / gradient_accumulation_steps) * config["num_epochs"]
|
||||
total_num_steps = (len(train_dataloader) / gradient_accumulation_steps) * config[
|
||||
"num_epochs"
|
||||
]
|
||||
# instead of decaying to zero, decay to ratio of min_lr / lr
|
||||
total_num_steps += int(total_num_steps * lr_ratio) + config["warmup_steps"]
|
||||
accelerator.print(f"Total training steps: {total_num_steps}")
|
||||
@ -104,11 +119,13 @@ def train(accelerator, config):
|
||||
)
|
||||
else:
|
||||
scheduler = DummyScheduler(
|
||||
optimizer, total_num_steps=config["warmup_steps"], warmup_num_steps=config["warmup_steps"]
|
||||
optimizer,
|
||||
total_num_steps=config["warmup_steps"],
|
||||
warmup_num_steps=config["warmup_steps"],
|
||||
)
|
||||
|
||||
model, optimizer, train_dataloader, val_dataloader, scheduler = accelerator.prepare(
|
||||
model, optimizer, train_dataloader, val_dataloader, scheduler
|
||||
model, optimizer, train_dataloader, val_dataloader, scheduler
|
||||
)
|
||||
|
||||
# setup for saving training states in case preemption
|
||||
@ -123,7 +140,6 @@ def train(accelerator, config):
|
||||
accelerator.skip_first_batches(train_dataloader, resume_step)
|
||||
accelerator.print(f"Resuming from step {resume_step}")
|
||||
|
||||
|
||||
# log gradients
|
||||
if accelerator.is_main_process and config["wandb"]:
|
||||
wandb.watch(model, log_freq=config["log_grads_every"], log="all")
|
||||
@ -136,38 +152,39 @@ def train(accelerator, config):
|
||||
loss = outputs.loss
|
||||
|
||||
# gather loss before backprop in case of gradient accumulation
|
||||
loss_values = accelerator.gather_for_metrics({"loss": loss.detach().float()})
|
||||
loss_values = accelerator.gather_for_metrics(
|
||||
{"loss": loss.detach().float()}
|
||||
)
|
||||
train_loss.update(loss_values["loss"])
|
||||
|
||||
loss = loss / gradient_accumulation_steps
|
||||
accelerator.backward(loss)
|
||||
# get gradient norm of all params
|
||||
|
||||
# log LR in case something weird happens
|
||||
# log LR in case something weird happens
|
||||
if step > 0 and step % (config["eval_every"] // 10) == 0:
|
||||
if config["wandb"]:
|
||||
curr_step = step + epoch * len(train_dataloader)
|
||||
accelerator.log({"lr": scheduler.get_last_lr()[0]}, step=curr_step)
|
||||
|
||||
if (step + 1) % gradient_accumulation_steps == 0 or step == len(train_dataloader) - 1:
|
||||
if (step + 1) % gradient_accumulation_steps == 0 or step == len(
|
||||
train_dataloader
|
||||
) - 1:
|
||||
optimizer.step()
|
||||
scheduler.step()
|
||||
optimizer.zero_grad()
|
||||
|
||||
|
||||
if step > 0 and step % config["save_every"] == 0:
|
||||
curr_step = step + epoch * len(train_dataloader)
|
||||
accelerator.save_state(f"{config['output_dir']}/step_{curr_step}")
|
||||
|
||||
if step > 0 and (step % config["eval_every"] == 0 or step == len(train_dataloader) - 1):
|
||||
if step > 0 and (
|
||||
step % config["eval_every"] == 0 or step == len(train_dataloader) - 1
|
||||
):
|
||||
val_loss = evaluate(model, val_dataloader)
|
||||
|
||||
log_train = {
|
||||
"train_loss": train_loss.compute()
|
||||
}
|
||||
log_val = {
|
||||
"val_loss": val_loss.compute()
|
||||
}
|
||||
log_train = {"train_loss": train_loss.compute()}
|
||||
log_val = {"val_loss": val_loss.compute()}
|
||||
|
||||
if config["wandb"]:
|
||||
curr_step = step + epoch * len(train_dataloader)
|
||||
@ -185,7 +202,9 @@ def train(accelerator, config):
|
||||
unwrapped_model = accelerator.unwrap_model(model)
|
||||
try:
|
||||
if accelerator.is_main_process:
|
||||
unwrapped_model.push_to_hub(config["save_name"] + f"-epoch_{epoch}", private=True)
|
||||
unwrapped_model.push_to_hub(
|
||||
config["save_name"] + f"-epoch_{epoch}", private=True
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
accelerator.print(e)
|
||||
@ -197,7 +216,7 @@ def train(accelerator, config):
|
||||
save_function=accelerator.save,
|
||||
state_dict=accelerator.get_state_dict(model),
|
||||
)
|
||||
|
||||
|
||||
accelerator.wait_for_everyone()
|
||||
unwrapped_model = accelerator.unwrap_model(model)
|
||||
unwrapped_model.save_pretrained(
|
||||
@ -209,7 +228,6 @@ def train(accelerator, config):
|
||||
|
||||
accelerator.end_training()
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# parse arguments by reading in a config
|
||||
@ -230,4 +248,4 @@ if __name__ == "__main__":
|
||||
else:
|
||||
accelerator = Accelerator()
|
||||
|
||||
train(accelerator, config=config)
|
||||
train(accelerator, config=config)
|
||||
|
123
update_script.py
123
update_script.py
@ -1,18 +1,21 @@
|
||||
import os
|
||||
import sys
|
||||
import git
|
||||
import subprocess
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import git
|
||||
import pipmaster as pm
|
||||
from ascii_colors import ASCIIColors, trace_exception
|
||||
|
||||
import pipmaster as pm
|
||||
if not pm.is_installed("PyQt5"):
|
||||
pm.install("PyQt5")
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
from PyQt5.QtWidgets import QApplication, QMessageBox
|
||||
|
||||
|
||||
def show_error_dialog(message):
|
||||
try:
|
||||
app = QApplication(sys.argv)
|
||||
@ -24,24 +27,25 @@ def show_error_dialog(message):
|
||||
except:
|
||||
ASCIIColors.error(message)
|
||||
|
||||
|
||||
def run_git_pull():
|
||||
try:
|
||||
ASCIIColors.info("----------------> Updating the code <-----------------------")
|
||||
|
||||
|
||||
repo = git.Repo(Path(__file__).parent)
|
||||
origin = repo.remotes.origin
|
||||
|
||||
|
||||
# Fetch the latest changes
|
||||
origin.fetch()
|
||||
|
||||
|
||||
# Check if there are any changes to pull
|
||||
if repo.head.commit == origin.refs.main.commit:
|
||||
ASCIIColors.success("Already up-to-date.")
|
||||
|
||||
|
||||
# Discard local changes and force update
|
||||
try:
|
||||
repo.git.reset('--hard', 'origin/main')
|
||||
repo.git.clean('-fd')
|
||||
repo.git.reset("--hard", "origin/main")
|
||||
repo.git.clean("-fd")
|
||||
origin.pull()
|
||||
ASCIIColors.success("Successfully updated the code.")
|
||||
except git.GitCommandError as e:
|
||||
@ -51,7 +55,7 @@ def run_git_pull():
|
||||
|
||||
print("Updating submodules")
|
||||
try:
|
||||
repo.git.submodule('update', '--init', '--recursive', '--force')
|
||||
repo.git.submodule("update", "--init", "--recursive", "--force")
|
||||
except Exception as ex:
|
||||
error_message = f"Couldn't update submodules: {str(ex)}"
|
||||
ASCIIColors.error(error_message)
|
||||
@ -61,13 +65,15 @@ def run_git_pull():
|
||||
for submodule in repo.submodules:
|
||||
try:
|
||||
submodule_repo = submodule.module()
|
||||
submodule_repo.git.fetch('origin')
|
||||
submodule_repo.git.reset('--hard', 'origin/main')
|
||||
submodule_repo.git.clean('-fd')
|
||||
submodule_repo.git.fetch("origin")
|
||||
submodule_repo.git.reset("--hard", "origin/main")
|
||||
submodule_repo.git.clean("-fd")
|
||||
ASCIIColors.success(f"Updated submodule: {submodule}.")
|
||||
except Exception as ex:
|
||||
print(f"Couldn't update submodule {submodule}: {str(ex)}\nPlease report the error to ParisNeo either on Discord or on github.")
|
||||
|
||||
print(
|
||||
f"Couldn't update submodule {submodule}: {str(ex)}\nPlease report the error to ParisNeo either on Discord or on github."
|
||||
)
|
||||
|
||||
execution_path = Path(os.getcwd())
|
||||
except Exception as ex:
|
||||
error_message = f"Couldn't update submodules: {str(ex)}\nPlease report the error to ParisNeo either on Discord or on github."
|
||||
@ -78,13 +84,27 @@ def run_git_pull():
|
||||
ASCIIColors.info("Updating lollms_core")
|
||||
lollms_core_path = execution_path / "lollms_core"
|
||||
if lollms_core_path.exists():
|
||||
subprocess.run(["git", "-C", str(lollms_core_path), "fetch", "origin"], check=True)
|
||||
subprocess.run(["git", "-C", str(lollms_core_path), "reset", "--hard", "origin/main"], check=True)
|
||||
subprocess.run(["git", "-C", str(lollms_core_path), "clean", "-fd"], check=True)
|
||||
subprocess.run(
|
||||
["git", "-C", str(lollms_core_path), "fetch", "origin"], check=True
|
||||
)
|
||||
subprocess.run(
|
||||
[
|
||||
"git",
|
||||
"-C",
|
||||
str(lollms_core_path),
|
||||
"reset",
|
||||
"--hard",
|
||||
"origin/main",
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
subprocess.run(
|
||||
["git", "-C", str(lollms_core_path), "clean", "-fd"], check=True
|
||||
)
|
||||
ASCIIColors.success("Successfully updated lollms_core")
|
||||
else:
|
||||
ASCIIColors.warning("lollms_core directory not found")
|
||||
|
||||
|
||||
except Exception as ex:
|
||||
error_message = f"Couldn't update submodules: {str(ex)}"
|
||||
ASCIIColors.error(error_message)
|
||||
@ -96,30 +116,54 @@ def run_git_pull():
|
||||
ASCIIColors.error(error_message)
|
||||
# show_error_dialog(error_message)
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def get_valid_input():
|
||||
while True:
|
||||
update_prompt = "New code is updated. Do you want to update the requirements? (y/n): "
|
||||
update_prompt = (
|
||||
"New code is updated. Do you want to update the requirements? (y/n): "
|
||||
)
|
||||
user_response = input(update_prompt).strip().lower()
|
||||
|
||||
if user_response in ['y', 'yes']:
|
||||
return 'yes'
|
||||
elif user_response in ['n', 'no']:
|
||||
return 'no'
|
||||
|
||||
if user_response in ["y", "yes"]:
|
||||
return "yes"
|
||||
elif user_response in ["n", "no"]:
|
||||
return "no"
|
||||
else:
|
||||
print("Invalid input. Please respond with 'y' or 'n'.")
|
||||
|
||||
|
||||
|
||||
def install_requirements():
|
||||
try:
|
||||
# Get valid input from the user
|
||||
user_choice = get_valid_input()
|
||||
|
||||
# Enhance the text based on the user's response
|
||||
if user_choice == 'yes':
|
||||
if user_choice == "yes":
|
||||
enhanced_text = "Great choice! Updating requirements ensures your project stays up-to-date with the latest changes."
|
||||
print(enhanced_text)
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", "--upgrade", "-r", "requirements.txt"])
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", "--upgrade", "-e", "lollms_core"])
|
||||
subprocess.check_call(
|
||||
[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--upgrade",
|
||||
"-r",
|
||||
"requirements.txt",
|
||||
]
|
||||
)
|
||||
subprocess.check_call(
|
||||
[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--upgrade",
|
||||
"-e",
|
||||
"lollms_core",
|
||||
]
|
||||
)
|
||||
ASCIIColors.success("Successfully installed requirements")
|
||||
else: # user_choice == 'no'
|
||||
enhanced_text = "Understood. Skipping the requirements update. Make sure to update them later if needed."
|
||||
@ -129,9 +173,15 @@ def install_requirements():
|
||||
ASCIIColors.error(error_message)
|
||||
show_error_dialog(error_message)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--repo", type=str, default="https://github.com/ParisNeo/lollms-webui.git", help="Path to the Git repository")
|
||||
parser.add_argument(
|
||||
"--repo",
|
||||
type=str,
|
||||
default="https://github.com/ParisNeo/lollms-webui.git",
|
||||
help="Path to the Git repository",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
repo_path = args.repo
|
||||
@ -159,10 +209,13 @@ def main():
|
||||
show_error_dialog(error_message)
|
||||
sys.exit(1)
|
||||
else:
|
||||
error_message = "Update process failed. Please check the console for more details."
|
||||
error_message = (
|
||||
"Update process failed. Please check the console for more details."
|
||||
)
|
||||
ASCIIColors.error(error_message)
|
||||
show_error_dialog(error_message)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -6,16 +6,20 @@ description:
|
||||
This is a utility for executing python code
|
||||
|
||||
"""
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
import time
|
||||
import subprocess
|
||||
|
||||
import json
|
||||
from lollms.client_session import Client
|
||||
from lollms.utilities import discussion_path_to_url
|
||||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
from lollms.client_session import Client
|
||||
from lollms.utilities import discussion_path_to_url
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
def build_graphviz_output(code, ifram_name=None):
|
||||
"""
|
||||
@ -31,84 +35,101 @@ def build_graphviz_output(code, ifram_name=None):
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
if ifram_name is not None:
|
||||
rendered = "\n".join([
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" srcdoc=\'',
|
||||
'<style>',
|
||||
'iframe {',
|
||||
'width: 100%;',
|
||||
'height: 100%;',
|
||||
'border: none;',
|
||||
'}',
|
||||
|
||||
'.graph {',
|
||||
'background-color: transparent;',
|
||||
'padding: 20px;',
|
||||
'border-radius: 10px;',
|
||||
'display: flex;',
|
||||
'justify-content: center;',
|
||||
'align-items: center;',
|
||||
'height: 100%;',
|
||||
'}',
|
||||
'#svg-container {',
|
||||
' border: 1px solid black;',
|
||||
' display: inline-block;',
|
||||
'}',
|
||||
'#controls {',
|
||||
' margin-top: 10px;',
|
||||
'}',
|
||||
'</style>',
|
||||
'<div id="controls">',
|
||||
' <button id="zoom-in">Zoom In</button>',
|
||||
' <button id="zoom-out">Zoom Out</button>',
|
||||
' <button id="save-svg">Save</button>',
|
||||
'</div>',
|
||||
'<div id="svg-container">',
|
||||
'<div id="graph" class="graph"></div>',
|
||||
'</div>',
|
||||
'<script src="https://github.com/mdaines/viz-js/releases/download/release-viz-3.2.4/viz-standalone.js"></script>',
|
||||
'<script>',
|
||||
'// Initialize the mermaid library and render our diagram',
|
||||
'Viz.instance().then(function(viz) {',
|
||||
'var svg = viz.renderSVGElement(`',
|
||||
"\n".join([c.replace("'","\"") for c in code.split("\n") if c.strip()!=""]),
|
||||
'`);',
|
||||
'document.getElementById("graph").appendChild(svg);',
|
||||
'});',
|
||||
'</script>',
|
||||
'<div style="text-align: center;">',
|
||||
'</div>',
|
||||
'\' style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
'</div>'
|
||||
|
||||
rendered = "\n".join(
|
||||
[
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" srcdoc=\'',
|
||||
"<style>",
|
||||
"iframe {",
|
||||
"width: 100%;",
|
||||
"height: 100%;",
|
||||
"border: none;",
|
||||
"}",
|
||||
".graph {",
|
||||
"background-color: transparent;",
|
||||
"padding: 20px;",
|
||||
"border-radius: 10px;",
|
||||
"display: flex;",
|
||||
"justify-content: center;",
|
||||
"align-items: center;",
|
||||
"height: 100%;",
|
||||
"}",
|
||||
"#svg-container {",
|
||||
" border: 1px solid black;",
|
||||
" display: inline-block;",
|
||||
"}",
|
||||
"#controls {",
|
||||
" margin-top: 10px;",
|
||||
"}",
|
||||
"</style>",
|
||||
'<div id="controls">',
|
||||
' <button id="zoom-in">Zoom In</button>',
|
||||
' <button id="zoom-out">Zoom Out</button>',
|
||||
' <button id="save-svg">Save</button>',
|
||||
"</div>",
|
||||
'<div id="svg-container">',
|
||||
'<div id="graph" class="graph"></div>',
|
||||
"</div>",
|
||||
'<script src="https://github.com/mdaines/viz-js/releases/download/release-viz-3.2.4/viz-standalone.js"></script>',
|
||||
"<script>",
|
||||
"// Initialize the mermaid library and render our diagram",
|
||||
"Viz.instance().then(function(viz) {",
|
||||
"var svg = viz.renderSVGElement(`",
|
||||
"\n".join(
|
||||
[c.replace("'", '"') for c in code.split("\n") if c.strip() != ""]
|
||||
),
|
||||
"`);",
|
||||
'document.getElementById("graph").appendChild(svg);',
|
||||
"});",
|
||||
"</script>",
|
||||
'<div style="text-align: center;">',
|
||||
"</div>",
|
||||
'\' style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
"</div>",
|
||||
]
|
||||
)
|
||||
else:
|
||||
with open(Path(__file__).parent/"assets"/"graphviz_container.html","r",encoding="utf-8") as f:
|
||||
with open(
|
||||
Path(__file__).parent / "assets" / "graphviz_container.html",
|
||||
"r",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
data = f.read()
|
||||
rendered = data.replace("{{svg_data}}","\n".join([c.replace("'","\'") for c in code.split("\n") if c.strip()!=""]) )
|
||||
|
||||
rendered = data.replace(
|
||||
"{{svg_data}}",
|
||||
"\n".join(
|
||||
[c.replace("'", "'") for c in code.split("\n") if c.strip() != ""]
|
||||
),
|
||||
)
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
return {"output": rendered, "execution_time": execution_time}
|
||||
|
||||
def execute_graphviz(code, client:Client, message_id, build_file=False):
|
||||
|
||||
def execute_graphviz(code, client: Client, message_id, build_file=False):
|
||||
if build_file:
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
if not "http" in lollmsElfServer.config.host and not "https" in lollmsElfServer.config.host:
|
||||
host = "http://"+lollmsElfServer.config.host
|
||||
if (
|
||||
not "http" in lollmsElfServer.config.host
|
||||
and not "https" in lollmsElfServer.config.host
|
||||
):
|
||||
host = "http://" + lollmsElfServer.config.host
|
||||
else:
|
||||
host = lollmsElfServer.config.host
|
||||
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
tmp_file = root_folder/f"ai_code_{message_id}.html"
|
||||
with open(tmp_file,"w",encoding="utf8") as f:
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"ai_code_{message_id}.html"
|
||||
with open(tmp_file, "w", encoding="utf8") as f:
|
||||
f.write(build_graphviz_output(code)["output"])
|
||||
link = f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(tmp_file)}"
|
||||
execution_time = time.time() - start_time
|
||||
output_json = {"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>', "execution_time": execution_time}
|
||||
output_json = {
|
||||
"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>',
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return output_json
|
||||
else:
|
||||
return build_graphviz_output(code, ifram_name="iframe")
|
||||
|
@ -6,16 +6,19 @@ description:
|
||||
This is a utility for executing python code
|
||||
|
||||
"""
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
import time
|
||||
import subprocess
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
from lollms.client_session import Client
|
||||
from lollms.utilities import discussion_path_to_url
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
def build_html_output(code, ifram_name="unnamed"):
|
||||
"""
|
||||
@ -29,37 +32,45 @@ def build_html_output(code, ifram_name="unnamed"):
|
||||
str: The HTML string for the iframe.
|
||||
"""
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
rendered = "\n".join([
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" srcdoc=\'',
|
||||
code.replace("'","\""),
|
||||
'\' style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
'</div>'
|
||||
start_time = time.time()
|
||||
rendered = "\n".join(
|
||||
[
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" srcdoc=\'',
|
||||
code.replace("'", '"'),
|
||||
'\' style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
"</div>",
|
||||
]
|
||||
)
|
||||
execution_time = time.time() - start_time
|
||||
return {"output": rendered, "execution_time": execution_time}
|
||||
|
||||
def execute_html(code, client:Client, message_id, build_file=False):
|
||||
|
||||
def execute_html(code, client: Client, message_id, build_file=False):
|
||||
if build_file:
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
if not "http" in lollmsElfServer.config.host and not "https" in lollmsElfServer.config.host:
|
||||
host = "http://"+lollmsElfServer.config.host
|
||||
if (
|
||||
not "http" in lollmsElfServer.config.host
|
||||
and not "https" in lollmsElfServer.config.host
|
||||
):
|
||||
host = "http://" + lollmsElfServer.config.host
|
||||
else:
|
||||
host = lollmsElfServer.config.host
|
||||
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
tmp_file = root_folder/f"ai_code_{message_id}.html"
|
||||
with open(tmp_file,"w",encoding="utf8") as f:
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"ai_code_{message_id}.html"
|
||||
with open(tmp_file, "w", encoding="utf8") as f:
|
||||
f.write(code)
|
||||
link = f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(tmp_file)}"
|
||||
# Stop the timer.
|
||||
execution_time = time.time() - start_time
|
||||
output_json = {"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>', "execution_time": execution_time}
|
||||
output_json = {
|
||||
"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>',
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return output_json
|
||||
else:
|
||||
return build_html_output(code)
|
||||
return build_html_output(code)
|
||||
|
@ -6,16 +6,19 @@ description:
|
||||
This is a utility for executing python code
|
||||
|
||||
"""
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
import time
|
||||
import subprocess
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
from lollms.client_session import Client
|
||||
from lollms.utilities import discussion_path_to_url
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
def build_javascript_output(code, ifram_name=None):
|
||||
"""
|
||||
@ -31,53 +34,62 @@ def build_javascript_output(code, ifram_name=None):
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
if ifram_name is not None:
|
||||
rendered = "\n".join([
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" srcdoc="',
|
||||
'<style>',
|
||||
'iframe {',
|
||||
'width: 100%;',
|
||||
'height: 100%;',
|
||||
'border: none;',
|
||||
'}',
|
||||
'</style>',
|
||||
'<script>',
|
||||
code,
|
||||
'</script>',
|
||||
'" style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
'</div>'
|
||||
rendered = "\n".join(
|
||||
[
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" srcdoc="',
|
||||
"<style>",
|
||||
"iframe {",
|
||||
"width: 100%;",
|
||||
"height: 100%;",
|
||||
"border: none;",
|
||||
"}",
|
||||
"</style>",
|
||||
"<script>",
|
||||
code,
|
||||
"</script>",
|
||||
'" style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
"</div>",
|
||||
]
|
||||
)
|
||||
else:
|
||||
rendered = "\n".join([
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
'<script>',
|
||||
code,
|
||||
'</script>',
|
||||
'</div>'
|
||||
rendered = "\n".join(
|
||||
[
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
"<script>",
|
||||
code,
|
||||
"</script>",
|
||||
"</div>",
|
||||
]
|
||||
)
|
||||
execution_time = time.time() - start_time
|
||||
return {"output": rendered, "execution_time": execution_time}
|
||||
|
||||
def execute_javascript(code, client:Client, message_id, build_file=False):
|
||||
|
||||
def execute_javascript(code, client: Client, message_id, build_file=False):
|
||||
if build_file:
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
if not "http" in lollmsElfServer.config.host and not "https" in lollmsElfServer.config.host:
|
||||
host = "http://"+lollmsElfServer.config.host
|
||||
if (
|
||||
not "http" in lollmsElfServer.config.host
|
||||
and not "https" in lollmsElfServer.config.host
|
||||
):
|
||||
host = "http://" + lollmsElfServer.config.host
|
||||
else:
|
||||
host = lollmsElfServer.config.host
|
||||
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
tmp_file = root_folder/f"ai_code_{message_id}.html"
|
||||
with open(tmp_file,"w",encoding="utf8") as f:
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"ai_code_{message_id}.html"
|
||||
with open(tmp_file, "w", encoding="utf8") as f:
|
||||
f.write(build_javascript_output(code)["output"])
|
||||
link = f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(tmp_file)}"
|
||||
execution_time = time.time() - start_time
|
||||
output_json = {"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>', "execution_time": execution_time}
|
||||
output_json = {
|
||||
"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>',
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return output_json
|
||||
else:
|
||||
return build_javascript_output(code)
|
||||
|
@ -6,37 +6,38 @@ description:
|
||||
This is a utility for executing latex code
|
||||
|
||||
"""
|
||||
from fastapi import APIRouter, Request, routing
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import tqdm
|
||||
from ascii_colors import ASCIIColors, get_trace_exception, trace_exception
|
||||
from fastapi import APIRouter, FastAPI, File, Request, UploadFile, routing
|
||||
from lollms.client_session import Client
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.utilities import discussion_path_to_url
|
||||
from pydantic import BaseModel
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
from pathlib import Path
|
||||
import tqdm
|
||||
from fastapi import FastAPI, UploadFile, File
|
||||
import shutil
|
||||
import time
|
||||
import subprocess
|
||||
import json
|
||||
from lollms.client_session import Client
|
||||
from lollms.utilities import discussion_path_to_url
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
def execute_latex(code, client:Client, message_id):
|
||||
def execute_latex(code, client: Client, message_id):
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
tmp_file = root_folder/f"latex_file_{message_id}.tex"
|
||||
with open(tmp_file,"w", encoding="utf-8") as f:
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"latex_file_{message_id}.tex"
|
||||
with open(tmp_file, "w", encoding="utf-8") as f:
|
||||
f.write(code)
|
||||
|
||||
try:
|
||||
@ -44,7 +45,7 @@ def execute_latex(code, client:Client, message_id):
|
||||
if lollmsElfServer.config.pdf_latex_path:
|
||||
pdflatex_command = lollmsElfServer.config.pdf_latex_path
|
||||
else:
|
||||
pdflatex_command = 'pdflatex'
|
||||
pdflatex_command = "pdflatex"
|
||||
# Set the execution path to the folder containing the tmp_file
|
||||
execution_path = tmp_file.parent
|
||||
|
||||
@ -53,7 +54,7 @@ def execute_latex(code, client:Client, message_id):
|
||||
[pdflatex_command, "-interaction=nonstopmode", str(tmp_file)],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=execution_path
|
||||
cwd=execution_path,
|
||||
)
|
||||
|
||||
# Get the output and error from the process.
|
||||
@ -62,7 +63,14 @@ def execute_latex(code, client:Client, message_id):
|
||||
# Stop the timer.
|
||||
execution_time = time.time() - start_time
|
||||
error_message = f"Error executing Python code: {ex}"
|
||||
error_json = {"output": "<div class='text-red-500'>"+ex+"\n"+get_trace_exception(ex)+"</div>", "execution_time": execution_time}
|
||||
error_json = {
|
||||
"output": "<div class='text-red-500'>"
|
||||
+ ex
|
||||
+ "\n"
|
||||
+ get_trace_exception(ex)
|
||||
+ "</div>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return error_json
|
||||
|
||||
# Stop the timer.
|
||||
@ -71,7 +79,7 @@ def execute_latex(code, client:Client, message_id):
|
||||
# Check if the process was successful.
|
||||
if process.returncode != 0:
|
||||
# The child process threw an exception.
|
||||
pdf_file = tmp_file.with_suffix('.pdf')
|
||||
pdf_file = tmp_file.with_suffix(".pdf")
|
||||
print(f"PDF file generated: {pdf_file}")
|
||||
try:
|
||||
error_message = f"Error executing Python code:\n{error.decode('utf-8', errors='ignore')}"
|
||||
@ -79,31 +87,50 @@ def execute_latex(code, client:Client, message_id):
|
||||
error_message = f"Error executing Python code:\n{error}"
|
||||
if pdf_file.exists():
|
||||
# The child process was successful.
|
||||
pdf_file=str(pdf_file).replace("\\","/")
|
||||
if not "http" in lollmsElfServer.config.host and not "https" in lollmsElfServer.config.host:
|
||||
host = "http://"+lollmsElfServer.config.host
|
||||
pdf_file = str(pdf_file).replace("\\", "/")
|
||||
if (
|
||||
not "http" in lollmsElfServer.config.host
|
||||
and not "https" in lollmsElfServer.config.host
|
||||
):
|
||||
host = "http://" + lollmsElfServer.config.host
|
||||
else:
|
||||
host = lollmsElfServer.config.host
|
||||
|
||||
url = f"{host}:{lollmsElfServer.config.port}/{discussion_path_to_url(pdf_file)}"
|
||||
error_json = {"output": f"<div>Pdf file generated at: {pdf_file}\n<a href='{url}' target='_blank'>Click here to show</a></div><div>Output:{output.decode('utf-8', errors='ignore')}\n</div><div class='text-red-500'>"+error_message+"</div>", "execution_time": execution_time}
|
||||
error_json = {
|
||||
"output": f"<div>Pdf file generated at: {pdf_file}\n<a href='{url}' target='_blank'>Click here to show</a></div><div>Output:{output.decode('utf-8', errors='ignore')}\n</div><div class='text-red-500'>"
|
||||
+ error_message
|
||||
+ "</div>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
|
||||
else:
|
||||
error_json = {"output": f"<div>Output:{output.decode('utf-8', errors='ignore')}\n</div><div class='text-red-500'>"+error_message+"</div>", "execution_time": execution_time}
|
||||
else:
|
||||
error_json = {
|
||||
"output": f"<div>Output:{output.decode('utf-8', errors='ignore')}\n</div><div class='text-red-500'>"
|
||||
+ error_message
|
||||
+ "</div>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return error_json
|
||||
|
||||
# The child process was successful.
|
||||
# If the compilation is successful, you will get a PDF file
|
||||
pdf_file = tmp_file.with_suffix('.pdf')
|
||||
pdf_file = tmp_file.with_suffix(".pdf")
|
||||
print(f"PDF file generated: {pdf_file}")
|
||||
|
||||
# The child process was successful.
|
||||
pdf_file=str(pdf_file).replace("\\","/")
|
||||
if not "http" in lollmsElfServer.config.host and not "https" in lollmsElfServer.config.host:
|
||||
host = "http://"+lollmsElfServer.config.host
|
||||
pdf_file = str(pdf_file).replace("\\", "/")
|
||||
if (
|
||||
not "http" in lollmsElfServer.config.host
|
||||
and not "https" in lollmsElfServer.config.host
|
||||
):
|
||||
host = "http://" + lollmsElfServer.config.host
|
||||
else:
|
||||
host = lollmsElfServer.config.host
|
||||
|
||||
url = f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(pdf_file)}"
|
||||
output_json = {"output": f"Pdf file generated at: {pdf_file}\n<a href='{url}' target='_blank'>Click here to show</a>", "execution_time": execution_time}
|
||||
output_json = {
|
||||
"output": f"Pdf file generated at: {pdf_file}\n<a href='{url}' target='_blank'>Click here to show</a>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return output_json
|
||||
|
@ -6,30 +6,37 @@ description:
|
||||
This is a utility for executing LilyPond code in LOLLMS
|
||||
"""
|
||||
|
||||
import pipmaster as pm
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from ascii_colors import trace_exception
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import pipmaster as pm
|
||||
from ascii_colors import trace_exception
|
||||
from lollms.client_session import Client
|
||||
from lollms.utilities import discussion_path_to_url, show_yes_no_dialog
|
||||
import shutil
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
def check_and_install_lilypond():
|
||||
"""Check if LilyPond is installed and install it if needed"""
|
||||
if not pm.is_installed("lilypond"):
|
||||
if not show_yes_no_dialog("Installation","LilyPond is not installed. Do you want to install it?"):
|
||||
return {"status":False,"error":"User refused LilyPond installation!"}
|
||||
if not show_yes_no_dialog(
|
||||
"Installation", "LilyPond is not installed. Do you want to install it?"
|
||||
):
|
||||
return {"status": False, "error": "User refused LilyPond installation!"}
|
||||
try:
|
||||
pm.install("lilypond")
|
||||
return {"status":True}
|
||||
return {"status": True}
|
||||
except Exception as ex:
|
||||
return {"status":False,"error":f"Failed to install LilyPond: {str(ex)}"}
|
||||
return {"status":True}
|
||||
return {"status": False, "error": f"Failed to install LilyPond: {str(ex)}"}
|
||||
return {"status": True}
|
||||
|
||||
def execute_lilypond(code, client:Client, message_id):
|
||||
|
||||
def execute_lilypond(code, client: Client, message_id):
|
||||
"""Execute LilyPond code and return the result"""
|
||||
try:
|
||||
# Check LilyPond installation
|
||||
@ -46,26 +53,35 @@ def execute_lilypond(code, client:Client, message_id):
|
||||
# Create work directory in discussion folder
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# Create LilyPond file
|
||||
ly_file = root_folder/f"score_{message_id}.ly"
|
||||
ly_file.write_text(code,encoding="utf8")
|
||||
ly_file = root_folder / f"score_{message_id}.ly"
|
||||
ly_file.write_text(code, encoding="utf8")
|
||||
|
||||
# Get the PDF and MIDI outputs
|
||||
pdf_file = ly_file.with_suffix('.pdf')
|
||||
midi_file = ly_file.with_suffix('.mid')
|
||||
pdf_file = ly_file.with_suffix(".pdf")
|
||||
midi_file = ly_file.with_suffix(".mid")
|
||||
|
||||
# Compile the file
|
||||
subprocess.run([lilypond.executable(), str(ly_file)], check=True, cwd=root_folder)
|
||||
subprocess.run(
|
||||
[lilypond.executable(), str(ly_file)], check=True, cwd=root_folder
|
||||
)
|
||||
|
||||
# Create links to files
|
||||
if not "http" in lollmsElfServer.config.host and not "https" in lollmsElfServer.config.host:
|
||||
host = "http://"+lollmsElfServer.config.host
|
||||
if (
|
||||
not "http" in lollmsElfServer.config.host
|
||||
and not "https" in lollmsElfServer.config.host
|
||||
):
|
||||
host = "http://" + lollmsElfServer.config.host
|
||||
else:
|
||||
host = lollmsElfServer.config.host
|
||||
|
||||
pdf_link = f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(pdf_file)}"
|
||||
midi_link = f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(midi_file)}"
|
||||
pdf_link = (
|
||||
f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(pdf_file)}"
|
||||
)
|
||||
midi_link = (
|
||||
f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(midi_file)}"
|
||||
)
|
||||
|
||||
# Create output HTML
|
||||
output = f"""
|
||||
@ -82,4 +98,7 @@ def execute_lilypond(code, client:Client, message_id):
|
||||
|
||||
except Exception as ex:
|
||||
trace = trace_exception(ex)
|
||||
return {"output": f"Error executing LilyPond code:\n{trace}", "execution_time": 0}
|
||||
return {
|
||||
"output": f"Error executing LilyPond code:\n{trace}",
|
||||
"execution_time": 0,
|
||||
}
|
||||
|
@ -6,16 +6,19 @@ description:
|
||||
This is a utility for executing python code
|
||||
|
||||
"""
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
import time
|
||||
import subprocess
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
from lollms.client_session import Client
|
||||
from lollms.utilities import discussion_path_to_url
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
def build_mermaid_output(code, ifram_name=None):
|
||||
"""
|
||||
@ -31,118 +34,123 @@ def build_mermaid_output(code, ifram_name=None):
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
if ifram_name is not None:
|
||||
rendered = "\n".join([
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" style="width: 100%" srcdoc="',
|
||||
'<style>',
|
||||
'iframe {',
|
||||
'width: 100%;',
|
||||
'height: 100%;',
|
||||
'border: none;',
|
||||
'}',
|
||||
'.mermaid {',
|
||||
'background-color: transparent;',
|
||||
'padding: 20px;',
|
||||
'border-radius: 10px;',
|
||||
'display: flex;',
|
||||
'justify-content: center;',
|
||||
'align-items: center;',
|
||||
'height: 100%;',
|
||||
'}',
|
||||
'</style>',
|
||||
'<div class=\'mermaid\'>',
|
||||
"\n".join([c for c in code.split("\n") if c.strip()!=""]),
|
||||
'</div>',
|
||||
'<script src=\'https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js\'></script>',
|
||||
'<script>',
|
||||
'// Initialize the mermaid library and render our diagram',
|
||||
'mermaid.initialize({ startOnLoad: true });',
|
||||
'// Function to save SVG content to a file',
|
||||
'function saveSVG() {',
|
||||
'var svg = document.querySelector(".mermaid > svg");',
|
||||
'var serializer = new XMLSerializer();',
|
||||
'var source = serializer.serializeToString(svg);',
|
||||
'var blob = new Blob([source], {type: "image/svg+xml;charset=utf-8"});',
|
||||
'var url = URL.createObjectURL(blob);',
|
||||
'var a = document.createElement("a");',
|
||||
'a.href = url;',
|
||||
'a.download = "diagram.svg";',
|
||||
'a.click();',
|
||||
'}',
|
||||
'</script>',
|
||||
'<div style=\'text-align: center;\'>',
|
||||
'</div>',
|
||||
'<button onclick="saveSVG()">Save SVG</button>',
|
||||
'" style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
'</div>'
|
||||
rendered = "\n".join(
|
||||
[
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" style="width: 100%" srcdoc="',
|
||||
"<style>",
|
||||
"iframe {",
|
||||
"width: 100%;",
|
||||
"height: 100%;",
|
||||
"border: none;",
|
||||
"}",
|
||||
".mermaid {",
|
||||
"background-color: transparent;",
|
||||
"padding: 20px;",
|
||||
"border-radius: 10px;",
|
||||
"display: flex;",
|
||||
"justify-content: center;",
|
||||
"align-items: center;",
|
||||
"height: 100%;",
|
||||
"}",
|
||||
"</style>",
|
||||
"<div class='mermaid'>",
|
||||
"\n".join([c for c in code.split("\n") if c.strip() != ""]),
|
||||
"</div>",
|
||||
"<script src='https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js'></script>",
|
||||
"<script>",
|
||||
"// Initialize the mermaid library and render our diagram",
|
||||
"mermaid.initialize({ startOnLoad: true });",
|
||||
"// Function to save SVG content to a file",
|
||||
"function saveSVG() {",
|
||||
'var svg = document.querySelector(".mermaid > svg");',
|
||||
"var serializer = new XMLSerializer();",
|
||||
"var source = serializer.serializeToString(svg);",
|
||||
'var blob = new Blob([source], {type: "image/svg+xml;charset=utf-8"});',
|
||||
"var url = URL.createObjectURL(blob);",
|
||||
'var a = document.createElement("a");',
|
||||
"a.href = url;",
|
||||
'a.download = "diagram.svg";',
|
||||
"a.click();",
|
||||
"}",
|
||||
"</script>",
|
||||
"<div style='text-align: center;'>",
|
||||
"</div>",
|
||||
'<button onclick="saveSVG()">Save SVG</button>',
|
||||
'" style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
"</div>",
|
||||
]
|
||||
)
|
||||
else:
|
||||
rendered = "\n".join([
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
'<style>',
|
||||
'.mermaid {',
|
||||
'background-color: transparent;',
|
||||
'padding: 20px;',
|
||||
'border-radius: 10px;',
|
||||
'display: flex;',
|
||||
'justify-content: center;',
|
||||
'align-items: center;',
|
||||
'height: 100%;',
|
||||
'}',
|
||||
'</style>',
|
||||
'<div class=\'mermaid\'>',
|
||||
"\n".join([c for c in code.split("\n") if c.strip()!=""]),
|
||||
'</div>',
|
||||
'<script src=\'https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js\'></script>',
|
||||
'<script>',
|
||||
'// Initialize the mermaid library and render our diagram',
|
||||
'mermaid.initialize({ startOnLoad: true });',
|
||||
'// Function to save SVG content to a file',
|
||||
'function saveSVG() {',
|
||||
'var svg = document.querySelector(".mermaid > svg");',
|
||||
'var serializer = new XMLSerializer();',
|
||||
'var source = serializer.serializeToString(svg);',
|
||||
'var blob = new Blob([source], {type: "image/svg+xml;charset=utf-8"});',
|
||||
'var url = URL.createObjectURL(blob);',
|
||||
'var a = document.createElement("a");',
|
||||
'a.href = url;',
|
||||
'a.download = "diagram.svg";',
|
||||
'a.click();',
|
||||
'}',
|
||||
'</script>',
|
||||
'<div style=\'text-align: center;\'>',
|
||||
'</div>',
|
||||
'<button onclick="saveSVG()">Save SVG</button>',
|
||||
'</div>'
|
||||
rendered = "\n".join(
|
||||
[
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
"<style>",
|
||||
".mermaid {",
|
||||
"background-color: transparent;",
|
||||
"padding: 20px;",
|
||||
"border-radius: 10px;",
|
||||
"display: flex;",
|
||||
"justify-content: center;",
|
||||
"align-items: center;",
|
||||
"height: 100%;",
|
||||
"}",
|
||||
"</style>",
|
||||
"<div class='mermaid'>",
|
||||
"\n".join([c for c in code.split("\n") if c.strip() != ""]),
|
||||
"</div>",
|
||||
"<script src='https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js'></script>",
|
||||
"<script>",
|
||||
"// Initialize the mermaid library and render our diagram",
|
||||
"mermaid.initialize({ startOnLoad: true });",
|
||||
"// Function to save SVG content to a file",
|
||||
"function saveSVG() {",
|
||||
'var svg = document.querySelector(".mermaid > svg");',
|
||||
"var serializer = new XMLSerializer();",
|
||||
"var source = serializer.serializeToString(svg);",
|
||||
'var blob = new Blob([source], {type: "image/svg+xml;charset=utf-8"});',
|
||||
"var url = URL.createObjectURL(blob);",
|
||||
'var a = document.createElement("a");',
|
||||
"a.href = url;",
|
||||
'a.download = "diagram.svg";',
|
||||
"a.click();",
|
||||
"}",
|
||||
"</script>",
|
||||
"<div style='text-align: center;'>",
|
||||
"</div>",
|
||||
'<button onclick="saveSVG()">Save SVG</button>',
|
||||
"</div>",
|
||||
]
|
||||
)
|
||||
execution_time = time.time() - start_time
|
||||
return {"output": rendered, "execution_time": execution_time}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def execute_mermaid(code, client:Client, message_id, build_file=False):
|
||||
def execute_mermaid(code, client: Client, message_id, build_file=False):
|
||||
if build_file:
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
if not "http" in lollmsElfServer.config.host and not "https" in lollmsElfServer.config.host:
|
||||
host = "http://"+lollmsElfServer.config.host
|
||||
if (
|
||||
not "http" in lollmsElfServer.config.host
|
||||
and not "https" in lollmsElfServer.config.host
|
||||
):
|
||||
host = "http://" + lollmsElfServer.config.host
|
||||
else:
|
||||
host = lollmsElfServer.config.host
|
||||
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
tmp_file = root_folder/f"ai_code_{message_id}.html"
|
||||
with open(tmp_file,"w",encoding="utf8") as f:
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"ai_code_{message_id}.html"
|
||||
with open(tmp_file, "w", encoding="utf8") as f:
|
||||
f.write(build_mermaid_output(code)["output"])
|
||||
link = f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(tmp_file)}"
|
||||
# Stop the timer.
|
||||
execution_time = time.time() - start_time
|
||||
output_json = {"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>', "execution_time": execution_time}
|
||||
output_json = {
|
||||
"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>',
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return output_json
|
||||
else:
|
||||
return build_mermaid_output(code, "app_iframe")
|
||||
|
@ -6,16 +6,20 @@ description:
|
||||
This is a utility for executing python code
|
||||
|
||||
"""
|
||||
from fastapi import routing
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
import time
|
||||
import subprocess
|
||||
|
||||
import json
|
||||
from lollms.client_session import Client
|
||||
import platform
|
||||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
from fastapi import routing
|
||||
from lollms.client_session import Client
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
def execute_python(code, client, message_id, build_file=True):
|
||||
@ -36,11 +40,29 @@ def execute_python(code, client, message_id, build_file=True):
|
||||
# Determine the platform and open a terminal to execute the Python code.
|
||||
system = platform.system()
|
||||
if system == "Windows":
|
||||
process = subprocess.Popen(f"""start cmd /k "cd /d "{root_folder}" && python "{tmp_file}" && pause" """, shell=True)
|
||||
process = subprocess.Popen(
|
||||
f"""start cmd /k "cd /d "{root_folder}" && python "{tmp_file}" && pause" """,
|
||||
shell=True,
|
||||
)
|
||||
elif system == "Darwin": # macOS
|
||||
process = subprocess.Popen(["open", "-a", "Terminal", f'cd "{root_folder}" && python "{tmp_file}"'], shell=True)
|
||||
process = subprocess.Popen(
|
||||
[
|
||||
"open",
|
||||
"-a",
|
||||
"Terminal",
|
||||
f'cd "{root_folder}" && python "{tmp_file}"',
|
||||
],
|
||||
shell=True,
|
||||
)
|
||||
elif system == "Linux":
|
||||
process = subprocess.Popen(["x-terminal-emulator", "-e", f'bash -c "cd \\"{root_folder}\\" && python \\"{tmp_file}\\"; exec bash"'], shell=True)
|
||||
process = subprocess.Popen(
|
||||
[
|
||||
"x-terminal-emulator",
|
||||
"-e",
|
||||
f'bash -c "cd \\"{root_folder}\\" && python \\"{tmp_file}\\"; exec bash"',
|
||||
],
|
||||
shell=True,
|
||||
)
|
||||
else:
|
||||
raise Exception(f"Unsupported platform: {system}")
|
||||
|
||||
@ -53,7 +75,14 @@ def execute_python(code, client, message_id, build_file=True):
|
||||
# Stop the timer.
|
||||
execution_time = time.time() - start_time
|
||||
error_message = f"Error executing Python code: {ex}"
|
||||
error_json = {"output": "<div class='text-red-500'>" + error_message + "\n" + get_trace_exception(ex) + "</div>", "execution_time": execution_time}
|
||||
error_json = {
|
||||
"output": "<div class='text-red-500'>"
|
||||
+ error_message
|
||||
+ "\n"
|
||||
+ get_trace_exception(ex)
|
||||
+ "</div>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return error_json
|
||||
|
||||
# Stop the timer.
|
||||
@ -66,12 +95,18 @@ def execute_python(code, client, message_id, build_file=True):
|
||||
error_message = f"Output: {output.decode('utf-8', errors='ignore')}\nError executing Python code:\n{error.decode('utf-8', errors='ignore')}"
|
||||
except:
|
||||
error_message = f"Error executing Python code:\n{error}"
|
||||
error_json = {"output": "<div class='text-red-500'>" + error_message + "</div>", "execution_time": execution_time}
|
||||
error_json = {
|
||||
"output": "<div class='text-red-500'>" + error_message + "</div>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return error_json
|
||||
|
||||
# The child process was successful.
|
||||
if output:
|
||||
output_json = {"output": output.decode("utf8"), "execution_time": execution_time}
|
||||
output_json = {
|
||||
"output": output.decode("utf8"),
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
else:
|
||||
output_json = {"output": "", "execution_time": execution_time}
|
||||
return output_json
|
||||
@ -79,8 +114,7 @@ def execute_python(code, client, message_id, build_file=True):
|
||||
return spawn_process(code)
|
||||
|
||||
|
||||
|
||||
def execute_python_old(code, client:Client, message_id, build_file=True):
|
||||
def execute_python_old(code, client: Client, message_id, build_file=True):
|
||||
def spawn_process(code):
|
||||
"""Executes Python code and returns the output as JSON."""
|
||||
|
||||
@ -89,9 +123,9 @@ def execute_python_old(code, client:Client, message_id, build_file=True):
|
||||
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
tmp_file = root_folder/f"ai_code_{message_id}.py"
|
||||
with open(tmp_file,"w",encoding="utf8") as f:
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"ai_code_{message_id}.py"
|
||||
with open(tmp_file, "w", encoding="utf8") as f:
|
||||
f.write(code)
|
||||
|
||||
try:
|
||||
@ -100,7 +134,7 @@ def execute_python_old(code, client:Client, message_id, build_file=True):
|
||||
["python", str(tmp_file)],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=root_folder
|
||||
cwd=root_folder,
|
||||
)
|
||||
|
||||
# Get the output and error from the process.
|
||||
@ -109,7 +143,14 @@ def execute_python_old(code, client:Client, message_id, build_file=True):
|
||||
# Stop the timer.
|
||||
execution_time = time.time() - start_time
|
||||
error_message = f"Error executing Python code: {ex}"
|
||||
error_json = {"output": "<div class='text-red-500'>"+ex+"\n"+get_trace_exception(ex)+"</div>", "execution_time": execution_time}
|
||||
error_json = {
|
||||
"output": "<div class='text-red-500'>"
|
||||
+ ex
|
||||
+ "\n"
|
||||
+ get_trace_exception(ex)
|
||||
+ "</div>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return error_json
|
||||
|
||||
# Stop the timer.
|
||||
@ -122,12 +163,19 @@ def execute_python_old(code, client:Client, message_id, build_file=True):
|
||||
error_message = f"Output:{output.decode('utf-8', errors='ignore')}\nError executing Python code:\n{error.decode('utf-8', errors='ignore')}"
|
||||
except:
|
||||
error_message = f"Error executing Python code:\n{error}"
|
||||
error_json = {"output": "<div class='text-red-500'>"+error_message+"</div>", "execution_time": execution_time}
|
||||
error_json = {
|
||||
"output": "<div class='text-red-500'>" + error_message + "</div>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return error_json
|
||||
|
||||
# The child process was successful.
|
||||
output_json = {"output": output.decode("utf8"), "execution_time": execution_time}
|
||||
output_json = {
|
||||
"output": output.decode("utf8"),
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return output_json
|
||||
|
||||
return spawn_process(code)
|
||||
|
||||
|
||||
@ -145,21 +193,47 @@ def create_and_execute_script(code, message_id, root_folder):
|
||||
# Determine the platform and open a terminal to execute the Python code
|
||||
system = platform.system()
|
||||
if system == "Windows":
|
||||
subprocess.Popen(f"""start cmd /k "cd /d "{root_folder}" && python "{tmp_file}" && pause" """, shell=True)
|
||||
subprocess.Popen(
|
||||
f"""start cmd /k "cd /d "{root_folder}" && python "{tmp_file}" && pause" """,
|
||||
shell=True,
|
||||
)
|
||||
elif system == "Darwin": # macOS
|
||||
subprocess.Popen(["open", "-a", "Terminal", f'cd "{root_folder}" && python "{tmp_file}"'], shell=True)
|
||||
subprocess.Popen(
|
||||
[
|
||||
"open",
|
||||
"-a",
|
||||
"Terminal",
|
||||
f'cd "{root_folder}" && python "{tmp_file}"',
|
||||
],
|
||||
shell=True,
|
||||
)
|
||||
elif system == "Linux":
|
||||
subprocess.Popen(["x-terminal-emulator", "-e", f'bash -c "cd \\"{root_folder}\\" && python \\"{tmp_file}\\"; exec bash"'], shell=True)
|
||||
subprocess.Popen(
|
||||
[
|
||||
"x-terminal-emulator",
|
||||
"-e",
|
||||
f'bash -c "cd \\"{root_folder}\\" && python \\"{tmp_file}\\"; exec bash"',
|
||||
],
|
||||
shell=True,
|
||||
)
|
||||
else:
|
||||
raise Exception(f"Unsupported platform: {system}")
|
||||
|
||||
except Exception as ex:
|
||||
error_message = f"Error executing Python code: {ex}"
|
||||
error_json = {"output": "<div class='text-red-500'>" + error_message + "\n" + get_trace_exception(ex) + "</div>", "execution_time": 0}
|
||||
error_json = {
|
||||
"output": "<div class='text-red-500'>"
|
||||
+ error_message
|
||||
+ "\n"
|
||||
+ get_trace_exception(ex)
|
||||
+ "</div>",
|
||||
"execution_time": 0,
|
||||
}
|
||||
print(error_json)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
code = "print('Hello world');input('hi')"
|
||||
message_id = 102
|
||||
root_folder = r"E:\lollms\discussion_databases\html stuff\105"
|
||||
create_and_execute_script(code, message_id, root_folder)
|
||||
create_and_execute_script(code, message_id, root_folder)
|
||||
|
@ -6,16 +6,20 @@ description:
|
||||
This is a utility for executing python code
|
||||
|
||||
"""
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
import time
|
||||
import subprocess
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
from lollms.client_session import Client
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
def execute_bash(code, client:Client, message_id, build_file=False):
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
|
||||
def execute_bash(code, client: Client, message_id, build_file=False):
|
||||
def spawn_process(code):
|
||||
"""Executes Python code and returns the output as JSON."""
|
||||
|
||||
@ -24,15 +28,15 @@ def execute_bash(code, client:Client, message_id, build_file=False):
|
||||
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
# Execute the Python code in a temporary file.
|
||||
process = subprocess.Popen(
|
||||
process = subprocess.Popen(
|
||||
code,
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=client.discussion.discussion_folder
|
||||
cwd=client.discussion.discussion_folder,
|
||||
)
|
||||
|
||||
# Get the output and error from the process.
|
||||
@ -41,7 +45,14 @@ def execute_bash(code, client:Client, message_id, build_file=False):
|
||||
# Stop the timer.
|
||||
execution_time = time.time() - start_time
|
||||
error_message = f"Error executing Python code: {ex}"
|
||||
error_json = {"output": "<div class='text-red-500'>"+str(ex)+"\n"+get_trace_exception(ex)+"</div>", "execution_time": execution_time}
|
||||
error_json = {
|
||||
"output": "<div class='text-red-500'>"
|
||||
+ str(ex)
|
||||
+ "\n"
|
||||
+ get_trace_exception(ex)
|
||||
+ "</div>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return error_json
|
||||
|
||||
# Stop the timer.
|
||||
@ -50,11 +61,20 @@ def execute_bash(code, client:Client, message_id, build_file=False):
|
||||
# Check if the process was successful.
|
||||
if process.returncode != 0:
|
||||
# The child process threw an exception.
|
||||
error_message = f"Error executing Python code: {error.decode('utf8','ignore')}"
|
||||
error_json = {"output": "<div class='text-red-500'>"+error_message+"</div>", "execution_time": execution_time}
|
||||
error_message = (
|
||||
f"Error executing Python code: {error.decode('utf8','ignore')}"
|
||||
)
|
||||
error_json = {
|
||||
"output": "<div class='text-red-500'>" + error_message + "</div>",
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return error_json
|
||||
|
||||
# The child process was successful.
|
||||
output_json = {"output": output.decode("utf8"), "execution_time": execution_time}
|
||||
output_json = {
|
||||
"output": output.decode("utf8"),
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return output_json
|
||||
|
||||
return spawn_process(code)
|
||||
|
@ -6,16 +6,19 @@ description:
|
||||
This is a utility for executing python code
|
||||
|
||||
"""
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
import time
|
||||
import subprocess
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
from lollms.client_session import Client
|
||||
from lollms.utilities import discussion_path_to_url
|
||||
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
||||
lollmsElfServer: LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
lollmsElfServer:LOLLMSWebUI = LOLLMSWebUI.get_instance()
|
||||
|
||||
def build_svg_output(code, ifram_name=None):
|
||||
"""
|
||||
@ -31,118 +34,125 @@ def build_svg_output(code, ifram_name=None):
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
if ifram_name is not None:
|
||||
rendered = "\n".join([
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" style="width: 100%" srcdoc="',
|
||||
'<style>',
|
||||
'iframe {',
|
||||
'width: 100%;',
|
||||
'height: 100%;',
|
||||
'border: none;',
|
||||
'}',
|
||||
'.svg {',
|
||||
'background-color: transparent;',
|
||||
'padding: 20px;',
|
||||
'border-radius: 10px;',
|
||||
'display: flex;',
|
||||
'justify-content: center;',
|
||||
'align-items: center;',
|
||||
'height: 100%;',
|
||||
'}',
|
||||
'</style>',
|
||||
'<div class=\'svg\'>',
|
||||
"\n".join([c for c in code.replace("\"","'").split("\n") if c.strip()!=""]),
|
||||
'</div>',
|
||||
'<script src=\'https://cdn.jsdelivr.net/npm/svg/dist/svg.min.js\'></script>',
|
||||
'<script>',
|
||||
'// Initialize the svg library and render our diagram',
|
||||
'svg.initialize({ startOnLoad: true });',
|
||||
'// Function to save SVG content to a file',
|
||||
'function saveSVG() {',
|
||||
'var svg = document.querySelector(".svg > svg");',
|
||||
'var serializer = new XMLSerializer();',
|
||||
'var source = serializer.serializeToString(svg);',
|
||||
'var blob = new Blob([source], {type: "image/svg+xml;charset=utf-8"});',
|
||||
'var url = URL.createObjectURL(blob);',
|
||||
'var a = document.createElement("a");',
|
||||
'a.href = url;',
|
||||
'a.download = "diagram.svg";',
|
||||
'a.click();',
|
||||
'}',
|
||||
'</script>',
|
||||
'<div style=\'text-align: center;\'>',
|
||||
'</div>',
|
||||
'<button onclick="saveSVG()">Save SVG</button>',
|
||||
'" style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
'</div>'
|
||||
rendered = "\n".join(
|
||||
[
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
f'<iframe id="{ifram_name}" style="width: 100%" srcdoc="',
|
||||
"<style>",
|
||||
"iframe {",
|
||||
"width: 100%;",
|
||||
"height: 100%;",
|
||||
"border: none;",
|
||||
"}",
|
||||
".svg {",
|
||||
"background-color: transparent;",
|
||||
"padding: 20px;",
|
||||
"border-radius: 10px;",
|
||||
"display: flex;",
|
||||
"justify-content: center;",
|
||||
"align-items: center;",
|
||||
"height: 100%;",
|
||||
"}",
|
||||
"</style>",
|
||||
"<div class='svg'>",
|
||||
"\n".join(
|
||||
[c for c in code.replace('"', "'").split("\n") if c.strip() != ""]
|
||||
),
|
||||
"</div>",
|
||||
"<script src='https://cdn.jsdelivr.net/npm/svg/dist/svg.min.js'></script>",
|
||||
"<script>",
|
||||
"// Initialize the svg library and render our diagram",
|
||||
"svg.initialize({ startOnLoad: true });",
|
||||
"// Function to save SVG content to a file",
|
||||
"function saveSVG() {",
|
||||
'var svg = document.querySelector(".svg > svg");',
|
||||
"var serializer = new XMLSerializer();",
|
||||
"var source = serializer.serializeToString(svg);",
|
||||
'var blob = new Blob([source], {type: "image/svg+xml;charset=utf-8"});',
|
||||
"var url = URL.createObjectURL(blob);",
|
||||
'var a = document.createElement("a");',
|
||||
"a.href = url;",
|
||||
'a.download = "diagram.svg";',
|
||||
"a.click();",
|
||||
"}",
|
||||
"</script>",
|
||||
"<div style='text-align: center;'>",
|
||||
"</div>",
|
||||
'<button onclick="saveSVG()">Save SVG</button>',
|
||||
'" style="width: 100%; height: 600px; border: none;"></iframe>',
|
||||
"</div>",
|
||||
]
|
||||
)
|
||||
else:
|
||||
rendered = "\n".join([
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
'<style>',
|
||||
'.svg {',
|
||||
'background-color: transparent;',
|
||||
'padding: 20px;',
|
||||
'border-radius: 10px;',
|
||||
'display: flex;',
|
||||
'justify-content: center;',
|
||||
'align-items: center;',
|
||||
'height: 100%;',
|
||||
'}',
|
||||
'</style>',
|
||||
'<div class=\'svg\'>',
|
||||
"\n".join([c for c in code.split("\n") if c.strip()!=""]),
|
||||
'</div>',
|
||||
'<script src=\'https://cdn.jsdelivr.net/npm/svg/dist/svg.min.js\'></script>',
|
||||
'<script>',
|
||||
'// Initialize the svg library and render our diagram',
|
||||
'svg.initialize({ startOnLoad: true });',
|
||||
'// Function to save SVG content to a file',
|
||||
'function saveSVG() {',
|
||||
'var svg = document.querySelector(".svg > svg");',
|
||||
'var serializer = new XMLSerializer();',
|
||||
'var source = serializer.serializeToString(svg);',
|
||||
'var blob = new Blob([source], {type: "image/svg+xml;charset=utf-8"});',
|
||||
'var url = URL.createObjectURL(blob);',
|
||||
'var a = document.createElement("a");',
|
||||
'a.href = url;',
|
||||
'a.download = "diagram.svg";',
|
||||
'a.click();',
|
||||
'}',
|
||||
'</script>',
|
||||
'<div style=\'text-align: center;\'>',
|
||||
'</div>',
|
||||
'<button onclick="saveSVG()">Save SVG</button>',
|
||||
'</div>'
|
||||
rendered = "\n".join(
|
||||
[
|
||||
'<div style="width: 100%; margin: 0 auto;">',
|
||||
"<style>",
|
||||
".svg {",
|
||||
"background-color: transparent;",
|
||||
"padding: 20px;",
|
||||
"border-radius: 10px;",
|
||||
"display: flex;",
|
||||
"justify-content: center;",
|
||||
"align-items: center;",
|
||||
"height: 100%;",
|
||||
"}",
|
||||
"</style>",
|
||||
"<div class='svg'>",
|
||||
"\n".join([c for c in code.split("\n") if c.strip() != ""]),
|
||||
"</div>",
|
||||
"<script src='https://cdn.jsdelivr.net/npm/svg/dist/svg.min.js'></script>",
|
||||
"<script>",
|
||||
"// Initialize the svg library and render our diagram",
|
||||
"svg.initialize({ startOnLoad: true });",
|
||||
"// Function to save SVG content to a file",
|
||||
"function saveSVG() {",
|
||||
'var svg = document.querySelector(".svg > svg");',
|
||||
"var serializer = new XMLSerializer();",
|
||||
"var source = serializer.serializeToString(svg);",
|
||||
'var blob = new Blob([source], {type: "image/svg+xml;charset=utf-8"});',
|
||||
"var url = URL.createObjectURL(blob);",
|
||||
'var a = document.createElement("a");',
|
||||
"a.href = url;",
|
||||
'a.download = "diagram.svg";',
|
||||
"a.click();",
|
||||
"}",
|
||||
"</script>",
|
||||
"<div style='text-align: center;'>",
|
||||
"</div>",
|
||||
'<button onclick="saveSVG()">Save SVG</button>',
|
||||
"</div>",
|
||||
]
|
||||
)
|
||||
execution_time = time.time() - start_time
|
||||
return {"output": rendered, "execution_time": execution_time}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def execute_svg(code, client:Client, message_id, build_file=False):
|
||||
def execute_svg(code, client: Client, message_id, build_file=False):
|
||||
if build_file:
|
||||
# Start the timer.
|
||||
start_time = time.time()
|
||||
if not "http" in lollmsElfServer.config.host and not "https" in lollmsElfServer.config.host:
|
||||
host = "http://"+lollmsElfServer.config.host
|
||||
if (
|
||||
not "http" in lollmsElfServer.config.host
|
||||
and not "https" in lollmsElfServer.config.host
|
||||
):
|
||||
host = "http://" + lollmsElfServer.config.host
|
||||
else:
|
||||
host = lollmsElfServer.config.host
|
||||
|
||||
# Create a temporary file.
|
||||
root_folder = client.discussion.discussion_folder
|
||||
root_folder.mkdir(parents=True,exist_ok=True)
|
||||
tmp_file = root_folder/f"ai_svg_{message_id}.svg"
|
||||
with open(tmp_file,"w",encoding="utf8") as f:
|
||||
root_folder.mkdir(parents=True, exist_ok=True)
|
||||
tmp_file = root_folder / f"ai_svg_{message_id}.svg"
|
||||
with open(tmp_file, "w", encoding="utf8") as f:
|
||||
f.write(code)
|
||||
link = f"{host}:{lollmsElfServer.config.port}{discussion_path_to_url(tmp_file)}"
|
||||
# Stop the timer.
|
||||
execution_time = time.time() - start_time
|
||||
output_json = {"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>', "execution_time": execution_time}
|
||||
output_json = {
|
||||
"output": f'<b>Page built successfully</b><br><a href="{link}" target="_blank">Press here to view the page</a>',
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
return output_json
|
||||
else:
|
||||
return build_svg_output(code, "app_iframe")
|
||||
|
Loading…
Reference in New Issue
Block a user