mirror of
https://github.com/ParisNeo/lollms-webui.git
synced 2024-12-22 22:02:21 +00:00
Welcome to wonder v 11
This commit is contained in:
parent
c23b3f98e5
commit
c90496a0dc
docs
endpoints
events
lollms_chatbox_events.pylollms_discussion_events.pylollms_generation_events.pylollms_interactive_events.py
lollms_corelollms_webui.pyscripts/python/lollms_installer
utilities/execution_engines
web
dist
src
zoos
@ -47,9 +47,9 @@ class APScript:
|
||||
def ui_in_iframe(self, html_ui: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def code(self, code: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def chunk(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def full(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None, msg_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL) -> Any
|
||||
def full_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def full_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None, msg_type: MSG_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT) -> Any
|
||||
def set_message_content_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def execute_python(self, code, code_folder = None, code_file_name = None) -> Any
|
||||
def build_python_code(self, prompt, max_title_length = 4096) -> Any
|
||||
def make_title(self, prompt, max_title_length: int = 50) -> Any
|
||||
@ -65,7 +65,7 @@ class APScript:
|
||||
def InfoMessage(self, content, client_id = None, verbose: bool = None) -> Any
|
||||
def info(self, info_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def step_progress(self, step_text: str, progress: float, callback: Callable[([str, MSG_TYPE, dict, list, AIPersonality], bool)] = None) -> Any
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL, metadata = [], callback: Callable[([str, int, dict, list, AIPersonality], bool)] = None) -> Any
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, metadata = [], callback: Callable[([str, int, dict, list, AIPersonality], bool)] = None) -> Any
|
||||
def finished_message(self, message_text: str = '', callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def print_prompt(self, title, prompt) -> Any
|
||||
def fast_gen_with_images(self, prompt: str, images: list, max_generation_size: int = None, placeholders: dict = {}, sacrifice: list = ['previous_discussion'], debug: bool = False, callback = None, show_progress = False) -> str
|
||||
|
@ -15,11 +15,11 @@ class AIPersonality:
|
||||
def success(self, content, duration: int = 4, client_id = None, verbose: bool = True) -> Any
|
||||
def error(self, content, duration: int = 4, client_id = None, verbose: bool = True) -> Any
|
||||
def notify(self, content, notification_type: NotificationType = NotificationType.NOTIF_SUCCESS, duration: int = 4, client_id = None, display_type: NotificationDisplayType = NotificationDisplayType.TOAST, verbose = True) -> Any
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL, metadata = [], callback: Callable[([str, int, dict, list, Any], bool)] = None) -> Any
|
||||
def full(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, metadata = [], callback: Callable[([str, int, dict, list, Any], bool)] = None) -> Any
|
||||
def set_message_content(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def ui(self, ui_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def full_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def full_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def build_prompt(self, prompt_parts: List[str], sacrifice_id: int = -1, context_size: int = None, minimum_spare_context_size: int = None) -> Any
|
||||
def add_collapsible_entry(self, title, content) -> Any
|
||||
def internet_search_with_vectorization(self, query, quick_search: bool = False, asses_using_llm = True) -> Any
|
||||
@ -213,9 +213,9 @@ class APScript:
|
||||
def ui_in_iframe(self, html_ui: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def code(self, code: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def chunk(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def full(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None, msg_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL) -> Any
|
||||
def full_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def full_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None, msg_type: MSG_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT) -> Any
|
||||
def set_message_content_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def execute_python(self, code, code_folder = None, code_file_name = None) -> Any
|
||||
def build_python_code(self, prompt, max_title_length = 4096) -> Any
|
||||
def make_title(self, prompt, max_title_length: int = 50) -> Any
|
||||
@ -231,7 +231,7 @@ class APScript:
|
||||
def InfoMessage(self, content, client_id = None, verbose: bool = None) -> Any
|
||||
def info(self, info_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def step_progress(self, step_text: str, progress: float, callback: Callable[([str, MSG_TYPE, dict, list, AIPersonality], bool)] = None) -> Any
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL, metadata = [], callback: Callable[([str, int, dict, list, AIPersonality], bool)] = None) -> Any
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, metadata = [], callback: Callable[([str, int, dict, list, AIPersonality], bool)] = None) -> Any
|
||||
def finished_message(self, message_text: str = '', callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def print_prompt(self, title, prompt) -> Any
|
||||
def fast_gen_with_images(self, prompt: str, images: list, max_generation_size: int = None, placeholders: dict = {}, sacrifice: list = ['previous_discussion'], debug: bool = False, callback = None, show_progress = False) -> str
|
||||
@ -385,13 +385,13 @@ def notify(self, content, notification_type: NotificationType = NotificationType
|
||||
### new_message
|
||||
|
||||
```python
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL, metadata = [], callback: Callable[([str, int, dict, list, Any], bool)] = None) -> Any
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, metadata = [], callback: Callable[([str, int, dict, list, Any], bool)] = None) -> Any
|
||||
```
|
||||
|
||||
### full
|
||||
|
||||
```python
|
||||
def full(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
```
|
||||
|
||||
### ui
|
||||
@ -403,13 +403,13 @@ def ui(self, ui_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool
|
||||
### full_invisible_to_ai
|
||||
|
||||
```python
|
||||
def full_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
```
|
||||
|
||||
### full_invisible_to_user
|
||||
|
||||
```python
|
||||
def full_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
```
|
||||
|
||||
### build_prompt
|
||||
@ -1393,19 +1393,19 @@ def chunk(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list],
|
||||
### full
|
||||
|
||||
```python
|
||||
def full(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None, msg_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL) -> Any
|
||||
def set_message_content(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None, msg_type: MSG_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT) -> Any
|
||||
```
|
||||
|
||||
### full_invisible_to_ai
|
||||
|
||||
```python
|
||||
def full_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content_invisible_to_ai(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
```
|
||||
|
||||
### full_invisible_to_user
|
||||
|
||||
```python
|
||||
def full_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
def set_message_content_invisible_to_user(self, full_text: str, callback: Callable[([str, MSG_TYPE, dict, list], bool)] = None) -> Any
|
||||
```
|
||||
|
||||
### execute_python
|
||||
@ -1501,7 +1501,7 @@ def step_progress(self, step_text: str, progress: float, callback: Callable[([st
|
||||
### new_message
|
||||
|
||||
```python
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_FULL, metadata = [], callback: Callable[([str, int, dict, list, AIPersonality], bool)] = None) -> Any
|
||||
def new_message(self, message_text: str, message_type: MSG_TYPE = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, metadata = [], callback: Callable[([str, int, dict, list, AIPersonality], bool)] = None) -> Any
|
||||
```
|
||||
|
||||
### finished_message
|
||||
|
@ -107,7 +107,7 @@ full(text: str)
|
||||
Writes text in a full message to the user inside a description. This method replaces any existing content in the message description with the specified text.
|
||||
|
||||
Example Usage:
|
||||
self.full("Here is a complete overview of your request.")
|
||||
self.set_message_content("Here is a complete overview of your request.")
|
||||
```
|
||||
|
||||
### Append Chunk (`chunk`)
|
||||
|
@ -13,7 +13,7 @@ from pydantic import BaseModel, Field
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_TYPE
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index
|
||||
from ascii_colors import ASCIIColors
|
||||
@ -78,7 +78,7 @@ async def execute_personality_command(request: CmdExecutionRequest):
|
||||
|
||||
ump = lollmsElfServer.config.discussion_prompt_separator +lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
|
||||
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
|
||||
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
|
||||
content="",
|
||||
|
@ -10,7 +10,7 @@ from fastapi import APIRouter, Request
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_TYPE
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, show_yes_no_dialog, add_period
|
||||
from lollms.security import sanitize_path, forbid_remote_access, check_access, sanitize_svg
|
||||
|
@ -12,7 +12,7 @@ from fastapi.responses import FileResponse
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_TYPE
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, show_yes_no_dialog, add_period
|
||||
from lollms.security import sanitize_path, forbid_remote_access, check_access, sanitize_svg, sanitize_path_from_endpoint
|
||||
|
@ -12,7 +12,7 @@ from pydantic import Field
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_TYPE
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.databases.discussions_database import DiscussionsDB
|
||||
|
@ -12,7 +12,7 @@ from fastapi import HTTPException
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel, Field
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_TYPE
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from lollms.utilities import detect_antiprompt, remove_text_from_string, trace_exception, find_first_available_file_index, add_period, PackageManager
|
||||
from lollms.security import sanitize_path_from_endpoint, validate_path, forbid_remote_access, check_access
|
||||
|
@ -15,8 +15,8 @@ from lollms.server.elf_server import LOLLMSElfServer
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.personality import MSG_TYPE, AIPersonality
|
||||
from lollms.types import MSG_TYPE, SENDER_TYPES
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import load_config, trace_exception, gc
|
||||
from lollms.utilities import find_first_available_file_index, convert_language_name, PackageManager, run_async
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
|
@ -15,8 +15,8 @@ from lollms.server.elf_server import LOLLMSElfServer
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.personality import MSG_TYPE, AIPersonality
|
||||
from lollms.types import MSG_TYPE, SENDER_TYPES
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import load_config, trace_exception, gc
|
||||
from lollms.utilities import find_first_available_file_index, convert_language_name, PackageManager
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
@ -106,10 +106,11 @@ def add_events(sio:socketio):
|
||||
except:
|
||||
nb_tokens = None
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
|
||||
message_type = MSG_TYPE.MSG_TYPE_FULL.value if lollmsElfServer.personality.include_welcome_message_in_discussion else MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value,
|
||||
message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value if lollmsElfServer.personality.include_welcome_message_in_discussion else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI.value,
|
||||
sender_type = SENDER_TYPES.SENDER_TYPES_AI.value,
|
||||
sender = lollmsElfServer.personality.name,
|
||||
content = welcome_message,
|
||||
steps = [],
|
||||
metadata = None,
|
||||
rank = 0,
|
||||
parent_message_id = -1,
|
||||
|
@ -15,8 +15,8 @@ from lollms.server.elf_server import LOLLMSElfServer
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.personality import MSG_TYPE, AIPersonality
|
||||
from lollms.types import MSG_TYPE, SENDER_TYPES
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import load_config, trace_exception, gc
|
||||
from lollms.utilities import find_first_available_file_index, convert_language_name
|
||||
from lollms.security import forbid_remote_access
|
||||
@ -67,10 +67,11 @@ def add_events(sio:socketio):
|
||||
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
ump = lollmsElfServer.config.discussion_prompt_separator +lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
|
||||
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
|
||||
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
|
||||
content=prompt,
|
||||
steps=[],
|
||||
metadata=None,
|
||||
parent_message_id=lollmsElfServer.message_id,
|
||||
created_at=created_at,
|
||||
@ -124,11 +125,12 @@ def add_events(sio:socketio):
|
||||
created_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
ump = lollmsElfServer.config.discussion_prompt_separator +lollmsElfServer.config.user_name.strip() if lollmsElfServer.config.use_user_name_in_discussions else lollmsElfServer.personality.user_message_prefix
|
||||
message = lollmsElfServer.session.get_client(client_id).discussion.add_message(
|
||||
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
message_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
|
||||
sender = ump.replace(lollmsElfServer.config.discussion_prompt_separator,"").replace(":",""),
|
||||
content=prompt,
|
||||
metadata=None,
|
||||
content = prompt,
|
||||
steps = [],
|
||||
metadata = None,
|
||||
parent_message_id=lollmsElfServer.message_id,
|
||||
created_at=created_at,
|
||||
nb_tokens=nb_tokens
|
||||
|
@ -15,8 +15,8 @@ from lollms.server.elf_server import LOLLMSElfServer
|
||||
from fastapi.responses import FileResponse
|
||||
from lollms.binding import BindingBuilder, InstallOption
|
||||
from ascii_colors import ASCIIColors
|
||||
from lollms.personality import MSG_TYPE, AIPersonality
|
||||
from lollms.types import MSG_TYPE, SENDER_TYPES
|
||||
from lollms.personality import AIPersonality
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.utilities import load_config, trace_exception, gc
|
||||
from lollms.utilities import find_first_available_file_index, convert_language_name, PackageManager, run_async, add_period
|
||||
from lollms.security import forbid_remote_access, check_access
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 1b4c8ca2930731f1012380e00538d8ccdf9fd816
|
||||
Subproject commit 7ebeeb347a2abd76d70c1ff1368d83981e3b6f0e
|
262
lollms_webui.py
262
lollms_webui.py
@ -11,7 +11,7 @@ from datetime import datetime
|
||||
from lollms.databases.discussions_database import DiscussionsDB, Discussion
|
||||
from pathlib import Path
|
||||
from lollms.config import InstallOption
|
||||
from lollms.types import MSG_TYPE, SENDER_TYPES
|
||||
from lollms.types import MSG_TYPE, MSG_OPERATION_TYPE, MSG_OPERATION_TYPE, CONTENT_OPERATION_TYPES, SENDER_TYPES
|
||||
from lollms.extension import LOLLMSExtension, ExtensionBuilder
|
||||
from lollms.personality import AIPersonality, PersonalityBuilder
|
||||
from lollms.binding import LOLLMSConfig, BindingBuilder, LLMBinding, ModelBuilder, BindingType
|
||||
@ -71,7 +71,7 @@ def terminate_thread(thread):
|
||||
else:
|
||||
ASCIIColors.yellow("Canceled successfully")# The current version of the webui
|
||||
|
||||
lollms_webui_version="10.1 (Warp Drive)"
|
||||
lollms_webui_version="11 code name: Wonder"
|
||||
|
||||
|
||||
|
||||
@ -339,7 +339,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
except:
|
||||
nb_tokens = None
|
||||
message = client.discussion.add_message(
|
||||
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
message_type = MSG_TYPE.MSG_TYPE_CONTENT.value,
|
||||
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
|
||||
sender = self.config.user_name.strip() if self.config.use_user_name_in_discussions else self.config.user_name,
|
||||
content = prompt,
|
||||
@ -632,7 +632,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
for message in messages:
|
||||
# Check if the message content is not empty and visible to the AI
|
||||
if message.content != '' and (
|
||||
message.message_type <= MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER.value and message.message_type != MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI.value):
|
||||
message.message_type <= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER.value and message.message_type != MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI.value):
|
||||
|
||||
# Tokenize the message content
|
||||
message_tokenized = self.model.tokenize(
|
||||
@ -656,7 +656,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
title = [""]
|
||||
def receive(
|
||||
chunk:str,
|
||||
message_type:MSG_TYPE
|
||||
message_type:MSG_OPERATION_TYPE
|
||||
):
|
||||
if chunk:
|
||||
title[0] += chunk
|
||||
@ -682,7 +682,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
|
||||
for message in messages:
|
||||
if message["id"]<= message_id or message_id==-1:
|
||||
if message["type"]!=MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER:
|
||||
if message["type"]!=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER:
|
||||
if message["sender"]==self.personality.name:
|
||||
full_message_list.append(self.config.discussion_prompt_separator+self.personality.ai_message_prefix+message["content"])
|
||||
else:
|
||||
@ -697,7 +697,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
|
||||
return discussion_messages # Removes the last return
|
||||
|
||||
def full(self, full_text:str, callback: Callable[[str, MSG_TYPE, dict, list], bool]=None, client_id=0):
|
||||
def set_message_content(self, full_text:str, callback: Callable[[str, MSG_OPERATION_TYPE, dict, list], bool]|None=None, client_id=0):
|
||||
"""This sends full text to front end
|
||||
|
||||
Args:
|
||||
@ -708,7 +708,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
callback = partial(self.process_chunk,client_id = client_id)
|
||||
|
||||
if callback:
|
||||
callback(full_text, MSG_TYPE.MSG_TYPE_FULL)
|
||||
callback(full_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
|
||||
|
||||
def emit_socket_io_info(self, name, data, client_id):
|
||||
run_async(partial(self.sio.emit,name, data, to=client_id))
|
||||
@ -753,16 +753,12 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
client_id,
|
||||
sender=None,
|
||||
content="",
|
||||
parameters=None,
|
||||
metadata=None,
|
||||
ui=None,
|
||||
message_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_FULL,
|
||||
message_type:MSG_OPERATION_TYPE=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT,
|
||||
sender_type:SENDER_TYPES=SENDER_TYPES.SENDER_TYPES_AI,
|
||||
open=False
|
||||
):
|
||||
client = self.session.get_client(client_id)
|
||||
#self.close_message(client_id)
|
||||
mtdt = metadata if metadata is None or type(metadata) == str else json.dumps(metadata, indent=4)
|
||||
if sender==None:
|
||||
sender= self.personality.name
|
||||
msg = client.discussion.add_message(
|
||||
@ -770,8 +766,9 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
sender_type = sender_type.value,
|
||||
sender = sender,
|
||||
content = content,
|
||||
metadata = mtdt,
|
||||
ui = ui,
|
||||
steps = [],
|
||||
metadata = None,
|
||||
ui = None,
|
||||
rank = 0,
|
||||
parent_message_id = client.discussion.current_message.id if client.discussion.current_message is not None else 0,
|
||||
binding = self.config["binding_name"],
|
||||
@ -786,9 +783,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
"message_type": message_type.value,
|
||||
"sender_type": SENDER_TYPES.SENDER_TYPES_AI.value,
|
||||
"content": content,
|
||||
"parameters": parameters,
|
||||
"metadata": metadata,
|
||||
"ui": ui,
|
||||
"metadata": None,
|
||||
"ui": None,
|
||||
"id": msg.id,
|
||||
"parent_message_id": msg.parent_message_id,
|
||||
|
||||
@ -805,6 +801,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
}, to=client_id
|
||||
)
|
||||
)
|
||||
|
||||
def new_block( self,
|
||||
client_id,
|
||||
sender=None,
|
||||
@ -812,7 +809,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
parameters=None,
|
||||
metadata=None,
|
||||
ui=None,
|
||||
message_type:MSG_TYPE=MSG_TYPE.MSG_TYPE_FULL,
|
||||
message_type:MSG_OPERATION_TYPE=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT,
|
||||
sender_type:SENDER_TYPES=SENDER_TYPES.SENDER_TYPES_AI,
|
||||
open=False
|
||||
):
|
||||
@ -852,6 +849,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
'id':client.discussion.current_message.id,
|
||||
'content': client.discussion.current_message.content,
|
||||
'discussion_id':client.discussion.discussion_id,
|
||||
'operation_type': MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
'message_type': client.discussion.current_message.message_type,
|
||||
'created_at':client.discussion.current_message.created_at,
|
||||
'started_generating_at': client.discussion.current_message.started_generating_at,
|
||||
@ -860,11 +858,12 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
}, to=client_id
|
||||
)
|
||||
)
|
||||
|
||||
def update_message(self, client_id, chunk,
|
||||
parameters=None,
|
||||
metadata=[],
|
||||
ui=None,
|
||||
msg_type:MSG_TYPE=None
|
||||
operation_type:MSG_OPERATION_TYPE=None
|
||||
):
|
||||
client = self.session.get_client(client_id)
|
||||
client.discussion.current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
@ -874,24 +873,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
|
||||
if self.nb_received_tokens==1:
|
||||
client.discussion.current_message.started_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
run_async(
|
||||
partial(self.sio.emit,'update_message', {
|
||||
"sender": self.personality.name,
|
||||
'id':client.discussion.current_message.id,
|
||||
'content': "✍ warming up ...",
|
||||
'ui': client.discussion.current_message.ui if ui is None else ui,
|
||||
'discussion_id':client.discussion.discussion_id,
|
||||
'message_type': MSG_TYPE.MSG_TYPE_STEP_END.value,
|
||||
'created_at':client.discussion.current_message.created_at,
|
||||
'started_generating_at': client.discussion.current_message.started_generating_at,
|
||||
'finished_generating_at': client.discussion.current_message.finished_generating_at,
|
||||
'nb_tokens': client.discussion.current_message.nb_tokens,
|
||||
'parameters':parameters,
|
||||
'metadata':metadata
|
||||
}, to=client_id
|
||||
)
|
||||
)
|
||||
self.update_message_step(client_id, "✍ warming up ...",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
self.update_message_step(client_id, "✍ generating ...",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
|
||||
run_async(
|
||||
partial(self.sio.emit,'update_message', {
|
||||
@ -900,7 +883,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
'content': chunk,
|
||||
'ui': client.discussion.current_message.ui if ui is None else ui,
|
||||
'discussion_id':client.discussion.discussion_id,
|
||||
'message_type': msg_type.value if msg_type is not None else MSG_TYPE.MSG_TYPE_CHUNK.value if self.nb_received_tokens>1 else MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
'operation_type': operation_type.value if operation_type is not None else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK.value if self.nb_received_tokens>1 else MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
'message_type': MSG_TYPE.MSG_TYPE_CONTENT.value,
|
||||
'created_at':client.discussion.current_message.created_at,
|
||||
'started_generating_at': client.discussion.current_message.started_generating_at,
|
||||
'finished_generating_at': client.discussion.current_message.finished_generating_at,
|
||||
@ -910,13 +894,107 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
}, to=client_id
|
||||
)
|
||||
)
|
||||
if msg_type and msg_type.value < MSG_TYPE.MSG_TYPE_INFO.value:
|
||||
if operation_type and operation_type.value < MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO.value:
|
||||
client.discussion.update_message(client.generated_text, new_metadata=mtdt, new_ui=ui, started_generating_at=client.discussion.current_message.started_generating_at, nb_tokens=client.discussion.current_message.nb_tokens)
|
||||
|
||||
|
||||
|
||||
def update_message_content(self, client_id, chunk, operation_type:MSG_OPERATION_TYPE=MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, message_type: MSG_TYPE=None):
|
||||
client = self.session.get_client(client_id)
|
||||
client.discussion.current_message.finished_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
client.discussion.current_message.nb_tokens = self.nb_received_tokens
|
||||
|
||||
|
||||
if self.nb_received_tokens==1:
|
||||
client.discussion.current_message.started_generating_at=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
run_async(
|
||||
partial(self.sio.emit,'update_message', {
|
||||
"sender": self.personality.name,
|
||||
'id':client.discussion.current_message.id,
|
||||
'content': chunk,
|
||||
'discussion_id':client.discussion.discussion_id,
|
||||
'operation_type': operation_type.value,
|
||||
'message_type': client.discussion.current_message.message_type if message_type is None else message_type,
|
||||
'created_at':client.discussion.current_message.created_at,
|
||||
'started_generating_at': client.discussion.current_message.started_generating_at,
|
||||
'finished_generating_at': client.discussion.current_message.finished_generating_at,
|
||||
'nb_tokens': client.discussion.current_message.nb_tokens,
|
||||
}, to=client_id
|
||||
)
|
||||
)
|
||||
|
||||
client.discussion.update_message_content(client.generated_text, started_generating_at=client.discussion.current_message.started_generating_at, nb_tokens=client.discussion.current_message.nb_tokens)
|
||||
|
||||
def update_message_step(self, client_id, step_text, msg_operation_type:MSG_OPERATION_TYPE=None):
|
||||
client = self.session.get_client(client_id)
|
||||
if msg_operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP:
|
||||
client.discussion.current_message.add_step(step_text,"instant",True, True)
|
||||
elif msg_operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START:
|
||||
client.discussion.current_message.add_step(step_text,"start_end",True,False)
|
||||
elif msg_operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS:
|
||||
client.discussion.current_message.add_step(step_text,"start_end",True, True)
|
||||
elif msg_operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_FAILURE:
|
||||
client.discussion.current_message.add_step(step_text,"start_end",False, True)
|
||||
|
||||
run_async(
|
||||
partial(self.sio.emit,'update_message', {
|
||||
'id':client.discussion.current_message.id,
|
||||
'discussion_id':client.discussion.discussion_id,
|
||||
'operation_type': msg_operation_type.value,
|
||||
'steps': client.discussion.current_message.steps,
|
||||
|
||||
}, to=client_id
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def update_message_metadata(self, client_id, metadata):
|
||||
client = self.session.get_client(client_id)
|
||||
run_async(
|
||||
partial(self.sio.emit,'update_message', {
|
||||
"sender": self.personality.name,
|
||||
'id':client.discussion.current_message.id,
|
||||
'metadata': metadata if type(metadata) in [str, None] else json.dumps(metadata) if type(metadata)==dict else None,
|
||||
'discussion_id':client.discussion.discussion_id,
|
||||
'message_type': MSG_OPERATION_TYPE.MSG_TYPE_JSON_INFOS,
|
||||
'created_at':client.discussion.current_message.created_at,
|
||||
'started_generating_at': client.discussion.current_message.started_generating_at,
|
||||
'finished_generating_at': client.discussion.current_message.finished_generating_at,
|
||||
'nb_tokens': client.discussion.current_message.nb_tokens,
|
||||
}, to=client_id
|
||||
)
|
||||
)
|
||||
|
||||
client.discussion.update_message_metadata(metadata)
|
||||
|
||||
def update_message_ui(self, client_id, ui):
|
||||
client = self.session.get_client(client_id)
|
||||
run_async(
|
||||
partial(self.sio.emit,'update_message', {
|
||||
"sender": self.personality.name,
|
||||
'id':client.discussion.current_message.id,
|
||||
'ui': ui if type(ui) in [str, None] else None,
|
||||
'discussion_id':client.discussion.discussion_id,
|
||||
'message_type': MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI,
|
||||
'created_at':client.discussion.current_message.created_at,
|
||||
'started_generating_at': client.discussion.current_message.started_generating_at,
|
||||
'finished_generating_at': client.discussion.current_message.finished_generating_at,
|
||||
'nb_tokens': client.discussion.current_message.nb_tokens,
|
||||
}, to=client_id
|
||||
)
|
||||
)
|
||||
|
||||
client.discussion.update_message_ui(ui)
|
||||
|
||||
|
||||
|
||||
def close_message(self, client_id):
|
||||
client = self.session.get_client(client_id)
|
||||
for msg in client.discussion.messages:
|
||||
if msg.steps is not None:
|
||||
for step in msg.steps:
|
||||
step["done"]=True
|
||||
if not client.discussion:
|
||||
return
|
||||
#fix halucination
|
||||
@ -949,9 +1027,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
|
||||
def process_chunk(
|
||||
self,
|
||||
chunk:str,
|
||||
message_type:MSG_TYPE,
|
||||
parameters:dict=None,
|
||||
chunk:str|None,
|
||||
operation_type:MSG_OPERATION_TYPE,
|
||||
metadata:list=None,
|
||||
client_id:int=0,
|
||||
personality:AIPersonality=None
|
||||
@ -960,64 +1037,61 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
Processes a chunk of generated text
|
||||
"""
|
||||
client = self.session.get_client(client_id)
|
||||
if chunk is None:
|
||||
if chunk is None and operation_type in [MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK]:
|
||||
return
|
||||
|
||||
if chunk is not None:
|
||||
if not client_id in list(self.session.clients.keys()):
|
||||
self.error("Connection lost", client_id=client_id)
|
||||
return
|
||||
if message_type == MSG_TYPE.MSG_TYPE_STEP:
|
||||
if operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP:
|
||||
ASCIIColors.info("--> Step:"+chunk)
|
||||
if message_type == MSG_TYPE.MSG_TYPE_STEP_START:
|
||||
if operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START:
|
||||
ASCIIColors.info("--> Step started:"+chunk)
|
||||
if message_type == MSG_TYPE.MSG_TYPE_STEP_END:
|
||||
if parameters['status']:
|
||||
ASCIIColors.success("--> Step ended:"+chunk)
|
||||
else:
|
||||
ASCIIColors.error("--> Step ended:"+chunk)
|
||||
if message_type == MSG_TYPE.MSG_TYPE_WARNING:
|
||||
self.update_message_step(client_id, chunk, operation_type)
|
||||
if operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS:
|
||||
ASCIIColors.success("--> Step ended:"+chunk)
|
||||
self.update_message_step(client_id, chunk, operation_type)
|
||||
if operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_FAILURE:
|
||||
ASCIIColors.success("--> Step ended:"+chunk)
|
||||
self.update_message_step(client_id, chunk, operation_type)
|
||||
if operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_WARNING:
|
||||
self.warning(chunk,client_id=client_id)
|
||||
ASCIIColors.error("--> Exception from personality:"+chunk)
|
||||
if message_type == MSG_TYPE.MSG_TYPE_EXCEPTION:
|
||||
if operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION:
|
||||
self.error(chunk, client_id=client_id)
|
||||
ASCIIColors.error("--> Exception from personality:"+chunk)
|
||||
return
|
||||
if message_type == MSG_TYPE.MSG_TYPE_INFO:
|
||||
if operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_INFO:
|
||||
self.info(chunk, client_id=client_id)
|
||||
ASCIIColors.info("--> Info:"+chunk)
|
||||
return
|
||||
if message_type == MSG_TYPE.MSG_TYPE_UI:
|
||||
self.update_message(client_id, "", parameters, metadata, chunk, MSG_TYPE.MSG_TYPE_UI)
|
||||
if operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI:
|
||||
self.update_message_ui(client_id, chunk)
|
||||
|
||||
if message_type == MSG_TYPE.MSG_TYPE_NEW_MESSAGE:
|
||||
if operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_NEW_MESSAGE:
|
||||
self.nb_received_tokens = 0
|
||||
self.start_time = datetime.now()
|
||||
self.update_message(client_id, "Generating ...", {"status":True}, msg_type=MSG_TYPE.MSG_TYPE_STEP_END)
|
||||
self.update_message_step(client_id, "✍ warming up ...", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
self.update_message_step(client_id, "✍ generating ...",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
self.new_message(
|
||||
client_id,
|
||||
self.personality.name if personality is None else personality.name,
|
||||
chunk if parameters["type"]!=MSG_TYPE.MSG_TYPE_UI.value else '',
|
||||
metadata = [{
|
||||
"title":chunk,
|
||||
"content":parameters["metadata"]
|
||||
}
|
||||
] if parameters["type"]==MSG_TYPE.MSG_TYPE_JSON_INFOS.value else None,
|
||||
ui= chunk if parameters["type"]==MSG_TYPE.MSG_TYPE_UI.value else None,
|
||||
message_type= MSG_TYPE(parameters["type"])
|
||||
)
|
||||
|
||||
elif message_type == MSG_TYPE.MSG_TYPE_FINISHED_MESSAGE:
|
||||
self.personality.name if personality is None else personality.name,
|
||||
chunk,
|
||||
operation_type= MSG_TYPE.MSG_TYPE_CONTENT
|
||||
)
|
||||
|
||||
elif operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_FINISHED_MESSAGE:
|
||||
self.close_message(client_id)
|
||||
|
||||
elif message_type == MSG_TYPE.MSG_TYPE_CHUNK:
|
||||
|
||||
elif operation_type == MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK:
|
||||
if self.nb_received_tokens==0:
|
||||
self.start_time = datetime.now()
|
||||
try:
|
||||
self.update_message(client_id, "✍ warming up ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_END, parameters= {'status':True})
|
||||
self.update_message(client_id, "Generating ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_START)
|
||||
self.update_message_step(client_id, "✍ warming up ...",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
self.update_message_step(client_id, "✍ generating ...",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
ASCIIColors.warning("Couldn't send status update to client")
|
||||
dt =(datetime.now() - self.start_time).seconds
|
||||
if dt==0:
|
||||
@ -1029,20 +1103,20 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
sys.stdout = sys.__stdout__
|
||||
sys.stdout.flush()
|
||||
if chunk:
|
||||
|
||||
client.generated_text += chunk
|
||||
antiprompt = self.personality.detect_antiprompt(client.generated_text)
|
||||
if antiprompt:
|
||||
ASCIIColors.warning(f"\n{antiprompt} detected. Stopping generation")
|
||||
client.generated_text = self.remove_text_from_string(client.generated_text,antiprompt)
|
||||
self.update_message(client_id, client.generated_text, parameters, metadata, None, MSG_TYPE.MSG_TYPE_FULL)
|
||||
self.update_message_content(client_id, client.generated_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
|
||||
return False
|
||||
else:
|
||||
self.nb_received_tokens += 1
|
||||
if client.continuing and client.first_chunk:
|
||||
self.update_message(client_id, client.generated_text, parameters, metadata)
|
||||
self.update_message_content(client_id, client.generated_text, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT)
|
||||
else:
|
||||
self.update_message(client_id, chunk, parameters, metadata, msg_type=MSG_TYPE.MSG_TYPE_CHUNK)
|
||||
self.update_message_content(client_id, chunk, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_ADD_CHUNK)
|
||||
|
||||
client.first_chunk=False
|
||||
# if stop generation is detected then stop
|
||||
if not self.cancel_gen:
|
||||
@ -1053,12 +1127,13 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
return False
|
||||
|
||||
# Stream the generated text to the main process
|
||||
elif message_type in [MSG_TYPE.MSG_TYPE_FULL, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_AI, MSG_TYPE.MSG_TYPE_FULL_INVISIBLE_TO_USER]:
|
||||
elif operation_type in [MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER]:
|
||||
if self.nb_received_tokens==0:
|
||||
self.start_time = datetime.now()
|
||||
try:
|
||||
self.update_message(client_id, "✍ warming up ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_END, parameters= {'status':True})
|
||||
self.update_message(client_id, "Generating ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_START)
|
||||
self.update_message_step(client_id, "✍ warming up ...", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
self.update_message_step(client_id, "✍ generating ...",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
|
||||
except Exception as ex:
|
||||
ASCIIColors.warning("Couldn't send status update to client")
|
||||
|
||||
@ -1067,14 +1142,14 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
if antiprompt:
|
||||
ASCIIColors.warning(f"\n{antiprompt} detected. Stopping generation")
|
||||
client.generated_text = self.remove_text_from_string(client.generated_text,antiprompt)
|
||||
self.update_message(client_id, client.generated_text, parameters, metadata, None, message_type)
|
||||
self.update_message_content(client_id, client.generated_text, operation_type)
|
||||
return False
|
||||
|
||||
self.update_message(client_id, chunk, parameters, metadata, ui=None, msg_type=message_type)
|
||||
self.update_message_content(client_id, chunk, operation_type)
|
||||
return True
|
||||
# Stream the generated text to the frontend
|
||||
else:
|
||||
self.update_message(client_id, chunk, parameters, metadata, ui=None, msg_type=message_type)
|
||||
self.update_message_content(client_id, chunk, operation_type)
|
||||
return True
|
||||
|
||||
|
||||
@ -1136,7 +1211,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
ASCIIColors.error(f"Workflow run failed.\nError:{ex}")
|
||||
ASCIIColors.error(traceback_text)
|
||||
if callback:
|
||||
callback(f"Workflow run failed\nError:{ex}", MSG_TYPE.MSG_TYPE_EXCEPTION)
|
||||
callback(f"Workflow run failed\nError:{ex}", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_EXCEPTION)
|
||||
return
|
||||
print("Finished executing the workflow")
|
||||
return output
|
||||
@ -1204,7 +1279,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
try:
|
||||
post_processed_output = process_ai_output(output, client.discussion.image_files, client.discussion.discussion_folder)
|
||||
if len(post_processed_output)!=output:
|
||||
self.process_chunk(post_processed_output, MSG_TYPE.MSG_TYPE_FULL,client_id=client_id)
|
||||
self.process_chunk(post_processed_output, MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT,client_id=client_id)
|
||||
except Exception as ex:
|
||||
ASCIIColors.error(str(ex))
|
||||
else:
|
||||
@ -1259,7 +1334,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
else:
|
||||
self.send_refresh(client_id)
|
||||
self.new_message(client_id, self.personality.name, "")
|
||||
self.update_message(client_id, "✍ warming up ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_START)
|
||||
self.update_message_step(client_id, "✍ warming up ...", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START)
|
||||
|
||||
# prepare query and reception
|
||||
self.discussion_messages, self.current_message, tokens, context_details, internet_search_infos = self.prepare_query(client_id, message_id, is_continue, n_tokens=self.config.min_n_predict, generation_type=generation_type, force_using_internet=force_using_internet, previous_chunk = client.generated_text if is_continue else "")
|
||||
@ -1279,7 +1354,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
)
|
||||
if self.tts and self.config.auto_read and len(self.personality.audio_samples)>0:
|
||||
try:
|
||||
self.process_chunk("Generating voice output",MSG_TYPE.MSG_TYPE_STEP_START,client_id=client_id)
|
||||
self.process_chunk("Generating voice output",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_START,client_id=client_id)
|
||||
from lollms.services.xtts.lollms_xtts import LollmsXTTS
|
||||
voice=self.config.xtts_current_voice
|
||||
if voice!="main_voice":
|
||||
@ -1300,8 +1375,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
f' Your browser does not support the audio element.',
|
||||
f'</audio>'
|
||||
])
|
||||
self.process_chunk("Generating voice output", MSG_TYPE.MSG_TYPE_STEP_END, {'status':True},client_id=client_id)
|
||||
self.process_chunk(fl,MSG_TYPE.MSG_TYPE_UI, client_id=client_id)
|
||||
self.process_chunk("Generating voice output", operation_type= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS,client_id=client_id)
|
||||
self.process_chunk(fl,MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_UI, client_id=client_id)
|
||||
else:
|
||||
self.InfoMessage("xtts is not up yet.\nPlease wait for it to load then try again. This may take some time.")
|
||||
|
||||
@ -1422,7 +1497,8 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
except Exception as ex:
|
||||
trace_exception(ex)
|
||||
try:
|
||||
self.update_message(client_id, "Generating ...", msg_type=MSG_TYPE.MSG_TYPE_STEP_END, parameters= {'status':True})
|
||||
self.update_message_step(client_id, "✍ warming up ...", MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
self.update_message_step(client_id, "✍ generating ...",MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_STEP_END_SUCCESS)
|
||||
except Exception as ex:
|
||||
ASCIIColors.warning("Couldn't send status update to client")
|
||||
self.close_message(client_id)
|
||||
@ -1469,7 +1545,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
nb_tokens = None
|
||||
|
||||
message = client.discussion.add_message(
|
||||
message_type = MSG_TYPE.MSG_TYPE_FULL.value,
|
||||
operation_type = MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT.value,
|
||||
sender_type = SENDER_TYPES.SENDER_TYPES_USER.value,
|
||||
sender = self.config.user_name.strip() if self.config.use_user_name_in_discussions else self.personality.user_message_prefix,
|
||||
content = prompt,
|
||||
@ -1481,7 +1557,7 @@ class LOLLMSWebUI(LOLLMSElfServer):
|
||||
self.new_message(
|
||||
client.client_id,
|
||||
self.personality.name,
|
||||
message_type= MSG_TYPE.MSG_TYPE_FULL,
|
||||
operation_type= MSG_OPERATION_TYPE.MSG_OPERATION_TYPE_SET_CONTENT,
|
||||
content=""
|
||||
)
|
||||
client.generated_text = ""
|
||||
|
@ -11,7 +11,7 @@ from lollms.paths import LollmsPaths
|
||||
from lollms.main_config import LOLLMSConfig
|
||||
from lollms.utilities import check_and_install_torch, PackageManager, check_torch_version, reinstall_pytorch_with_cuda, reinstall_pytorch_with_cpu, reinstall_pytorch_with_rocm
|
||||
from lollms.com import NotificationType, NotificationDisplayType, LoLLMsCom
|
||||
from lollms.types import MSG_TYPE, SENDER_TYPES
|
||||
from lollms.types import MSG_OPERATION_TYPE, SENDER_TYPES
|
||||
from lollms.app import LollmsApplication
|
||||
from pathlib import Path
|
||||
from ascii_colors import ASCIIColors
|
||||
|
@ -10,7 +10,7 @@ from fastapi import APIRouter, Request, routing
|
||||
from lollms_webui import LOLLMSWebUI
|
||||
from pydantic import BaseModel
|
||||
from starlette.responses import StreamingResponse
|
||||
from lollms.types import MSG_TYPE
|
||||
from lollms.types import MSG_OPERATION_TYPE
|
||||
from lollms.main_config import BaseConfig
|
||||
from ascii_colors import get_trace_exception, trace_exception
|
||||
from ascii_colors import ASCIIColors
|
||||
|
8
web/dist/assets/index-7271416c.css
vendored
Normal file
8
web/dist/assets/index-7271416c.css
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
8
web/dist/assets/index-bfc4ff0e.css
vendored
8
web/dist/assets/index-bfc4ff0e.css
vendored
File diff suppressed because one or more lines are too long
4
web/dist/index.html
vendored
4
web/dist/index.html
vendored
@ -6,8 +6,8 @@
|
||||
<script src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-svg.js"></script>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>LoLLMS WebUI</title>
|
||||
<script type="module" crossorigin src="/assets/index-c043acbd.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index-bfc4ff0e.css">
|
||||
<script type="module" crossorigin src="/assets/index-ae808653.js"></script>
|
||||
<link rel="stylesheet" href="/assets/index-7271416c.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
@ -37,21 +37,36 @@
|
||||
<div class="overflow-x-auto w-full overflow-y-auto scrollbar-thin scrollbar-track-bg-light-tone scrollbar-thumb-bg-light-tone-panel hover:scrollbar-thumb-primary dark:scrollbar-track-bg-dark-tone dark:scrollbar-thumb-bg-dark-tone-panel dark:hover:scrollbar-thumb-primary active:scrollbar-thumb-secondary">
|
||||
<!-- MESSAGE CONTENT -->
|
||||
<details v-show="message != undefined && message.steps != undefined && message.steps.length>0" class="flex w-full cursor-pointer rounded-xl border border-gray-200 bg-white shadow-sm dark:border-gray-800 dark:bg-gray-900 mb-3.5 max-w-full">
|
||||
<summary class="grid min-w-72 select-none grid-cols-[40px,1fr] items-center gap-2.5 p-2">
|
||||
<div class="relative grid aspect-square place-content-center overflow-hidden rounded-lg bg-gray-300 dark:bg-gray-200">
|
||||
<img v-if="message.status_message!='Done' & message.status_message!= 'Generation canceled'" :src="loading_svg" class="w-50 h-50 absolute inset-0 text-gray-100 transition-opacity dark:text-gray-800 opacity-100">
|
||||
<img v-if="message.status_message== 'Generation canceled'" :src="failed_svg" class="w-50 h-50 absolute inset-0 text-gray-100 transition-opacity dark:text-gray-800 opacity-100">
|
||||
<img v-if="message.status_message=='Done'" :src="ok_svg" class="w-50 h-50 absolute m-2 w-6 inset-0 text-geen-100 transition-opacity dark:text-gray-800 opacity-100">
|
||||
<summary class="grid min-w-80 select-none grid-cols-[50px,1fr] items-center gap-3 p-3 bg-gray-50 dark:bg-gray-800 rounded-lg shadow-sm hover:shadow-md transition-all duration-300">
|
||||
<div class="relative grid aspect-square place-content-center overflow-hidden rounded-full bg-gradient-to-br from-blue-400 to-purple-500">
|
||||
<svg v-if="message.status_message !== 'Done' && message.status_message !== 'Generation canceled'" class="w-8 h-8 text-white animate-spin" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
|
||||
<circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"></circle>
|
||||
<path class="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
|
||||
</svg>
|
||||
<svg v-if="message.status_message === 'Generation canceled'" class="w-8 h-8 text-red-500" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"></path>
|
||||
</svg>
|
||||
<svg v-if="message.status_message === 'Done'" class="w-8 h-8 text-green-500" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 13l4 4L19 7"></path>
|
||||
</svg>
|
||||
</div>
|
||||
<dl class="leading-4">
|
||||
<dd class="text-sm">Processing infos</dd>
|
||||
<dt class="flex items-center gap-1 truncate whitespace-nowrap text-[.82rem] text-gray-400">{{ message==undefined?"":message.status_message }}</dt>
|
||||
<dl class="leading-5">
|
||||
<dd class="text-lg font-semibold text-gray-800 dark:text-gray-200">Processing Info</dd>
|
||||
<dt class="flex items-center gap-1 truncate whitespace-nowrap text-sm text-gray-500 dark:text-gray-400">
|
||||
<span class="inline-block w-2 h-2 rounded-full" :class="{
|
||||
'bg-blue-500 animate-pulse': message.status_message !== 'Done' && message.status_message !== 'Generation canceled',
|
||||
'bg-red-500': message.status_message === 'Generation canceled',
|
||||
'bg-green-500': message.status_message === 'Done'
|
||||
}"></span>
|
||||
{{ message === undefined ? '' : message.status_message }}
|
||||
</dt>
|
||||
</dl>
|
||||
</summary>
|
||||
</summary>
|
||||
|
||||
<div class="content px-5 pb-5 pt-4">
|
||||
<ol class="list-none">
|
||||
<div v-for="(step, index) in message.steps" :key="'step-' + message.id + '-' + index" class="group border-l pb-6 last:!border-transparent last:pb-0 dark:border-gray-800" :style="{ backgroundColor: step.done ? 'transparent' : 'inherit' }">
|
||||
<Step :done="step.done" :message="step.message" :status="step.status" :step_type = "step.type"/>
|
||||
<Step :done="step.done" :text="step.text" :status="step.status" :step_type = "step.step_type"/>
|
||||
</div>
|
||||
</ol>
|
||||
</div>
|
||||
@ -79,7 +94,7 @@
|
||||
<JsonViewer :jsonFormText="metadata.title" :jsonData="metadata.content" />
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="message.ui !== null">
|
||||
<div v-if="message.ui != null">
|
||||
<DynamicUIRenderer ref="ui" class="w-full" :ui="message.ui"></DynamicUIRenderer>
|
||||
</div>
|
||||
|
||||
@ -595,7 +610,7 @@ export default {
|
||||
|
||||
// Function to speak a chunk of text
|
||||
const speakChunk = () => {
|
||||
if (this.status_message=='Done' || this.message.content.includes('.')||this.message.content.includes('?')||this.message.content.includes('!')){
|
||||
if (this.message.status_message=='Done' || this.message.content.includes('.')||this.message.content.includes('?')||this.message.content.includes('!')){
|
||||
const endIndex = findLastSentenceIndex(startIndex);
|
||||
const chunk = this.message.content.substring(startIndex, endIndex);
|
||||
this.msg.text = chunk;
|
||||
|
@ -1,71 +1,142 @@
|
||||
<template>
|
||||
<div class="flex items-start">
|
||||
<div class="step flex items-center mb-4">
|
||||
<div v-if="step_type=='start_end'" class="flex items-center justify-center w-6 h-6 mr-2">
|
||||
<div class="step-container">
|
||||
<div
|
||||
class="step-wrapper transition-all duration-300 ease-in-out"
|
||||
:class="{
|
||||
'bg-green-100 dark:bg-green-900': done && status,
|
||||
'bg-red-100 dark:bg-red-900': done && !status,
|
||||
'bg-gray-100 dark:bg-gray-800': !done
|
||||
}"
|
||||
>
|
||||
<div class="step-icon">
|
||||
<div v-if="step_type === 'start_end'">
|
||||
<div v-if="!done">
|
||||
<i
|
||||
data-feather="circle"
|
||||
class="feather-icon text-gray-600 dark:text-gray-300"
|
||||
></i>
|
||||
</div>
|
||||
<div v-else-if="done && status">
|
||||
<i
|
||||
data-feather="check-circle"
|
||||
class="feather-icon text-green-600 dark:text-green-400"
|
||||
></i>
|
||||
</div>
|
||||
<div v-else>
|
||||
<i
|
||||
data-feather="x-circle"
|
||||
class="feather-icon text-red-600 dark:text-red-400"
|
||||
></i>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="!done">
|
||||
<i
|
||||
data-feather="square"
|
||||
class="text-gray-400 w-4 h-4"
|
||||
></i>
|
||||
</div>
|
||||
<div v-if="done && status">
|
||||
<i
|
||||
data-feather="check-square"
|
||||
class="text-green-500 w-4 h-4"
|
||||
></i>
|
||||
</div>
|
||||
<div v-if="done && !status">
|
||||
<i
|
||||
data-feather="x-square"
|
||||
class="text-red-500 w-4 h-4"
|
||||
></i>
|
||||
<div class="spinner"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="!done" role="status" class="m-15">
|
||||
<svg aria-hidden="true" class="w-6 h-6 animate-spin fill-secondary" viewBox="0 0 100 101"
|
||||
fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M100 50.5908C100 78.2051 77.6142 100.591 50 100.591C22.3858 100.591 0 78.2051 0 50.5908C0 22.9766 22.3858 0.59082 50 0.59082C77.6142 0.59082 100 22.9766 100 50.5908ZM9.08144 50.5908C9.08144 73.1895 27.4013 91.5094 50 91.5094C72.5987 91.5094 90.9186 73.1895 90.9186 50.5908C90.9186 27.9921 72.5987 9.67226 50 9.67226C27.4013 9.67226 9.08144 27.9921 9.08144 50.5908Z"
|
||||
fill="currentColor" />
|
||||
<path
|
||||
d="M93.9676 39.0409C96.393 38.4038 97.8624 35.9116 97.0079 33.5539C95.2932 28.8227 92.871 24.3692 89.8167 20.348C85.8452 15.1192 80.8826 10.7238 75.2124 7.41289C69.5422 4.10194 63.2754 1.94025 56.7698 1.05124C51.7666 0.367541 46.6976 0.446843 41.7345 1.27873C39.2613 1.69328 37.813 4.19778 38.4501 6.62326C39.0873 9.04874 41.5694 10.4717 44.0505 10.1071C47.8511 9.54855 51.7191 9.52689 55.5402 10.0491C60.8642 10.7766 65.9928 12.5457 70.6331 15.2552C75.2735 17.9648 79.3347 21.5619 82.5849 25.841C84.9175 28.9121 86.7997 32.2913 88.1811 35.8758C89.083 38.2158 91.5421 39.6781 93.9676 39.0409Z"
|
||||
fill="currentFill" />
|
||||
</svg>
|
||||
<div class="step-content">
|
||||
<h3
|
||||
class="step-text"
|
||||
:class="{
|
||||
'text-green-600 dark:text-green-400': done && status,
|
||||
'text-red-600 dark:text-red-400': done && !status,
|
||||
'text-gray-800 dark:text-gray-200': !done
|
||||
}"
|
||||
>
|
||||
{{ text || 'No text provided' }}
|
||||
</h3>
|
||||
<p v-if="description" class="step-description">{{ description || 'No description provided' }}</p>
|
||||
</div>
|
||||
<h3 class="text-sm ml-6" >{{ message }}</h3>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
||||
<script>
|
||||
export default {
|
||||
props: {
|
||||
done: {
|
||||
type: Boolean,
|
||||
default: false // Default to false if not provided
|
||||
},
|
||||
text: {
|
||||
type: String,
|
||||
default: '' // Default to empty string if not provided
|
||||
},
|
||||
status: {
|
||||
type: Boolean,
|
||||
default: false // Default to false if not provided
|
||||
},
|
||||
step_type: {
|
||||
type: String,
|
||||
default: 'start_end' // Default to 'start_end' if not provided
|
||||
},
|
||||
description: {
|
||||
type: String,
|
||||
default: '' // Default to empty string if not provided
|
||||
}
|
||||
},
|
||||
mounted() {
|
||||
|
||||
|
||||
|
||||
<script>
|
||||
export default {
|
||||
props: {
|
||||
done: {
|
||||
type: Boolean,
|
||||
required: true
|
||||
},
|
||||
message: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
status: {
|
||||
type: Boolean,
|
||||
required: true
|
||||
},
|
||||
step_type: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: 'start_end'
|
||||
this.amounted();
|
||||
},
|
||||
methods: {
|
||||
amounted() {
|
||||
console.log('Component mounted with the following properties:');
|
||||
console.log('done:', this.done);
|
||||
console.log('text:', this.text);
|
||||
console.log('status:', this.status);
|
||||
console.log('step_type:', this.step_type);
|
||||
console.log('description:', this.description);
|
||||
}
|
||||
},
|
||||
watch: {
|
||||
done(newValue) {
|
||||
if (typeof newValue !== 'boolean') {
|
||||
console.error('Invalid type for done. Expected Boolean.');
|
||||
}
|
||||
},
|
||||
status(newValue) {
|
||||
if (typeof newValue !== 'boolean') {
|
||||
console.error('Invalid type for status. Expected Boolean.');
|
||||
}
|
||||
if (this.done && !newValue) {
|
||||
console.error('Task completed with errors.');
|
||||
}
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
||||
<style>
|
||||
|
||||
</style>
|
||||
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.step-container {
|
||||
@apply mb-4;
|
||||
}
|
||||
|
||||
.step-wrapper {
|
||||
@apply flex items-start p-4 rounded-lg shadow-md;
|
||||
}
|
||||
|
||||
.step-icon {
|
||||
@apply flex-shrink-0 w-6 h-6 mr-4 flex items-center justify-center;
|
||||
}
|
||||
|
||||
.feather-icon {
|
||||
@apply w-6 h-6 stroke-2 stroke-current;
|
||||
}
|
||||
|
||||
.spinner {
|
||||
@apply w-6 h-6 border-2 border-gray-600 border-t-2 border-t-blue-600 rounded-full animate-spin;
|
||||
}
|
||||
|
||||
.step-content {
|
||||
@apply flex-grow;
|
||||
}
|
||||
|
||||
.step-text {
|
||||
@apply text-lg font-semibold mb-1;
|
||||
}
|
||||
|
||||
.step-description {
|
||||
@apply text-sm text-gray-600 dark:text-gray-400;
|
||||
}
|
||||
</style>
|
||||
|
@ -8,80 +8,103 @@
|
||||
:src="$store.state.config == null ? storeLogo : $store.state.config.app_custom_logo != '' ? '/user_infos/' + $store.state.config.app_custom_logo : storeLogo"
|
||||
alt="Logo" title="LoLLMS WebUI">
|
||||
</div>
|
||||
<div class="flex flex-col justify-center logo-text">
|
||||
<div class="flex flex-col justify-center">
|
||||
<p class="text-2xl font-bold drop-shadow-md leading-none">LoLLMS</p>
|
||||
<p class="text-gray-400 text-sm">One tool to rule them all</p>
|
||||
</div>
|
||||
</RouterLink>
|
||||
|
||||
<!-- GITHUB AND THEME BUTTONS -->
|
||||
<!-- SYSTEM STATUS -->
|
||||
<div class="flex gap-3 flex-1 items-center justify-end">
|
||||
<div v-if="isModelOK" title="Model is ok" class="text-green-500 cursor-pointer">
|
||||
<b class="text-2xl">M</b>
|
||||
</div>
|
||||
<div v-else title="Model is not ok" class="text-red-500 cursor-pointer">
|
||||
<b class="text-2xl">M</b>
|
||||
</div>
|
||||
<div v-if="!isGenerating" title="Text is not being generated. Ready to generate" class="text-green-500 cursor-pointer">
|
||||
<i data-feather="flag"></i>
|
||||
</div>
|
||||
<div v-else title="Generation in progress..." class="text-red-500 cursor-pointer">
|
||||
<i data-feather="flag"></i>
|
||||
</div>
|
||||
<div v-if="isConnected" title="Connection status: Connected" class="text-green-500 cursor-pointer">
|
||||
<i data-feather="zap"></i>
|
||||
</div>
|
||||
<div v-else title="Connection status: Not connected" class="text-red-500 cursor-pointer">
|
||||
<i data-feather="zap-off"></i>
|
||||
</div>
|
||||
|
||||
<a href="#" @click="restartProgram" class="text-2xl hover:text-primary duration-150" title="restart program">
|
||||
<i data-feather="power"></i>
|
||||
</a>
|
||||
<a href="#" @click="refreshPage" class="text-2xl hover:text-primary duration-150" title="refresh page">
|
||||
<i data-feather="refresh-ccw"></i>
|
||||
</a>
|
||||
|
||||
<a href="/docs" target="_blank" class="text-2xl hover:text-primary duration-150" title="Fast API doc">
|
||||
<img :src="FastAPI" width="75" height="25" alt="Fast API">
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/ParisNeo/lollms-webui" target="_blank" class="text-2xl hover:text-primary duration-150" title="Visit repository page">
|
||||
<i data-feather="github"></i>
|
||||
</a>
|
||||
<a href="https://www.youtube.com/channel/UCJzrg0cyQV2Z30SQ1v2FdSQ" target="_blank" class="text-2xl hover:text-primary duration-150" title="Visit my youtube channel">
|
||||
<i data-feather="youtube"></i>
|
||||
</a>
|
||||
<a href="https://x.com/ParisNeo_AI" target="_blank" class="text-2xl hover:fill-primary dark:fill-white dark:hover:fill-primary duration-150" title="Follow me on my twitter acount">
|
||||
<svg class="w-10 h-10 rounded-lg object-fill dark:text-white" xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 1668.56 1221.19" style="enable-background:new 0 0 1668.56 1221.19;" xml:space="preserve">
|
||||
<g id="layer1" transform="translate(52.390088,-25.058597)">
|
||||
<path id="path1009" d="M283.94,167.31l386.39,516.64L281.5,1104h87.51l340.42-367.76L984.48,1104h297.8L874.15,558.3l361.92-390.99
|
||||
h-87.51l-313.51,338.7l-253.31-338.7H283.94z M412.63,231.77h136.81l604.13,807.76h-136.81L412.63,231.77z"/>
|
||||
</g>
|
||||
<div v-if="isModelOK" title="Model is ok" class="text-green-500 dark:text-green-400 cursor-pointer transition-transform hover:scale-110">
|
||||
<svg class="w-8 h-8" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M21 12C21 16.9706 16.9706 21 12 21C7.02944 21 3 16.9706 3 12C3 7.02944 7.02944 3 12 3C16.9706 3 21 7.02944 21 12Z" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M9 12L11 14L15 10" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
</a>
|
||||
<a href="https://discord.com/channels/1092918764925882418" target="_blank" class="text-2xl hover:text-primary duration-150" title="Visit my discord channel">
|
||||
<img :src="discord" width="25" height="25" alt="Discord">
|
||||
</a>
|
||||
|
||||
<div class="sun text-2xl w-6 hover:text-primary duration-150 cursor-pointer" title="Switch to Light theme" @click="themeSwitch()">
|
||||
<i data-feather="sun"></i>
|
||||
</div>
|
||||
<div class="moon text-2xl w-6 hover:text-primary duration-150 cursor-pointer" title="Switch to Dark theme" @click="themeSwitch()">
|
||||
<i data-feather="moon"></i>
|
||||
<div v-else title="Model is not ok" class="text-red-500 dark:text-red-400 cursor-pointer transition-transform hover:scale-110">
|
||||
<svg class="w-8 h-8" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M21 12C21 16.9706 16.9706 21 12 21C7.02944 21 3 16.9706 3 12C3 7.02944 7.02944 3 12 3C16.9706 3 21 7.02944 21 12Z" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M15 9L9 15" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M9 9L15 15" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
</div>
|
||||
<div v-if="!isGenerating" title="Text is not being generated. Ready to generate" class="text-green-500 dark:text-green-400 cursor-pointer transition-transform hover:scale-110">
|
||||
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M3 21v-4m0 0V5a2 2 0 012-2h6.5l1 1H21l-3 6 3 6h-8.5l-1-1H5a2 2 0 00-2 2zm9-13.5V9"></path>
|
||||
</svg>
|
||||
</div>
|
||||
<div v-else title="Generation in progress..." class="text-yellow-500 dark:text-yellow-400 cursor-pointer transition-transform hover:scale-110">
|
||||
<svg class="w-6 h-6 animate-spin" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15"></path>
|
||||
</svg>
|
||||
</div>
|
||||
<div v-if="isConnected" title="Connection status: Connected" class="text-green-500 dark:text-green-400 cursor-pointer transition-transform hover:scale-110">
|
||||
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 10V3L4 14h7v7l9-11h-7z"></path>
|
||||
</svg>
|
||||
</div>
|
||||
<div v-else title="Connection status: Not connected" class="text-red-500 dark:text-red-400 cursor-pointer transition-transform hover:scale-110">
|
||||
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M18.364 18.364A9 9 0 005.636 5.636m12.728 12.728A9 9 0 015.636 5.636m12.728 12.728L5.636 5.636"></path>
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center space-x-4">
|
||||
<ActionButton @click="restartProgram" icon="power" title="restart program" />
|
||||
<ActionButton @click="refreshPage" icon="refresh-ccw" title="refresh page" />
|
||||
<ActionButton href="/docs" icon="file-text" title="Fast API doc" />
|
||||
</div>
|
||||
|
||||
<!-- SOCIALS -->
|
||||
<div class="flex items-center space-x-4">
|
||||
<SocialIcon href="https://github.com/ParisNeo/lollms-webui" icon="github" />
|
||||
<SocialIcon href="https://www.youtube.com/channel/UCJzrg0cyQV2Z30SQ1v2FdSQ" icon="youtube" />
|
||||
<SocialIcon href="https://x.com/ParisNeo_AI" icon="x" />
|
||||
<SocialIcon href="https://discord.com/channels/1092918764925882418" icon="discord" />
|
||||
</div>
|
||||
|
||||
<div class="text-2xl w-6 hover:text-primary duration-150 cursor-pointer" title="Lollms News" @click="showNews()">
|
||||
<img :src="static_info" alt="News">
|
||||
<div class="relative group" title="Lollms News">
|
||||
<div @click="showNews()" class="text-2xl w-8 h-8 cursor-pointer transition-colors duration-300 text-gray-600 hover:text-primary dark:text-gray-300 dark:hover:text-primary">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="w-full h-full">
|
||||
<path d="M19 20H5a2 2 0 0 1-2-2V6a2 2 0 0 1 2-2h10a2 2 0 0 1 2 2v1m2 13a2 2 0 0 1-2-2V7m2 13a2 2 0 0 0 2-2V9a2 2 0 0 0-2-2h-2m-4-3H9M7 16h6M7 8h6v4H7V8z"></path>
|
||||
</svg>
|
||||
</div>
|
||||
<span class="absolute hidden group-hover:block bg-gray-800 text-white text-xs rounded py-1 px-2 bottom-full left-1/2 transform -translate-x-1/2 mb-2 whitespace-nowrap">
|
||||
Lollms News
|
||||
</span>
|
||||
</div>
|
||||
<div v-if="is_fun_mode" title="fun mode is on press to turn off" class="text-green-500 cursor-pointer" @click="fun_mode_off()">
|
||||
<img class="w-5 h-5" :src="fun_mode" alt="Fun mode on">
|
||||
</div>
|
||||
<div v-else title="fun mode is off press to turn on" class="text-red-500 cursor-pointer" @click="fun_mode_on()">
|
||||
<img class="w-5 h-5" :src="normal_mode" alt="Fun mode off">
|
||||
</div>
|
||||
|
||||
<div class="relative group">
|
||||
<div
|
||||
v-if="is_fun_mode"
|
||||
title="Fun mode is on, press to turn off"
|
||||
class="w-8 h-8 cursor-pointer text-green-500 dark:text-green-400 hover:text-green-600 dark:hover:text-green-300 transition-colors duration-300"
|
||||
@click="fun_mode_off()"
|
||||
>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="w-full h-full animate-bounce">
|
||||
<circle cx="12" cy="12" r="10"></circle>
|
||||
<path d="M8 14s1.5 2 4 2 4-2 4-2"></path>
|
||||
<line x1="9" y1="9" x2="9.01" y2="9"></line>
|
||||
<line x1="15" y1="9" x2="15.01" y2="9"></line>
|
||||
</svg>
|
||||
</div>
|
||||
<div
|
||||
v-else
|
||||
title="Fun mode is off, press to turn on"
|
||||
class="w-8 h-8 cursor-pointer text-gray-500 dark:text-gray-400 hover:text-gray-600 dark:hover:text-gray-300 transition-colors duration-300"
|
||||
@click="fun_mode_on()"
|
||||
>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="w-full h-full">
|
||||
<circle cx="12" cy="12" r="10"></circle>
|
||||
<line x1="8" y1="15" x2="16" y2="15"></line>
|
||||
<line x1="9" y1="9" x2="9.01" y2="9"></line>
|
||||
<line x1="15" y1="9" x2="15.01" y2="9"></line>
|
||||
</svg>
|
||||
</div>
|
||||
<span class="absolute hidden group-hover:block bg-gray-800 text-white text-xs rounded py-1 px-2 bottom-full left-1/2 transform -translate-x-1/2 mb-2 whitespace-nowrap">
|
||||
{{ is_fun_mode ? 'Turn off fun mode' : 'Turn on fun mode' }}
|
||||
</span>
|
||||
</div>
|
||||
<div class="language-selector relative">
|
||||
<button @click="toggleLanguageMenu" class="bg-transparent text-black dark:text-white py-1 px-1 rounded font-bold uppercase transition-colors duration-300 hover:bg-blue-500">
|
||||
{{ $store.state.language.slice(0, 2) }}
|
||||
@ -100,7 +123,12 @@
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="sun text-2xl w-6 hover:text-primary duration-150 cursor-pointer" title="Switch to Light theme" @click="themeSwitch()">
|
||||
<i data-feather="sun"></i>
|
||||
</div>
|
||||
<div class="moon text-2xl w-6 hover:text-primary duration-150 cursor-pointer" title="Switch to Dark theme" @click="themeSwitch()">
|
||||
<i data-feather="moon"></i>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<!-- NAVIGATION BUTTONS -->
|
||||
@ -129,6 +157,9 @@ import UniversalForm from '../components/UniversalForm.vue';
|
||||
import YesNoDialog from './YesNoDialog.vue';
|
||||
import PersonalityEditor from "@/components/PersonalityEditor.vue"
|
||||
import PopupViewer from '@/components/PopupViewer.vue';
|
||||
import ActionButton from '@/components/ActionButton.vue'
|
||||
import SocialIcon from '@/components/SocialIcon.vue'
|
||||
|
||||
import FastAPI from '@/assets/fastapi.png';
|
||||
import discord from '@/assets/discord.svg';
|
||||
import { RouterLink } from 'vue-router'
|
||||
@ -222,7 +253,9 @@ export default {
|
||||
YesNoDialog,
|
||||
Navigation,
|
||||
PersonalityEditor,
|
||||
PopupViewer
|
||||
PopupViewer,
|
||||
ActionButton,
|
||||
SocialIcon
|
||||
},
|
||||
watch:{
|
||||
'$store.state.config.fun_mode': function(newVal, oldVal) {
|
||||
@ -515,6 +548,20 @@ export default {
|
||||
border-radius: 50%;
|
||||
object-fit: cover;
|
||||
}
|
||||
@keyframes bounce {
|
||||
0%, 100% {
|
||||
transform: translateY(-25%);
|
||||
animation-timing-function: cubic-bezier(0.8, 0, 1, 1);
|
||||
}
|
||||
50% {
|
||||
transform: translateY(0);
|
||||
animation-timing-function: cubic-bezier(0, 0, 0.2, 1);
|
||||
}
|
||||
}
|
||||
.animate-bounce {
|
||||
animation: bounce 1s infinite;
|
||||
}
|
||||
|
||||
|
||||
@keyframes roll-and-bounce {
|
||||
0%, 100% {
|
||||
@ -543,8 +590,4 @@ export default {
|
||||
}
|
||||
}
|
||||
|
||||
.logo-text {
|
||||
position: relative;
|
||||
left: 100px; /* Adjust this value to match the maximum translation of the logo */
|
||||
}
|
||||
</style>
|
@ -8,7 +8,6 @@ import './assets/tailwind.css'
|
||||
//import './assets/tailwind_april_fool.css'
|
||||
|
||||
const app = createApp(App)
|
||||
console.log("Loaded main.js")
|
||||
|
||||
function copyObject(obj) {
|
||||
const copy = {};
|
||||
@ -228,13 +227,11 @@ export const store = createStore({
|
||||
let res = await axios.get('/get_lollms_webui_version', {});
|
||||
if (res) {
|
||||
this.state.version = res.data
|
||||
console.log("version res:", res)
|
||||
console.log("version :", this.state.version)
|
||||
}
|
||||
|
||||
}
|
||||
catch{
|
||||
console.log("Coudln't get version")
|
||||
console.error("Coudln't get version")
|
||||
}
|
||||
},
|
||||
async refreshConfig({ commit }) {
|
||||
@ -587,16 +584,8 @@ app.mixin({
|
||||
if (!actionsExecuted) {
|
||||
this.$store.state.api_get_req = api_get_req
|
||||
this.$store.state.api_post_req = api_post_req
|
||||
console.log("Main.js creation started")
|
||||
}
|
||||
console.log("Main.js created")
|
||||
},
|
||||
beforeMount() {
|
||||
console.log("Main.js before mount")
|
||||
},
|
||||
mounted() {
|
||||
console.log("Main.js mounted")
|
||||
}
|
||||
})
|
||||
|
||||
function logObjectProperties(obj) {
|
||||
|
@ -330,6 +330,8 @@ export default {
|
||||
},
|
||||
async installApp(appName) {
|
||||
this.loading = true;
|
||||
this.$store.state.messageBox.showBlockingMessage(`Installing app ${appName}`)
|
||||
|
||||
try {
|
||||
await axios.post(`/install/${appName}`, {
|
||||
client_id: this.$store.state.client_id,
|
||||
@ -341,6 +343,7 @@ export default {
|
||||
this.loading = false;
|
||||
this.fetchApps();
|
||||
this.fetchGithubApps();
|
||||
this.$store.state.messageBox.hideMessage()
|
||||
}
|
||||
},
|
||||
async uninstallApp(appName) {
|
||||
|
@ -1,46 +1,61 @@
|
||||
<template>
|
||||
<transition name="fade-and-fly">
|
||||
<div v-if="!isReady" class="fixed top-0 left-0 w-screen h-screen flex items-center justify-center bg-gradient-to-br from-blue-100 to-purple-100 dark:from-blue-900 dark:to-purple-900">
|
||||
<div class="flex flex-col items-center text-center max-w-4xl w-full px-4">
|
||||
<div class="mb-8 w-full">
|
||||
<h1 class="text-5xl md:text-6xl font-bold text-transparent bg-clip-text bg-gradient-to-r from-blue-600 to-purple-600 dark:from-blue-400 dark:to-purple-400 mb-2">
|
||||
LoLLMS {{ version_info }}
|
||||
</h1>
|
||||
<p class="text-2xl text-gray-600 dark:text-gray-300 italic">
|
||||
One tool to rule them all
|
||||
</p>
|
||||
<p class="text-xl text-gray-500 dark:text-gray-400 mb-6">
|
||||
by ParisNeo
|
||||
</p>
|
||||
<div v-if="!isReady" class="fixed top-0 left-0 w-screen h-screen flex items-center justify-center bg-gradient-to-br from-blue-100 to-purple-100 dark:from-blue-900 dark:to-purple-900 overflow-hidden">
|
||||
<!-- Falling stars -->
|
||||
<div class="absolute inset-0 pointer-events-none overflow-hidden">
|
||||
<div v-for="n in 50" :key="n" class="absolute animate-fall"
|
||||
:style="{
|
||||
left: `${Math.random() * 100}%`,
|
||||
top: `-20px`,
|
||||
animationDuration: `${3 + Math.random() * 7}s`,
|
||||
animationDelay: `${Math.random() * 5}s`
|
||||
}">
|
||||
<svg class="w-2 h-2 text-yellow-300" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path d="M9.049 2.927c.3-.921 1.603-.921 1.902 0l1.07 3.292a1 1 0 00.95.69h3.462c.969 0 1.371 1.24.588 1.81l-2.8 2.034a1 1 0 00-.364 1.118l1.07 3.292c.3.921-.755 1.688-1.54 1.118l-2.8-2.034a1 1 0 00-1.175 0l-2.8 2.034c-.784.57-1.838-.197-1.539-1.118l1.07-3.292a1 1 0 00-.364-1.118L2.98 8.72c-.783-.57-.38-1.81.588-1.81h3.461a1 1 0 00.951-.69l1.07-3.292z" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="w-full h-24 relative overflow-hidden bg-gradient-to-r from-blue-200 to-purple-200 dark:from-blue-800 dark:to-purple-800 rounded-full shadow-lg">
|
||||
<div class="flex flex-col items-center text-center max-w-4xl w-full px-4 relative z-10">
|
||||
<div class="mb-8 w-full">
|
||||
<h1 class="text-5xl md:text-6xl font-bold text-transparent bg-clip-text bg-gradient-to-r from-blue-600 to-purple-600 dark:from-blue-400 dark:to-purple-400 mb-2 animate-glow">
|
||||
Lord Of Large Language & Multimodal Systems
|
||||
</h1>
|
||||
<p class="text-2xl text-gray-600 dark:text-gray-300 italic">
|
||||
{{ version_info }}
|
||||
</p>
|
||||
<p class="text-2xl text-gray-600 dark:text-gray-300 italic">
|
||||
One tool to rule them all
|
||||
</p>
|
||||
<p class="text-xl text-gray-500 dark:text-gray-400 mb-6">
|
||||
by ParisNeo
|
||||
</p>
|
||||
|
||||
<div class="w-full h-24 relative overflow-hidden bg-gradient-to-r from-blue-200 to-purple-200 dark:from-blue-800 dark:to-purple-800 rounded-full shadow-lg">
|
||||
<img
|
||||
class="w-24 h-24 rounded-full absolute top-0 transition-all duration-300 ease-linear"
|
||||
:style="{ left: `calc(${loading_progress}% - 3rem)` }"
|
||||
title="LoLLMS WebUI"
|
||||
src="@/assets/logo.png"
|
||||
alt="Logo"
|
||||
class="w-24 h-24 rounded-full absolute top-0 transition-all duration-300 ease-linear"
|
||||
:style="{ left: `calc(${loading_progress}% - 3rem)` }"
|
||||
title="LoLLMS WebUI"
|
||||
src="@/assets/logo.png"
|
||||
alt="Logo"
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="w-full max-w-2xl">
|
||||
<h2 class="text-3xl font-semibold text-gray-800 dark:text-gray-200 mb-6">
|
||||
Welcome
|
||||
</h2>
|
||||
<div role="status" class="w-full">
|
||||
<div role="status" class="w-full">
|
||||
<p class="text-xl text-gray-700 dark:text-gray-300">
|
||||
{{ loading_infos }}...
|
||||
{{ loading_infos }}...
|
||||
</p>
|
||||
<p class="text-2xl font-bold text-blue-600 dark:text-blue-400 mt-2">
|
||||
{{ Math.round(loading_progress) }}%
|
||||
{{ Math.round(loading_progress) }}%
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</transition>
|
||||
|
||||
<button v-if="isReady" @click="togglePanel" class="absolute top-2 left-2 p-3 bg-white bg-opacity-0 cursor-pointer transition-all duration-300 hover:scale-110 hover:bg-opacity-20 hover:shadow-xl group">
|
||||
<div v-show="leftPanelCollapsed" ><i data-feather='chevron-right'></i></div>
|
||||
<div v-show="!leftPanelCollapsed" ><i data-feather='chevron-left'></i></div>
|
||||
@ -271,7 +286,6 @@
|
||||
<Message v-for="(msg, index) in discussionArr"
|
||||
:key="msg.id" :message="msg" :id="'msg-' + msg.id" :ref="'msg-' + msg.id"
|
||||
:host="host"
|
||||
ref="messages"
|
||||
|
||||
@copy="copyToClipBoard" @delete="deleteMessage" @rankUp="rankUpMessage"
|
||||
@rankDown="rankDownMessage" @updateMessage="updateMessage" @resendMessage="resendMessage" @continueMessage="continueMessage"
|
||||
@ -468,13 +482,73 @@ animation: custom-pulse 2s infinite;
|
||||
}
|
||||
@keyframes float {
|
||||
0%, 100% { transform: translateY(0); }
|
||||
50% { transform: translateY(-10px); }
|
||||
50% { transform: translateY(-20px); }
|
||||
}
|
||||
.animate-float {
|
||||
animation: float linear infinite;
|
||||
}
|
||||
|
||||
.animate-float {
|
||||
animation: float 3s ease-in-out infinite;
|
||||
@keyframes star-move {
|
||||
0% { transform: translate(0, 0) rotate(0deg); }
|
||||
50% { transform: translate(20px, 20px) rotate(180deg); }
|
||||
100% { transform: translate(0, 0) rotate(360deg); }
|
||||
}
|
||||
|
||||
.animate-star {
|
||||
animation: star-move linear infinite;
|
||||
}
|
||||
|
||||
@keyframes fall {
|
||||
from {
|
||||
transform: translateY(-20px) rotate(0deg);
|
||||
opacity: 1;
|
||||
}
|
||||
to {
|
||||
transform: translateY(calc(100vh + 20px)) rotate(360deg);
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.animate-fall {
|
||||
animation: fall linear infinite;
|
||||
}
|
||||
|
||||
@keyframes glow {
|
||||
0%, 100% {
|
||||
text-shadow: 0 0 5px rgba(66, 153, 225, 0.5), 0 0 10px rgba(66, 153, 225, 0.5);
|
||||
}
|
||||
50% {
|
||||
text-shadow: 0 0 20px rgba(66, 153, 225, 0.8), 0 0 30px rgba(66, 153, 225, 0.8);
|
||||
}
|
||||
}
|
||||
|
||||
.animate-glow {
|
||||
animation: glow 2s ease-in-out infinite;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
@keyframes glow {
|
||||
0%, 100% {
|
||||
text-shadow: 0 0 5px rgba(147, 197, 253, 0.5), 0 0 10px rgba(147, 197, 253, 0.5);
|
||||
}
|
||||
50% {
|
||||
text-shadow: 0 0 20px rgba(147, 197, 253, 0.8), 0 0 30px rgba(147, 197, 253, 0.8);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
@keyframes glow {
|
||||
0%, 100% {
|
||||
text-shadow: 0 0 5px rgba(147, 197, 253, 0.5), 0 0 10px rgba(147, 197, 253, 0.5);
|
||||
}
|
||||
50% {
|
||||
text-shadow: 0 0 20px rgba(147, 197, 253, 0.8), 0 0 30px rgba(147, 197, 253, 0.8);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@keyframes roll {
|
||||
0% {
|
||||
transform: translateX(-50%) rotate(0deg);
|
||||
@ -580,34 +654,40 @@ export default {
|
||||
host:"",
|
||||
progress_visibility_val : true,
|
||||
progress_value : 0,
|
||||
// To be synced with the backend database types
|
||||
msgTypes: {
|
||||
// Messaging
|
||||
MSG_TYPE_CHUNK : 0, // A chunk of a message (used for classical chat)
|
||||
MSG_TYPE_FULL : 1, // A full message (for some personality the answer is sent in bulk)
|
||||
MSG_TYPE_FULL_INVISIBLE_TO_AI : 2, // A full message (for some personality the answer is sent in bulk)
|
||||
MSG_TYPE_FULL_INVISIBLE_TO_USER : 3, // A full message (for some personality the answer is sent in bulk)
|
||||
|
||||
MSG_TYPE_CONTENT : 1, // A full message (for some personality the answer is sent in bulk)
|
||||
MSG_TYPE_CONTENT_INVISIBLE_TO_AI : 2, // A full message (for some personality the answer is sent in bulk)
|
||||
MSG_TYPE_CONTENT_INVISIBLE_TO_USER : 3, // A full message (for some personality the answer is sent in bulk)
|
||||
},
|
||||
// To be synced with the backend database types
|
||||
operationTypes: {
|
||||
// Messaging
|
||||
MSG_OPERATION_TYPE_ADD_CHUNK : 0, // Add a chunk to the current message
|
||||
MSG_OPERATION_TYPE_SET_CONTENT : 1, // sets the content of current message
|
||||
MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI : 2, // sets the content of current message as invisible to ai
|
||||
MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_USER : 3, // sets the content of current message as invisible to user
|
||||
// Informations
|
||||
MSG_TYPE_EXCEPTION : 4, // An exception occured
|
||||
MSG_TYPE_WARNING : 5, // A warning occured
|
||||
MSG_TYPE_INFO : 6, // An information to be shown to user
|
||||
MSG_OPERATION_TYPE_EXCEPTION : 4, // An exception occured
|
||||
MSG_OPERATION_TYPE_WARNING : 5, // A warning occured
|
||||
MSG_OPERATION_TYPE_INFO : 6, // An information to be shown to user
|
||||
|
||||
// Steps
|
||||
MSG_TYPE_STEP : 7, // An instant step (a step that doesn't need time to be executed)
|
||||
MSG_TYPE_STEP_START : 8, // A step has started (the text contains an explanation of the step done by he personality)
|
||||
MSG_TYPE_STEP_PROGRESS : 9, // The progress value (the text contains a percentage and can be parsed by the reception)
|
||||
MSG_TYPE_STEP_END : 10,// A step has been done (the text contains an explanation of the step done by he personality)
|
||||
MSG_OPERATION_TYPE_STEP : 7, // An instant step (a step that doesn't need time to be executed)
|
||||
MSG_OPERATION_TYPE_STEP_START : 8, // A step has started (the text contains an explanation of the step done by he personality)
|
||||
MSG_OPERATION_TYPE_STEP_PROGRESS : 9, // The progress value (the text contains a percentage and can be parsed by the reception)
|
||||
MSG_OPERATION_TYPE_STEP_END_SUCCESS : 10, // A step has been done (the text contains an explanation of the step done by he personality)
|
||||
MSG_OPERATION_TYPE_STEP_END_FAILURE : 11, // A step has been done (the text contains an explanation of the step done by he personality)
|
||||
|
||||
//Extra
|
||||
MSG_TYPE_JSON_INFOS : 11,// A JSON output that is useful for summarizing the process of generation used by personalities like chain of thoughts and tree of thooughts
|
||||
MSG_TYPE_REF : 12,// References (in form of [text](path))
|
||||
MSG_TYPE_CODE : 13,// A javascript code to execute
|
||||
MSG_TYPE_UI : 14,// A vue.js component to show (we need to build some and parse the text to show it)
|
||||
MSG_OPERATION_TYPE_JSON_INFOS : 12, // A JSON output that is useful for summarizing the process of generation used by personalities like chain of thoughts and tree of thooughts
|
||||
MSG_OPERATION_TYPE_REF : 13, // References (in form of [text](path))
|
||||
MSG_OPERATION_TYPE_CODE : 14, // A javascript code to execute
|
||||
MSG_OPERATION_TYPE_UI : 15, // A vue.js component to show (we need to build some and parse the text to show it)
|
||||
|
||||
|
||||
MSG_TYPE_NEW_MESSAGE : 15,// A new message
|
||||
MSG_TYPE_FINISHED_MESSAGE : 17 // End of current message
|
||||
//Commands
|
||||
MSG_OPERATION_TYPE_NEW_MESSAGE : 16, // A new message
|
||||
MSG_OPERATION_TYPE_FINISHED_MESSAGE : 17, // End of current message
|
||||
|
||||
},
|
||||
// Sender types
|
||||
@ -645,6 +725,15 @@ export default {
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
getRandomEdgePosition() {
|
||||
const edge = Math.floor(Math.random() * 4);
|
||||
switch (edge) {
|
||||
case 0: return 0; // Top edge
|
||||
case 1: return 100; // Right edge
|
||||
case 2: return Math.random() * 100; // Bottom or left edge
|
||||
case 3: return Math.random() * 100; // Bottom or left edge
|
||||
}
|
||||
},
|
||||
selectPrompt(prompt){
|
||||
this.$refs.chatBox.message = prompt;
|
||||
},
|
||||
@ -908,13 +997,13 @@ export default {
|
||||
if (data) {
|
||||
// Filter out the user and bot entries
|
||||
this.discussionArr = data.filter((item) =>
|
||||
item.message_type == this.msgTypes.MSG_TYPE_CHUNK ||
|
||||
item.message_type == this.msgTypes.MSG_TYPE_FULL ||
|
||||
item.message_type == this.msgTypes.MSG_TYPE_FULL_INVISIBLE_TO_AI ||
|
||||
item.message_type == this.msgTypes.MSG_TYPE_CODE ||
|
||||
item.message_type == this.msgTypes.MSG_TYPE_JSON_INFOS ||
|
||||
item.message_type == this.msgTypes.MSG_TYPE_UI
|
||||
item.message_type == this.msgTypes.MSG_TYPE_CONTENT ||
|
||||
item.message_type == this.msgTypes.MSG_TYPE_CONTENT_INVISIBLE_TO_AI
|
||||
)
|
||||
this.discussionArr.forEach((item) => {
|
||||
item.status_message = "Done";
|
||||
});
|
||||
|
||||
console.log("this.discussionArr")
|
||||
console.log(this.discussionArr)
|
||||
if(next){
|
||||
@ -1476,7 +1565,7 @@ export default {
|
||||
|
||||
|
||||
sender: this.$store.state.config.user_name,
|
||||
message_type: this.msgTypes.MSG_TYPE_FULL,
|
||||
message_type: this.operationTypes.MSG_TYPE_CONTENT,
|
||||
sender_type: this.senderTypes.SENDER_TYPES_USER,
|
||||
content: msg,
|
||||
id: lastmsgid,
|
||||
@ -1557,18 +1646,21 @@ export default {
|
||||
|
||||
this.chime.play()
|
||||
},
|
||||
streamMessageContent(msgObj) {
|
||||
update_message(msgObj) {
|
||||
console.log("update_message trigged")
|
||||
console.log(msgObj)
|
||||
// Streams response message content from binding
|
||||
this.discussion_id = msgObj.discussion_id
|
||||
this.setDiscussionLoading(this.discussion_id, true);
|
||||
|
||||
if (this.currentDiscussion.id == this.discussion_id) {
|
||||
//this.isGenerating = true;
|
||||
const index = this.discussionArr.findIndex((x) => x.id == msgObj.id)
|
||||
const messageItem = this.discussionArr[index]
|
||||
|
||||
if (
|
||||
messageItem && (msgObj.message_type==this.msgTypes.MSG_TYPE_FULL ||
|
||||
msgObj.message_type==this.msgTypes.MSG_TYPE_FULL_INVISIBLE_TO_AI)
|
||||
messageItem && (msgObj.operation_type==this.operationTypes.MSG_OPERATION_TYPE_SET_CONTENT ||
|
||||
msgObj.operation_type==this.operationTypes.MSG_OPERATION_TYPE_SET_CONTENT_INVISIBLE_TO_AI)
|
||||
) {
|
||||
this.isGenerating = true;
|
||||
messageItem.content = msgObj.content
|
||||
@ -1578,54 +1670,32 @@ export default {
|
||||
messageItem.finished_generating_at = msgObj.finished_generating_at
|
||||
this.extractHtml()
|
||||
}
|
||||
else if(messageItem && msgObj.message_type==this.msgTypes.MSG_TYPE_CHUNK){
|
||||
else if(messageItem && msgObj.operation_type==this.operationTypes.MSG_OPERATION_TYPE_ADD_CHUNK){
|
||||
this.isGenerating = true;
|
||||
messageItem.content += msgObj.content
|
||||
messageItem.created_at = msgObj.created_at
|
||||
messageItem.started_generating_at = msgObj.started_generating_at
|
||||
messageItem.nb_tokens = msgObj.nb_tokens
|
||||
messageItem.finished_generating_at = msgObj.finished_generating_at
|
||||
//console.log("content")
|
||||
//console.log(messageItem.content)
|
||||
messageItem.created_at = msgObj.created_at
|
||||
messageItem.started_generating_at = msgObj.started_generating_at
|
||||
messageItem.nb_tokens = msgObj.nb_tokens
|
||||
messageItem.finished_generating_at = msgObj.finished_generating_at
|
||||
this.extractHtml()
|
||||
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_STEP){
|
||||
messageItem.status_message = msgObj.content
|
||||
messageItem.steps.push({"message":msgObj.content,"done":true, "status":true, "type": "instantanious" })
|
||||
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_STEP_START){
|
||||
messageItem.status_message = msgObj.content
|
||||
messageItem.steps.push({"message":msgObj.content,"done":false, "status":true, "type": "start_end" })
|
||||
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_STEP_END) {
|
||||
console.log("received step end", msgObj)
|
||||
try{
|
||||
|
||||
// Iterate over each step and update the 'done' property if the message matches msgObj.content
|
||||
messageItem.steps.forEach(step => {
|
||||
if (step.message === msgObj.content) {
|
||||
step.done = true;
|
||||
try {
|
||||
console.log(msgObj.parameters)
|
||||
const parameters = msgObj.parameters;
|
||||
if(parameters !== undefined){
|
||||
step.status = parameters.status;
|
||||
console.log(parameters);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error parsing JSON:', error.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
} else if (msgObj.operation_type == this.operationTypes.MSG_OPERATION_TYPE_STEP || msgObj.operation_type == this.operationTypes.MSG_OPERATION_TYPE_STEP_START || msgObj.operation_type == this.operationTypes.MSG_OPERATION_TYPE_STEP_END_SUCCESS || msgObj.operation_type == this.operationTypes.MSG_OPERATION_TYPE_STEP_END_FAILURE){
|
||||
if (Array.isArray(msgObj.steps)) {
|
||||
messageItem.status_message = msgObj.steps[msgObj.steps.length - 1]["text"]
|
||||
console.log("step Content: ", messageItem.status_message)
|
||||
messageItem.steps = msgObj.steps;
|
||||
console.log("steps: ", msgObj.steps)
|
||||
} else {
|
||||
console.error("Invalid steps data:", msgObj.steps);
|
||||
}
|
||||
catch{
|
||||
console.log("error")
|
||||
}
|
||||
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_JSON_INFOS) {
|
||||
console.log("JSON message")
|
||||
console.log(msgObj.metadata)
|
||||
|
||||
} else if (msgObj.message_type == this.operationTypes.MSG_TYPE_JSON_INFOS) {
|
||||
messageItem.metadata = msgObj.metadata
|
||||
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_UI) {
|
||||
console.log("UI message")
|
||||
} else if (msgObj.message_type == this.operationTypes.MSG_OPERATION_TYPE_UI) {
|
||||
console.log("UI", msgObj.ui)
|
||||
messageItem.ui = msgObj.ui
|
||||
} else if (msgObj.message_type == this.msgTypes.MSG_TYPE_EXCEPTION) {
|
||||
} else if (msgObj.message_type == this.operationTypes.MSG_OPERATION_TYPE_EXCEPTION) {
|
||||
this.$store.state.toast.showToast(msgObj.content, 5, false)
|
||||
}
|
||||
// // Disables as per request
|
||||
@ -2353,7 +2423,7 @@ export default {
|
||||
|
||||
socket.on('notification', this.notify)
|
||||
socket.on('new_message', this.new_message)
|
||||
socket.on('update_message', this.streamMessageContent)
|
||||
socket.on('update_message', this.update_message)
|
||||
socket.on('close_message', this.finalMsgEvent)
|
||||
|
||||
socket.on('disucssion_renamed',(event)=>{
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit cda67694b2838448818ee9eae002a663321a1861
|
||||
Subproject commit 89d27afaf2ef43cc70940365e04891d9a04f0773
|
@ -1 +1 @@
|
||||
Subproject commit efe94a6e9aec9d7af759496e5cbf499aeee20fa3
|
||||
Subproject commit 27613f177a0a1737a2e8d6354b0fa656a376baff
|
Loading…
Reference in New Issue
Block a user