This commit is contained in:
Saifeddine ALOUI 2024-02-20 00:53:33 +01:00
parent d86c24e8c2
commit 415bbe045b
3 changed files with 20 additions and 16 deletions

View File

@ -1,5 +1,5 @@
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
version: 66
version: 67
binding_name: null
model_name: null
@ -89,20 +89,22 @@ ollama_base_url: http://0.0.0.0:11434
# petals service
enable_petals_service: false
petals_base_url: http://0.0.0.0:8010
petals_base_url: http://localhost:8064
petals_model_path: TinyLlama/TinyLlama-1.1B-Chat-v1.0
petals_device: cuda0
# lollms service
enable_lollms_service: false
lollms_base_url: http://0.0.0.0:1234
lollms_base_url: http://localhost:1234
# elastic search service
elastic_search_service: false
elastic_search_url: http://0.0.0.0:9200
elastic_search_url: http://localhost:9200
# vll service
vllm_service: false
vllm_url: http://0.0.0.0:8000
vllm_model_path: mistralai/Mistral-7B-v0.1
vllm_url: http://localhost:8000
vllm_model_path: TinyLlama/TinyLlama-1.1B-Chat-v1.0
# Audio

View File

@ -1,11 +1,11 @@
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
version: 64
version: 67
binding_name: null
model_name: null
show_news_panel: True
# Execution protection
# Security measures
turn_on_code_execution: True
turn_on_code_validation: True
turn_on_open_file_validation: False
@ -49,7 +49,7 @@ user_avatar: default_user.svg
use_user_informations_in_discussion: false
# UI parameters
discussion_db_name: database.db
discussion_db_name: default
# Automatic updates
debug: False
@ -89,20 +89,22 @@ ollama_base_url: http://0.0.0.0:11434
# petals service
enable_petals_service: false
petals_base_url: http://0.0.0.0:8010
petals_base_url: http://localhost:8064
petals_model_path: TinyLlama/TinyLlama-1.1B-Chat-v1.0
petals_device: cuda0
# lollms service
enable_lollms_service: false
lollms_base_url: http://0.0.0.0:1234
lollms_base_url: http://localhost:1234
# elastic search service
elastic_search_service: false
elastic_search_url: http://0.0.0.0:9200
elastic_search_url: http://localhost:9200
# vll service
vllm_service: false
vllm_url: http://0.0.0.0:8000
vllm_model_path: mistralai/Mistral-7B-v0.1
vllm_url: http://localhost:8000
vllm_model_path: TinyLlama/TinyLlama-1.1B-Chat-v1.0
# Audio

View File

@ -81,7 +81,7 @@ class Service:
def __init__(
self,
app:LollmsApplication,
base_url="http://127.0.0.1:11434",
base_url="http://localhost:8000",
wait_max_retries = 5
):
self.base_url = base_url
@ -106,7 +106,7 @@ class Service:
# run vllm
if platform.system() == 'Windows':
#subprocess.Popen(['wsl', 'ls', '$HOME'])
subprocess.Popen(['wsl', 'bash', '$HOME/run_vllm.sh'])
subprocess.Popen(['wsl', 'bash', '$HOME/run_vllm.sh', self.app.config.vllm_model_path])
else:
subprocess.Popen(['bash', f'{Path.home()}/run_vllm.sh', self.app.config.vllm_model_path])