mirror of
https://github.com/ParisNeo/lollms.git
synced 2024-12-20 21:23:17 +00:00
upgraded
This commit is contained in:
parent
c3007e744a
commit
781b1382d7
@ -1,5 +1,5 @@
|
||||
# =================== Lord Of Large Language Models Configuration file ===========================
|
||||
version: 46
|
||||
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
|
||||
version: 49
|
||||
binding_name: null
|
||||
model_name: null
|
||||
|
||||
@ -79,6 +79,16 @@ petals_base_url: http://0.0.0.0:8010
|
||||
enable_lollms_service: false
|
||||
lollms_base_url: http://0.0.0.0:1234
|
||||
|
||||
# elastic search service
|
||||
elastic_search_service: false
|
||||
elastic_search_url: http://0.0.0.0:9606
|
||||
|
||||
# vll service
|
||||
vllm_service: false
|
||||
vllm_url: http://0.0.0.0:8000
|
||||
vllm_model_path: mistralai/Mistral-7B-v0.1
|
||||
|
||||
|
||||
# Audio
|
||||
media_on: false
|
||||
audio_in_language: 'en-US'
|
||||
@ -114,3 +124,8 @@ positive_boost: null
|
||||
negative_boost: null
|
||||
force_output_language_to_be: null
|
||||
fun_mode: False
|
||||
|
||||
|
||||
# webui configurations
|
||||
show_code_of_conduct: true
|
||||
activate_audio_infos: true
|
||||
|
@ -1,5 +1,5 @@
|
||||
# =================== Lord Of Large Language Models Configuration file ===========================
|
||||
version: 48
|
||||
# =================== Lord Of Large Language Multimodal Systems Configuration file ===========================
|
||||
version: 49
|
||||
binding_name: null
|
||||
model_name: null
|
||||
|
||||
@ -124,3 +124,8 @@ positive_boost: null
|
||||
negative_boost: null
|
||||
force_output_language_to_be: null
|
||||
fun_mode: False
|
||||
|
||||
|
||||
# webui configurations
|
||||
show_code_of_conduct: true
|
||||
activate_audio_infos: true
|
||||
|
@ -64,7 +64,8 @@ class Service:
|
||||
self,
|
||||
app:LollmsApplication,
|
||||
base_url="http://127.0.0.1:11434",
|
||||
wait_max_retries = 5
|
||||
wait_max_retries = 5,
|
||||
wait_for_service=True
|
||||
):
|
||||
self.base_url = base_url
|
||||
# Get the current directory
|
||||
@ -91,7 +92,10 @@ class Service:
|
||||
subprocess.Popen(['bash', f'{Path.home()}/run_ollama.sh'])
|
||||
|
||||
# Wait until the service is available at http://127.0.0.1:7860/
|
||||
self.wait_for_service(max_retries=wait_max_retries)
|
||||
if wait_for_service:
|
||||
self.wait_for_service(max_retries=wait_max_retries)
|
||||
else:
|
||||
ASCIIColors.warning("We are not waiting for the OLLAMA service to be up.\nThis means that you may need to wait a bit before you can use it.")
|
||||
|
||||
def wait_for_service(self, max_retries = 150, show_warning=True):
|
||||
url = f"{self.base_url}"
|
||||
|
@ -204,7 +204,8 @@ class LollmsSD:
|
||||
username=None,
|
||||
password=None,
|
||||
auto_sd_base_url=None,
|
||||
share=False
|
||||
share=False,
|
||||
wait_for_service=True
|
||||
):
|
||||
if auto_sd_base_url=="" or auto_sd_base_url=="http://127.0.0.1:7860":
|
||||
auto_sd_base_url = None
|
||||
@ -264,7 +265,10 @@ class LollmsSD:
|
||||
ASCIIColors.success("Launching Auto1111's SD succeeded")
|
||||
|
||||
# Wait until the service is available at http://127.0.0.1:7860/
|
||||
self.wait_for_service(max_retries=max_retries)
|
||||
if wait_for_service:
|
||||
self.wait_for_service(max_retries=max_retries)
|
||||
else:
|
||||
ASCIIColors.warning("We are not waiting for the SD service to be up.\nThis means that you may need to wait a bit before you can use it.")
|
||||
|
||||
self.default_sampler = sampler
|
||||
self.default_steps = steps
|
||||
|
Loading…
Reference in New Issue
Block a user