lollms-webui/configs/default.yaml

30 lines
805 B
YAML
Raw Normal View History

version: 4
2023-04-14 11:58:07 +02:00
config: default
2023-05-05 16:40:08 +02:00
ctx_size: 2048
db_path: databases/database.db
2023-04-14 11:58:07 +02:00
debug: false
n_threads: 8
2023-04-14 11:58:07 +02:00
host: localhost
language: en-US
2023-04-20 19:30:03 +02:00
# Supported backends are llamacpp and gpt-j
2023-05-11 15:09:35 +02:00
backend: gpt4all
model: null
n_predict: 1024
2023-04-14 11:58:07 +02:00
nb_messages_to_remember: 5
2023-04-20 19:30:03 +02:00
personality_language: english
2023-05-05 14:23:07 +02:00
personality_category: generic
2023-04-30 22:40:19 +02:00
personality: gpt4all
2023-04-14 11:58:07 +02:00
port: 9600
repeat_last_n: 40
repeat_penalty: 1.2
2023-05-07 03:44:42 +02:00
seed: -1
temperature: 0.9
top_k: 50
2023-04-12 22:36:03 +02:00
top_p: 0.95
2023-04-14 11:58:07 +02:00
voice: ""
2023-04-14 23:29:11 +02:00
use_gpu: false # Not active yet
2023-04-20 19:30:03 +02:00
auto_read: false
2023-05-02 22:53:27 +02:00
use_avx2: true # By default we require using avx2 but if not supported, make sure you remove it from here
2023-05-05 14:23:07 +02:00
use_new_ui: false # By default use old ui
override_personality_model_parameters: false #if true the personality parameters are overriden by those of the configuration (may affect personality behaviour)
debug: false