lollms-webui/configs/config.yaml

32 lines
852 B
YAML
Raw Normal View History

2023-05-26 12:10:16 +02:00
version: 5
user_name: user
2023-04-14 11:58:07 +02:00
config: default
2023-05-05 16:40:08 +02:00
ctx_size: 2048
n_gpu_layers: 20 #Depends on your GPU size
db_path: databases/database.db
2023-04-14 11:58:07 +02:00
debug: false
n_threads: 8
2023-04-14 11:58:07 +02:00
host: localhost
language: en-US
2023-05-25 23:24:14 +02:00
# Supported bindings are llamacpp and gpt-j
binding: gpt_4all
2023-05-11 15:09:35 +02:00
model: null
n_predict: 1024
2023-04-14 11:58:07 +02:00
nb_messages_to_remember: 5
2023-04-20 19:30:03 +02:00
personality_language: english
2023-05-21 22:51:20 +02:00
personality_category: default
2023-04-30 22:40:19 +02:00
personality: gpt4all
2023-04-14 11:58:07 +02:00
port: 9600
repeat_last_n: 40
repeat_penalty: 1.2
2023-05-07 03:44:42 +02:00
seed: -1
temperature: 0.9
top_k: 50
2023-04-12 22:36:03 +02:00
top_p: 0.95
2023-04-14 11:58:07 +02:00
voice: ""
2023-04-14 23:29:11 +02:00
use_gpu: false # Not active yet
2023-04-20 19:30:03 +02:00
auto_read: false
2023-05-02 22:53:27 +02:00
use_avx2: true # By default we require using avx2 but if not supported, make sure you remove it from here
2023-05-22 01:13:55 +02:00
use_new_ui: true # By default use old ui
override_personality_model_parameters: false #if true the personality parameters are overriden by those of the configuration (may affect personality behaviour)