LocalAI/examples/configurations/phi-2.yaml
2024-01-01 10:51:47 +01:00

15 lines
273 B
YAML

name: phi-2
context_size: 2048
f16: true
gpu_layers: 90
mmap: true
parameters:
model: huggingface://TheBloke/phi-2-GGUF/phi-2.Q8_0.gguf
temperature: 0.2
top_k: 40
top_p: 0.95
template:
chat: &template |
Instruct: {{.Input}}
Output:
completion: *template