mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-19 04:37:53 +00:00
6ca4d38a01
* docs: re-order sections * fix references * Add mixtral-instruct, tinyllama-chat, dolphin-2.5-mixtral-8x7b * Fix link * Minor corrections * fix: models is a StringSlice, not a String Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * WIP: switch docs theme * content * Fix GH link * enhancements * enhancements * Fixed how to link Signed-off-by: lunamidori5 <118759930+lunamidori5@users.noreply.github.com> * fixups * logo fix * more fixups * final touches --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io> Signed-off-by: lunamidori5 <118759930+lunamidori5@users.noreply.github.com> Co-authored-by: lunamidori5 <118759930+lunamidori5@users.noreply.github.com>
28 lines
800 B
YAML
28 lines
800 B
YAML
name: tinyllama-chat
|
|
mmap: true
|
|
parameters:
|
|
model: huggingface://TheBloke/TinyLlama-1.1B-Chat-v0.3-GGUF/tinyllama-1.1b-chat-v0.3.Q8_0.gguf
|
|
temperature: 0.2
|
|
top_k: 40
|
|
top_p: 0.95
|
|
template:
|
|
chat_message: |
|
|
<|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "user"}}user{{end}}
|
|
{{if .Content}}{{.Content}}{{end}}<|im_end|>
|
|
chat: |
|
|
{{.Input}}
|
|
<|im_start|>assistant
|
|
|
|
completion: |
|
|
{{.Input}}
|
|
context_size: 4096
|
|
f16: true
|
|
stopwords:
|
|
- <|im_end|>
|
|
gpu_layers: 90
|
|
|
|
usage: |
|
|
curl http://localhost:8080/v1/chat/completions -H "Content-Type: application/json" -d '{
|
|
"model": "tinyllama-chat",
|
|
"messages": [{"role": "user", "content": "How are you doing?", "temperature": 0.1}]
|
|
}' |