diff --git a/gallery/index.yaml b/gallery/index.yaml index afc2ed78..e5a329d0 100644 --- a/gallery/index.yaml +++ b/gallery/index.yaml @@ -1046,6 +1046,25 @@ - filename: Akhil-Theerthala_Kuvera-8B-v0.1.0-Q4_K_M.gguf sha256: a4e5f379ad58b4225620b664f2c67470f40b43d49a6cf05c83d10ab34ddceb85 uri: huggingface://bartowski/Akhil-Theerthala_Kuvera-8B-v0.1.0-GGUF/Akhil-Theerthala_Kuvera-8B-v0.1.0-Q4_K_M.gguf +- !!merge <<: *qwen3 + name: "openbuddy_openbuddy-r1-0528-distill-qwen3-32b-preview0-qat" + icon: https://raw.githubusercontent.com/OpenBuddy/OpenBuddy/main/media/demo.png + url: "github:mudler/LocalAI/gallery/qwen3-openbuddy.yaml@master" + urls: + - https://huggingface.co/OpenBuddy/OpenBuddy-R1-0528-Distill-Qwen3-32B-Preview0-QAT + - https://huggingface.co/bartowski/OpenBuddy_OpenBuddy-R1-0528-Distill-Qwen3-32B-Preview0-QAT-GGUF + description: | + Base Model: Qwen/Qwen3-32B + Context Length: 40K Tokens + License: Apache 2.0 + Training Data: Distilled from DeepSeek-R1-0528 + overrides: + parameters: + model: OpenBuddy_OpenBuddy-R1-0528-Distill-Qwen3-32B-Preview0-QAT-Q4_K_M.gguf + files: + - filename: OpenBuddy_OpenBuddy-R1-0528-Distill-Qwen3-32B-Preview0-QAT-Q4_K_M.gguf + sha256: 4862bc5841f34bd7402a66b2149d6948465fef63e50499ab2d07c89f77aec651 + uri: huggingface://bartowski/OpenBuddy_OpenBuddy-R1-0528-Distill-Qwen3-32B-Preview0-QAT-GGUF/OpenBuddy_OpenBuddy-R1-0528-Distill-Qwen3-32B-Preview0-QAT-Q4_K_M.gguf - &gemma3 url: "github:mudler/LocalAI/gallery/gemma.yaml@master" name: "gemma-3-27b-it" diff --git a/gallery/qwen3-openbuddy.yaml b/gallery/qwen3-openbuddy.yaml new file mode 100644 index 00000000..754d730d --- /dev/null +++ b/gallery/qwen3-openbuddy.yaml @@ -0,0 +1,40 @@ +--- +name: "qwen3-openbuddy" + +config_file: | + mmap: true + template: + chat_message: | + <|role|>{{ .RoleName }}<|says|> + {{ if .FunctionCall -}} + {{ else if eq .RoleName "tool" -}} + {{ end -}} + {{ if .Content -}} + {{.Content }} + {{ end -}} + {{ if .FunctionCall -}} + {{toJson .FunctionCall}} + {{ end -}}<|end|> + function: | + <|role|>system<|says|> + You are a function calling AI model. You are provided with functions to execute. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools: + {{range .Functions}} + {'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }} + {{end}} + For each function call return a json object with function name and arguments + <|end|> + {{.Input -}} + <|role|>assistant<|says|> + chat: | + {{.Input -}} + <|role|>assistant<|says|> + completion: | + {{.Input}} + context_size: 8192 + f16: true + stopwords: + - '<|im_end|>' + - '<|end|>' + - '' + - '' + - '<|endoftext|>'