context_size: 4096 f16: true mmap: true mmproj: minicpm-v-2_6-mmproj-f16.gguf name: gpt-4o parameters: model: minicpm-v-2_6-Q4_K_M.gguf stopwords: - <|im_end|> - - - <|endoftext|> template: chat: | {{.Input -}} <|im_start|>assistant chat_message: | <|im_start|>{{ .RoleName }} {{ if .FunctionCall -}} Function call: {{ else if eq .RoleName "tool" -}} Function response: {{ end -}} {{ if .Content -}} {{.Content }} {{ end -}} {{ if .FunctionCall -}} {{toJson .FunctionCall}} {{ end -}}<|im_end|> completion: | {{.Input}} function: | <|im_start|>system You are a function calling AI model. You are provided with functions to execute. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools: {{range .Functions}} {'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }} {{end}} For each function call return a json object with function name and arguments <|im_end|> {{.Input -}} <|im_start|>assistant download_files: - filename: minicpm-v-2_6-Q4_K_M.gguf sha256: 3a4078d53b46f22989adbf998ce5a3fd090b6541f112d7e936eb4204a04100b1 uri: huggingface://openbmb/MiniCPM-V-2_6-gguf/ggml-model-Q4_K_M.gguf - filename: minicpm-v-2_6-mmproj-f16.gguf uri: huggingface://openbmb/MiniCPM-V-2_6-gguf/mmproj-model-f16.gguf sha256: 4485f68a0f1aa404c391e788ea88ea653c100d8e98fe572698f701e5809711fd