diff --git a/api/openai/chat.go b/api/openai/chat.go index 0c603886..4028e3ce 100644 --- a/api/openai/chat.go +++ b/api/openai/chat.go @@ -109,6 +109,7 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) var predInput string + suppressConfigSystemPrompt := false mess := []string{} for messageIndex, i := range input.Messages { var content string @@ -146,7 +147,7 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) content = templatedChatMessage } } - // If this model doesn't have such a template, or if + // If this model doesn't have such a template, or if that template fails to return a value, template at the message level. if content == "" { if r != "" { if contentExists { @@ -177,6 +178,10 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) } } } + // Special Handling: System. We care if it was printed at all, not the r branch, so check seperately + if contentExists && role == "system" { + suppressConfigSystemPrompt = true + } } mess = append(mess, content) @@ -207,8 +212,10 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) // A model can have a "file.bin.tmpl" file associated with a prompt template prefix templatedInput, err := o.Loader.EvaluateTemplateForPrompt(model.ChatPromptTemplate, templateFile, model.PromptTemplateData{ - Input: predInput, - Functions: funcs, + SystemPrompt: config.SystemPrompt, + SuppressSystemPrompt: suppressConfigSystemPrompt, + Input: predInput, + Functions: funcs, }) if err == nil { predInput = templatedInput diff --git a/api/openai/completion.go b/api/openai/completion.go index e7406ebb..19d24be1 100644 --- a/api/openai/completion.go +++ b/api/openai/completion.go @@ -123,7 +123,8 @@ func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe for k, i := range config.PromptStrings { // A model can have a "file.bin.tmpl" file associated with a prompt template prefix templatedInput, err := o.Loader.EvaluateTemplateForPrompt(model.CompletionPromptTemplate, templateFile, model.PromptTemplateData{ - Input: i, + SystemPrompt: config.SystemPrompt, + Input: i, }) if err == nil { i = templatedInput diff --git a/api/openai/edit.go b/api/openai/edit.go index d5a7f279..ef37131a 100644 --- a/api/openai/edit.go +++ b/api/openai/edit.go @@ -35,8 +35,9 @@ func EditEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) for _, i := range config.InputStrings { // A model can have a "file.bin.tmpl" file associated with a prompt template prefix templatedInput, err := o.Loader.EvaluateTemplateForPrompt(model.EditPromptTemplate, templateFile, model.PromptTemplateData{ - Input: i, - Instruction: input.Instruction, + Input: i, + Instruction: input.Instruction, + SystemPrompt: config.SystemPrompt, }) if err == nil { i = templatedInput diff --git a/pkg/model/loader.go b/pkg/model/loader.go index 73d13ebd..a4d86dbc 100644 --- a/pkg/model/loader.go +++ b/pkg/model/loader.go @@ -20,10 +20,12 @@ import ( // These are the definitions of all possible variables LocalAI will currently populate for use in a prompt template file // Please note: Not all of these are populated on every endpoint - your template should either be tested for each endpoint you map it to, or tolerant of zero values. type PromptTemplateData struct { - Input string - Instruction string - Functions []grammar.Function - MessageIndex int + SystemPrompt string + SuppressSystemPrompt bool // used by chat specifically to indicate that SystemPrompt above should be _ignored_ + Input string + Instruction string + Functions []grammar.Function + MessageIndex int } // TODO: Ask mudler about FunctionCall stuff being useful at the message level?