feat: "simple" chat/edit/completion template system prompt from config (#856)

This commit is contained in:
Dave 2023-08-02 18:19:55 -04:00 committed by GitHub
parent fc8aec7324
commit 7fb8b4191f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 21 additions and 10 deletions

View File

@ -109,6 +109,7 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
var predInput string var predInput string
suppressConfigSystemPrompt := false
mess := []string{} mess := []string{}
for messageIndex, i := range input.Messages { for messageIndex, i := range input.Messages {
var content string var content string
@ -146,7 +147,7 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
content = templatedChatMessage content = templatedChatMessage
} }
} }
// If this model doesn't have such a template, or if // If this model doesn't have such a template, or if that template fails to return a value, template at the message level.
if content == "" { if content == "" {
if r != "" { if r != "" {
if contentExists { if contentExists {
@ -177,6 +178,10 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
} }
} }
} }
// Special Handling: System. We care if it was printed at all, not the r branch, so check seperately
if contentExists && role == "system" {
suppressConfigSystemPrompt = true
}
} }
mess = append(mess, content) mess = append(mess, content)
@ -207,8 +212,10 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix // A model can have a "file.bin.tmpl" file associated with a prompt template prefix
templatedInput, err := o.Loader.EvaluateTemplateForPrompt(model.ChatPromptTemplate, templateFile, model.PromptTemplateData{ templatedInput, err := o.Loader.EvaluateTemplateForPrompt(model.ChatPromptTemplate, templateFile, model.PromptTemplateData{
Input: predInput, SystemPrompt: config.SystemPrompt,
Functions: funcs, SuppressSystemPrompt: suppressConfigSystemPrompt,
Input: predInput,
Functions: funcs,
}) })
if err == nil { if err == nil {
predInput = templatedInput predInput = templatedInput

View File

@ -123,7 +123,8 @@ func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe
for k, i := range config.PromptStrings { for k, i := range config.PromptStrings {
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix // A model can have a "file.bin.tmpl" file associated with a prompt template prefix
templatedInput, err := o.Loader.EvaluateTemplateForPrompt(model.CompletionPromptTemplate, templateFile, model.PromptTemplateData{ templatedInput, err := o.Loader.EvaluateTemplateForPrompt(model.CompletionPromptTemplate, templateFile, model.PromptTemplateData{
Input: i, SystemPrompt: config.SystemPrompt,
Input: i,
}) })
if err == nil { if err == nil {
i = templatedInput i = templatedInput

View File

@ -35,8 +35,9 @@ func EditEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
for _, i := range config.InputStrings { for _, i := range config.InputStrings {
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix // A model can have a "file.bin.tmpl" file associated with a prompt template prefix
templatedInput, err := o.Loader.EvaluateTemplateForPrompt(model.EditPromptTemplate, templateFile, model.PromptTemplateData{ templatedInput, err := o.Loader.EvaluateTemplateForPrompt(model.EditPromptTemplate, templateFile, model.PromptTemplateData{
Input: i, Input: i,
Instruction: input.Instruction, Instruction: input.Instruction,
SystemPrompt: config.SystemPrompt,
}) })
if err == nil { if err == nil {
i = templatedInput i = templatedInput

View File

@ -20,10 +20,12 @@ import (
// These are the definitions of all possible variables LocalAI will currently populate for use in a prompt template file // These are the definitions of all possible variables LocalAI will currently populate for use in a prompt template file
// Please note: Not all of these are populated on every endpoint - your template should either be tested for each endpoint you map it to, or tolerant of zero values. // Please note: Not all of these are populated on every endpoint - your template should either be tested for each endpoint you map it to, or tolerant of zero values.
type PromptTemplateData struct { type PromptTemplateData struct {
Input string SystemPrompt string
Instruction string SuppressSystemPrompt bool // used by chat specifically to indicate that SystemPrompt above should be _ignored_
Functions []grammar.Function Input string
MessageIndex int Instruction string
Functions []grammar.Function
MessageIndex int
} }
// TODO: Ask mudler about FunctionCall stuff being useful at the message level? // TODO: Ask mudler about FunctionCall stuff being useful at the message level?