mirror of
https://github.com/mudler/LocalAI.git
synced 2025-02-21 17:46:41 +00:00
Allow to inject prompt as part of the call
This commit is contained in:
parent
4a932483e1
commit
c17dcc5e9d
5
api.go
5
api.go
@ -64,6 +64,7 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre
|
||||
input := new(struct {
|
||||
Messages []Message `json:"messages"`
|
||||
Model string `json:"model"`
|
||||
Prompt string `json:"prompt"`
|
||||
})
|
||||
if err := c.BodyParser(input); err != nil {
|
||||
return err
|
||||
@ -126,6 +127,7 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre
|
||||
|
||||
predInput := strings.Join(mess, "\n")
|
||||
|
||||
if input.Prompt == "" {
|
||||
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
|
||||
templatedInput, err := loader.TemplatePrefix(input.Model, struct {
|
||||
Input string
|
||||
@ -133,6 +135,9 @@ func api(defaultModel *llama.LLama, loader *ModelLoader, listenAddr string, thre
|
||||
if err == nil {
|
||||
predInput = templatedInput
|
||||
}
|
||||
} else {
|
||||
predInput = input.Prompt + predInput
|
||||
}
|
||||
|
||||
// Generate the prediction using the language model
|
||||
prediction, err := model.Predict(
|
||||
|
Loading…
x
Reference in New Issue
Block a user