mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-08 11:38:29 +00:00
fix: return index and delta in stream token (#680)
Signed-off-by: mudler <mudler@localai.io>
This commit is contained in:
parent
d3a486a4f8
commit
02136531a3
@ -153,7 +153,12 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|||||||
ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool {
|
ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool {
|
||||||
resp := OpenAIResponse{
|
resp := OpenAIResponse{
|
||||||
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
||||||
Choices: []Choice{{Text: s}},
|
Choices: []Choice{
|
||||||
|
{
|
||||||
|
Index: 0,
|
||||||
|
Text: s,
|
||||||
|
},
|
||||||
|
},
|
||||||
Object: "text_completion",
|
Object: "text_completion",
|
||||||
}
|
}
|
||||||
log.Debug().Msgf("Sending goroutine: %s", s)
|
log.Debug().Msgf("Sending goroutine: %s", s)
|
||||||
@ -229,7 +234,13 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|||||||
|
|
||||||
resp := &OpenAIResponse{
|
resp := &OpenAIResponse{
|
||||||
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
||||||
Choices: []Choice{{FinishReason: "stop"}},
|
Choices: []Choice{
|
||||||
|
{
|
||||||
|
Index: 0,
|
||||||
|
FinishReason: "stop",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Object: "text_completion",
|
||||||
}
|
}
|
||||||
respData, _ := json.Marshal(resp)
|
respData, _ := json.Marshal(resp)
|
||||||
|
|
||||||
@ -346,7 +357,7 @@ func chatEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|||||||
ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool {
|
ComputeChoices(s, req, config, o, loader, func(s string, c *[]Choice) {}, func(s string) bool {
|
||||||
resp := OpenAIResponse{
|
resp := OpenAIResponse{
|
||||||
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
||||||
Choices: []Choice{{Delta: &Message{Content: s}}},
|
Choices: []Choice{{Delta: &Message{Content: s}, Index: 0}},
|
||||||
Object: "chat.completion.chunk",
|
Object: "chat.completion.chunk",
|
||||||
}
|
}
|
||||||
log.Debug().Msgf("Sending goroutine: %s", s)
|
log.Debug().Msgf("Sending goroutine: %s", s)
|
||||||
@ -430,7 +441,13 @@ func chatEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|||||||
|
|
||||||
resp := &OpenAIResponse{
|
resp := &OpenAIResponse{
|
||||||
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
|
||||||
Choices: []Choice{{FinishReason: "stop"}},
|
Choices: []Choice{
|
||||||
|
{
|
||||||
|
FinishReason: "stop",
|
||||||
|
Index: 0,
|
||||||
|
Delta: &Message{},
|
||||||
|
}},
|
||||||
|
Object: "chat.completion.chunk",
|
||||||
}
|
}
|
||||||
respData, _ := json.Marshal(resp)
|
respData, _ := json.Marshal(resp)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user