mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-19 20:57:54 +00:00
fix(base-grpc): close channel in base grpc server (#3734)
If the LLM does not implement any logic for PredictStream, we close the channel immediately to not leave the process hanging. Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
e28e80857b
commit
092bb0bd6b
@ -41,6 +41,7 @@ func (llm *Base) Predict(opts *pb.PredictOptions) (string, error) {
|
||||
}
|
||||
|
||||
func (llm *Base) PredictStream(opts *pb.PredictOptions, results chan string) error {
|
||||
close(results)
|
||||
return fmt.Errorf("unimplemented")
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user