mirror of
https://github.com/mudler/LocalAI.git
synced 2025-06-17 14:38:08 +00:00
Merge branch 'master' into default_miro
This commit is contained in:
@ -34,7 +34,9 @@ func (f *FederatedCLI) Run(ctx *cliContext.Context) error {
|
||||
return fmt.Errorf("creating a new node: %w", err)
|
||||
}
|
||||
|
||||
if err := p2p.ServiceDiscoverer(context.Background(), n, f.Peer2PeerToken, p2p.FederatedID, nil); err != nil {
|
||||
if err := p2p.ServiceDiscoverer(context.Background(), n, f.Peer2PeerToken, p2p.FederatedID, func(servicesID string, tunnel p2p.NodeData) {
|
||||
log.Debug().Msgf("Discovered node: %s", tunnel.ID)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -98,6 +100,10 @@ func Proxy(ctx context.Context, node *node.Node, listenAddr, service string) err
|
||||
}
|
||||
}
|
||||
|
||||
if len(tunnelAddresses) == 0 {
|
||||
log.Error().Msg("No available nodes yet")
|
||||
return
|
||||
}
|
||||
// open a TCP stream to one of the tunnels
|
||||
// chosen randomly
|
||||
// TODO: optimize this and track usage
|
||||
|
@ -119,7 +119,7 @@ func (r *RunCMD) Run(ctx *cliContext.Context) error {
|
||||
}
|
||||
|
||||
log.Info().Msg("Starting P2P server discovery...")
|
||||
if err := p2p.ServiceDiscoverer(context.Background(), node, token, "", func() {
|
||||
if err := p2p.ServiceDiscoverer(context.Background(), node, token, "", func(serviceID string, node p2p.NodeData) {
|
||||
var tunnelAddresses []string
|
||||
for _, v := range p2p.GetAvailableNodes("") {
|
||||
if v.IsOnline() {
|
||||
|
@ -225,18 +225,10 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
|
||||
}
|
||||
|
||||
// Update input grammar
|
||||
// Handle if we should return "name" instead of "functions"
|
||||
if config.FunctionsConfig.FunctionName {
|
||||
jsStruct := funcs.ToJSONNameStructure()
|
||||
config.Grammar = jsStruct.Grammar(config.FunctionsConfig.GrammarConfig.Options()...)
|
||||
} else {
|
||||
jsStruct := funcs.ToJSONFunctionStructure()
|
||||
config.Grammar = jsStruct.Grammar(config.FunctionsConfig.GrammarConfig.Options()...)
|
||||
}
|
||||
jsStruct := funcs.ToJSONStructure(config.FunctionsConfig.FunctionNameKey, config.FunctionsConfig.FunctionNameKey)
|
||||
config.Grammar = jsStruct.Grammar(config.FunctionsConfig.GrammarConfig.Options()...)
|
||||
case input.JSONFunctionGrammarObject != nil:
|
||||
config.Grammar = input.JSONFunctionGrammarObject.Grammar(config.FunctionsConfig.GrammarConfig.Options()...)
|
||||
case input.JSONFunctionGrammarObjectName != nil:
|
||||
config.Grammar = input.JSONFunctionGrammarObjectName.Grammar(config.FunctionsConfig.GrammarConfig.Options()...)
|
||||
default:
|
||||
// Force picking one of the functions by the request
|
||||
if config.FunctionToCall() != "" {
|
||||
|
@ -144,7 +144,7 @@ func copyStream(closer chan struct{}, dst io.Writer, src io.Reader) {
|
||||
|
||||
// This is the main of the server (which keeps the env variable updated)
|
||||
// This starts a goroutine that keeps LLAMACPP_GRPC_SERVERS updated with the discovered services
|
||||
func ServiceDiscoverer(ctx context.Context, n *node.Node, token, servicesID string, discoveryFunc func()) error {
|
||||
func ServiceDiscoverer(ctx context.Context, n *node.Node, token, servicesID string, discoveryFunc func(serviceID string, node NodeData)) error {
|
||||
if servicesID == "" {
|
||||
servicesID = defaultServicesID
|
||||
}
|
||||
@ -166,7 +166,7 @@ func ServiceDiscoverer(ctx context.Context, n *node.Node, token, servicesID stri
|
||||
case tunnel := <-tunnels:
|
||||
AddNode(servicesID, tunnel)
|
||||
if discoveryFunc != nil {
|
||||
discoveryFunc()
|
||||
discoveryFunc(servicesID, tunnel)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ func GenerateToken() string {
|
||||
return "not implemented"
|
||||
}
|
||||
|
||||
func ServiceDiscoverer(ctx context.Context, node *node.Node, token, servicesID string, fn func()) error {
|
||||
func ServiceDiscoverer(ctx context.Context, node *node.Node, token, servicesID string, fn func(string, NodeData)) error {
|
||||
return fmt.Errorf("not implemented")
|
||||
}
|
||||
|
||||
|
@ -179,8 +179,7 @@ type OpenAIRequest struct {
|
||||
// A grammar to constrain the LLM output
|
||||
Grammar string `json:"grammar" yaml:"grammar"`
|
||||
|
||||
JSONFunctionGrammarObject *functions.JSONFunctionStructureFunction `json:"grammar_json_functions" yaml:"grammar_json_functions"`
|
||||
JSONFunctionGrammarObjectName *functions.JSONFunctionStructureName `json:"grammar_json_name" yaml:"grammar_json_name"`
|
||||
JSONFunctionGrammarObject *functions.JSONFunctionStructure `json:"grammar_json_functions" yaml:"grammar_json_functions"`
|
||||
|
||||
Backend string `json:"backend" yaml:"backend"`
|
||||
|
||||
|
Reference in New Issue
Block a user