mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-18 20:27:57 +00:00
feat: allow to disable '/metrics' endpoints for local stats (#3945)
Seem the "/metrics" endpoint that is source of confusion as people tends to believe we collect telemetry data just because we import "opentelemetry", however it is still a good idea to allow to disable even local metrics if not really required. See also: https://github.com/mudler/LocalAI/issues/3942 Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
418c582430
commit
8737a65760
@ -53,6 +53,7 @@ type RunCMD struct {
|
||||
OpaqueErrors bool `env:"LOCALAI_OPAQUE_ERRORS" default:"false" help:"If true, all error responses are replaced with blank 500 errors. This is intended only for hardening against information leaks and is normally not recommended." group:"hardening"`
|
||||
UseSubtleKeyComparison bool `env:"LOCALAI_SUBTLE_KEY_COMPARISON" default:"false" help:"If true, API Key validation comparisons will be performed using constant-time comparisons rather than simple equality. This trades off performance on each request for resiliancy against timing attacks." group:"hardening"`
|
||||
DisableApiKeyRequirementForHttpGet bool `env:"LOCALAI_DISABLE_API_KEY_REQUIREMENT_FOR_HTTP_GET" default:"false" help:"If true, a valid API key is not required to issue GET requests to portions of the web ui. This should only be enabled in secure testing environments" group:"hardening"`
|
||||
DisableMetricsEndpoint bool `env:"LOCALAI_DISABLE_METRICS_ENDPOINT,DISABLE_METRICS_ENDPOINT" default:"false" help:"Disable the /metrics endpoint" group:"api"`
|
||||
HttpGetExemptedEndpoints []string `env:"LOCALAI_HTTP_GET_EXEMPTED_ENDPOINTS" default:"^/$,^/browse/?$,^/talk/?$,^/p2p/?$,^/chat/?$,^/text2image/?$,^/tts/?$,^/static/.*$,^/swagger.*$" help:"If LOCALAI_DISABLE_API_KEY_REQUIREMENT_FOR_HTTP_GET is overriden to true, this is the list of endpoints to exempt. Only adjust this in case of a security incident or as a result of a personal security posture review" group:"hardening"`
|
||||
Peer2Peer bool `env:"LOCALAI_P2P,P2P" name:"p2p" default:"false" help:"Enable P2P mode" group:"p2p"`
|
||||
Peer2PeerDHTInterval int `env:"LOCALAI_P2P_DHT_INTERVAL,P2P_DHT_INTERVAL" default:"360" name:"p2p-dht-interval" help:"Interval for DHT refresh (used during token generation)" group:"p2p"`
|
||||
@ -108,6 +109,10 @@ func (r *RunCMD) Run(ctx *cliContext.Context) error {
|
||||
config.WithLoadToMemory(r.LoadToMemory),
|
||||
}
|
||||
|
||||
if r.DisableMetricsEndpoint {
|
||||
opts = append(opts, config.DisableMetricsEndpoint)
|
||||
}
|
||||
|
||||
token := ""
|
||||
if r.Peer2Peer || r.Peer2PeerToken != "" {
|
||||
log.Info().Msg("P2P mode enabled")
|
||||
|
@ -39,6 +39,7 @@ type ApplicationConfig struct {
|
||||
OpaqueErrors bool
|
||||
UseSubtleKeyComparison bool
|
||||
DisableApiKeyRequirementForHttpGet bool
|
||||
DisableMetrics bool
|
||||
HttpGetExemptedEndpoints []*regexp.Regexp
|
||||
DisableGalleryEndpoint bool
|
||||
LoadToMemory []string
|
||||
@ -350,6 +351,10 @@ func WithDisableApiKeyRequirementForHttpGet(required bool) AppOption {
|
||||
}
|
||||
}
|
||||
|
||||
var DisableMetricsEndpoint AppOption = func(o *ApplicationConfig) {
|
||||
o.DisableMetrics = true
|
||||
}
|
||||
|
||||
func WithHttpGetExemptedEndpoints(endpoints []string) AppOption {
|
||||
return func(o *ApplicationConfig) {
|
||||
o.HttpGetExemptedEndpoints = []*regexp.Regexp{}
|
||||
|
@ -109,19 +109,21 @@ func App(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *confi
|
||||
app.Use(recover.New())
|
||||
}
|
||||
|
||||
metricsService, err := services.NewLocalAIMetricsService()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !appConfig.DisableMetrics {
|
||||
metricsService, err := services.NewLocalAIMetricsService()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if metricsService != nil {
|
||||
app.Use(localai.LocalAIMetricsAPIMiddleware(metricsService))
|
||||
app.Hooks().OnShutdown(func() error {
|
||||
return metricsService.Shutdown()
|
||||
})
|
||||
}
|
||||
if metricsService != nil {
|
||||
app.Use(localai.LocalAIMetricsAPIMiddleware(metricsService))
|
||||
app.Hooks().OnShutdown(func() error {
|
||||
return metricsService.Shutdown()
|
||||
})
|
||||
}
|
||||
|
||||
// Health Checks should always be exempt from auth, so register these first
|
||||
}
|
||||
// Health Checks should always be exempt from auth, so register these first
|
||||
routes.HealthRoutes(app)
|
||||
|
||||
kaConfig, err := middleware.GetKeyAuthConfig(appConfig)
|
||||
|
@ -42,7 +42,9 @@ func RegisterLocalAIRoutes(app *fiber.App,
|
||||
app.Post("/stores/get", localai.StoresGetEndpoint(sl, appConfig))
|
||||
app.Post("/stores/find", localai.StoresFindEndpoint(sl, appConfig))
|
||||
|
||||
app.Get("/metrics", localai.LocalAIMetricsEndpoint())
|
||||
if !appConfig.DisableMetrics {
|
||||
app.Get("/metrics", localai.LocalAIMetricsEndpoint())
|
||||
}
|
||||
|
||||
// Experimental Backend Statistics Module
|
||||
backendMonitorService := services.NewBackendMonitorService(ml, cl, appConfig) // Split out for now
|
||||
|
Loading…
Reference in New Issue
Block a user