LocalAI/main.go

216 lines
6.2 KiB
Go
Raw Normal View History

2023-03-18 22:59:06 +00:00
package main
import (
"os"
"os/signal"
"path/filepath"
"strings"
"syscall"
api "github.com/go-skynet/LocalAI/api"
"github.com/go-skynet/LocalAI/api/options"
"github.com/go-skynet/LocalAI/internal"
model "github.com/go-skynet/LocalAI/pkg/model"
2023-04-20 16:33:02 +00:00
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
2023-03-18 22:59:06 +00:00
"github.com/urfave/cli/v2"
)
func main() {
2023-04-20 16:33:02 +00:00
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
// clean up process
go func() {
c := make(chan os.Signal, 1) // we need to reserve to buffer size 1, so the notifier are not blocked
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
<-c
os.Exit(1)
}()
2023-04-20 16:33:02 +00:00
path, err := os.Getwd()
2023-03-18 22:59:06 +00:00
if err != nil {
2023-04-20 16:33:02 +00:00
log.Error().Msgf("error: %s", err.Error())
os.Exit(1)
2023-03-18 22:59:06 +00:00
}
app := &cli.App{
Name: "LocalAI",
Version: internal.PrintableVersion(),
Usage: "OpenAI compatible API for running LLaMA/GPT models locally on CPU with consumer grade hardware.",
Flags: []cli.Flag{
&cli.BoolFlag{
Name: "f16",
EnvVars: []string{"F16"},
},
&cli.BoolFlag{
Name: "autoload-galleries",
EnvVars: []string{"AUTOLOAD_GALLERIES"},
},
2023-04-20 16:33:02 +00:00
&cli.BoolFlag{
Name: "debug",
EnvVars: []string{"DEBUG"},
},
2023-05-21 12:38:25 +00:00
&cli.BoolFlag{
Name: "cors",
EnvVars: []string{"CORS"},
},
&cli.StringFlag{
Name: "cors-allow-origins",
EnvVars: []string{"CORS_ALLOW_ORIGINS"},
},
&cli.IntFlag{
Name: "threads",
Usage: "Number of threads used for parallel computation. Usage of the number of physical cores in the system is suggested.",
EnvVars: []string{"THREADS"},
Value: 4,
2023-03-18 22:59:06 +00:00
},
&cli.StringFlag{
Name: "models-path",
Usage: "Path containing models used for inferencing",
EnvVars: []string{"MODELS_PATH"},
Value: filepath.Join(path, "models"),
2023-03-18 22:59:06 +00:00
},
&cli.StringFlag{
Name: "galleries",
Usage: "JSON list of galleries",
EnvVars: []string{"GALLERIES"},
},
&cli.StringFlag{
Name: "preload-models",
Usage: "A List of models to apply in JSON at start",
EnvVars: []string{"PRELOAD_MODELS"},
},
&cli.StringFlag{
Name: "preload-models-config",
Usage: "A List of models to apply at startup. Path to a YAML config file",
EnvVars: []string{"PRELOAD_MODELS_CONFIG"},
},
&cli.StringFlag{
Name: "config-file",
Usage: "Config file",
EnvVars: []string{"CONFIG_FILE"},
},
2023-03-18 22:59:06 +00:00
&cli.StringFlag{
Name: "address",
Usage: "Bind address for the API server.",
EnvVars: []string{"ADDRESS"},
Value: ":8080",
},
&cli.StringFlag{
Name: "image-path",
Usage: "Image directory",
EnvVars: []string{"IMAGE_PATH"},
Value: "/tmp/generated/images",
},
&cli.StringFlag{
Name: "audio-path",
Usage: "audio directory",
EnvVars: []string{"AUDIO_PATH"},
Value: "/tmp/generated/audio",
},
&cli.StringFlag{
Name: "backend-assets-path",
Usage: "Path used to extract libraries that are required by some of the backends in runtime.",
EnvVars: []string{"BACKEND_ASSETS_PATH"},
Value: "/tmp/localai/backend_data",
},
&cli.StringSliceFlag{
Name: "external-grpc-backends",
Usage: "A list of external grpc backends",
EnvVars: []string{"EXTERNAL_GRPC_BACKENDS"},
},
&cli.IntFlag{
Name: "context-size",
Usage: "Default context size of the model",
EnvVars: []string{"CONTEXT_SIZE"},
Value: 512,
},
&cli.IntFlag{
Name: "upload-limit",
Usage: "Default upload-limit. MB",
EnvVars: []string{"UPLOAD_LIMIT"},
Value: 15,
},
&cli.StringSliceFlag{
Name: "api-keys",
Usage: "List of API Keys to enable API authentication. When this is set, all the requests must be authenticated with one of these API keys.",
EnvVars: []string{"API_KEY"},
},
2023-08-18 19:23:14 +00:00
&cli.BoolFlag{
Name: "preload-backend-only",
Usage: "If set, the api is NOT launched, and only the preloaded models / backends are started. This is intended for multi-node setups.",
EnvVars: []string{"PRELOAD_BACKEND_ONLY"},
Value: false,
},
},
Description: `
LocalAI is a drop-in replacement OpenAI API which runs inference locally.
2023-03-18 22:59:06 +00:00
Some of the models compatible are:
- Vicuna
- Koala
- GPT4ALL
- GPT4ALL-J
- Cerebras
- Alpaca
- StableLM (ggml quantized)
2023-03-18 22:59:06 +00:00
For a list of compatible model, check out: https://localai.io/model-compatibility/index.html
2023-03-18 22:59:06 +00:00
`,
UsageText: `local-ai [options]`,
Copyright: "Ettore Di Giacinto",
2023-03-18 22:59:06 +00:00
Action: func(ctx *cli.Context) error {
opts := []options.AppOption{
options.WithConfigFile(ctx.String("config-file")),
options.WithJSONStringPreload(ctx.String("preload-models")),
options.WithYAMLConfigPreload(ctx.String("preload-models-config")),
options.WithModelLoader(model.NewModelLoader(ctx.String("models-path"))),
options.WithContextSize(ctx.Int("context-size")),
options.WithDebug(ctx.Bool("debug")),
options.WithImageDir(ctx.String("image-path")),
options.WithAudioDir(ctx.String("audio-path")),
options.WithF16(ctx.Bool("f16")),
options.WithStringGalleries(ctx.String("galleries")),
options.WithDisableMessage(false),
options.WithCors(ctx.Bool("cors")),
options.WithCorsAllowOrigins(ctx.String("cors-allow-origins")),
options.WithThreads(ctx.Int("threads")),
options.WithBackendAssets(backendAssets),
options.WithBackendAssetsOutput(ctx.String("backend-assets-path")),
options.WithUploadLimitMB(ctx.Int("upload-limit")),
options.WithApiKeys(ctx.StringSlice("api-keys")),
}
externalgRPC := ctx.StringSlice("external-grpc-backends")
// split ":" to get backend name and the uri
for _, v := range externalgRPC {
backend := v[:strings.IndexByte(v, ':')]
uri := v[strings.IndexByte(v, ':')+1:]
opts = append(opts, options.WithExternalBackend(backend, uri))
}
if ctx.Bool("autoload-galleries") {
opts = append(opts, options.EnableGalleriesAutoload)
}
2023-08-18 19:23:14 +00:00
if ctx.Bool("preload-backend-only") {
_, _, err := api.Startup(opts...)
return err
}
app, err := api.App(opts...)
if err != nil {
return err
}
return app.Listen(ctx.String("address"))
2023-03-18 22:59:06 +00:00
},
}
err = app.Run(os.Args)
2023-03-18 22:59:06 +00:00
if err != nil {
2023-04-20 16:33:02 +00:00
log.Error().Msgf("error: %s", err.Error())
2023-03-18 22:59:06 +00:00
os.Exit(1)
}
}