2023-03-18 22:59:06 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"os"
|
2023-07-14 23:19:43 +00:00
|
|
|
"os/signal"
|
2023-05-03 21:03:31 +00:00
|
|
|
"path/filepath"
|
2023-07-14 23:19:43 +00:00
|
|
|
"syscall"
|
2024-03-01 15:19:53 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
"github.com/alecthomas/kong"
|
|
|
|
"github.com/joho/godotenv"
|
2024-06-23 08:24:36 +00:00
|
|
|
"github.com/mudler/LocalAI/core/cli"
|
|
|
|
"github.com/mudler/LocalAI/internal"
|
2024-04-11 07:19:24 +00:00
|
|
|
|
2023-04-20 16:33:02 +00:00
|
|
|
"github.com/rs/zerolog"
|
|
|
|
"github.com/rs/zerolog/log"
|
2024-03-29 21:29:33 +00:00
|
|
|
|
2024-06-23 08:24:36 +00:00
|
|
|
_ "github.com/mudler/LocalAI/swagger"
|
2023-03-18 22:59:06 +00:00
|
|
|
)
|
|
|
|
|
2023-04-19 16:43:10 +00:00
|
|
|
func main() {
|
2024-04-11 07:19:24 +00:00
|
|
|
var err error
|
|
|
|
|
|
|
|
// Initialize zerolog at a level of INFO, we will set the desired level after we parse the CLI options
|
2023-04-20 16:33:02 +00:00
|
|
|
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
|
2024-04-11 07:19:24 +00:00
|
|
|
zerolog.SetGlobalLevel(zerolog.InfoLevel)
|
|
|
|
|
|
|
|
// Catch signals from the OS requesting us to exit
|
2023-07-14 23:19:43 +00:00
|
|
|
go func() {
|
|
|
|
c := make(chan os.Signal, 1) // we need to reserve to buffer size 1, so the notifier are not blocked
|
|
|
|
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
|
|
|
|
<-c
|
|
|
|
os.Exit(1)
|
|
|
|
}()
|
2023-04-20 16:33:02 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
// handle loading environment variabled from .env files
|
|
|
|
envFiles := []string{".env", "localai.env"}
|
|
|
|
homeDir, err := os.UserHomeDir()
|
|
|
|
if err == nil {
|
|
|
|
envFiles = append(envFiles, filepath.Join(homeDir, "localai.env"), filepath.Join(homeDir, ".config/localai.env"))
|
2023-03-18 22:59:06 +00:00
|
|
|
}
|
2024-04-11 07:19:24 +00:00
|
|
|
envFiles = append(envFiles, "/etc/localai.env")
|
2023-03-18 22:59:06 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
for _, envFile := range envFiles {
|
|
|
|
if _, err := os.Stat(envFile); err == nil {
|
2024-06-09 17:27:42 +00:00
|
|
|
log.Info().Str("envFile", envFile).Msg("env file found, loading environment variables from file")
|
2024-04-26 08:34:31 +00:00
|
|
|
err = godotenv.Load(envFile)
|
|
|
|
if err != nil {
|
|
|
|
log.Error().Err(err).Str("envFile", envFile).Msg("failed to load environment variables from file")
|
|
|
|
continue
|
|
|
|
}
|
2024-04-11 07:19:24 +00:00
|
|
|
}
|
|
|
|
}
|
2024-03-01 15:19:53 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
// Actually parse the CLI options
|
|
|
|
ctx := kong.Parse(&cli.CLI,
|
|
|
|
kong.Description(
|
|
|
|
` LocalAI is a drop-in replacement OpenAI API for running LLM, GPT and genAI models locally on CPU, GPUs with consumer grade hardware.
|
2024-03-01 15:19:53 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
Some of the models compatible are:
|
|
|
|
- Vicuna
|
|
|
|
- Koala
|
|
|
|
- GPT4ALL
|
|
|
|
- GPT4ALL-J
|
|
|
|
- Cerebras
|
|
|
|
- Alpaca
|
|
|
|
- StableLM (ggml quantized)
|
2024-03-01 15:19:53 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
For a list of compatible models, check out: https://localai.io/model-compatibility/index.html
|
2023-10-17 16:22:53 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
Copyright: Ettore Di Giacinto
|
2023-05-30 10:00:30 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
Version: ${version}
|
|
|
|
`,
|
|
|
|
),
|
|
|
|
kong.UsageOnError(),
|
|
|
|
kong.Vars{
|
|
|
|
"basepath": kong.ExpandPath("."),
|
|
|
|
"remoteLibraryURL": "https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/model_library.yaml",
|
2024-04-23 07:22:58 +00:00
|
|
|
"galleries": `[{"name":"localai", "url":"github:mudler/LocalAI/gallery/index.yaml@master"}]`,
|
2024-04-11 07:19:24 +00:00
|
|
|
"version": internal.PrintableVersion(),
|
2023-03-18 22:59:06 +00:00
|
|
|
},
|
2024-04-11 07:19:24 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// Configure the logging level before we run the application
|
|
|
|
// This is here to preserve the existing --debug flag functionality
|
|
|
|
logLevel := "info"
|
|
|
|
if cli.CLI.Debug && cli.CLI.LogLevel == nil {
|
|
|
|
logLevel = "debug"
|
|
|
|
zerolog.SetGlobalLevel(zerolog.DebugLevel)
|
|
|
|
cli.CLI.LogLevel = &logLevel
|
|
|
|
}
|
2023-10-15 07:17:41 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
if cli.CLI.LogLevel == nil {
|
|
|
|
cli.CLI.LogLevel = &logLevel
|
|
|
|
}
|
2023-10-15 07:17:41 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
switch *cli.CLI.LogLevel {
|
|
|
|
case "error":
|
|
|
|
zerolog.SetGlobalLevel(zerolog.ErrorLevel)
|
2024-04-20 08:43:37 +00:00
|
|
|
log.Info().Msg("Setting logging to error")
|
2024-04-11 07:19:24 +00:00
|
|
|
case "warn":
|
|
|
|
zerolog.SetGlobalLevel(zerolog.WarnLevel)
|
2024-04-20 08:43:37 +00:00
|
|
|
log.Info().Msg("Setting logging to warn")
|
2024-04-11 07:19:24 +00:00
|
|
|
case "info":
|
|
|
|
zerolog.SetGlobalLevel(zerolog.InfoLevel)
|
2024-04-20 08:43:37 +00:00
|
|
|
log.Info().Msg("Setting logging to info")
|
2024-04-11 07:19:24 +00:00
|
|
|
case "debug":
|
|
|
|
zerolog.SetGlobalLevel(zerolog.DebugLevel)
|
2024-04-20 08:43:37 +00:00
|
|
|
log.Debug().Msg("Setting logging to debug")
|
|
|
|
case "trace":
|
|
|
|
zerolog.SetGlobalLevel(zerolog.TraceLevel)
|
|
|
|
log.Trace().Msg("Setting logging to trace")
|
2024-04-11 07:19:24 +00:00
|
|
|
}
|
2023-10-15 07:17:41 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
// Populate the application with the embedded backend assets
|
|
|
|
cli.CLI.Context.BackendAssets = backendAssets
|
2023-10-15 07:17:41 +00:00
|
|
|
|
2024-04-11 07:19:24 +00:00
|
|
|
// Run the thing!
|
|
|
|
err = ctx.Run(&cli.CLI.Context)
|
2024-06-09 17:27:42 +00:00
|
|
|
if err != nil {
|
|
|
|
log.Fatal().Err(err).Msg("Error running the application")
|
|
|
|
}
|
2023-03-18 22:59:06 +00:00
|
|
|
}
|