mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-08 19:48:37 +00:00
Some checks failed
Explorer deployment / build-linux (push) Has been cancelled
GPU tests / ubuntu-latest (1.21.x) (push) Has been cancelled
generate and publish intel docker caches / generate_caches (intel/oneapi-basekit:2025.1.0-0-devel-ubuntu22.04, linux/amd64, ubuntu-latest) (push) Has been cancelled
build container images / hipblas-jobs (-aio-gpu-hipblas, rocm/dev-ubuntu-22.04:6.1, hipblas, true, ubuntu:22.04, extras, latest-gpu-hipblas, latest-aio-gpu-hipblas, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, auto, -hipblas) (push) Has been cancelled
build container images / hipblas-jobs (rocm/dev-ubuntu-22.04:6.1, hipblas, true, ubuntu:22.04, core, latest-gpu-hipblas-core, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, false, -hipblas-core) (push) Has been cancelled
build container images / self-hosted-jobs (-aio-gpu-intel-f16, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, true, ubuntu:22.04, extras, latest-gpu-intel-f16, latest-aio-gpu-intel-f16, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, auto, -sycl-f16) (push) Has been cancelled
build container images / self-hosted-jobs (-aio-gpu-intel-f32, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, true, ubuntu:22.04, extras, latest-gpu-intel-f32, latest-aio-gpu-intel-f32, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, auto, -sycl-f32) (push) Has been cancelled
build container images / self-hosted-jobs (-aio-gpu-nvidia-cuda-11, ubuntu:22.04, cublas, 11, 7, true, extras, latest-gpu-nvidia-cuda-11, latest-aio-gpu-nvidia-cuda-11, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, auto, -cublas-cuda11) (push) Has been cancelled
build container images / self-hosted-jobs (-aio-gpu-nvidia-cuda-12, ubuntu:22.04, cublas, 12, 0, true, extras, latest-gpu-nvidia-cuda-12, latest-aio-gpu-nvidia-cuda-12, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, auto, -cublas-cuda12) (push) Has been cancelled
build container images / self-hosted-jobs (quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, true, ubuntu:22.04, core, latest-gpu-intel-f16-core, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, false, -sycl-f16-core) (push) Has been cancelled
build container images / self-hosted-jobs (quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, true, ubuntu:22.04, core, latest-gpu-intel-f32-core, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, false, -sycl-f32-core) (push) Has been cancelled
build container images / self-hosted-jobs (ubuntu:22.04, , true, extras, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, auto, ) (push) Has been cancelled
build container images / core-image-build (-aio-cpu, ubuntu:22.04, , true, core, latest-cpu, latest-aio-cpu, --jobs=4 --output-sync=target, linux/amd64,linux/arm64, arc-runner-set, false, auto, -core) (push) Has been cancelled
build container images / core-image-build (ubuntu:22.04, cublas, 11, 7, true, core, latest-gpu-nvidia-cuda-12-core, --jobs=4 --output-sync=target, linux/amd64, arc-runner-set, false, false, -cublas-cuda11-core) (push) Has been cancelled
build container images / core-image-build (ubuntu:22.04, cublas, 12, 0, true, core, latest-gpu-nvidia-cuda-12-core, --jobs=4 --output-sync=target, linux/amd64, arc-runner-set, false, false, -cublas-cuda12-core) (push) Has been cancelled
build container images / core-image-build (ubuntu:22.04, vulkan, true, core, latest-gpu-vulkan-core, --jobs=4 --output-sync=target, linux/amd64, arc-runner-set, false, false, -vulkan-core) (push) Has been cancelled
build container images / gh-runner (nvcr.io/nvidia/l4t-jetpack:r36.4.0, cublas, 12, 0, true, core, latest-nvidia-l4t-arm64-core, --jobs=4 --output-sync=target, linux/arm64, ubuntu-24.04-arm, true, false, -nvidia-l4t-arm64-core) (push) Has been cancelled
Security Scan / tests (push) Has been cancelled
Tests extras backends / tests-transformers (push) Has been cancelled
Tests extras backends / tests-rerankers (push) Has been cancelled
Tests extras backends / tests-diffusers (push) Has been cancelled
Tests extras backends / tests-coqui (push) Has been cancelled
tests / tests-linux (1.21.x) (push) Has been cancelled
tests / tests-aio-container (push) Has been cancelled
tests / tests-apple (1.21.x) (push) Has been cancelled
Update swagger / swagger (push) Has been cancelled
Check if checksums are up-to-date / checksum_check (push) Has been cancelled
Bump dependencies / bump (mudler/LocalAI) (push) Has been cancelled
Bump dependencies / bump (main, PABannier/bark.cpp, BARKCPP_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, ggerganov/whisper.cpp, WHISPER_CPP_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, ggml-org/llama.cpp, CPPLLAMA_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, leejet/stable-diffusion.cpp, STABLEDIFFUSION_GGML_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, mudler/go-piper, PIPER_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, mudler/go-stable-diffusion, STABLEDIFFUSION_VERSION) (push) Has been cancelled
generate and publish GRPC docker caches / generate_caches (ubuntu:22.04, linux/amd64,linux/arm64, arc-runner-set) (push) Has been cancelled
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
136 lines
5.1 KiB
Go
136 lines
5.1 KiB
Go
package cli
|
|
|
|
import (
|
|
"encoding/json"
|
|
"errors"
|
|
"fmt"
|
|
|
|
"github.com/rs/zerolog/log"
|
|
|
|
gguf "github.com/gpustack/gguf-parser-go"
|
|
cliContext "github.com/mudler/LocalAI/core/cli/context"
|
|
"github.com/mudler/LocalAI/core/config"
|
|
"github.com/mudler/LocalAI/core/gallery"
|
|
"github.com/mudler/LocalAI/pkg/downloader"
|
|
)
|
|
|
|
type UtilCMD struct {
|
|
GGUFInfo GGUFInfoCMD `cmd:"" name:"gguf-info" help:"Get information about a GGUF file"`
|
|
HFScan HFScanCMD `cmd:"" name:"hf-scan" help:"Checks installed models for known security issues. WARNING: this is a best-effort feature and may not catch everything!"`
|
|
UsecaseHeuristic UsecaseHeuristicCMD `cmd:"" name:"usecase-heuristic" help:"Checks a specific model config and prints what usecase LocalAI will offer for it."`
|
|
}
|
|
|
|
type GGUFInfoCMD struct {
|
|
Args []string `arg:"" optional:"" name:"args" help:"Arguments to pass to the utility command"`
|
|
Header bool `optional:"" default:"false" name:"header" help:"Show header information"`
|
|
}
|
|
|
|
type HFScanCMD struct {
|
|
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
|
|
Galleries string `env:"LOCALAI_GALLERIES,GALLERIES" help:"JSON list of galleries" group:"models" default:"${galleries}"`
|
|
ToScan []string `arg:""`
|
|
}
|
|
|
|
type UsecaseHeuristicCMD struct {
|
|
ConfigName string `name:"The config file to check"`
|
|
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
|
|
}
|
|
|
|
func (u *GGUFInfoCMD) Run(ctx *cliContext.Context) error {
|
|
if u.Args == nil || len(u.Args) == 0 {
|
|
return fmt.Errorf("no GGUF file provided")
|
|
}
|
|
// We try to guess only if we don't have a template defined already
|
|
f, err := gguf.ParseGGUFFile(u.Args[0])
|
|
if err != nil {
|
|
// Only valid for gguf files
|
|
log.Error().Msgf("guessDefaultsFromFile: %s", "not a GGUF file")
|
|
return err
|
|
}
|
|
|
|
log.Info().
|
|
Any("eosTokenID", f.Tokenizer().EOSTokenID).
|
|
Any("bosTokenID", f.Tokenizer().BOSTokenID).
|
|
Any("modelName", f.Metadata().Name).
|
|
Any("architecture", f.Architecture().Architecture).Msgf("GGUF file loaded: %s", u.Args[0])
|
|
|
|
log.Info().Any("tokenizer", fmt.Sprintf("%+v", f.Tokenizer())).Msg("Tokenizer")
|
|
log.Info().Any("architecture", fmt.Sprintf("%+v", f.Architecture())).Msg("Architecture")
|
|
|
|
v, exists := f.Header.MetadataKV.Get("tokenizer.chat_template")
|
|
if exists {
|
|
log.Info().Msgf("chat_template: %s", v.ValueString())
|
|
}
|
|
|
|
if u.Header {
|
|
for _, metadata := range f.Header.MetadataKV {
|
|
log.Info().Msgf("%s: %+v", metadata.Key, metadata.Value)
|
|
}
|
|
// log.Info().Any("header", fmt.Sprintf("%+v", f.Header)).Msg("Header")
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
func (hfscmd *HFScanCMD) Run(ctx *cliContext.Context) error {
|
|
log.Info().Msg("LocalAI Security Scanner - This is BEST EFFORT functionality! Currently limited to huggingface models!")
|
|
if len(hfscmd.ToScan) == 0 {
|
|
log.Info().Msg("Checking all installed models against galleries")
|
|
var galleries []config.Gallery
|
|
if err := json.Unmarshal([]byte(hfscmd.Galleries), &galleries); err != nil {
|
|
log.Error().Err(err).Msg("unable to load galleries")
|
|
}
|
|
|
|
err := gallery.SafetyScanGalleryModels(galleries, hfscmd.ModelsPath)
|
|
if err == nil {
|
|
log.Info().Msg("No security warnings were detected for your installed models. Please note that this is a BEST EFFORT tool, and all issues may not be detected.")
|
|
} else {
|
|
log.Error().Err(err).Msg("! WARNING ! A known-vulnerable model is installed!")
|
|
}
|
|
return err
|
|
} else {
|
|
var errs error = nil
|
|
for _, uri := range hfscmd.ToScan {
|
|
log.Info().Str("uri", uri).Msg("scanning specific uri")
|
|
scanResults, err := downloader.HuggingFaceScan(downloader.URI(uri))
|
|
if err != nil && errors.Is(err, downloader.ErrUnsafeFilesFound) {
|
|
log.Error().Err(err).Strs("clamAV", scanResults.ClamAVInfectedFiles).Strs("pickles", scanResults.DangerousPickles).Msg("! WARNING ! A known-vulnerable model is included in this repo!")
|
|
errs = errors.Join(errs, err)
|
|
}
|
|
}
|
|
if errs != nil {
|
|
return errs
|
|
}
|
|
log.Info().Msg("No security warnings were detected for your installed models. Please note that this is a BEST EFFORT tool, and all issues may not be detected.")
|
|
return nil
|
|
}
|
|
}
|
|
|
|
func (uhcmd *UsecaseHeuristicCMD) Run(ctx *cliContext.Context) error {
|
|
if len(uhcmd.ConfigName) == 0 {
|
|
log.Error().Msg("ConfigName is a required parameter")
|
|
return fmt.Errorf("config name is a required parameter")
|
|
}
|
|
if len(uhcmd.ModelsPath) == 0 {
|
|
log.Error().Msg("ModelsPath is a required parameter")
|
|
return fmt.Errorf("model path is a required parameter")
|
|
}
|
|
bcl := config.NewBackendConfigLoader(uhcmd.ModelsPath)
|
|
err := bcl.LoadBackendConfig(uhcmd.ConfigName)
|
|
if err != nil {
|
|
log.Error().Err(err).Str("ConfigName", uhcmd.ConfigName).Msg("error while loading backend")
|
|
return err
|
|
}
|
|
bc, exists := bcl.GetBackendConfig(uhcmd.ConfigName)
|
|
if !exists {
|
|
log.Error().Str("ConfigName", uhcmd.ConfigName).Msg("ConfigName not found")
|
|
}
|
|
for name, uc := range config.GetAllBackendConfigUsecases() {
|
|
if bc.HasUsecases(uc) {
|
|
log.Info().Str("Usecase", name)
|
|
}
|
|
}
|
|
log.Info().Msg("---")
|
|
return nil
|
|
}
|