mirror of
https://github.com/mudler/LocalAI.git
synced 2025-06-14 21:18:07 +00:00
feat(llama.cpp/clip): inject gpu options if we detect GPUs (#5243)
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
committed by
GitHub
parent
cae9bf1308
commit
9628860c0e
@ -4,6 +4,7 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/mudler/LocalAI/pkg/xsysinfo"
|
||||
"github.com/rs/zerolog/log"
|
||||
gguf "github.com/thxcode/gguf-parser-go"
|
||||
)
|
||||
@ -35,4 +36,10 @@ func guessDefaultsFromFile(cfg *BackendConfig, modelPath string, defaultCtx int)
|
||||
}
|
||||
cfg.ContextSize = &defaultCtx
|
||||
}
|
||||
|
||||
if cfg.Options == nil {
|
||||
if xsysinfo.HasGPU("nvidia") || xsysinfo.HasGPU("amd") {
|
||||
cfg.Options = []string{"gpu"}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user