2023-05-11 14:34:16 +00:00
package model
import (
2023-07-14 23:19:43 +00:00
"context"
2024-05-04 15:56:12 +00:00
"errors"
2023-05-11 14:34:16 +00:00
"fmt"
2023-07-14 23:19:43 +00:00
"os"
2023-05-14 15:49:10 +00:00
"path/filepath"
2024-05-04 15:56:12 +00:00
"slices"
2023-05-11 14:34:16 +00:00
"strings"
2023-07-14 23:19:43 +00:00
"time"
2023-05-11 14:34:16 +00:00
2024-05-14 23:17:02 +00:00
"github.com/klauspost/cpuid/v2"
2024-06-23 08:24:36 +00:00
grpc "github.com/mudler/LocalAI/pkg/grpc"
"github.com/mudler/LocalAI/pkg/library"
2024-06-24 15:32:12 +00:00
"github.com/mudler/LocalAI/pkg/utils"
2024-06-23 08:24:36 +00:00
"github.com/mudler/LocalAI/pkg/xsysinfo"
2023-07-14 23:19:43 +00:00
"github.com/phayes/freeport"
2023-05-11 14:34:16 +00:00
"github.com/rs/zerolog/log"
2024-05-13 09:37:52 +00:00
"github.com/elliotchance/orderedmap/v2"
2023-05-11 14:34:16 +00:00
)
2023-12-16 17:22:45 +00:00
var Aliases map [ string ] string = map [ string ] string {
2024-05-04 15:56:12 +00:00
"go-llama" : LLamaCPP ,
"llama" : LLamaCPP ,
"embedded-store" : LocalStoreBackend ,
"langchain-huggingface" : LCHuggingFaceBackend ,
2023-12-16 17:22:45 +00:00
}
2024-10-11 14:55:57 +00:00
var AutoDetect = os . Getenv ( "DISABLE_AUTODETECT" ) != "true"
2024-05-14 23:17:02 +00:00
2023-05-11 14:34:16 +00:00
const (
2024-05-04 15:56:12 +00:00
LlamaGGML = "llama-ggml"
2024-05-14 23:17:02 +00:00
LLamaCPP = "llama-cpp"
2024-05-14 17:40:18 +00:00
2024-05-13 09:37:52 +00:00
LLamaCPPAVX2 = "llama-cpp-avx2"
LLamaCPPAVX = "llama-cpp-avx"
2024-05-04 15:56:12 +00:00
LLamaCPPFallback = "llama-cpp-fallback"
2024-05-14 17:40:18 +00:00
LLamaCPPCUDA = "llama-cpp-cuda"
2024-06-05 06:44:15 +00:00
LLamaCPPHipblas = "llama-cpp-hipblas"
2024-06-06 06:40:51 +00:00
LLamaCPPSycl16 = "llama-cpp-sycl_16"
LLamaCPPSycl32 = "llama-cpp-sycl_32"
LLamaCPPGRPC = "llama-cpp-grpc"
2024-05-04 15:56:12 +00:00
2023-05-16 17:32:53 +00:00
WhisperBackend = "whisper"
StableDiffusionBackend = "stablediffusion"
2023-12-24 19:27:24 +00:00
TinyDreamBackend = "tinydream"
2023-06-22 15:53:10 +00:00
PiperBackend = "piper"
2024-05-04 15:56:12 +00:00
LCHuggingFaceBackend = "huggingface"
2024-03-22 20:14:04 +00:00
LocalStoreBackend = "local-store"
2023-05-11 14:34:16 +00:00
)
2024-05-04 15:56:12 +00:00
func backendPath ( assetDir , backend string ) string {
return filepath . Join ( assetDir , "backend-assets" , "grpc" , backend )
}
2024-05-05 07:10:23 +00:00
// backendsInAssetDir returns the list of backends in the asset directory
// that should be loaded
2024-10-11 14:55:57 +00:00
func backendsInAssetDir ( assetDir string ) ( map [ string ] [ ] string , error ) {
2024-05-05 07:10:23 +00:00
// Exclude backends from automatic loading
excludeBackends := [ ] string { LocalStoreBackend }
2024-05-04 15:56:12 +00:00
entry , err := os . ReadDir ( backendPath ( assetDir , "" ) )
if err != nil {
return nil , err
}
2024-05-13 09:37:52 +00:00
backends := make ( map [ string ] [ ] string )
2024-05-04 15:56:12 +00:00
ENTRY :
for _ , e := range entry {
for _ , exclude := range excludeBackends {
if e . Name ( ) == exclude {
continue ENTRY
}
}
2024-05-14 23:17:02 +00:00
if e . IsDir ( ) {
continue
2024-05-13 09:37:52 +00:00
}
2024-08-25 12:36:09 +00:00
if strings . HasSuffix ( e . Name ( ) , ".log" ) {
continue
}
2024-05-14 23:17:02 +00:00
// Skip the llama.cpp variants if we are autoDetecting
// But we always load the fallback variant if it exists
2024-10-11 14:55:57 +00:00
if strings . Contains ( e . Name ( ) , LLamaCPP ) && ! strings . Contains ( e . Name ( ) , LLamaCPPFallback ) && AutoDetect {
2024-05-14 23:17:02 +00:00
continue
}
backends [ e . Name ( ) ] = [ ] string { }
2024-05-13 09:37:52 +00:00
}
2024-05-14 23:17:02 +00:00
// if we are autoDetecting, we want to show the llama.cpp variants as a single backend
2024-10-11 14:55:57 +00:00
if AutoDetect {
2024-05-14 23:17:02 +00:00
// if we find the llama.cpp variants, show them of as a single backend (llama-cpp) as later we are going to pick that up
// when starting the service
2024-06-06 06:40:51 +00:00
foundLCPPAVX , foundLCPPAVX2 , foundLCPPFallback , foundLCPPGRPC , foundLCPPCuda , foundLCPPHipblas , foundSycl16 , foundSycl32 := false , false , false , false , false , false , false , false
2024-05-14 23:17:02 +00:00
if _ , ok := backends [ LLamaCPP ] ; ! ok {
for _ , e := range entry {
if strings . Contains ( e . Name ( ) , LLamaCPPAVX2 ) && ! foundLCPPAVX2 {
backends [ LLamaCPP ] = append ( backends [ LLamaCPP ] , LLamaCPPAVX2 )
foundLCPPAVX2 = true
}
if strings . Contains ( e . Name ( ) , LLamaCPPAVX ) && ! foundLCPPAVX {
backends [ LLamaCPP ] = append ( backends [ LLamaCPP ] , LLamaCPPAVX )
foundLCPPAVX = true
}
if strings . Contains ( e . Name ( ) , LLamaCPPFallback ) && ! foundLCPPFallback {
backends [ LLamaCPP ] = append ( backends [ LLamaCPP ] , LLamaCPPFallback )
foundLCPPFallback = true
}
if strings . Contains ( e . Name ( ) , LLamaCPPGRPC ) && ! foundLCPPGRPC {
backends [ LLamaCPP ] = append ( backends [ LLamaCPP ] , LLamaCPPGRPC )
foundLCPPGRPC = true
}
if strings . Contains ( e . Name ( ) , LLamaCPPCUDA ) && ! foundLCPPCuda {
backends [ LLamaCPP ] = append ( backends [ LLamaCPP ] , LLamaCPPCUDA )
foundLCPPCuda = true
}
2024-06-05 06:44:15 +00:00
if strings . Contains ( e . Name ( ) , LLamaCPPHipblas ) && ! foundLCPPHipblas {
backends [ LLamaCPP ] = append ( backends [ LLamaCPP ] , LLamaCPPHipblas )
foundLCPPHipblas = true
}
2024-06-06 06:40:51 +00:00
if strings . Contains ( e . Name ( ) , LLamaCPPSycl16 ) && ! foundSycl16 {
backends [ LLamaCPP ] = append ( backends [ LLamaCPP ] , LLamaCPPSycl16 )
foundSycl16 = true
}
if strings . Contains ( e . Name ( ) , LLamaCPPSycl32 ) && ! foundSycl32 {
backends [ LLamaCPP ] = append ( backends [ LLamaCPP ] , LLamaCPPSycl32 )
foundSycl32 = true
}
2024-05-13 09:37:52 +00:00
}
2024-05-04 15:56:12 +00:00
}
}
2024-10-11 14:55:57 +00:00
return backends , nil
}
func orderBackends ( backends map [ string ] [ ] string ) ( [ ] string , error ) {
2024-05-04 15:56:12 +00:00
// order backends from the asset directory.
// as we scan for backends, we want to keep some order which backends are tried of.
// for example, llama.cpp should be tried first, and we want to keep the huggingface backend at the last.
2024-05-14 23:17:02 +00:00
// sets a priority list - first has more priority
2024-05-04 15:56:12 +00:00
priorityList := [ ] string {
2024-05-14 23:17:02 +00:00
// First llama.cpp(variants) and llama-ggml to follow.
// We keep the fallback to prevent that if the llama.cpp variants
// that depends on shared libs if breaks have still a safety net.
2024-08-07 21:35:55 +00:00
LLamaCPP , LlamaGGML , LLamaCPPFallback ,
2024-05-04 15:56:12 +00:00
}
2024-05-13 09:37:52 +00:00
2024-05-04 15:56:12 +00:00
toTheEnd := [ ] string {
// last has to be huggingface
LCHuggingFaceBackend ,
}
2024-05-13 09:37:52 +00:00
// create an ordered map
orderedBackends := orderedmap . NewOrderedMap [ string , any ] ( )
// add priorityList first
for _ , p := range priorityList {
if _ , ok := backends [ p ] ; ok {
orderedBackends . Set ( p , backends [ p ] )
2024-05-04 15:56:12 +00:00
}
}
2024-05-13 09:37:52 +00:00
for k , v := range backends {
if ! slices . Contains ( toTheEnd , k ) {
if _ , ok := orderedBackends . Get ( k ) ; ! ok {
orderedBackends . Set ( k , v )
2024-05-04 15:56:12 +00:00
}
}
}
2024-05-13 09:37:52 +00:00
for _ , t := range toTheEnd {
if _ , ok := backends [ t ] ; ok {
orderedBackends . Set ( t , backends [ t ] )
}
}
2024-05-14 23:17:02 +00:00
return orderedBackends . Keys ( ) , nil
}
2024-10-11 14:55:57 +00:00
// selectGRPCProcessByHostCapabilities selects the GRPC process to start based on system capabilities
// Note: this is now relevant only for llama.cpp
func selectGRPCProcessByHostCapabilities ( backend , assetDir string , f16 bool ) string {
2024-05-14 23:17:02 +00:00
foundCUDA := false
2024-06-05 06:44:15 +00:00
foundAMDGPU := false
2024-06-06 06:40:51 +00:00
foundIntelGPU := false
2024-05-14 23:17:02 +00:00
var grpcProcess string
// Select backend now just for llama.cpp
if backend != LLamaCPP {
return ""
}
// Note: This environment variable is read by the LocalAI's llama.cpp grpc-server
if os . Getenv ( "LLAMACPP_GRPC_SERVERS" ) != "" {
log . Info ( ) . Msgf ( "[%s] attempting to load with GRPC variant" , LLamaCPPGRPC )
return backendPath ( assetDir , LLamaCPPGRPC )
}
2024-10-11 14:55:57 +00:00
// Check for GPU-binaries that are shipped with single binary releases
2024-05-14 23:17:02 +00:00
gpus , err := xsysinfo . GPUs ( )
if err == nil {
for _ , gpu := range gpus {
if strings . Contains ( gpu . String ( ) , "nvidia" ) {
p := backendPath ( assetDir , LLamaCPPCUDA )
if _ , err := os . Stat ( p ) ; err == nil {
log . Info ( ) . Msgf ( "[%s] attempting to load with CUDA variant" , backend )
grpcProcess = p
foundCUDA = true
} else {
2024-07-23 21:35:31 +00:00
log . Debug ( ) . Msgf ( "Nvidia GPU device found, no embedded CUDA variant found. You can ignore this message if you are using container with CUDA support" )
2024-05-14 23:17:02 +00:00
}
}
2024-06-05 06:44:15 +00:00
if strings . Contains ( gpu . String ( ) , "amd" ) {
p := backendPath ( assetDir , LLamaCPPHipblas )
if _ , err := os . Stat ( p ) ; err == nil {
log . Info ( ) . Msgf ( "[%s] attempting to load with HIPBLAS variant" , backend )
grpcProcess = p
foundAMDGPU = true
} else {
2024-07-23 21:35:31 +00:00
log . Debug ( ) . Msgf ( "AMD GPU device found, no embedded HIPBLAS variant found. You can ignore this message if you are using container with HIPBLAS support" )
2024-06-05 06:44:15 +00:00
}
}
2024-06-06 06:40:51 +00:00
if strings . Contains ( gpu . String ( ) , "intel" ) {
backend := LLamaCPPSycl16
if ! f16 {
backend = LLamaCPPSycl32
}
p := backendPath ( assetDir , backend )
if _ , err := os . Stat ( p ) ; err == nil {
log . Info ( ) . Msgf ( "[%s] attempting to load with Intel variant" , backend )
grpcProcess = p
foundIntelGPU = true
} else {
2024-07-23 21:35:31 +00:00
log . Debug ( ) . Msgf ( "Intel GPU device found, no embedded SYCL variant found. You can ignore this message if you are using container with SYCL support" )
2024-06-06 06:40:51 +00:00
}
}
2024-05-14 23:17:02 +00:00
}
}
2024-06-06 06:40:51 +00:00
if foundCUDA || foundAMDGPU || foundIntelGPU {
2024-05-14 23:17:02 +00:00
return grpcProcess
}
2024-10-11 14:55:57 +00:00
// No GPU found or no specific binaries found, try to load the CPU variant(s)
2024-10-15 07:41:53 +00:00
// Select a binary based on availability/capability
selectedProcess := ""
// Check if we have a native build (llama-cpp) and use that
if _ , err := os . Stat ( backendPath ( assetDir , LLamaCPPFallback ) ) ; err == nil {
log . Debug ( ) . Msgf ( "[%s] %s variant available" , LLamaCPPFallback , backend )
selectedProcess = backendPath ( assetDir , LLamaCPPFallback )
}
// Check if we have a native build (llama-cpp) and use that instead
// As a reminder, we do ultimately attempt again with the fallback variant
// If things fail with what we select here
if _ , err := os . Stat ( backendPath ( assetDir , LLamaCPP ) ) ; err == nil {
log . Debug ( ) . Msgf ( "[%s] attempting to load with native variant" , backend )
selectedProcess = backendPath ( assetDir , LLamaCPP )
}
2024-10-11 14:55:57 +00:00
// IF we find any optimized binary, we use that
2024-05-14 23:17:02 +00:00
if xsysinfo . HasCPUCaps ( cpuid . AVX2 ) {
2024-07-01 20:50:36 +00:00
p := backendPath ( assetDir , LLamaCPPAVX2 )
if _ , err := os . Stat ( p ) ; err == nil {
log . Info ( ) . Msgf ( "[%s] attempting to load with AVX2 variant" , backend )
2024-10-11 14:55:57 +00:00
selectedProcess = p
2024-07-01 20:50:36 +00:00
}
2024-05-14 23:17:02 +00:00
} else if xsysinfo . HasCPUCaps ( cpuid . AVX ) {
2024-07-01 20:50:36 +00:00
p := backendPath ( assetDir , LLamaCPPAVX )
if _ , err := os . Stat ( p ) ; err == nil {
log . Info ( ) . Msgf ( "[%s] attempting to load with AVX variant" , backend )
2024-10-11 14:55:57 +00:00
selectedProcess = p
2024-07-01 20:50:36 +00:00
}
2024-05-14 23:17:02 +00:00
}
2024-10-15 07:41:53 +00:00
// Safety measure: check if the binary exists otherwise return empty string
2024-10-11 14:55:57 +00:00
if _ , err := os . Stat ( selectedProcess ) ; err == nil {
return selectedProcess
}
return ""
2023-05-11 18:20:07 +00:00
}
2024-10-15 07:41:53 +00:00
func attemptLoadingOnFailure ( backend string , ml * ModelLoader , o * Options , err error ) ( * Model , error ) {
// XXX: This is too backend specific(llama-cpp), remove this bit or generalize further
// We failed somehow starting the binary. For instance, could be that we are missing
// some libraries if running in binary-only mode.
// In this case, we attempt to load the model with the fallback variant.
// If not llama-cpp backend, return the error immediately
if backend != LLamaCPP {
return nil , err
}
log . Error ( ) . Msgf ( "[%s] Failed loading model, trying with fallback '%s', error: %s" , backend , LLamaCPPFallback , err . Error ( ) )
return ml . LoadModel ( o . modelID , o . model , ml . grpcModel ( LLamaCPPFallback , false , o ) )
}
2023-07-20 20:10:12 +00:00
// starts the grpcModelProcess for the backend, and returns a grpc client
// It also loads the model
2024-10-11 14:55:57 +00:00
func ( ml * ModelLoader ) grpcModel ( backend string , autodetect bool , o * Options ) func ( string , string , string ) ( * Model , error ) {
2024-10-02 06:55:58 +00:00
return func ( modelID , modelName , modelFile string ) ( * Model , error ) {
2024-06-06 06:40:51 +00:00
2024-10-02 06:55:58 +00:00
log . Debug ( ) . Msgf ( "Loading Model %s with gRPC (file: %s) (backend: %s): %+v" , modelID , modelFile , backend , * o )
2023-07-14 23:19:43 +00:00
2024-08-25 12:36:09 +00:00
var client * Model
2023-07-14 23:19:43 +00:00
2023-07-20 20:10:12 +00:00
getFreeAddress := func ( ) ( string , error ) {
port , err := freeport . GetFreePort ( )
if err != nil {
return "" , fmt . Errorf ( "failed allocating free ports: %s" , err . Error ( ) )
}
return fmt . Sprintf ( "127.0.0.1:%d" , port ) , nil
2023-07-14 23:19:43 +00:00
}
2024-03-07 13:37:45 +00:00
// If no specific model path is set for transformers/HF, set it to the model path
for _ , env := range [ ] string { "HF_HOME" , "TRANSFORMERS_CACHE" , "HUGGINGFACE_HUB_CACHE" } {
if os . Getenv ( env ) == "" {
2024-04-29 13:11:42 +00:00
err := os . Setenv ( env , ml . ModelPath )
if err != nil {
log . Error ( ) . Err ( err ) . Str ( "name" , env ) . Str ( "modelPath" , ml . ModelPath ) . Msg ( "unable to set environment variable to modelPath" )
}
2024-03-07 13:37:45 +00:00
}
}
2023-07-20 20:10:12 +00:00
// Check if the backend is provided as external
if uri , ok := o . externalBackends [ backend ] ; ok {
log . Debug ( ) . Msgf ( "Loading external backend: %s" , uri )
// check if uri is a file or a address
2024-08-23 22:27:14 +00:00
if fi , err := os . Stat ( uri ) ; err == nil {
log . Debug ( ) . Msgf ( "external backend is file: %+v" , fi )
2023-07-20 20:10:12 +00:00
serverAddress , err := getFreeAddress ( )
if err != nil {
2024-08-25 12:36:09 +00:00
return nil , fmt . Errorf ( "failed allocating free ports: %s" , err . Error ( ) )
2023-07-20 20:10:12 +00:00
}
// Make sure the process is executable
2024-10-02 06:55:58 +00:00
process , err := ml . startProcess ( uri , modelID , serverAddress )
2024-09-26 10:44:55 +00:00
if err != nil {
2024-08-23 22:27:14 +00:00
log . Error ( ) . Err ( err ) . Str ( "path" , uri ) . Msg ( "failed to launch " )
2024-08-25 12:36:09 +00:00
return nil , err
2023-07-20 20:10:12 +00:00
}
log . Debug ( ) . Msgf ( "GRPC Service Started" )
2024-10-02 06:55:58 +00:00
client = NewModel ( modelID , serverAddress , process )
2023-07-20 20:10:12 +00:00
} else {
2024-10-04 16:32:29 +00:00
log . Debug ( ) . Msg ( "external backend is a uri" )
2023-07-20 20:10:12 +00:00
// address
2024-10-02 06:55:58 +00:00
client = NewModel ( modelID , uri , nil )
2023-07-14 23:19:43 +00:00
}
2023-07-20 20:10:12 +00:00
} else {
2024-05-04 15:56:12 +00:00
grpcProcess := backendPath ( o . assetDir , backend )
2024-06-24 15:32:12 +00:00
if err := utils . VerifyPath ( grpcProcess , o . assetDir ) ; err != nil {
2024-09-17 14:51:40 +00:00
return nil , fmt . Errorf ( "refering to a backend not in asset dir: %s" , err . Error ( ) )
2024-06-24 15:32:12 +00:00
}
2024-05-13 09:37:52 +00:00
2024-10-11 14:55:57 +00:00
if autodetect {
2024-05-14 23:17:02 +00:00
// autoDetect GRPC process to start based on system capabilities
2024-10-11 14:55:57 +00:00
if selectedProcess := selectGRPCProcessByHostCapabilities ( backend , o . assetDir , o . gRPCOptions . F16Memory ) ; selectedProcess != "" {
2024-05-14 23:17:02 +00:00
grpcProcess = selectedProcess
2024-05-13 09:37:52 +00:00
}
}
2023-07-20 20:10:12 +00:00
// Check if the file exists
if _ , err := os . Stat ( grpcProcess ) ; os . IsNotExist ( err ) {
2024-09-17 14:51:40 +00:00
return nil , fmt . Errorf ( "backend not found: %s" , grpcProcess )
2023-07-14 23:19:43 +00:00
}
2023-07-20 20:10:12 +00:00
serverAddress , err := getFreeAddress ( )
2023-07-14 23:19:43 +00:00
if err != nil {
2024-08-25 12:36:09 +00:00
return nil , fmt . Errorf ( "failed allocating free ports: %s" , err . Error ( ) )
2023-07-14 23:19:43 +00:00
}
2023-07-20 20:10:12 +00:00
2024-06-18 20:43:43 +00:00
args := [ ] string { }
// Load the ld.so if it exists
args , grpcProcess = library . LoadLDSO ( o . assetDir , args , grpcProcess )
// Make sure the process is executable in any circumstance
2024-10-02 06:55:58 +00:00
process , err := ml . startProcess ( grpcProcess , modelID , serverAddress , args ... )
2024-09-26 10:44:55 +00:00
if err != nil {
2024-08-25 12:36:09 +00:00
return nil , err
2023-07-14 23:19:43 +00:00
}
2023-07-20 20:10:12 +00:00
log . Debug ( ) . Msgf ( "GRPC Service Started" )
2023-07-14 23:19:43 +00:00
2024-10-02 06:55:58 +00:00
client = NewModel ( modelID , serverAddress , process )
2023-07-20 20:10:12 +00:00
}
2023-07-14 23:19:43 +00:00
2024-09-17 14:51:40 +00:00
log . Debug ( ) . Msgf ( "Wait for the service to start up" )
2023-07-14 23:19:43 +00:00
// Wait for the service to start up
ready := false
2023-08-15 23:11:32 +00:00
for i := 0 ; i < o . grpcAttempts ; i ++ {
2024-01-07 23:37:02 +00:00
alive , err := client . GRPC ( o . parallelRequests , ml . wd ) . HealthCheck ( context . Background ( ) )
if alive {
2023-07-14 23:19:43 +00:00
log . Debug ( ) . Msgf ( "GRPC Service Ready" )
ready = true
break
}
2024-01-07 23:37:02 +00:00
if err != nil && i == o . grpcAttempts - 1 {
2024-04-04 07:24:22 +00:00
log . Error ( ) . Err ( err ) . Msg ( "failed starting/connecting to the gRPC service" )
2024-01-07 23:37:02 +00:00
}
2023-08-15 23:11:32 +00:00
time . Sleep ( time . Duration ( o . grpcAttemptsDelay ) * time . Second )
2023-07-14 23:19:43 +00:00
}
if ! ready {
log . Debug ( ) . Msgf ( "GRPC Service NOT ready" )
2024-10-02 18:37:40 +00:00
if process := client . Process ( ) ; process != nil {
process . Stop ( )
}
2024-08-25 12:36:09 +00:00
return nil , fmt . Errorf ( "grpc service not ready" )
2023-07-14 23:19:43 +00:00
}
options := * o . gRPCOptions
2023-08-07 20:39:10 +00:00
options . Model = modelName
options . ModelFile = modelFile
2024-10-31 11:12:22 +00:00
options . ModelPath = ml . ModelPath
2023-07-14 23:19:43 +00:00
log . Debug ( ) . Msgf ( "GRPC: Loading model with options: %+v" , options )
2023-11-26 17:36:23 +00:00
res , err := client . GRPC ( o . parallelRequests , ml . wd ) . LoadModel ( o . context , & options )
2023-07-14 23:19:43 +00:00
if err != nil {
2024-10-02 18:37:40 +00:00
if process := client . Process ( ) ; process != nil {
process . Stop ( )
}
2024-08-25 12:36:09 +00:00
return nil , fmt . Errorf ( "could not load model: %w" , err )
2023-07-14 23:19:43 +00:00
}
if ! res . Success {
2024-10-02 18:37:40 +00:00
if process := client . Process ( ) ; process != nil {
process . Stop ( )
}
2024-08-25 12:36:09 +00:00
return nil , fmt . Errorf ( "could not load model (no success): %s" , res . Message )
2023-07-14 23:19:43 +00:00
}
return client , nil
}
}
2024-09-05 18:44:30 +00:00
func ( ml * ModelLoader ) ListAvailableBackends ( assetdir string ) ( [ ] string , error ) {
2024-10-11 14:55:57 +00:00
backends , err := backendsInAssetDir ( assetdir )
if err != nil {
return nil , err
}
return orderBackends ( backends )
2024-09-05 18:44:30 +00:00
}
2024-11-08 20:54:25 +00:00
func ( ml * ModelLoader ) backendLoader ( opts ... Option ) ( client grpc . Backend , err error ) {
2023-07-14 23:19:43 +00:00
o := NewOptions ( opts ... )
2024-10-02 06:55:58 +00:00
log . Info ( ) . Msgf ( "Loading model '%s' with backend %s" , o . modelID , o . backendString )
2023-07-14 23:19:43 +00:00
backend := strings . ToLower ( o . backendString )
2023-12-16 17:22:45 +00:00
if realBackend , exists := Aliases [ backend ] ; exists {
backend = realBackend
log . Debug ( ) . Msgf ( "%s is an alias of %s" , backend , realBackend )
}
2023-07-20 20:10:12 +00:00
2024-10-11 14:55:57 +00:00
ml . stopActiveBackends ( o . modelID , o . singleActiveBackend )
2023-08-18 23:49:33 +00:00
2023-11-16 07:20:05 +00:00
var backendToConsume string
2023-07-20 20:10:12 +00:00
2023-07-14 23:19:43 +00:00
switch backend {
case PiperBackend :
o . gRPCOptions . LibrarySearchPath = filepath . Join ( o . assetDir , "backend-assets" , "espeak-ng-data" )
2023-11-16 07:20:05 +00:00
backendToConsume = PiperBackend
2023-05-11 14:34:16 +00:00
default :
2023-11-16 07:20:05 +00:00
backendToConsume = backend
}
2024-10-11 14:55:57 +00:00
model , err := ml . LoadModel ( o . modelID , o . model , ml . grpcModel ( backendToConsume , AutoDetect , o ) )
2023-11-16 07:20:05 +00:00
if err != nil {
2024-10-15 07:41:53 +00:00
model , err = attemptLoadingOnFailure ( backend , ml , o , err )
2024-10-11 14:55:57 +00:00
if err != nil {
return nil , err
}
2023-05-11 14:34:16 +00:00
}
2023-11-16 07:20:05 +00:00
2024-08-25 12:36:09 +00:00
return model . GRPC ( o . parallelRequests , ml . wd ) , nil
2023-05-11 14:34:16 +00:00
}
2024-10-11 14:55:57 +00:00
func ( ml * ModelLoader ) stopActiveBackends ( modelID string , singleActiveBackend bool ) {
// If we can have only one backend active, kill all the others (except external backends)
if singleActiveBackend {
log . Debug ( ) . Msgf ( "Stopping all backends except '%s'" , modelID )
err := ml . StopGRPC ( allExcept ( modelID ) )
if err != nil {
log . Error ( ) . Err ( err ) . Str ( "keptModel" , modelID ) . Msg ( "error while shutting down all backends except for the keptModel - greedyloader continuing" )
}
}
}
2024-11-08 20:54:25 +00:00
func ( ml * ModelLoader ) Load ( opts ... Option ) ( grpc . Backend , error ) {
2023-07-14 23:19:43 +00:00
o := NewOptions ( opts ... )
2023-08-18 23:49:33 +00:00
// Return earlier if we have a model already loaded
// (avoid looping through all the backends)
2024-10-02 06:55:58 +00:00
if m := ml . CheckIsLoaded ( o . modelID ) ; m != nil {
log . Debug ( ) . Msgf ( "Model '%s' already loaded" , o . modelID )
2023-11-16 07:20:05 +00:00
2024-08-25 12:36:09 +00:00
return m . GRPC ( o . parallelRequests , ml . wd ) , nil
2023-05-11 14:34:16 +00:00
}
2024-08-25 12:36:09 +00:00
2024-10-11 14:55:57 +00:00
ml . stopActiveBackends ( o . modelID , o . singleActiveBackend )
2023-08-18 23:49:33 +00:00
2024-11-08 20:54:25 +00:00
if o . backendString != "" {
return ml . backendLoader ( opts ... )
}
2023-05-11 14:34:16 +00:00
var err error
2024-05-14 23:17:02 +00:00
// get backends embedded in the binary
2024-10-11 14:55:57 +00:00
autoLoadBackends , err := ml . ListAvailableBackends ( o . assetDir )
2024-05-04 15:56:12 +00:00
if err != nil {
return nil , err
}
2024-05-13 09:37:52 +00:00
2024-05-14 23:17:02 +00:00
// append externalBackends supplied by the user via the CLI
2023-07-20 20:10:12 +00:00
for _ , b := range o . externalBackends {
2024-05-14 23:17:02 +00:00
autoLoadBackends = append ( autoLoadBackends , b )
2023-07-20 20:10:12 +00:00
}
2024-01-07 23:37:02 +00:00
2024-05-14 23:17:02 +00:00
log . Debug ( ) . Msgf ( "Loading from the following backends (in order): %+v" , autoLoadBackends )
2024-10-02 06:55:58 +00:00
log . Info ( ) . Msgf ( "Trying to load the model '%s' with the backend '%s'" , o . modelID , autoLoadBackends )
2023-07-14 23:19:43 +00:00
2024-05-14 23:17:02 +00:00
for _ , key := range autoLoadBackends {
2024-05-13 09:37:52 +00:00
log . Info ( ) . Msgf ( "[%s] Attempting to load" , key )
2024-10-02 06:55:58 +00:00
options := append ( opts , [ ] Option {
2024-05-13 09:37:52 +00:00
WithBackendString ( key ) ,
2024-10-02 06:55:58 +00:00
} ... )
2023-07-20 20:10:12 +00:00
2024-11-08 20:54:25 +00:00
model , modelerr := ml . backendLoader ( options ... )
2023-05-11 14:34:16 +00:00
if modelerr == nil && model != nil {
2024-05-13 09:37:52 +00:00
log . Info ( ) . Msgf ( "[%s] Loads OK" , key )
2023-05-11 14:34:16 +00:00
return model , nil
} else if modelerr != nil {
2024-06-05 06:45:24 +00:00
err = errors . Join ( err , fmt . Errorf ( "[%s]: %w" , key , modelerr ) )
2024-05-13 09:37:52 +00:00
log . Info ( ) . Msgf ( "[%s] Fails: %s" , key , modelerr . Error ( ) )
2023-07-17 21:58:15 +00:00
} else if model == nil {
2024-06-05 06:45:24 +00:00
err = errors . Join ( err , fmt . Errorf ( "backend %s returned no usable model" , key ) )
2024-05-13 09:37:52 +00:00
log . Info ( ) . Msgf ( "[%s] Fails: %s" , key , "backend returned no usable model" )
2023-05-11 14:34:16 +00:00
}
}
return nil , fmt . Errorf ( "could not load model - all backends returned error: %s" , err . Error ( ) )
}