mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-19 04:37:53 +00:00
fix(worker): use dynaload for single binaries (#2620)
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
f569237a50
commit
8d84dd4f88
@ -7,6 +7,7 @@ import (
|
||||
|
||||
cliContext "github.com/go-skynet/LocalAI/core/cli/context"
|
||||
"github.com/go-skynet/LocalAI/pkg/assets"
|
||||
"github.com/go-skynet/LocalAI/pkg/library"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
@ -27,17 +28,18 @@ func (r *LLamaCPP) Run(ctx *cliContext.Context) error {
|
||||
return fmt.Errorf("usage: local-ai worker llama-cpp-rpc -- <llama-rpc-server-args>")
|
||||
}
|
||||
|
||||
grpcProcess := assets.ResolvePath(
|
||||
r.BackendAssetsPath,
|
||||
"util",
|
||||
"llama-cpp-rpc-server",
|
||||
)
|
||||
|
||||
args := os.Args[4:]
|
||||
args, grpcProcess = library.LoadLDSO(r.BackendAssetsPath, args, grpcProcess)
|
||||
|
||||
args = append([]string{grpcProcess}, args...)
|
||||
return syscall.Exec(
|
||||
assets.ResolvePath(
|
||||
r.BackendAssetsPath,
|
||||
"util",
|
||||
"llama-cpp-rpc-server",
|
||||
),
|
||||
append([]string{
|
||||
assets.ResolvePath(
|
||||
r.BackendAssetsPath,
|
||||
"util",
|
||||
"llama-cpp-rpc-server",
|
||||
)}, os.Args[4:]...),
|
||||
grpcProcess,
|
||||
args,
|
||||
os.Environ())
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ import (
|
||||
cliContext "github.com/go-skynet/LocalAI/core/cli/context"
|
||||
"github.com/go-skynet/LocalAI/core/p2p"
|
||||
"github.com/go-skynet/LocalAI/pkg/assets"
|
||||
"github.com/go-skynet/LocalAI/pkg/library"
|
||||
"github.com/phayes/freeport"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
@ -71,13 +72,18 @@ func (r *P2P) Run(ctx *cliContext.Context) error {
|
||||
go func() {
|
||||
for {
|
||||
log.Info().Msgf("Starting llama-cpp-rpc-server on '%s:%d'", address, port)
|
||||
|
||||
grpcProcess := assets.ResolvePath(
|
||||
r.BackendAssetsPath,
|
||||
"util",
|
||||
"llama-cpp-rpc-server",
|
||||
)
|
||||
|
||||
args := append([]string{"--host", address, "--port", fmt.Sprint(port)}, r.ExtraLLamaCPPArgs...)
|
||||
args, grpcProcess = library.LoadLDSO(r.BackendAssetsPath, args, grpcProcess)
|
||||
|
||||
cmd := exec.Command(
|
||||
assets.ResolvePath(
|
||||
r.BackendAssetsPath,
|
||||
"util",
|
||||
"llama-cpp-rpc-server",
|
||||
),
|
||||
append([]string{"--host", address, "--port", fmt.Sprint(port)}, r.ExtraLLamaCPPArgs...)...,
|
||||
grpcProcess, args...,
|
||||
)
|
||||
|
||||
cmd.Env = os.Environ()
|
||||
@ -86,7 +92,7 @@ func (r *P2P) Run(ctx *cliContext.Context) error {
|
||||
cmd.Stdout = os.Stdout
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
log.Error().Err(err).Msg("Failed to start llama-cpp-rpc-server")
|
||||
log.Error().Any("grpcProcess", grpcProcess).Any("args", args).Err(err).Msg("Failed to start llama-cpp-rpc-server")
|
||||
}
|
||||
|
||||
cmd.Wait()
|
||||
|
@ -49,7 +49,7 @@ func LoadLDSO(assetDir string, args []string, grpcProcess string) ([]string, str
|
||||
if _, err := os.Stat(ldPath); err == nil {
|
||||
log.Debug().Msgf("ld.so found")
|
||||
// We need to run the grpc process with the ld.so
|
||||
args = append(args, grpcProcess)
|
||||
args = append([]string{grpcProcess}, args...)
|
||||
grpcProcess = ldPath
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user