mirror of
https://github.com/mudler/LocalAI.git
synced 2024-12-19 04:37:53 +00:00
af9e5a2d05
* Revert "fix(fncall): fix regression introduced in #1963 (#2048)" This reverts commit6b06d4e0af
. * Revert "fix: action-tmate back to upstream, dead code removal (#2038)" This reverts commitfdec8a9d00
. * Revert "feat(grpc): return consumed token count and update response accordingly (#2035)" This reverts commite843d7df0e
. * Revert "refactor: backend/service split, channel-based llm flow (#1963)" This reverts commiteed5706994
. * feat(grpc): return consumed token count and update response accordingly Fixes: #1920 Signed-off-by: Ettore Di Giacinto <mudler@localai.io> --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
55 lines
1.7 KiB
Go
55 lines
1.7 KiB
Go
package cli
|
|
|
|
import (
|
|
"context"
|
|
"errors"
|
|
"fmt"
|
|
|
|
"github.com/go-skynet/LocalAI/core/backend"
|
|
"github.com/go-skynet/LocalAI/core/config"
|
|
"github.com/go-skynet/LocalAI/pkg/model"
|
|
)
|
|
|
|
type TranscriptCMD struct {
|
|
Filename string `arg:""`
|
|
|
|
Backend string `short:"b" default:"whisper" help:"Backend to run the transcription model"`
|
|
Model string `short:"m" required:"" help:"Model name to run the TTS"`
|
|
Language string `short:"l" help:"Language of the audio file"`
|
|
Threads int `short:"t" default:"1" help:"Number of threads used for parallel computation"`
|
|
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
|
|
BackendAssetsPath string `env:"LOCALAI_BACKEND_ASSETS_PATH,BACKEND_ASSETS_PATH" type:"path" default:"/tmp/localai/backend_data" help:"Path used to extract libraries that are required by some of the backends in runtime" group:"storage"`
|
|
}
|
|
|
|
func (t *TranscriptCMD) Run(ctx *Context) error {
|
|
opts := &config.ApplicationConfig{
|
|
ModelPath: t.ModelsPath,
|
|
Context: context.Background(),
|
|
AssetsDestination: t.BackendAssetsPath,
|
|
}
|
|
|
|
cl := config.NewBackendConfigLoader()
|
|
ml := model.NewModelLoader(opts.ModelPath)
|
|
if err := cl.LoadBackendConfigsFromPath(t.ModelsPath); err != nil {
|
|
return err
|
|
}
|
|
|
|
c, exists := cl.GetBackendConfig(t.Model)
|
|
if !exists {
|
|
return errors.New("model not found")
|
|
}
|
|
|
|
c.Threads = &t.Threads
|
|
|
|
defer ml.StopAllGRPC()
|
|
|
|
tr, err := backend.ModelTranscription(t.Filename, t.Language, ml, c, opts)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
for _, segment := range tr.Segments {
|
|
fmt.Println(segment.Start.String(), "-", segment.Text)
|
|
}
|
|
return nil
|
|
}
|