2024-10-02 06:56:18 +00:00
|
|
|
package backend
|
|
|
|
|
|
|
|
import (
|
|
|
|
"github.com/mudler/LocalAI/core/config"
|
|
|
|
"github.com/mudler/LocalAI/core/schema"
|
|
|
|
"github.com/mudler/LocalAI/pkg/grpc"
|
|
|
|
model "github.com/mudler/LocalAI/pkg/model"
|
|
|
|
)
|
|
|
|
|
|
|
|
func ModelTokenize(s string, loader *model.ModelLoader, backendConfig config.BackendConfig, appConfig *config.ApplicationConfig) (schema.TokenizeResponse, error) {
|
|
|
|
|
|
|
|
modelFile := backendConfig.Model
|
|
|
|
|
|
|
|
var inferenceModel grpc.Backend
|
|
|
|
var err error
|
|
|
|
|
2024-11-08 20:54:25 +00:00
|
|
|
opts := ModelOptions(backendConfig, appConfig, model.WithModel(modelFile))
|
2024-10-02 06:56:18 +00:00
|
|
|
|
|
|
|
if backendConfig.Backend == "" {
|
2024-11-08 20:54:25 +00:00
|
|
|
inferenceModel, err = loader.Load(opts...)
|
2024-10-02 06:56:18 +00:00
|
|
|
} else {
|
|
|
|
opts = append(opts, model.WithBackendString(backendConfig.Backend))
|
2024-11-08 20:54:25 +00:00
|
|
|
inferenceModel, err = loader.Load(opts...)
|
2024-10-02 06:56:18 +00:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return schema.TokenizeResponse{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
predictOptions := gRPCPredictOpts(backendConfig, loader.ModelPath)
|
|
|
|
predictOptions.Prompt = s
|
|
|
|
|
|
|
|
// tokenize the string
|
|
|
|
resp, err := inferenceModel.TokenizeString(appConfig.Context, predictOptions)
|
|
|
|
if err != nil {
|
|
|
|
return schema.TokenizeResponse{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return schema.TokenizeResponse{
|
|
|
|
Tokens: resp.Tokens,
|
|
|
|
}, nil
|
|
|
|
|
|
|
|
}
|