mirror of
https://github.com/ggerganov/whisper.cpp.git
synced 2024-12-18 20:27:53 +00:00
openvino : Pass CPU threads parameter
This commit is contained in:
parent
37c88027e1
commit
60c293e943
@ -1013,7 +1013,7 @@ int main(int argc, char ** argv) {
|
||||
}
|
||||
|
||||
// initialize openvino encoder. this has no effect on whisper.cpp builds that don't have OpenVINO configured
|
||||
whisper_ctx_init_openvino_encoder(ctx, nullptr, params.openvino_encode_device.c_str(), nullptr);
|
||||
whisper_ctx_init_openvino_encoder(ctx, nullptr, params.openvino_encode_device.c_str(), nullptr, params.n_threads);
|
||||
|
||||
if (!params.grammar.empty()) {
|
||||
auto & grammar = params.grammar_parsed;
|
||||
|
@ -560,7 +560,7 @@ int main(int argc, char ** argv) {
|
||||
}
|
||||
|
||||
// initialize openvino encoder. this has no effect on whisper.cpp builds that don't have OpenVINO configured
|
||||
whisper_ctx_init_openvino_encoder(ctx, nullptr, params.openvino_encode_device.c_str(), nullptr);
|
||||
whisper_ctx_init_openvino_encoder(ctx, nullptr, params.openvino_encode_device.c_str(), nullptr, params.n_threads);
|
||||
|
||||
Server svr;
|
||||
svr.set_default_headers({{"Server", "whisper.cpp"},
|
||||
@ -981,7 +981,7 @@ int main(int argc, char ** argv) {
|
||||
}
|
||||
|
||||
// initialize openvino encoder. this has no effect on whisper.cpp builds that don't have OpenVINO configured
|
||||
whisper_ctx_init_openvino_encoder(ctx, nullptr, params.openvino_encode_device.c_str(), nullptr);
|
||||
whisper_ctx_init_openvino_encoder(ctx, nullptr, params.openvino_encode_device.c_str(), nullptr, params.n_threads);
|
||||
|
||||
const std::string success = "Load was successful!";
|
||||
res.set_content(success, "application/text");
|
||||
|
@ -245,13 +245,15 @@ extern "C" {
|
||||
struct whisper_state * state,
|
||||
const char * model_path,
|
||||
const char * device,
|
||||
const char * cache_dir);
|
||||
const char * cache_dir,
|
||||
int n_threads);
|
||||
|
||||
WHISPER_API int whisper_ctx_init_openvino_encoder(
|
||||
struct whisper_context * ctx,
|
||||
const char * model_path,
|
||||
const char * device,
|
||||
const char * cache_dir);
|
||||
const char * cache_dir,
|
||||
int n_threads);
|
||||
|
||||
// Frees all allocated memory
|
||||
WHISPER_API void whisper_free (struct whisper_context * ctx);
|
||||
|
@ -9,7 +9,8 @@ struct whisper_openvino_context {
|
||||
|
||||
struct whisper_openvino_context * whisper_openvino_init(const char* path_model,
|
||||
const char* device,
|
||||
const char* cache_dir)
|
||||
const char* cache_dir,
|
||||
int n_threads)
|
||||
{
|
||||
if (!path_model || !device) {
|
||||
fprintf(stderr, "%s: path_model and/or device is null\n", __func__);
|
||||
@ -29,6 +30,10 @@ struct whisper_openvino_context * whisper_openvino_init(const char* path_model,
|
||||
core.set_property(ov::cache_dir(cache_dir));
|
||||
}
|
||||
|
||||
if (strncmp(device, "CPU", 3) == 0) {
|
||||
core.set_property(ov::inference_num_threads(n_threads));
|
||||
}
|
||||
|
||||
//Read the OpenVINO encoder IR (.xml/.bin) from disk, producing an ov::Model object.
|
||||
std::shared_ptr<ov::Model> model = core.read_model(path_model);
|
||||
|
||||
|
@ -11,7 +11,8 @@ struct whisper_openvino_context;
|
||||
// path to cache_dir. Returns null upon failure.
|
||||
struct whisper_openvino_context * whisper_openvino_init(const char * path_model,
|
||||
const char * device,
|
||||
const char * cache_dir);
|
||||
const char * cache_dir,
|
||||
int n_threads);
|
||||
|
||||
// clean up a ctx previously returned from whisper_openvino_init()
|
||||
void whisper_openvino_free(struct whisper_openvino_context * ctx);
|
||||
|
@ -3423,13 +3423,15 @@ int whisper_ctx_init_openvino_encoder_with_state(
|
||||
struct whisper_state * state,
|
||||
const char * model_path,
|
||||
const char * device,
|
||||
const char * cache_dir) {
|
||||
const char * cache_dir,
|
||||
int n_threads) {
|
||||
#ifndef WHISPER_USE_OPENVINO
|
||||
(void)(ctx);
|
||||
(void)(state);
|
||||
(void)(model_path);
|
||||
(void)(device);
|
||||
(void)(cache_dir);
|
||||
(void)(n_threads);
|
||||
|
||||
return 1;
|
||||
#else
|
||||
@ -3457,7 +3459,7 @@ int whisper_ctx_init_openvino_encoder_with_state(
|
||||
WHISPER_LOG_INFO("%s: loading OpenVINO model from '%s'\n", __func__, path_encoder.c_str());
|
||||
WHISPER_LOG_INFO("%s: first run on a device may take a while ...\n", __func__);
|
||||
|
||||
state->ctx_openvino = whisper_openvino_init(path_encoder.c_str(), device, path_cache.c_str());
|
||||
state->ctx_openvino = whisper_openvino_init(path_encoder.c_str(), device, path_cache.c_str(), n_threads);
|
||||
if (!state->ctx_openvino) {
|
||||
WHISPER_LOG_ERROR("%s: failed to init OpenVINO encoder from '%s'\n", __func__, path_encoder.c_str());
|
||||
return 1;
|
||||
@ -3473,8 +3475,9 @@ int whisper_ctx_init_openvino_encoder(
|
||||
struct whisper_context * ctx,
|
||||
const char * model_path,
|
||||
const char * device,
|
||||
const char * cache_dir) {
|
||||
return whisper_ctx_init_openvino_encoder_with_state(ctx, ctx->state, model_path, device, cache_dir);
|
||||
const char * cache_dir,
|
||||
int n_threads) {
|
||||
return whisper_ctx_init_openvino_encoder_with_state(ctx, ctx->state, model_path, device, cache_dir, n_threads);
|
||||
}
|
||||
|
||||
struct whisper_context_params whisper_context_default_params() {
|
||||
|
Loading…
Reference in New Issue
Block a user