mirror of
https://github.com/ggerganov/whisper.cpp.git
synced 2024-12-19 04:37:51 +00:00
openvino : fix convert-whisper-to-openvino.py (#1890)
Fix issue: Conversion from Whisper to OpenVino failed #1870 convert-whisper-to-openvino.py stopped working with OpenVINO version 2023.0.0-10926-b4452d56304-releases/2023/0 . Error was: TypeError: load(): incompatible function arguments. The following argument types are supported: 1. (self: openvino._pyopenvino.FrontEnd, path: object) -> ov::frontend::InputModel Tested successfully with a large-v3 conversion. Co-authored-by: Stefan Grundmann <grundmanns@sandiego.gov>
This commit is contained in:
parent
c56344b509
commit
eb23f4ef16
@ -3,6 +3,7 @@ import torch
|
|||||||
from whisper import load_model
|
from whisper import load_model
|
||||||
import os
|
import os
|
||||||
from openvino.tools import mo
|
from openvino.tools import mo
|
||||||
|
from openvino.frontend import FrontEndManager
|
||||||
from openvino.runtime import serialize
|
from openvino.runtime import serialize
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
@ -11,7 +12,7 @@ def convert_encoder(hparams, encoder, mname):
|
|||||||
|
|
||||||
mel = torch.zeros((1, hparams.n_mels, 3000))
|
mel = torch.zeros((1, hparams.n_mels, 3000))
|
||||||
|
|
||||||
onnx_folder=os.path.join(os.path.dirname(__file__),"onnx_encoder")
|
onnx_folder = os.path.join(os.path.dirname(__file__), "onnx_encoder")
|
||||||
|
|
||||||
#create a directory to store the onnx model, and other collateral that is saved during onnx export procedure
|
#create a directory to store the onnx model, and other collateral that is saved during onnx export procedure
|
||||||
if not os.path.isdir(onnx_folder):
|
if not os.path.isdir(onnx_folder):
|
||||||
@ -19,6 +20,7 @@ def convert_encoder(hparams, encoder, mname):
|
|||||||
|
|
||||||
onnx_path = os.path.join(onnx_folder, "whisper_encoder.onnx")
|
onnx_path = os.path.join(onnx_folder, "whisper_encoder.onnx")
|
||||||
|
|
||||||
|
# Export the PyTorch model to ONNX
|
||||||
torch.onnx.export(
|
torch.onnx.export(
|
||||||
encoder,
|
encoder,
|
||||||
mel,
|
mel,
|
||||||
@ -27,11 +29,16 @@ def convert_encoder(hparams, encoder, mname):
|
|||||||
output_names=["output_features"]
|
output_names=["output_features"]
|
||||||
)
|
)
|
||||||
|
|
||||||
# use model optimizer to convert onnx to OpenVINO IR format
|
# Convert ONNX to OpenVINO IR format using the frontend
|
||||||
encoder_model = mo.convert_model(onnx_path, compress_to_fp16=True)
|
fem = FrontEndManager()
|
||||||
serialize(encoder_model, xml_path=os.path.join(os.path.dirname(__file__),"ggml-" + mname + "-encoder-openvino.xml"))
|
onnx_fe = fem.load_by_framework("onnx")
|
||||||
|
onnx_model = onnx_fe.load(onnx_path)
|
||||||
|
ov_model = onnx_fe.convert(onnx_model)
|
||||||
|
|
||||||
#cleanup
|
# Serialize the OpenVINO model to XML and BIN files
|
||||||
|
serialize(ov_model, xml_path=os.path.join(os.path.dirname(__file__), "ggml-" + mname + "-encoder-openvino.xml"))
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
if os.path.isdir(onnx_folder):
|
if os.path.isdir(onnx_folder):
|
||||||
shutil.rmtree(onnx_folder)
|
shutil.rmtree(onnx_folder)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user