mirror of
https://github.com/ggerganov/whisper.cpp.git
synced 2025-04-07 03:17:10 +00:00
convert : update convert-h5-to-ggml.py (#2840)
improved handling of missing max_length
This commit is contained in:
parent
e0f3c9d4dd
commit
4854789751
@ -85,9 +85,15 @@ encoder_added = json.load((dir_model / "added_tokens.json").open( "r", encoding=
|
||||
hparams = json.load((dir_model / "config.json").open("r", encoding="utf8"))
|
||||
|
||||
# Add this block to handle missing 'max_length'
|
||||
if "max_length" not in hparams:
|
||||
hparams["max_length"] = hparams.get("max_target_positions", 448)
|
||||
|
||||
if "max_length" not in hparams or hparams["max_length"] is None:
|
||||
hparams["max_length"] = hparams.get("max_target_positions", 448) # Default to 448 if missing
|
||||
elif not isinstance(hparams["max_length"], int):
|
||||
try:
|
||||
hparams["max_length"] = int(hparams["max_length"]) # Convert if necessary
|
||||
except ValueError:
|
||||
print(f"Warning: Invalid max_length value '{hparams['max_length']}', using default 448.")
|
||||
hparams["max_length"] = 448
|
||||
|
||||
model = WhisperForConditionalGeneration.from_pretrained(dir_model)
|
||||
|
||||
#code.interact(local=locals())
|
||||
|
Loading…
x
Reference in New Issue
Block a user