From 49706a658a468a04543d515aa754baee70734f3b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 23 Nov 2022 17:17:01 +0200 Subject: [PATCH] minor : updates few prints + fix buttons in whisper.wasm --- examples/whisper.wasm/index-tmpl.html | 5 +++++ whisper.cpp | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/examples/whisper.wasm/index-tmpl.html b/examples/whisper.wasm/index-tmpl.html index 182527f5..5511f696 100644 --- a/examples/whisper.wasm/index-tmpl.html +++ b/examples/whisper.wasm/index-tmpl.html @@ -297,6 +297,11 @@ storeFS(fname, buf); } reader.readAsArrayBuffer(file); + + document.getElementById('fetch-whisper-tiny-en').style.display = 'none'; + document.getElementById('fetch-whisper-base-en').style.display = 'none'; + document.getElementById('fetch-whisper-tiny').style.display = 'none'; + document.getElementById('fetch-whisper-base').style.display = 'none'; } // fetch a remote file from remote URL using the Fetch API diff --git a/whisper.cpp b/whisper.cpp index 6c2e0e0e..4f23cde4 100644 --- a/whisper.cpp +++ b/whisper.cpp @@ -1042,7 +1042,7 @@ static bool whisper_model_load(const std::string & fname, whisper_context & wctx fin.read(reinterpret_cast(tensor->data), ggml_nbytes(tensor)); - //printf("%24s - [%5d, %5d], type = %6s, %6.2f MB\n", name.data(), ne[0], ne[1], ftype == 0 ? "float" : "f16", ggml_nbytes(tensor)/1024.0/1024.0); + //printf("%48s - [%5d, %5d, %5d], type = %6s, %6.2f MB\n", name.data(), ne[0], ne[1], ne[2], ftype == 0 ? "float" : "f16", ggml_nbytes(tensor)/1024.0/1024.0); total_size += ggml_nbytes(tensor); model.n_loaded++; } @@ -2708,7 +2708,7 @@ int whisper_full( //{ // const auto tt = token.pt > 0.10 ? ctx->vocab.id_to_token[token.tid] : "[?]"; - // printf("%s: %10s %6.3f '%s'\n", __func__, tt.c_str(), token.pt, ctx->vocab.id_to_token[token.id].c_str()); + // printf("%s: %10s %6d %6.3f '%s'\n", __func__, tt.c_str(), token.id, token.pt, ctx->vocab.id_to_token[token.id].c_str()); //} // end of text token