2022-11-21 20:20:42 +00:00
|
|
|
#include "ggml.h"
|
2022-11-23 22:08:57 +00:00
|
|
|
#include "gpt-2.h"
|
2022-11-21 20:20:42 +00:00
|
|
|
#include "whisper.h"
|
|
|
|
|
|
|
|
#include <emscripten.h>
|
|
|
|
#include <emscripten/bind.h>
|
|
|
|
|
|
|
|
#include <atomic>
|
|
|
|
#include <cmath>
|
|
|
|
#include <mutex>
|
|
|
|
#include <string>
|
|
|
|
#include <thread>
|
|
|
|
#include <vector>
|
|
|
|
#include <regex>
|
|
|
|
|
|
|
|
constexpr int N_THREAD = 8;
|
|
|
|
|
2022-11-23 22:08:57 +00:00
|
|
|
struct gpt2_context * g_gpt2;
|
2022-11-21 20:20:42 +00:00
|
|
|
std::vector<struct whisper_context *> g_contexts(4, nullptr);
|
|
|
|
|
|
|
|
std::mutex g_mutex;
|
|
|
|
std::thread g_worker;
|
|
|
|
std::atomic<bool> g_running(false);
|
|
|
|
|
|
|
|
bool g_force_speak = false;
|
|
|
|
std::string g_text_to_speak = "";
|
2022-11-22 18:10:20 +00:00
|
|
|
std::string g_status = "";
|
2022-11-21 20:20:42 +00:00
|
|
|
std::string g_status_forced = "";
|
|
|
|
|
2022-11-23 22:08:57 +00:00
|
|
|
std::vector<float> g_pcmf32;
|
2022-11-21 20:20:42 +00:00
|
|
|
|
2022-11-23 22:08:57 +00:00
|
|
|
std::string to_timestamp(int64_t t) {
|
|
|
|
int64_t sec = t/100;
|
|
|
|
int64_t msec = t - sec*100;
|
|
|
|
int64_t min = sec/60;
|
|
|
|
sec = sec - min*60;
|
2022-11-21 20:20:42 +00:00
|
|
|
|
2022-11-23 22:08:57 +00:00
|
|
|
char buf[32];
|
|
|
|
snprintf(buf, sizeof(buf), "%02d:%02d.%03d", (int) min, (int) sec, (int) msec);
|
2022-11-21 20:20:42 +00:00
|
|
|
|
2022-11-23 22:08:57 +00:00
|
|
|
return std::string(buf);
|
2022-11-21 20:20:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void talk_set_status(const std::string & status) {
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
g_status = status;
|
|
|
|
}
|
|
|
|
|
|
|
|
void talk_main(size_t index) {
|
|
|
|
talk_set_status("loading data ...");
|
|
|
|
|
|
|
|
struct whisper_full_params wparams = whisper_full_default_params(whisper_sampling_strategy::WHISPER_SAMPLING_GREEDY);
|
|
|
|
|
|
|
|
wparams.n_threads = std::min(N_THREAD, (int) std::thread::hardware_concurrency());
|
|
|
|
wparams.offset_ms = 0;
|
|
|
|
wparams.translate = false;
|
|
|
|
wparams.no_context = true;
|
|
|
|
wparams.single_segment = true;
|
|
|
|
wparams.print_realtime = false;
|
|
|
|
wparams.print_progress = false;
|
|
|
|
wparams.print_timestamps = true;
|
|
|
|
wparams.print_special_tokens = false;
|
|
|
|
|
|
|
|
wparams.max_tokens = 32;
|
2022-11-24 21:13:26 +00:00
|
|
|
wparams.audio_ctx = 768; // partial encoder context for better performance
|
2022-11-21 20:20:42 +00:00
|
|
|
|
|
|
|
wparams.language = "en";
|
|
|
|
|
2022-11-23 22:08:57 +00:00
|
|
|
g_gpt2 = gpt2_init("gpt-2.bin");
|
2022-11-21 20:20:42 +00:00
|
|
|
|
2022-11-22 18:10:20 +00:00
|
|
|
printf("talk: using %d threads\n", N_THREAD);
|
|
|
|
|
2022-11-21 20:20:42 +00:00
|
|
|
std::vector<float> pcmf32;
|
|
|
|
|
2022-11-23 22:08:57 +00:00
|
|
|
// whisper context
|
2022-11-21 20:20:42 +00:00
|
|
|
auto & ctx = g_contexts[index];
|
|
|
|
|
|
|
|
const int64_t step_samples = 2*WHISPER_SAMPLE_RATE;
|
|
|
|
const int64_t step_ms = (step_samples*1000)/WHISPER_SAMPLE_RATE;
|
|
|
|
const int64_t window_samples = 9*WHISPER_SAMPLE_RATE;
|
|
|
|
|
|
|
|
auto t_last = std::chrono::high_resolution_clock::now();
|
|
|
|
|
|
|
|
talk_set_status("listening ...");
|
|
|
|
|
|
|
|
while (g_running) {
|
|
|
|
|
|
|
|
const auto t_now = std::chrono::high_resolution_clock::now();
|
|
|
|
if (std::chrono::duration_cast<std::chrono::milliseconds>(t_now - t_last).count() < step_ms) {
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
g_pcmf32.clear();
|
|
|
|
}
|
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
talk_set_status("listening ...");
|
|
|
|
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> lock(g_mutex);
|
|
|
|
|
|
|
|
if (g_pcmf32.size() < step_samples) {
|
|
|
|
lock.unlock();
|
|
|
|
|
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
|
|
|
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
pcmf32 = std::vector<float>(g_pcmf32.end() - std::min((int64_t) g_pcmf32.size(), window_samples), g_pcmf32.end());
|
|
|
|
}
|
|
|
|
|
|
|
|
// if energy in during last second is above threshold, then skip
|
|
|
|
{
|
|
|
|
float energy_all = 0.0f;
|
|
|
|
float energy_1s = 0.0f;
|
|
|
|
|
|
|
|
for (size_t i = 0; i < pcmf32.size(); i++) {
|
|
|
|
energy_all += fabsf(pcmf32[i]);
|
|
|
|
|
|
|
|
if (i >= pcmf32.size() - WHISPER_SAMPLE_RATE) {
|
|
|
|
energy_1s += fabsf(pcmf32[i]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
energy_all /= pcmf32.size();
|
|
|
|
energy_1s /= WHISPER_SAMPLE_RATE;
|
|
|
|
|
|
|
|
if (energy_1s > 0.1f*energy_all && !g_force_speak) {
|
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-24 21:13:26 +00:00
|
|
|
talk_set_status("processing audio (whisper)...");
|
2022-11-21 20:20:42 +00:00
|
|
|
|
|
|
|
t_last = t_now;
|
|
|
|
|
2022-11-24 18:15:07 +00:00
|
|
|
if (!g_force_speak) {
|
2022-11-21 20:20:42 +00:00
|
|
|
const auto t_start = std::chrono::high_resolution_clock::now();
|
|
|
|
|
|
|
|
int ret = whisper_full(ctx, wparams, pcmf32.data(), pcmf32.size());
|
|
|
|
if (ret != 0) {
|
|
|
|
printf("whisper_full() failed: %d\n", ret);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
const auto t_end = std::chrono::high_resolution_clock::now();
|
|
|
|
|
|
|
|
printf("whisper_full() returned %d in %f seconds\n", ret, std::chrono::duration<double>(t_end - t_start).count());
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
std::string text_heard;
|
|
|
|
|
2022-11-24 18:15:07 +00:00
|
|
|
if (!g_force_speak) {
|
|
|
|
const int n_segments = whisper_full_n_segments(ctx);
|
|
|
|
for (int i = n_segments - 1; i < n_segments; ++i) {
|
|
|
|
const char * text = whisper_full_get_segment_text(ctx, i);
|
2022-11-21 20:20:42 +00:00
|
|
|
|
2022-11-24 18:15:07 +00:00
|
|
|
const int64_t t0 = whisper_full_get_segment_t0(ctx, i);
|
|
|
|
const int64_t t1 = whisper_full_get_segment_t1(ctx, i);
|
2022-11-21 20:20:42 +00:00
|
|
|
|
2022-11-24 18:15:07 +00:00
|
|
|
printf ("[%s --> %s] %s\n", to_timestamp(t0).c_str(), to_timestamp(t1).c_str(), text);
|
2022-11-21 20:20:42 +00:00
|
|
|
|
2022-11-24 18:15:07 +00:00
|
|
|
text_heard += text;
|
|
|
|
}
|
2022-11-21 20:20:42 +00:00
|
|
|
}
|
|
|
|
|
2022-11-24 18:15:07 +00:00
|
|
|
g_force_speak = false;
|
|
|
|
|
2022-11-21 20:20:42 +00:00
|
|
|
// remove text between brackets using regex
|
|
|
|
{
|
|
|
|
std::regex re("\\[.*?\\]");
|
|
|
|
text_heard = std::regex_replace(text_heard, re, "");
|
|
|
|
}
|
|
|
|
|
|
|
|
// remove text between brackets using regex
|
|
|
|
{
|
|
|
|
std::regex re("\\(.*?\\)");
|
|
|
|
text_heard = std::regex_replace(text_heard, re, "");
|
|
|
|
}
|
|
|
|
|
|
|
|
// remove all characters, except for letters, numbers, punctuation and ':', '\'', '-', ' '
|
|
|
|
text_heard = std::regex_replace(text_heard, std::regex("[^a-zA-Z0-9\\.,\\?!\\s\\:\\'\\-]"), "");
|
|
|
|
|
|
|
|
// take first line
|
|
|
|
text_heard = text_heard.substr(0, text_heard.find_first_of("\n"));
|
|
|
|
|
|
|
|
// remove leading and trailing whitespace
|
|
|
|
text_heard = std::regex_replace(text_heard, std::regex("^\\s+"), "");
|
|
|
|
text_heard = std::regex_replace(text_heard, std::regex("\\s+$"), "");
|
|
|
|
|
2022-11-24 21:13:26 +00:00
|
|
|
talk_set_status("'" + text_heard + "' - thinking how to respond (gpt-2) ...");
|
2022-11-21 20:20:42 +00:00
|
|
|
|
2022-11-23 22:08:57 +00:00
|
|
|
const std::vector<gpt_vocab::id> tokens = gpt2_tokenize(g_gpt2, text_heard.c_str());
|
2022-11-21 20:20:42 +00:00
|
|
|
|
|
|
|
printf("whisper: number of tokens: %d, '%s'\n", (int) tokens.size(), text_heard.c_str());
|
|
|
|
|
|
|
|
std::string text_to_speak;
|
2022-11-22 18:10:20 +00:00
|
|
|
std::string prompt_base;
|
|
|
|
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
2022-11-23 22:08:57 +00:00
|
|
|
prompt_base = gpt2_get_prompt(g_gpt2);
|
2022-11-22 18:10:20 +00:00
|
|
|
}
|
2022-11-21 20:20:42 +00:00
|
|
|
|
|
|
|
if (tokens.size() > 0) {
|
2022-11-23 22:08:57 +00:00
|
|
|
text_to_speak = gpt2_gen_text(g_gpt2, (prompt_base + text_heard + "\n").c_str(), 32);
|
2022-11-21 20:20:42 +00:00
|
|
|
text_to_speak = std::regex_replace(text_to_speak, std::regex("[^a-zA-Z0-9\\.,\\?!\\s\\:\\'\\-]"), "");
|
|
|
|
text_to_speak = text_to_speak.substr(0, text_to_speak.find_first_of("\n"));
|
|
|
|
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
|
|
|
|
// remove first 2 lines of base prompt
|
|
|
|
{
|
2022-11-22 18:10:20 +00:00
|
|
|
const size_t pos = prompt_base.find_first_of("\n");
|
2022-11-21 20:20:42 +00:00
|
|
|
if (pos != std::string::npos) {
|
2022-11-22 18:10:20 +00:00
|
|
|
prompt_base = prompt_base.substr(pos + 1);
|
2022-11-21 20:20:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
2022-11-22 18:10:20 +00:00
|
|
|
const size_t pos = prompt_base.find_first_of("\n");
|
2022-11-21 20:20:42 +00:00
|
|
|
if (pos != std::string::npos) {
|
2022-11-22 18:10:20 +00:00
|
|
|
prompt_base = prompt_base.substr(pos + 1);
|
2022-11-21 20:20:42 +00:00
|
|
|
}
|
|
|
|
}
|
2022-11-22 18:10:20 +00:00
|
|
|
prompt_base += text_heard + "\n" + text_to_speak + "\n";
|
2022-11-21 20:20:42 +00:00
|
|
|
} else {
|
2022-11-23 22:08:57 +00:00
|
|
|
text_to_speak = gpt2_gen_text(g_gpt2, prompt_base.c_str(), 32);
|
2022-11-21 20:20:42 +00:00
|
|
|
text_to_speak = std::regex_replace(text_to_speak, std::regex("[^a-zA-Z0-9\\.,\\?!\\s\\:\\'\\-]"), "");
|
|
|
|
text_to_speak = text_to_speak.substr(0, text_to_speak.find_first_of("\n"));
|
|
|
|
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
|
2022-11-22 18:10:20 +00:00
|
|
|
const size_t pos = prompt_base.find_first_of("\n");
|
2022-11-21 20:20:42 +00:00
|
|
|
if (pos != std::string::npos) {
|
2022-11-22 18:10:20 +00:00
|
|
|
prompt_base = prompt_base.substr(pos + 1);
|
2022-11-21 20:20:42 +00:00
|
|
|
}
|
2022-11-22 18:10:20 +00:00
|
|
|
prompt_base += text_to_speak + "\n";
|
2022-11-21 20:20:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
printf("gpt-2: %s\n", text_to_speak.c_str());
|
|
|
|
|
|
|
|
//printf("========================\n");
|
2022-11-22 18:10:20 +00:00
|
|
|
//printf("gpt-2: prompt_base:\n'%s'\n", prompt_base.c_str());
|
2022-11-21 20:20:42 +00:00
|
|
|
//printf("========================\n");
|
|
|
|
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
t_last = std::chrono::high_resolution_clock::now();
|
|
|
|
g_text_to_speak = text_to_speak;
|
|
|
|
g_pcmf32.clear();
|
2022-11-23 22:08:57 +00:00
|
|
|
gpt2_set_prompt(g_gpt2, prompt_base.c_str());
|
2022-11-21 20:20:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
talk_set_status("speaking ...");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-23 22:08:57 +00:00
|
|
|
gpt2_free(g_gpt2);
|
|
|
|
|
2022-11-21 20:20:42 +00:00
|
|
|
if (index < g_contexts.size()) {
|
|
|
|
whisper_free(g_contexts[index]);
|
|
|
|
g_contexts[index] = nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
EMSCRIPTEN_BINDINGS(talk) {
|
|
|
|
emscripten::function("init", emscripten::optional_override([](const std::string & path_model) {
|
|
|
|
for (size_t i = 0; i < g_contexts.size(); ++i) {
|
|
|
|
if (g_contexts[i] == nullptr) {
|
|
|
|
g_contexts[i] = whisper_init(path_model.c_str());
|
|
|
|
if (g_contexts[i] != nullptr) {
|
|
|
|
g_running = true;
|
|
|
|
if (g_worker.joinable()) {
|
|
|
|
g_worker.join();
|
|
|
|
}
|
|
|
|
g_worker = std::thread([i]() {
|
|
|
|
talk_main(i);
|
|
|
|
});
|
|
|
|
|
|
|
|
return i + 1;
|
|
|
|
} else {
|
|
|
|
return (size_t) 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return (size_t) 0;
|
|
|
|
}));
|
|
|
|
|
|
|
|
emscripten::function("free", emscripten::optional_override([](size_t index) {
|
|
|
|
if (g_running) {
|
|
|
|
g_running = false;
|
|
|
|
}
|
|
|
|
}));
|
|
|
|
|
|
|
|
emscripten::function("set_audio", emscripten::optional_override([](size_t index, const emscripten::val & audio) {
|
|
|
|
--index;
|
|
|
|
|
|
|
|
if (index >= g_contexts.size()) {
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (g_contexts[index] == nullptr) {
|
|
|
|
return -2;
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
const int n = audio["length"].as<int>();
|
|
|
|
|
|
|
|
emscripten::val heap = emscripten::val::module_property("HEAPU8");
|
|
|
|
emscripten::val memory = heap["buffer"];
|
|
|
|
|
|
|
|
g_pcmf32.resize(n);
|
|
|
|
|
|
|
|
emscripten::val memoryView = audio["constructor"].new_(memory, reinterpret_cast<uintptr_t>(g_pcmf32.data()), n);
|
|
|
|
memoryView.call<void>("set", audio);
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}));
|
|
|
|
|
|
|
|
emscripten::function("force_speak", emscripten::optional_override([](size_t index) {
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
g_force_speak = true;
|
|
|
|
}
|
|
|
|
}));
|
|
|
|
|
|
|
|
emscripten::function("get_text_context", emscripten::optional_override([]() {
|
|
|
|
std::string text_context;
|
|
|
|
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
2022-11-23 22:08:57 +00:00
|
|
|
text_context = gpt2_get_prompt(g_gpt2);
|
2022-11-21 20:20:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return text_context;
|
|
|
|
}));
|
|
|
|
|
|
|
|
emscripten::function("get_text_to_speak", emscripten::optional_override([]() {
|
|
|
|
std::string text_to_speak;
|
|
|
|
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
text_to_speak = std::move(g_text_to_speak);
|
|
|
|
}
|
|
|
|
|
|
|
|
return text_to_speak;
|
|
|
|
}));
|
|
|
|
|
|
|
|
emscripten::function("get_status", emscripten::optional_override([]() {
|
|
|
|
std::string status;
|
|
|
|
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
status = g_status_forced.empty() ? g_status : g_status_forced;
|
|
|
|
}
|
|
|
|
|
|
|
|
return status;
|
|
|
|
}));
|
|
|
|
|
|
|
|
emscripten::function("set_status", emscripten::optional_override([](const std::string & status) {
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
|
|
|
g_status_forced = status;
|
|
|
|
}
|
|
|
|
}));
|
2022-11-22 18:10:20 +00:00
|
|
|
|
|
|
|
emscripten::function("set_prompt", emscripten::optional_override([](const std::string & prompt) {
|
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> lock(g_mutex);
|
2022-11-23 22:08:57 +00:00
|
|
|
gpt2_set_prompt(g_gpt2, prompt.c_str());
|
2022-11-22 18:10:20 +00:00
|
|
|
}
|
|
|
|
}));
|
2022-11-21 20:20:42 +00:00
|
|
|
}
|