From 499119380b44c2aec5e9ea479bb89cb24cbbd7cb Mon Sep 17 00:00:00 2001 From: Saifeddine ALOUI Date: Sat, 2 Nov 2024 22:16:22 +0100 Subject: [PATCH] fixed a bug in lollms_client.js --- endpoints/libraries/lollms_client_js.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/endpoints/libraries/lollms_client_js.js b/endpoints/libraries/lollms_client_js.js index fea36ca0..2e8c6c02 100644 --- a/endpoints/libraries/lollms_client_js.js +++ b/endpoints/libraries/lollms_client_js.js @@ -595,7 +595,7 @@ async generateCode(prompt, images = [], { code = codes[0].content.split('\n').slice(0, -1).join('\n'); while (!codes[0].is_complete) { console.warn("The AI did not finish the code, let's ask it to continue") - const continuePrompt = prompt + code + this.userFullHeader + "continue the code. Rewrite last line and continue the code." + this.separatorTemplate() + this.aiFullHeader; + const continuePrompt = prompt + code + this.userFullHeader + "continue the code. Rewrite last line and continue the code. Don't forget to put the code inside a markdown code tag." + this.separatorTemplate() + this.aiFullHeader; response = await this.generate(fullPrompt, { n_predict: n_predict, temperature: temperature,