From c5d3f92ba80161a7fbf78b703ced12a56b7c3897 Mon Sep 17 00:00:00 2001 From: saloui Date: Mon, 22 May 2023 12:27:45 +0200 Subject: [PATCH] upgraded install --- backends/c_transformers/install.py | 3 +++ backends/gpt_4all/install.py | 5 ++++- gpt4all_api/api.py | 5 +---- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/backends/c_transformers/install.py b/backends/c_transformers/install.py index 0b60eb34..c8d70f16 100644 --- a/backends/c_transformers/install.py +++ b/backends/c_transformers/install.py @@ -13,6 +13,7 @@ class Install: print("-------------- cTransformers backend -------------------------------") print("This is the first time you are using this backend.") print("Installing ...") + """ try: print("Checking pytorch") import torch @@ -24,6 +25,8 @@ class Install: self.reinstall_pytorch_with_cuda() except Exception as ex: self.reinstall_pytorch_with_cuda() + """ + # Step 2: Install dependencies using pip from requirements.txt requirements_file = current_dir / "requirements.txt" subprocess.run(["pip", "install", "--upgrade", "--no-cache-dir", "-r", str(requirements_file)]) diff --git a/backends/gpt_4all/install.py b/backends/gpt_4all/install.py index 3ae37da6..6b0f3655 100644 --- a/backends/gpt_4all/install.py +++ b/backends/gpt_4all/install.py @@ -13,6 +13,7 @@ class Install: print("-------------- GPT4All backend by nomic-ai -------------------------------") print("This is the first time you are using this backend.") print("Installing ...") + """ try: print("Checking pytorch") import torch @@ -23,7 +24,9 @@ class Install: print("CUDA is not supported. Reinstalling PyTorch with CUDA support.") self.reinstall_pytorch_with_cuda() except Exception as ex: - self.reinstall_pytorch_with_cuda() + self.reinstall_pytorch_with_cuda() + """ + # Step 2: Install dependencies using pip from requirements.txt requirements_file = current_dir / "requirements.txt" subprocess.run(["pip", "install", "--no-cache-dir", "-r", str(requirements_file)]) diff --git a/gpt4all_api/api.py b/gpt4all_api/api.py index 9ee16970..cf218945 100644 --- a/gpt4all_api/api.py +++ b/gpt4all_api/api.py @@ -717,10 +717,7 @@ class GPT4AllAPI(): self.process.generate(self.discussion_messages, self.current_message, message_id, n_predict = self.config['n_predict']) self.process.started_queue.get() while(self.process.is_generating.value): # Simulating other commands being issued - chunk = "" - while not self.process.generation_queue.empty(): - chk, tok, message_type = self.process.generation_queue.get() - chunk += chk + chunk, tok, message_type = self.process.generation_queue.get() if chunk!="": self.process_chunk(chunk)