diff --git a/backends/c_transformers/install.py b/backends/c_transformers/install.py index 5dadc1e0..0b60eb34 100644 --- a/backends/c_transformers/install.py +++ b/backends/c_transformers/install.py @@ -27,6 +27,12 @@ class Install: # Step 2: Install dependencies using pip from requirements.txt requirements_file = current_dir / "requirements.txt" subprocess.run(["pip", "install", "--upgrade", "--no-cache-dir", "-r", str(requirements_file)]) + + # Create ther models folder + models_folder = Path("./models/c_transformers") + models_folder.mkdir(exist_ok=True, parents=True) + + #Create the install file with open(install_file,"w") as f: f.write("ok") print("Installed successfully") diff --git a/backends/gpt_4all/install.py b/backends/gpt_4all/install.py new file mode 100644 index 00000000..3ae37da6 --- /dev/null +++ b/backends/gpt_4all/install.py @@ -0,0 +1,43 @@ +import subprocess +from pathlib import Path +import requests +from tqdm import tqdm + +class Install: + def __init__(self, api): + # Get the current directory + current_dir = Path(__file__).resolve().parent + install_file = current_dir / ".installed" + + if not install_file.exists(): + print("-------------- GPT4All backend by nomic-ai -------------------------------") + print("This is the first time you are using this backend.") + print("Installing ...") + try: + print("Checking pytorch") + import torch + import torchvision + if torch.cuda.is_available(): + print("CUDA is supported.") + else: + print("CUDA is not supported. Reinstalling PyTorch with CUDA support.") + self.reinstall_pytorch_with_cuda() + except Exception as ex: + self.reinstall_pytorch_with_cuda() + # Step 2: Install dependencies using pip from requirements.txt + requirements_file = current_dir / "requirements.txt" + subprocess.run(["pip", "install", "--no-cache-dir", "-r", str(requirements_file)]) + + # Create ther models folder + models_folder = Path("./models/c_transformers") + models_folder.mkdir(exist_ok=True, parents=True) + + #Create the install file + + with open(install_file,"w") as f: + f.write("ok") + print("Installed successfully") + + def reinstall_pytorch_with_cuda(self): + subprocess.run(["pip", "install", "torch", "torchvision", "torchaudio", "--no-cache-dir", "--index-url", "https://download.pytorch.org/whl/cu117"]) + \ No newline at end of file diff --git a/backends/gpt_j_a/install.py b/backends/gpt_j_a/install.py index d96fdafd..aeb156c6 100644 --- a/backends/gpt_j_a/install.py +++ b/backends/gpt_j_a/install.py @@ -27,6 +27,13 @@ class Install: # Step 2: Install dependencies using pip from requirements.txt requirements_file = current_dir / "requirements.txt" subprocess.run(["pip", "install", "--no-cache-dir", "-r", str(requirements_file)]) + + # Create ther models folder + models_folder = Path("./models/c_transformers") + models_folder.mkdir(exist_ok=True, parents=True) + + #Create the install file + with open(install_file,"w") as f: f.write("ok") print("Installed successfully") diff --git a/backends/gpt_j_m/install.py b/backends/gpt_j_m/install.py index f0349775..47b3b3bd 100644 --- a/backends/gpt_j_m/install.py +++ b/backends/gpt_j_m/install.py @@ -27,6 +27,12 @@ class Install: # Step 2: Install dependencies using pip from requirements.txt requirements_file = current_dir / "requirements.txt" subprocess.run(["pip", "install", "--no-cache-dir", "-r", str(requirements_file)]) + + # Create ther models folder + models_folder = Path("./models/c_transformers") + models_folder.mkdir(exist_ok=True, parents=True) + + #Create the install file with open(install_file,"w") as f: f.write("ok") print("Installed successfully") diff --git a/backends/llama_cpp_official/install.py b/backends/llama_cpp_official/install.py index a02eceb5..72f02763 100644 --- a/backends/llama_cpp_official/install.py +++ b/backends/llama_cpp_official/install.py @@ -27,6 +27,12 @@ class Install: # Step 2: Install dependencies using pip from requirements.txt requirements_file = current_dir / "requirements.txt" subprocess.run(["pip", "install", "--no-cache-dir", "-r", str(requirements_file)]) + + # Create ther models folder + models_folder = Path("./models/c_transformers") + models_folder.mkdir(exist_ok=True, parents=True) + + #Create the install file with open(install_file,"w") as f: f.write("ok") print("Installed successfully") diff --git a/backends/py_llama_cpp/install.py b/backends/py_llama_cpp/install.py index 8f70c81e..45de125a 100644 --- a/backends/py_llama_cpp/install.py +++ b/backends/py_llama_cpp/install.py @@ -27,6 +27,12 @@ class Install: # Step 2: Install dependencies using pip from requirements.txt requirements_file = current_dir / "requirements.txt" subprocess.run(["pip", "install", "--no-cache-dir", "-r", str(requirements_file)]) + + # Create ther models folder + models_folder = Path("./models/c_transformers") + models_folder.mkdir(exist_ok=True, parents=True) + + #Create the install file with open(install_file,"w") as f: f.write("ok") print("Installed successfully")