lollms-webui/backends/backend_template/install.py

61 lines
2.6 KiB
Python
Raw Normal View History

2023-05-25 09:34:56 +00:00
import subprocess
from pathlib import Path
import requests
from tqdm import tqdm
2023-05-25 14:40:28 +00:00
from api.config import save_config
2023-05-25 09:34:56 +00:00
class Install:
def __init__(self, api):
# Get the current directory
current_dir = Path(__file__).resolve().parent
install_file = current_dir / ".installed"
if not install_file.exists():
print("-------------- Template backend -------------------------------")
print("This is the first time you are using this backend.")
print("Installing ...")
2023-05-25 10:51:31 +00:00
# Example of installing py torche
2023-05-25 09:34:56 +00:00
"""
try:
print("Checking pytorch")
import torch
import torchvision
if torch.cuda.is_available():
print("CUDA is supported.")
else:
print("CUDA is not supported. Reinstalling PyTorch with CUDA support.")
self.reinstall_pytorch_with_cuda()
except Exception as ex:
self.reinstall_pytorch_with_cuda()
"""
# Step 2: Install dependencies using pip from requirements.txt
requirements_file = current_dir / "requirements.txt"
subprocess.run(["pip", "install", "--upgrade", "--no-cache-dir", "-r", str(requirements_file)])
2023-05-25 10:51:31 +00:00
# Create the models folder
models_folder = Path(f"./models/{Path(__file__).parent.stem}")
2023-05-25 09:34:56 +00:00
models_folder.mkdir(exist_ok=True, parents=True)
2023-05-25 14:40:28 +00:00
# The local config can be used to store personal information that shouldn't be shared like chatgpt Key
# or other personal information
# This file is never commited to the repository as it is ignored by .gitignore
# You can remove this if you don't need custom local configurations
self._local_config_file_path = Path(__file__).parent/"config_local.yaml"
if not self._local_config_file_path.exists():
config = {
#Put your default configurations here
}
save_config(config, self._local_config_file_path)
2023-05-25 09:34:56 +00:00
2023-05-25 10:51:31 +00:00
#Create the install file (a file that is used to insure the installation was done correctly)
2023-05-25 09:34:56 +00:00
with open(install_file,"w") as f:
f.write("ok")
print("Installed successfully")
2023-05-25 14:40:28 +00:00
2023-05-25 09:34:56 +00:00
def reinstall_pytorch_with_cuda(self):
2023-05-25 14:40:28 +00:00
"""Installs pytorch with cuda (if you have a gpu)
"""
2023-05-25 09:34:56 +00:00
subprocess.run(["pip", "install", "torch", "torchvision", "torchaudio", "--no-cache-dir", "--index-url", "https://download.pytorch.org/whl/cu117"])