mirror of
https://github.com/ParisNeo/lollms.git
synced 2024-12-19 04:37:54 +00:00
Updated
This commit is contained in:
parent
c1a58f2a1b
commit
761406a6f9
@ -49,9 +49,6 @@ class LollmsApplication:
|
|||||||
# Pull the repository if it already exists
|
# Pull the repository if it already exists
|
||||||
ASCIIColors.info("Extensions zoo found in your personal space.\nPulling last Extensions zoo")
|
ASCIIColors.info("Extensions zoo found in your personal space.\nPulling last Extensions zoo")
|
||||||
subprocess.run(["git", "-C", self.lollms_paths.extensions_zoo_path, "pull"])
|
subprocess.run(["git", "-C", self.lollms_paths.extensions_zoo_path, "pull"])
|
||||||
# Pull the repository if it already exists
|
|
||||||
# ASCIIColors.info("Qlora found in your personal space.\nPulling last qlora code")
|
|
||||||
# subprocess.run(["git", "-C", self.lollms_paths.gptqlora_path, "pull"])
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
ASCIIColors.error("Couldn't pull zoos. Please contact the main dev on our discord channel and report the problem.")
|
ASCIIColors.error("Couldn't pull zoos. Please contact the main dev on our discord channel and report the problem.")
|
||||||
trace_exception(ex)
|
trace_exception(ex)
|
||||||
|
@ -334,7 +334,50 @@ class LLMBinding:
|
|||||||
else:
|
else:
|
||||||
ASCIIColors.error("Pytorch installed successfully!!")
|
ASCIIColors.error("Pytorch installed successfully!!")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def vram_usage():
|
||||||
|
try:
|
||||||
|
output = subprocess.check_output(['nvidia-smi', '--query-gpu=memory.total,memory.used,gpu_name', '--format=csv,nounits,noheader'])
|
||||||
|
lines = output.decode().strip().split('\n')
|
||||||
|
vram_info = [line.split(',') for line in lines]
|
||||||
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||||
|
return {
|
||||||
|
"nb_gpus": 0
|
||||||
|
}
|
||||||
|
|
||||||
|
ram_usage = {
|
||||||
|
"nb_gpus": len(vram_info)
|
||||||
|
}
|
||||||
|
|
||||||
|
if vram_info is not None:
|
||||||
|
for i, gpu in enumerate(vram_info):
|
||||||
|
ram_usage[f"gpu_{i}_total_vram"] = int(gpu[0])*1024*1024
|
||||||
|
ram_usage[f"gpu_{i}_used_vram"] = int(gpu[1])*1024*1024
|
||||||
|
ram_usage[f"gpu_{i}_model"] = gpu[2].strip()
|
||||||
|
else:
|
||||||
|
# Set all VRAM-related entries to None
|
||||||
|
ram_usage["gpu_0_total_vram"] = None
|
||||||
|
ram_usage["gpu_0_used_vram"] = None
|
||||||
|
ram_usage["gpu_0_model"] = None
|
||||||
|
|
||||||
|
return ram_usage
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def clear_cuda():
|
||||||
|
import torch
|
||||||
|
ASCIIColors.red("*-*-*-*-*-*-*-*")
|
||||||
|
ASCIIColors.red("Cuda VRAM usage")
|
||||||
|
ASCIIColors.red("*-*-*-*-*-*-*-*")
|
||||||
|
print(LLMBinding.vram_usage())
|
||||||
|
try:
|
||||||
|
torch.cuda.empty_cache()
|
||||||
|
except Exception as ex:
|
||||||
|
ASCIIColors.error("Couldn't clear cuda memory")
|
||||||
|
ASCIIColors.red("Cleared cache")
|
||||||
|
ASCIIColors.red("*-*-*-*-*-*-*-*")
|
||||||
|
ASCIIColors.red("Cuda VRAM usage")
|
||||||
|
ASCIIColors.red("*-*-*-*-*-*-*-*")
|
||||||
|
print(LLMBinding.vram_usage())
|
||||||
# To implement by children
|
# To implement by children
|
||||||
# @staticmethod
|
# @staticmethod
|
||||||
# def get_available_models():
|
# def get_available_models():
|
||||||
|
@ -3,6 +3,7 @@ import shutil
|
|||||||
from ascii_colors import ASCIIColors
|
from ascii_colors import ASCIIColors
|
||||||
from lollms.config import BaseConfig
|
from lollms.config import BaseConfig
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import os
|
||||||
|
|
||||||
lollms_path = Path(__file__).parent
|
lollms_path = Path(__file__).parent
|
||||||
lollms_default_cfg_path = lollms_path / "configs/config.yaml"
|
lollms_default_cfg_path = lollms_path / "configs/config.yaml"
|
||||||
@ -48,6 +49,12 @@ class LollmsPaths:
|
|||||||
self.personal_trainers_path = self.personal_path / "trainers"
|
self.personal_trainers_path = self.personal_path / "trainers"
|
||||||
self.gptqlora_path = self.personal_trainers_path / "gptqlora"
|
self.gptqlora_path = self.personal_trainers_path / "gptqlora"
|
||||||
|
|
||||||
|
self.execution_path = Path(os.getcwd())
|
||||||
|
if (self.execution_path/"zoos").exists():
|
||||||
|
self.bindings_zoo_path = self.execution_path/"zoos" / "bindings_zoo"
|
||||||
|
self.personalities_zoo_path = self.execution_path/"zoos" / "personalities_zoo"
|
||||||
|
self.extensions_zoo_path = self.execution_path/"zoos" / "extensions_zoo"
|
||||||
|
else:
|
||||||
|
|
||||||
self.bindings_zoo_path = self.personal_path / "bindings_zoo"
|
self.bindings_zoo_path = self.personal_path / "bindings_zoo"
|
||||||
self.personalities_zoo_path = self.personal_path / "personalities_zoo"
|
self.personalities_zoo_path = self.personal_path / "personalities_zoo"
|
||||||
@ -124,12 +131,6 @@ class LollmsPaths:
|
|||||||
ASCIIColors.info("No extensions found in your personal space.\nCloning the extensions zoo")
|
ASCIIColors.info("No extensions found in your personal space.\nCloning the extensions zoo")
|
||||||
subprocess.run(["git", "clone", extensions_zoo_repo, self.extensions_zoo_path])
|
subprocess.run(["git", "clone", extensions_zoo_repo, self.extensions_zoo_path])
|
||||||
|
|
||||||
# QLora should be installed by the application
|
|
||||||
#if not self.gptqlora_path.exists():
|
|
||||||
# Clone the repository to the target path
|
|
||||||
#ASCIIColors.info("No gptqlora found in your personal space.\nCloning the gptqlora repo")
|
|
||||||
#subprocess.run(["git", "clone", gptqlora_repo, self.gptqlora_path])
|
|
||||||
#subprocess.run(["pip", "install", "-r", "requirements.txt"], cwd=self.gptqlora_path)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_default_config(self):
|
def copy_default_config(self):
|
||||||
|
2
setup.py
2
setup.py
@ -26,7 +26,7 @@ def get_all_files(path):
|
|||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="lollms",
|
name="lollms",
|
||||||
version="5.7.5",
|
version="5.7.6",
|
||||||
author="Saifeddine ALOUI",
|
author="Saifeddine ALOUI",
|
||||||
author_email="aloui.saifeddine@gmail.com",
|
author_email="aloui.saifeddine@gmail.com",
|
||||||
description="A python library for AI personality definition",
|
description="A python library for AI personality definition",
|
||||||
|
Loading…
Reference in New Issue
Block a user