added gpu model name

This commit is contained in:
andzejsp 2023-07-01 14:42:39 +03:00
parent d7aad205fa
commit ff80083126
3 changed files with 5 additions and 1 deletions

4
app.py
View File

@ -563,7 +563,7 @@ class LoLLMsWebUI(LoLLMsAPPI):
def vram_usage(self) -> Optional[dict]:
try:
output = subprocess.check_output(['nvidia-smi', '--query-gpu=memory.total,memory.used', '--format=csv,nounits,noheader'])
output = subprocess.check_output(['nvidia-smi', '--query-gpu=memory.total,memory.used,gpu_name', '--format=csv,nounits,noheader'])
lines = output.decode().strip().split('\n')
vram_info = [line.split(',') for line in lines]
except (subprocess.CalledProcessError, FileNotFoundError):
@ -579,10 +579,12 @@ class LoLLMsWebUI(LoLLMsAPPI):
for i, gpu in enumerate(vram_info):
ram_usage[f"gpu_{i}_total_vram"] = int(gpu[0])*1024*1024
ram_usage[f"gpu_{i}_used_vram"] = int(gpu[1])*1024*1024
ram_usage[f"gpu_{i}_model"] = gpu[2].strip()
else:
# Set all VRAM-related entries to None
ram_usage["gpu_0_total_vram"] = None
ram_usage["gpu_0_used_vram"] = None
ram_usage["gpu_0_model"] = None
return jsonify(ram_usage)

1
src/clip Submodule

@ -0,0 +1 @@
Subproject commit a9b1bf5920416aaeaec965c25dd9e8f98c864f16

@ -0,0 +1 @@
Subproject commit 3ba01b241669f5ade541ce990f7650a3b8f65318