Yang2001 commited on
Commit
d200274
·
1 Parent(s): 06fd531

Fix: total_mem -> total_memory

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -138,7 +138,7 @@ def init_models():
138
  for i in range(torch.cuda.device_count()):
139
  name = torch.cuda.get_device_name(i)
140
  cap = torch.cuda.get_device_capability(i)
141
- mem = torch.cuda.get_device_properties(i).total_mem / 1024**3
142
  print(f"[Diagnostics] GPU {i}: {name}, sm_{cap[0]}{cap[1]}, {mem:.1f} GB")
143
  try:
144
  res = _sp.run(["nvidia-smi", "--query-gpu=name,compute_cap,memory.total", "--format=csv,noheader"], capture_output=True, text=True, timeout=10)
 
138
  for i in range(torch.cuda.device_count()):
139
  name = torch.cuda.get_device_name(i)
140
  cap = torch.cuda.get_device_capability(i)
141
+ mem = torch.cuda.get_device_properties(i).total_memory / 1024**3
142
  print(f"[Diagnostics] GPU {i}: {name}, sm_{cap[0]}{cap[1]}, {mem:.1f} GB")
143
  try:
144
  res = _sp.run(["nvidia-smi", "--query-gpu=name,compute_cap,memory.total", "--format=csv,noheader"], capture_output=True, text=True, timeout=10)