Vedant Jigarbhai Mehta commited on
Commit
a05c98a
·
1 Parent(s): 209365d

Fix VRAM attribute name for PyTorch compatibility

Browse files
Files changed (1) hide show
  1. setup_colab.py +2 -1
setup_colab.py CHANGED
@@ -91,7 +91,8 @@ def check_gpu() -> Tuple[Optional[str], Optional[str], Optional[float]]:
91
  return None, None, None
92
 
93
  gpu_name = torch.cuda.get_device_name(0)
94
- vram_gb = torch.cuda.get_device_properties(0).total_mem / 1e9
 
95
 
96
  name_upper = gpu_name.upper()
97
  if "T4" in name_upper:
 
91
  return None, None, None
92
 
93
  gpu_name = torch.cuda.get_device_name(0)
94
+ props = torch.cuda.get_device_properties(0)
95
+ vram_gb = getattr(props, "total_memory", getattr(props, "total_mem", 0)) / 1e9
96
 
97
  name_upper = gpu_name.upper()
98
  if "T4" in name_upper: