{"cells":[{"cell_type":"code","execution_count":1,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["Number of CUDA devices: 1\n","Device 0: NVIDIA H100 PCIe\n"]}],"source":["import torch\n","\n","if torch.cuda.is_available():\n"," print(f\"Number of CUDA devices: {torch.cuda.device_count()}\")\n"," for i in range(torch.cuda.device_count()):\n"," print(f\"Device {i}: {torch.cuda.get_device_name(i)}\")\n","else:\n"," print(\"No CUDA devices available\")\n"]},{"cell_type":"code","execution_count":2,"metadata":{"execution":{"iopub.execute_input":"2024-12-22T02:50:01.688084Z","iopub.status.busy":"2024-12-22T02:50:01.687777Z","iopub.status.idle":"2024-12-22T02:50:50.139225Z","shell.execute_reply":"2024-12-22T02:50:50.138526Z","shell.execute_reply.started":"2024-12-22T02:50:01.688056Z"},"trusted":true},"outputs":[{"name":"stdout","output_type":"stream","text":["Loading the ImageNet dataset from /home/jovyan/.cache/kagglehub/datasets/ifigotin/imagenetmini-1000/versions/1/imagenet-mini/train\n","Loading the validation dataset from /home/jovyan/.cache/kagglehub/datasets/ifigotin/imagenetmini-1000/versions/1/imagenet-mini/val\n"]}],"source":["import torch\n","import torchvision\n","import torchvision.transforms as transforms\n","from tqdm import tqdm # For progress bar\n","import os\n","\n","# Set device\n","device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n","\n","# Set hyperparameters\n","num_epochs = 120\n","batch_size = 64\n","learning_rate = 3e-4\n","\n","# Initialize transformations for data augmentation\n","transform_train = transforms.Compose([\n"," transforms.Resize(256),\n"," transforms.RandomHorizontalFlip(),\n"," transforms.RandomVerticalFlip(),\n"," transforms.RandomRotation(degrees=45),\n"," transforms.ColorJitter(brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5),\n"," transforms.CenterCrop(224),\n"," transforms.ToTensor(),\n"," transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n","])\n","\n","transform_val = transforms.Compose([\n"," transforms.Resize(256),\n"," transforms.CenterCrop(224),\n"," transforms.ToTensor(),\n"," transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n","])\n","\n","# Load the ImageNet Object Localization Challenge dataset\n","# train_dataset_path = '/kaggle/input/imagenet-object-localization-challenge/ILSVRC/Data/CLS-LOC/train'\n","# val_dataset_path = '/kaggle/input/imagenet-object-localization-challenge/ILSVRC/Data/CLS-LOC/val'\n","train_dataset_path = '/home/jovyan/.cache/kagglehub/datasets/ifigotin/imagenetmini-1000/versions/1/imagenet-mini/train'\n","val_dataset_path = '/home/jovyan/.cache/kagglehub/datasets/ifigotin/imagenetmini-1000/versions/1/imagenet-mini/val'\n","\n","print(f'Loading the ImageNet dataset from {train_dataset_path}')\n","train_dataset = torchvision.datasets.ImageFolder(\n"," root=train_dataset_path,\n"," transform=transform_train\n",")\n","\n","print(f'Loading the validation dataset from {val_dataset_path}')\n","val_dataset = torchvision.datasets.ImageFolder(\n"," root=val_dataset_path,\n"," transform=transform_val\n",")\n","\n","train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True, num_workers=os.cpu_count()-1)\n","val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=batch_size, shuffle=False, num_workers=os.cpu_count()-1)"]},{"cell_type":"code","execution_count":3,"metadata":{"execution":{"iopub.execute_input":"2024-12-22T02:50:50.140765Z","iopub.status.busy":"2024-12-22T02:50:50.140326Z","iopub.status.idle":"2024-12-22T02:50:50.156435Z","shell.execute_reply":"2024-12-22T02:50:50.155604Z","shell.execute_reply.started":"2024-12-22T02:50:50.140736Z"},"trusted":true},"outputs":[],"source":["import torch\n","import torch.nn as nn\n","import torch.nn.functional as F\n","\n","\n","class Bottleneck(nn.Module):\n"," expansion = 4\n"," def __init__(self, in_channels, out_channels, i_downsample=None, stride=1):\n"," super(Bottleneck, self).__init__()\n"," \n"," self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, padding=0)\n"," self.batch_norm1 = nn.BatchNorm2d(out_channels)\n"," \n"," self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=stride, padding=1)\n"," self.batch_norm2 = nn.BatchNorm2d(out_channels)\n"," \n"," self.conv3 = nn.Conv2d(out_channels, out_channels*self.expansion, kernel_size=1, stride=1, padding=0)\n"," self.batch_norm3 = nn.BatchNorm2d(out_channels*self.expansion)\n"," \n"," self.i_downsample = i_downsample\n"," self.stride = stride\n"," self.relu = nn.ReLU()\n"," \n"," def forward(self, x):\n"," identity = x.clone()\n"," x = self.relu(self.batch_norm1(self.conv1(x)))\n"," \n"," x = self.relu(self.batch_norm2(self.conv2(x)))\n"," \n"," x = self.conv3(x)\n"," x = self.batch_norm3(x)\n"," \n"," #downsample if needed\n"," if self.i_downsample is not None:\n"," identity = self.i_downsample(identity)\n"," #add identity\n"," x+=identity\n"," x=self.relu(x)\n"," \n"," return x\n","\n","class Block(nn.Module):\n"," expansion = 1\n"," def __init__(self, in_channels, out_channels, i_downsample=None, stride=1):\n"," super(Block, self).__init__()\n"," \n","\n"," self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1, stride=stride, bias=False)\n"," self.batch_norm1 = nn.BatchNorm2d(out_channels)\n"," self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1, stride=stride, bias=False)\n"," self.batch_norm2 = nn.BatchNorm2d(out_channels)\n","\n"," self.i_downsample = i_downsample\n"," self.stride = stride\n"," self.relu = nn.ReLU()\n","\n"," def forward(self, x):\n"," identity = x.clone()\n","\n"," x = self.relu(self.batch_norm2(self.conv1(x)))\n"," x = self.batch_norm2(self.conv2(x))\n","\n"," if self.i_downsample is not None:\n"," identity = self.i_downsample(identity)\n"," print(x.shape)\n"," print(identity.shape)\n"," x += identity\n"," x = self.relu(x)\n"," return x\n","\n","\n"," \n"," \n","class ResNet(nn.Module):\n"," def __init__(self, ResBlock, layer_list, num_classes, num_channels=3):\n"," super(ResNet, self).__init__()\n"," self.in_channels = 64\n"," \n"," self.conv1 = nn.Conv2d(num_channels, 64, kernel_size=7, stride=2, padding=3, bias=False)\n"," self.batch_norm1 = nn.BatchNorm2d(64)\n"," self.relu = nn.ReLU()\n"," self.max_pool = nn.MaxPool2d(kernel_size = 3, stride=2, padding=1)\n"," \n"," self.layer1 = self._make_layer(ResBlock, layer_list[0], planes=64)\n"," self.layer2 = self._make_layer(ResBlock, layer_list[1], planes=128, stride=2)\n"," self.layer3 = self._make_layer(ResBlock, layer_list[2], planes=256, stride=2)\n"," self.layer4 = self._make_layer(ResBlock, layer_list[3], planes=512, stride=2)\n"," \n"," self.avgpool = nn.AdaptiveAvgPool2d((1,1))\n"," self.fc = nn.Linear(512*ResBlock.expansion, num_classes)\n"," \n"," def forward(self, x):\n"," x = self.relu(self.batch_norm1(self.conv1(x)))\n"," x = self.max_pool(x)\n","\n"," x = self.layer1(x)\n"," x = self.layer2(x)\n"," x = self.layer3(x)\n"," x = self.layer4(x)\n"," \n"," x = self.avgpool(x)\n"," x = x.reshape(x.shape[0], -1)\n"," x = self.fc(x)\n"," \n"," return x\n"," \n"," def _make_layer(self, ResBlock, blocks, planes, stride=1):\n"," ii_downsample = None\n"," layers = []\n"," \n"," if stride != 1 or self.in_channels != planes*ResBlock.expansion:\n"," ii_downsample = nn.Sequential(\n"," nn.Conv2d(self.in_channels, planes*ResBlock.expansion, kernel_size=1, stride=stride),\n"," nn.BatchNorm2d(planes*ResBlock.expansion)\n"," )\n"," \n"," layers.append(ResBlock(self.in_channels, planes, i_downsample=ii_downsample, stride=stride))\n"," self.in_channels = planes*ResBlock.expansion\n"," \n"," for i in range(blocks-1):\n"," layers.append(ResBlock(self.in_channels, planes))\n"," \n"," return nn.Sequential(*layers)\n","\n"," \n"," \n","def ResNet50(num_classes, channels=3):\n"," return ResNet(Bottleneck, [3,4,6,3], num_classes, channels)\n"," \n","def ResNet101(num_classes, channels=3):\n"," return ResNet(Bottleneck, [3,4,23,3], num_classes, channels)\n","\n","def ResNet152(num_classes, channels=3):\n"," return ResNet(Bottleneck, [3,8,36,3], num_classes, channels)"]},{"cell_type":"code","execution_count":4,"metadata":{},"outputs":[],"source":["model = ResNet50(num_classes=1000).to(device)"]},{"cell_type":"code","execution_count":5,"metadata":{},"outputs":[{"name":"stderr","output_type":"stream","text":["/home/jovyan/env/lib/python3.10/site-packages/torch_lr_finder/lr_finder.py:5: TqdmExperimentalWarning: Using `tqdm.autonotebook.tqdm` in notebook mode. Use `tqdm.tqdm` instead to force console mode (e.g. in jupyter console)\n"," from tqdm.autonotebook import tqdm\n"]},{"data":{"application/vnd.jupyter.widget-view+json":{"model_id":"15d815b5e96e4c64b45066bfb73e84bd","version_major":2,"version_minor":0},"text/plain":[" 0%| | 0/200 [00:00"]},"metadata":{},"output_type":"display_data"}],"source":["from torch_lr_finder import LRFinder\n","optimizer = torch.optim.Adam(model.parameters(), lr=3e-4, weight_decay=1e-4)\n","criterion = nn.CrossEntropyLoss()\n","lr_finder = LRFinder(model, optimizer, criterion, device=\"cuda\")\n","lr_finder.range_test(train_loader, end_lr=10, num_iter=200, step_mode=\"exp\")\n","_ , best_lr = lr_finder.plot() # to inspect the loss-learning rate graph\n","lr_finder.reset()"]},{"cell_type":"code","execution_count":6,"metadata":{},"outputs":[{"name":"stdout","output_type":"stream","text":["Training the model on ImageNet\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 1/120: 100%|██████████| 543/543 [01:19<00:00, 6.82it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 1/120 - Training Loss: 6.9605, Training Accuracy: 0.24%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 2/120: 100%|██████████| 543/543 [01:22<00:00, 6.59it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 2/120 - Training Loss: 6.8293, Training Accuracy: 0.34%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 3/120: 100%|██████████| 543/543 [01:23<00:00, 6.46it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 3/120 - Training Loss: 6.7652, Training Accuracy: 0.40%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 4/120: 100%|██████████| 543/543 [01:22<00:00, 6.54it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 4/120 - Training Loss: 6.6984, Training Accuracy: 0.54%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 5/120: 100%|██████████| 543/543 [01:20<00:00, 6.72it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 5/120 - Training Loss: 6.6378, Training Accuracy: 0.64%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 6/120: 100%|██████████| 543/543 [01:19<00:00, 6.81it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 6/120 - Training Loss: 6.5298, Training Accuracy: 0.87%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 7/120: 100%|██████████| 543/543 [01:22<00:00, 6.58it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 7/120 - Training Loss: 6.3680, Training Accuracy: 1.24%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 8/120: 100%|██████████| 543/543 [01:22<00:00, 6.56it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 8/120 - Training Loss: 6.2427, Training Accuracy: 1.56%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 9/120: 100%|██████████| 543/543 [01:20<00:00, 6.76it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 9/120 - Training Loss: 6.1240, Training Accuracy: 1.94%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 10/120: 100%|██████████| 543/543 [01:20<00:00, 6.76it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 10/120 - Training Loss: 6.0081, Training Accuracy: 2.43%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 11/120: 100%|██████████| 543/543 [01:20<00:00, 6.72it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 11/120 - Training Loss: 5.8858, Training Accuracy: 2.87%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 12/120: 100%|██████████| 543/543 [01:19<00:00, 6.80it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 12/120 - Training Loss: 5.7630, Training Accuracy: 3.44%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 13/120: 100%|██████████| 543/543 [01:21<00:00, 6.63it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 13/120 - Training Loss: 5.6355, Training Accuracy: 4.35%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 14/120: 100%|██████████| 543/543 [01:21<00:00, 6.69it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 14/120 - Training Loss: 5.5218, Training Accuracy: 4.92%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 15/120: 100%|██████████| 543/543 [01:19<00:00, 6.83it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 15/120 - Training Loss: 5.4045, Training Accuracy: 5.76%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 16/120: 100%|██████████| 543/543 [01:23<00:00, 6.47it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 16/120 - Training Loss: 5.2993, Training Accuracy: 6.64%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 17/120: 100%|██████████| 543/543 [01:22<00:00, 6.54it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 17/120 - Training Loss: 5.1903, Training Accuracy: 7.39%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 18/120: 100%|██████████| 543/543 [01:20<00:00, 6.77it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 18/120 - Training Loss: 5.0770, Training Accuracy: 8.36%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 19/120: 100%|██████████| 543/543 [01:24<00:00, 6.45it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 19/120 - Training Loss: 4.9620, Training Accuracy: 9.24%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 20/120: 100%|██████████| 543/543 [01:27<00:00, 6.22it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 20/120 - Training Loss: 4.8556, Training Accuracy: 10.39%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 21/120: 100%|██████████| 543/543 [01:24<00:00, 6.39it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 21/120 - Training Loss: 4.7491, Training Accuracy: 11.45%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 22/120: 100%|██████████| 543/543 [01:21<00:00, 6.63it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 22/120 - Training Loss: 4.6427, Training Accuracy: 12.27%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 23/120: 100%|██████████| 543/543 [01:19<00:00, 6.79it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 23/120 - Training Loss: 4.5412, Training Accuracy: 13.50%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 24/120: 100%|██████████| 543/543 [01:19<00:00, 6.79it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 24/120 - Training Loss: 4.4443, Training Accuracy: 14.14%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 25/120: 100%|██████████| 543/543 [01:22<00:00, 6.57it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 25/120 - Training Loss: 4.3367, Training Accuracy: 15.75%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 26/120: 100%|██████████| 543/543 [01:24<00:00, 6.46it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 26/120 - Training Loss: 4.2294, Training Accuracy: 16.76%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 27/120: 100%|██████████| 543/543 [01:21<00:00, 6.65it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 27/120 - Training Loss: 4.1470, Training Accuracy: 17.71%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 28/120: 100%|██████████| 543/543 [01:22<00:00, 6.58it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 28/120 - Training Loss: 4.0461, Training Accuracy: 18.78%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 29/120: 100%|██████████| 543/543 [01:21<00:00, 6.70it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 29/120 - Training Loss: 3.9508, Training Accuracy: 20.00%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 30/120: 100%|██████████| 543/543 [01:22<00:00, 6.61it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 30/120 - Training Loss: 3.8641, Training Accuracy: 21.00%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 31/120: 100%|██████████| 543/543 [01:19<00:00, 6.81it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 31/120 - Training Loss: 3.7645, Training Accuracy: 22.42%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 32/120: 100%|██████████| 543/543 [01:26<00:00, 6.25it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 32/120 - Training Loss: 3.6751, Training Accuracy: 23.74%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 33/120: 100%|██████████| 543/543 [01:20<00:00, 6.75it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 33/120 - Training Loss: 3.5752, Training Accuracy: 24.70%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 34/120: 100%|██████████| 543/543 [01:22<00:00, 6.57it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 34/120 - Training Loss: 3.4737, Training Accuracy: 26.52%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 35/120: 100%|██████████| 543/543 [01:21<00:00, 6.63it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 35/120 - Training Loss: 3.3980, Training Accuracy: 27.30%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 36/120: 100%|██████████| 543/543 [01:20<00:00, 6.73it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 36/120 - Training Loss: 3.2857, Training Accuracy: 28.90%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 37/120: 100%|██████████| 543/543 [01:22<00:00, 6.55it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 37/120 - Training Loss: 3.2040, Training Accuracy: 30.42%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 38/120: 100%|██████████| 543/543 [01:19<00:00, 6.79it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 38/120 - Training Loss: 3.1034, Training Accuracy: 31.92%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 39/120: 100%|██████████| 543/543 [01:23<00:00, 6.47it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 39/120 - Training Loss: 3.0145, Training Accuracy: 33.11%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 40/120: 100%|██████████| 543/543 [01:19<00:00, 6.79it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 40/120 - Training Loss: 2.9228, Training Accuracy: 34.47%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 41/120: 100%|██████████| 543/543 [01:22<00:00, 6.58it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 41/120 - Training Loss: 2.8409, Training Accuracy: 35.72%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 42/120: 100%|██████████| 543/543 [01:21<00:00, 6.67it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 42/120 - Training Loss: 2.7390, Training Accuracy: 37.39%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 43/120: 100%|██████████| 543/543 [01:19<00:00, 6.82it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 43/120 - Training Loss: 2.6607, Training Accuracy: 39.06%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 44/120: 100%|██████████| 543/543 [01:21<00:00, 6.68it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 44/120 - Training Loss: 2.5639, Training Accuracy: 40.69%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 45/120: 100%|██████████| 543/543 [01:20<00:00, 6.72it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 45/120 - Training Loss: 2.4654, Training Accuracy: 42.27%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 46/120: 100%|██████████| 543/543 [01:20<00:00, 6.74it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 46/120 - Training Loss: 2.3758, Training Accuracy: 44.00%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 47/120: 100%|██████████| 543/543 [01:21<00:00, 6.68it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 47/120 - Training Loss: 2.2870, Training Accuracy: 45.50%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 48/120: 100%|██████████| 543/543 [01:24<00:00, 6.46it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 48/120 - Training Loss: 2.2153, Training Accuracy: 46.72%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 49/120: 100%|██████████| 543/543 [01:21<00:00, 6.68it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 49/120 - Training Loss: 2.1197, Training Accuracy: 48.34%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 50/120: 100%|██████████| 543/543 [01:21<00:00, 6.64it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 50/120 - Training Loss: 2.0399, Training Accuracy: 50.16%\n","Checkpoint saved at epoch 50 with accuracy 50.16% at checkpoints/epoch_050_bs64.pth\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 51/120: 100%|██████████| 543/543 [01:20<00:00, 6.71it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 51/120 - Training Loss: 1.9467, Training Accuracy: 52.02%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 52/120: 100%|██████████| 543/543 [01:22<00:00, 6.56it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 52/120 - Training Loss: 1.8707, Training Accuracy: 53.72%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 53/120: 100%|██████████| 543/543 [01:23<00:00, 6.49it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 53/120 - Training Loss: 1.7942, Training Accuracy: 54.64%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 54/120: 100%|██████████| 543/543 [01:23<00:00, 6.54it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 54/120 - Training Loss: 1.7012, Training Accuracy: 56.85%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 55/120: 100%|██████████| 543/543 [01:21<00:00, 6.66it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 55/120 - Training Loss: 1.6348, Training Accuracy: 58.01%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 56/120: 100%|██████████| 543/543 [01:23<00:00, 6.47it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 56/120 - Training Loss: 1.5495, Training Accuracy: 59.72%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 57/120: 100%|██████████| 543/543 [01:23<00:00, 6.49it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 57/120 - Training Loss: 1.4835, Training Accuracy: 61.06%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 58/120: 100%|██████████| 543/543 [01:21<00:00, 6.70it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 58/120 - Training Loss: 1.4047, Training Accuracy: 63.16%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 59/120: 100%|██████████| 543/543 [01:20<00:00, 6.75it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 59/120 - Training Loss: 1.3414, Training Accuracy: 64.55%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 60/120: 100%|██████████| 543/543 [01:26<00:00, 6.27it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 60/120 - Training Loss: 1.2781, Training Accuracy: 66.10%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 61/120: 100%|██████████| 543/543 [01:26<00:00, 6.31it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 61/120 - Training Loss: 1.2124, Training Accuracy: 67.54%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 62/120: 100%|██████████| 543/543 [01:20<00:00, 6.75it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 62/120 - Training Loss: 1.1647, Training Accuracy: 68.55%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 63/120: 100%|██████████| 543/543 [01:19<00:00, 6.79it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 63/120 - Training Loss: 1.0914, Training Accuracy: 70.17%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 64/120: 100%|██████████| 543/543 [01:24<00:00, 6.43it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 64/120 - Training Loss: 1.0561, Training Accuracy: 71.38%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 65/120: 100%|██████████| 543/543 [01:20<00:00, 6.72it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 65/120 - Training Loss: 0.9971, Training Accuracy: 72.66%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 66/120: 100%|██████████| 543/543 [01:23<00:00, 6.52it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 66/120 - Training Loss: 0.9487, Training Accuracy: 73.94%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 67/120: 100%|██████████| 543/543 [01:20<00:00, 6.78it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 67/120 - Training Loss: 0.8944, Training Accuracy: 75.18%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 68/120: 100%|██████████| 543/543 [01:22<00:00, 6.60it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 68/120 - Training Loss: 0.8484, Training Accuracy: 76.50%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 69/120: 100%|██████████| 543/543 [01:20<00:00, 6.77it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 69/120 - Training Loss: 0.8090, Training Accuracy: 77.20%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 70/120: 100%|██████████| 543/543 [01:22<00:00, 6.54it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 70/120 - Training Loss: 0.7662, Training Accuracy: 78.25%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 71/120: 100%|██████████| 543/543 [01:21<00:00, 6.66it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 71/120 - Training Loss: 0.7242, Training Accuracy: 79.15%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 72/120: 100%|██████████| 543/543 [01:22<00:00, 6.61it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 72/120 - Training Loss: 0.6986, Training Accuracy: 80.34%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 73/120: 100%|██████████| 543/543 [01:22<00:00, 6.58it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 73/120 - Training Loss: 0.6341, Training Accuracy: 81.79%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 74/120: 100%|██████████| 543/543 [01:21<00:00, 6.70it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 74/120 - Training Loss: 0.6147, Training Accuracy: 82.39%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 75/120: 100%|██████████| 543/543 [01:24<00:00, 6.46it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 75/120 - Training Loss: 0.5838, Training Accuracy: 83.23%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 76/120: 100%|██████████| 543/543 [01:20<00:00, 6.71it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 76/120 - Training Loss: 0.5637, Training Accuracy: 83.81%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 77/120: 100%|██████████| 543/543 [01:24<00:00, 6.46it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 77/120 - Training Loss: 0.5262, Training Accuracy: 84.75%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 78/120: 100%|██████████| 543/543 [01:20<00:00, 6.71it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 78/120 - Training Loss: 0.4961, Training Accuracy: 85.69%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 79/120: 100%|██████████| 543/543 [01:20<00:00, 6.76it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 79/120 - Training Loss: 0.4731, Training Accuracy: 86.31%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 80/120: 100%|██████████| 543/543 [01:21<00:00, 6.69it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 80/120 - Training Loss: 0.4580, Training Accuracy: 86.50%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 81/120: 100%|██████████| 543/543 [01:21<00:00, 6.67it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 81/120 - Training Loss: 0.4209, Training Accuracy: 87.87%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 82/120: 100%|██████████| 543/543 [01:23<00:00, 6.52it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 82/120 - Training Loss: 0.4134, Training Accuracy: 87.86%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 83/120: 100%|██████████| 543/543 [01:25<00:00, 6.35it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 83/120 - Training Loss: 0.3862, Training Accuracy: 88.73%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 84/120: 100%|██████████| 543/543 [01:20<00:00, 6.76it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 84/120 - Training Loss: 0.3804, Training Accuracy: 88.62%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 85/120: 100%|██████████| 543/543 [01:24<00:00, 6.46it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 85/120 - Training Loss: 0.3539, Training Accuracy: 89.62%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 86/120: 100%|██████████| 543/543 [01:20<00:00, 6.71it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 86/120 - Training Loss: 0.3228, Training Accuracy: 90.66%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 87/120: 100%|██████████| 543/543 [01:22<00:00, 6.59it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 87/120 - Training Loss: 0.3090, Training Accuracy: 90.95%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 88/120: 100%|██████████| 543/543 [01:21<00:00, 6.62it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 88/120 - Training Loss: 0.3039, Training Accuracy: 90.93%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 89/120: 100%|██████████| 543/543 [01:20<00:00, 6.72it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 89/120 - Training Loss: 0.2926, Training Accuracy: 91.36%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 90/120: 100%|██████████| 543/543 [01:25<00:00, 6.37it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 90/120 - Training Loss: 0.2714, Training Accuracy: 92.01%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 91/120: 100%|██████████| 543/543 [01:20<00:00, 6.74it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 91/120 - Training Loss: 0.2554, Training Accuracy: 92.41%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 92/120: 100%|██████████| 543/543 [01:23<00:00, 6.50it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 92/120 - Training Loss: 0.2532, Training Accuracy: 92.42%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 93/120: 100%|██████████| 543/543 [01:25<00:00, 6.36it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 93/120 - Training Loss: 0.2381, Training Accuracy: 93.09%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 94/120: 100%|██████████| 543/543 [01:20<00:00, 6.71it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 94/120 - Training Loss: 0.2254, Training Accuracy: 93.48%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 95/120: 100%|██████████| 543/543 [01:22<00:00, 6.58it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 95/120 - Training Loss: 0.2137, Training Accuracy: 93.89%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 96/120: 100%|██████████| 543/543 [01:24<00:00, 6.40it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 96/120 - Training Loss: 0.2034, Training Accuracy: 94.17%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 97/120: 100%|██████████| 543/543 [01:21<00:00, 6.64it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 97/120 - Training Loss: 0.1883, Training Accuracy: 94.41%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 98/120: 100%|██████████| 543/543 [01:23<00:00, 6.53it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 98/120 - Training Loss: 0.1871, Training Accuracy: 94.47%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 99/120: 100%|██████████| 543/543 [01:24<00:00, 6.46it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 99/120 - Training Loss: 0.1745, Training Accuracy: 94.81%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 100/120: 100%|██████████| 543/543 [01:20<00:00, 6.72it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 100/120 - Training Loss: 0.1620, Training Accuracy: 95.20%\n","Checkpoint saved at epoch 100 with accuracy 95.20% at checkpoints/epoch_095_bs64.pth\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 101/120: 100%|██████████| 543/543 [01:27<00:00, 6.21it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 101/120 - Training Loss: 0.1602, Training Accuracy: 95.29%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 102/120: 100%|██████████| 543/543 [01:27<00:00, 6.18it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 102/120 - Training Loss: 0.1438, Training Accuracy: 95.83%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 103/120: 100%|██████████| 543/543 [01:21<00:00, 6.69it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 103/120 - Training Loss: 0.1441, Training Accuracy: 95.70%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 104/120: 100%|██████████| 543/543 [01:20<00:00, 6.74it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 104/120 - Training Loss: 0.1335, Training Accuracy: 96.16%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 105/120: 100%|██████████| 543/543 [01:20<00:00, 6.73it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 105/120 - Training Loss: 0.1335, Training Accuracy: 96.07%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 106/120: 100%|██████████| 543/543 [01:23<00:00, 6.51it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 106/120 - Training Loss: 0.1210, Training Accuracy: 96.53%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 107/120: 100%|██████████| 543/543 [01:22<00:00, 6.55it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 107/120 - Training Loss: 0.1130, Training Accuracy: 96.80%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 108/120: 100%|██████████| 543/543 [01:24<00:00, 6.44it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 108/120 - Training Loss: 0.1127, Training Accuracy: 96.69%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 109/120: 100%|██████████| 543/543 [01:21<00:00, 6.64it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 109/120 - Training Loss: 0.1010, Training Accuracy: 97.17%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 110/120: 100%|██████████| 543/543 [01:21<00:00, 6.66it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 110/120 - Training Loss: 0.0990, Training Accuracy: 97.19%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 111/120: 100%|██████████| 543/543 [01:20<00:00, 6.76it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 111/120 - Training Loss: 0.0912, Training Accuracy: 97.46%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 112/120: 100%|██████████| 543/543 [01:23<00:00, 6.49it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 112/120 - Training Loss: 0.0901, Training Accuracy: 97.46%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 113/120: 100%|██████████| 543/543 [01:21<00:00, 6.70it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 113/120 - Training Loss: 0.0827, Training Accuracy: 97.73%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 114/120: 100%|██████████| 543/543 [01:22<00:00, 6.58it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 114/120 - Training Loss: 0.0796, Training Accuracy: 97.82%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 115/120: 100%|██████████| 543/543 [01:20<00:00, 6.74it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 115/120 - Training Loss: 0.0738, Training Accuracy: 98.07%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 116/120: 100%|██████████| 543/543 [01:19<00:00, 6.82it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 116/120 - Training Loss: 0.0699, Training Accuracy: 98.13%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 117/120: 100%|██████████| 543/543 [01:20<00:00, 6.73it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 117/120 - Training Loss: 0.0656, Training Accuracy: 98.20%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 118/120: 100%|██████████| 543/543 [01:25<00:00, 6.33it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 118/120 - Training Loss: 0.0648, Training Accuracy: 98.20%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 119/120: 100%|██████████| 543/543 [01:21<00:00, 6.70it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 119/120 - Training Loss: 0.0649, Training Accuracy: 98.26%\n"]},{"name":"stderr","output_type":"stream","text":["Epoch 120/120: 100%|██████████| 543/543 [01:22<00:00, 6.57it/s]\n"]},{"name":"stdout","output_type":"stream","text":["Epoch 120/120 - Training Loss: 0.0624, Training Accuracy: 98.22%\n","Validating the model on unseen data after training...\n"]},{"name":"stderr","output_type":"stream","text":["Validating: 100%|██████████| 62/62 [00:12<00:00, 4.92it/s]"]},{"name":"stdout","output_type":"stream","text":["Validation Loss: 12.1100, Validation Accuracy: 13.36%\n","Per-class Accuracy:\n","n01440764: 0.00%\n","n01443537: 33.33%\n","n01484850: 20.00%\n","n01491361: 0.00%\n","n01494475: 20.00%\n","n01496331: 0.00%\n","n01498041: 33.33%\n","n01514668: 0.00%\n","n01514859: 0.00%\n","n01518878: 0.00%\n","n01530575: 25.00%\n","n01531178: 0.00%\n","n01532829: 0.00%\n","n01534433: 25.00%\n","n01537544: 0.00%\n","n01558993: 0.00%\n","n01560419: 66.67%\n","n01580077: 0.00%\n","n01582220: 50.00%\n","n01592084: 0.00%\n","n01601694: 0.00%\n","n01608432: 25.00%\n","n01614925: 33.33%\n","n01616318: 0.00%\n","n01622779: 50.00%\n","n01629819: 0.00%\n","n01630670: 0.00%\n","n01631663: 25.00%\n","n01632458: 0.00%\n","n01632777: 50.00%\n","n01641577: 33.33%\n","n01644373: 25.00%\n","n01644900: 0.00%\n","n01664065: 0.00%\n","n01665541: 33.33%\n","n01667114: 0.00%\n","n01667778: 0.00%\n","n01669191: 0.00%\n","n01675722: 0.00%\n","n01677366: 33.33%\n","n01682714: 0.00%\n","n01685808: 0.00%\n","n01687978: 0.00%\n","n01688243: 0.00%\n","n01689811: 0.00%\n","n01692333: 0.00%\n","n01693334: 0.00%\n","n01694178: 0.00%\n","n01695060: 0.00%\n","n01697457: 0.00%\n","n01698640: 0.00%\n","n01704323: 25.00%\n","n01728572: 0.00%\n","n01728920: 25.00%\n","n01729322: 0.00%\n","n01729977: 0.00%\n","n01734418: 0.00%\n","n01735189: 50.00%\n","n01737021: 0.00%\n","n01739381: 33.33%\n","n01740131: 0.00%\n","n01742172: 0.00%\n","n01744401: 0.00%\n","n01748264: 0.00%\n","n01749939: 25.00%\n","n01751748: 0.00%\n","n01753488: 33.33%\n","n01755581: 0.00%\n","n01756291: 0.00%\n","n01768244: 0.00%\n","n01770081: 50.00%\n","n01770393: 100.00%\n","n01773157: 60.00%\n","n01773549: 0.00%\n","n01773797: 25.00%\n","n01774384: 0.00%\n","n01774750: 0.00%\n","n01775062: 0.00%\n","n01776313: 9.09%\n","n01784675: 0.00%\n","n01795545: 0.00%\n","n01796340: 0.00%\n","n01797886: 0.00%\n","n01798484: 0.00%\n","n01806143: 0.00%\n","n01806567: 33.33%\n","n01807496: 0.00%\n","n01817953: 0.00%\n","n01818515: 33.33%\n","n01819313: 50.00%\n","n01820546: 50.00%\n","n01824575: 0.00%\n","n01828970: 25.00%\n","n01829413: 0.00%\n","n01833805: 50.00%\n","n01843065: 50.00%\n","n01843383: 0.00%\n","n01847000: 0.00%\n","n01855032: 25.00%\n","n01855672: 33.33%\n","n01860187: 25.00%\n","n01871265: 0.00%\n","n01872401: 100.00%\n","n01873310: 0.00%\n","n01877812: 0.00%\n","n01882714: 66.67%\n","n01883070: 0.00%\n","n01910747: 20.00%\n","n01914609: 0.00%\n","n01917289: 0.00%\n","n01924916: 0.00%\n","n01930112: 28.57%\n","n01943899: 0.00%\n","n01944390: 0.00%\n","n01945685: 0.00%\n","n01950731: 0.00%\n","n01955084: 0.00%\n","n01968897: 33.33%\n","n01978287: 0.00%\n","n01978455: 0.00%\n","n01980166: 0.00%\n","n01981276: 0.00%\n","n01983481: 0.00%\n","n01984695: 0.00%\n","n01985128: 0.00%\n","n01986214: 0.00%\n","n01990800: 0.00%\n","n02002556: 0.00%\n","n02002724: 40.00%\n","n02006656: 50.00%\n","n02007558: 0.00%\n","n02009229: 25.00%\n","n02009912: 0.00%\n","n02011460: 0.00%\n","n02012849: 0.00%\n","n02013706: 0.00%\n","n02017213: 0.00%\n","n02018207: 0.00%\n","n02018795: 33.33%\n","n02025239: 33.33%\n","n02027492: 0.00%\n","n02028035: 33.33%\n","n02033041: 0.00%\n","n02037110: 0.00%\n","n02051845: 0.00%\n","n02056570: 0.00%\n","n02058221: 0.00%\n","n02066245: 25.00%\n","n02071294: 0.00%\n","n02074367: 50.00%\n","n02077923: 33.33%\n","n02085620: 0.00%\n","n02085782: 80.00%\n","n02085936: 50.00%\n","n02086079: 50.00%\n","n02086240: 0.00%\n","n02086646: 66.67%\n","n02086910: 0.00%\n","n02087046: 20.00%\n","n02087394: 0.00%\n","n02088094: 25.00%\n","n02088238: 33.33%\n","n02088364: 0.00%\n","n02088466: 20.00%\n","n02088632: 0.00%\n","n02089078: 0.00%\n","n02089867: 0.00%\n","n02089973: 33.33%\n","n02090379: 0.00%\n","n02090622: 0.00%\n","n02090721: 0.00%\n","n02091032: 0.00%\n","n02091134: 33.33%\n","n02091244: 25.00%\n","n02091467: 33.33%\n","n02091635: 0.00%\n","n02091831: 0.00%\n","n02092002: 33.33%\n","n02092339: 0.00%\n","n02093256: 0.00%\n","n02093428: 0.00%\n","n02093647: 0.00%\n","n02093754: 0.00%\n","n02093859: 0.00%\n","n02093991: 25.00%\n","n02094114: 0.00%\n","n02094258: 0.00%\n","n02094433: 33.33%\n","n02095314: 33.33%\n","n02095570: 0.00%\n","n02095889: 50.00%\n","n02096051: 33.33%\n","n02096177: 0.00%\n","n02096294: 33.33%\n","n02096437: 42.86%\n","n02096585: 0.00%\n","n02097047: 0.00%\n","n02097130: 0.00%\n","n02097209: 25.00%\n","n02097298: 0.00%\n","n02097474: 33.33%\n","n02097658: 0.00%\n","n02098105: 50.00%\n","n02098286: 0.00%\n","n02098413: 40.00%\n","n02099267: 33.33%\n","n02099429: 25.00%\n","n02099601: 0.00%\n","n02099712: 0.00%\n","n02099849: 0.00%\n","n02100236: 50.00%\n","n02100583: 0.00%\n","n02100735: 0.00%\n","n02100877: 0.00%\n","n02101006: 0.00%\n","n02101388: 0.00%\n","n02101556: 0.00%\n","n02102040: 0.00%\n","n02102177: 33.33%\n","n02102318: 0.00%\n","n02102480: 0.00%\n","n02102973: 20.00%\n","n02104029: 0.00%\n","n02104365: 0.00%\n","n02105056: 0.00%\n","n02105162: 0.00%\n","n02105251: 0.00%\n","n02105412: 0.00%\n","n02105505: 50.00%\n","n02105641: 16.67%\n","n02105855: 0.00%\n","n02106030: 33.33%\n","n02106166: 33.33%\n","n02106382: 0.00%\n","n02106550: 33.33%\n","n02106662: 0.00%\n","n02107142: 0.00%\n","n02107312: 0.00%\n","n02107574: 0.00%\n","n02107683: 33.33%\n","n02107908: 33.33%\n","n02108000: 0.00%\n","n02108089: 0.00%\n","n02108422: 33.33%\n","n02108551: 33.33%\n","n02108915: 0.00%\n","n02109047: 0.00%\n","n02109525: 0.00%\n","n02109961: 25.00%\n","n02110063: 0.00%\n","n02110185: 25.00%\n","n02110341: 0.00%\n","n02110627: 40.00%\n","n02110806: 0.00%\n","n02110958: 0.00%\n","n02111129: 66.67%\n","n02111277: 33.33%\n","n02111500: 40.00%\n","n02111889: 0.00%\n","n02112018: 33.33%\n","n02112137: 0.00%\n","n02112350: 33.33%\n","n02112706: 25.00%\n","n02113023: 25.00%\n","n02113186: 0.00%\n","n02113624: 0.00%\n","n02113712: 0.00%\n","n02113799: 0.00%\n","n02113978: 33.33%\n","n02114367: 0.00%\n","n02114548: 50.00%\n","n02114712: 25.00%\n","n02114855: 33.33%\n","n02115641: 0.00%\n","n02115913: 0.00%\n","n02116738: 66.67%\n","n02117135: 16.67%\n","n02119022: 12.50%\n","n02119789: 0.00%\n","n02120079: 20.00%\n","n02120505: 33.33%\n","n02123045: 33.33%\n","n02123159: 0.00%\n","n02123394: 12.50%\n","n02123597: 0.00%\n","n02124075: 0.00%\n","n02125311: 0.00%\n","n02127052: 25.00%\n","n02128385: 40.00%\n","n02128757: 40.00%\n","n02128925: 40.00%\n","n02129165: 16.67%\n","n02129604: 12.50%\n","n02130308: 66.67%\n","n02132136: 0.00%\n","n02133161: 20.00%\n","n02134084: 14.29%\n","n02134418: 33.33%\n","n02137549: 0.00%\n","n02138441: 0.00%\n","n02165105: 0.00%\n","n02165456: 40.00%\n","n02167151: 25.00%\n","n02168699: 0.00%\n","n02169497: 25.00%\n","n02172182: 0.00%\n","n02174001: 0.00%\n","n02177972: 0.00%\n","n02190166: 0.00%\n","n02206856: 0.00%\n","n02219486: 16.67%\n","n02226429: 33.33%\n","n02229544: 66.67%\n","n02231487: 0.00%\n","n02233338: 0.00%\n","n02236044: 0.00%\n","n02256656: 0.00%\n","n02259212: 33.33%\n","n02264363: 25.00%\n","n02268443: 0.00%\n","n02268853: 25.00%\n","n02276258: 33.33%\n","n02277742: 75.00%\n","n02279972: 75.00%\n","n02280649: 33.33%\n","n02281406: 75.00%\n","n02281787: 20.00%\n","n02317335: 0.00%\n","n02319095: 0.00%\n","n02321529: 0.00%\n","n02325366: 50.00%\n","n02326432: 0.00%\n","n02328150: 25.00%\n","n02342885: 75.00%\n","n02346627: 33.33%\n","n02356798: 0.00%\n","n02361337: 0.00%\n","n02363005: 66.67%\n","n02364673: 0.00%\n","n02389026: 0.00%\n","n02391049: 33.33%\n","n02395406: 0.00%\n","n02396427: 0.00%\n","n02397096: 0.00%\n","n02398521: 0.00%\n","n02403003: 33.33%\n","n02408429: 0.00%\n","n02410509: 0.00%\n","n02412080: 0.00%\n","n02415577: 33.33%\n","n02417914: 0.00%\n","n02422106: 66.67%\n","n02422699: 0.00%\n","n02423022: 50.00%\n","n02437312: 0.00%\n","n02437616: 0.00%\n","n02441942: 0.00%\n","n02442845: 0.00%\n","n02443114: 0.00%\n","n02443484: 0.00%\n","n02444819: 0.00%\n","n02445715: 50.00%\n","n02447366: 55.56%\n","n02454379: 33.33%\n","n02457408: 50.00%\n","n02480495: 33.33%\n","n02480855: 0.00%\n","n02481823: 0.00%\n","n02483362: 0.00%\n","n02483708: 0.00%\n","n02484975: 0.00%\n","n02486261: 0.00%\n","n02486410: 0.00%\n","n02487347: 0.00%\n","n02488291: 0.00%\n","n02488702: 0.00%\n","n02489166: 33.33%\n","n02490219: 0.00%\n","n02492035: 25.00%\n","n02492660: 50.00%\n","n02493509: 0.00%\n","n02493793: 0.00%\n","n02494079: 0.00%\n","n02497673: 0.00%\n","n02500267: 50.00%\n","n02504013: 0.00%\n","n02504458: 60.00%\n","n02509815: 0.00%\n","n02510455: 66.67%\n","n02514041: 33.33%\n","n02526121: 25.00%\n","n02536864: 0.00%\n","n02606052: 50.00%\n","n02607072: 0.00%\n","n02640242: 0.00%\n","n02641379: 0.00%\n","n02643566: 0.00%\n","n02655020: 0.00%\n","n02666196: 0.00%\n","n02667093: 0.00%\n","n02669723: 0.00%\n","n02672831: 0.00%\n","n02676566: 0.00%\n","n02687172: 0.00%\n","n02690373: 40.00%\n","n02692877: 0.00%\n","n02699494: 0.00%\n","n02701002: 66.67%\n","n02704792: 0.00%\n","n02708093: 12.50%\n","n02727426: 0.00%\n","n02730930: 0.00%\n","n02747177: 0.00%\n","n02749479: 0.00%\n","n02769748: 0.00%\n","n02776631: 33.33%\n","n02777292: 0.00%\n","n02782093: 0.00%\n","n02783161: 0.00%\n","n02786058: 14.29%\n","n02787622: 0.00%\n","n02788148: 0.00%\n","n02790996: 16.67%\n","n02791124: 0.00%\n","n02791270: 0.00%\n","n02793495: 25.00%\n","n02794156: 25.00%\n","n02795169: 0.00%\n","n02797295: 0.00%\n","n02799071: 33.33%\n","n02802426: 25.00%\n","n02804414: 0.00%\n","n02804610: 0.00%\n","n02807133: 0.00%\n","n02808304: 0.00%\n","n02808440: 0.00%\n","n02814533: 0.00%\n","n02814860: 25.00%\n","n02815834: 0.00%\n","n02817516: 25.00%\n","n02823428: 25.00%\n","n02823750: 0.00%\n","n02825657: 0.00%\n","n02834397: 25.00%\n","n02835271: 0.00%\n","n02837789: 0.00%\n","n02840245: 12.50%\n","n02841315: 20.00%\n","n02843684: 0.00%\n","n02859443: 0.00%\n","n02860847: 0.00%\n","n02865351: 0.00%\n","n02869837: 0.00%\n","n02870880: 0.00%\n","n02871525: 0.00%\n","n02877765: 25.00%\n","n02879718: 66.67%\n","n02883205: 20.00%\n","n02892201: 50.00%\n","n02892767: 0.00%\n","n02894605: 0.00%\n","n02895154: 0.00%\n","n02906734: 0.00%\n","n02909870: 0.00%\n","n02910353: 25.00%\n","n02916936: 0.00%\n","n02917067: 66.67%\n","n02927161: 25.00%\n","n02930766: 33.33%\n","n02939185: 0.00%\n","n02948072: 25.00%\n","n02950826: 0.00%\n","n02951358: 0.00%\n","n02951585: 0.00%\n","n02963159: 50.00%\n","n02965783: 25.00%\n","n02966193: 0.00%\n","n02966687: 0.00%\n","n02971356: 33.33%\n","n02974003: 25.00%\n","n02977058: 25.00%\n","n02978881: 0.00%\n","n02979186: 16.67%\n","n02980441: 50.00%\n","n02981792: 0.00%\n","n02988304: 0.00%\n","n02992211: 0.00%\n","n02992529: 12.50%\n","n02999410: 0.00%\n","n03000134: 0.00%\n","n03000247: 0.00%\n","n03000684: 14.29%\n","n03014705: 0.00%\n","n03016953: 0.00%\n","n03017168: 0.00%\n","n03018349: 0.00%\n","n03026506: 0.00%\n","n03028079: 0.00%\n","n03032252: 0.00%\n","n03041632: 0.00%\n","n03042490: 0.00%\n","n03045698: 0.00%\n","n03047690: 0.00%\n","n03062245: 0.00%\n","n03063599: 0.00%\n","n03063689: 0.00%\n","n03065424: 0.00%\n","n03075370: 0.00%\n","n03085013: 0.00%\n","n03089624: 0.00%\n","n03095699: 0.00%\n","n03100240: 25.00%\n","n03109150: 27.27%\n","n03110669: 0.00%\n","n03124043: 0.00%\n","n03124170: 0.00%\n","n03125729: 0.00%\n","n03126707: 25.00%\n","n03127747: 20.00%\n","n03127925: 0.00%\n","n03131574: 33.33%\n","n03133878: 0.00%\n","n03134739: 0.00%\n","n03141823: 0.00%\n","n03146219: 0.00%\n","n03160309: 50.00%\n","n03179701: 0.00%\n","n03180011: 0.00%\n","n03187595: 0.00%\n","n03188531: 50.00%\n","n03196217: 16.67%\n","n03197337: 25.00%\n","n03201208: 50.00%\n","n03207743: 0.00%\n","n03207941: 0.00%\n","n03208938: 25.00%\n","n03216828: 25.00%\n","n03218198: 0.00%\n","n03220513: 0.00%\n","n03223299: 0.00%\n","n03240683: 20.00%\n","n03249569: 0.00%\n","n03250847: 25.00%\n","n03255030: 0.00%\n","n03259280: 0.00%\n","n03271574: 20.00%\n","n03272010: 0.00%\n","n03272562: 50.00%\n","n03290653: 44.44%\n","n03291819: 0.00%\n","n03297495: 28.57%\n","n03314780: 9.09%\n","n03325584: 0.00%\n","n03337140: 0.00%\n","n03344393: 50.00%\n","n03345487: 33.33%\n","n03347037: 0.00%\n","n03355925: 40.00%\n","n03372029: 0.00%\n","n03376595: 0.00%\n","n03379051: 0.00%\n","n03384352: 33.33%\n","n03388043: 0.00%\n","n03388183: 50.00%\n","n03388549: 0.00%\n","n03393912: 33.33%\n","n03394916: 33.33%\n","n03400231: 0.00%\n","n03404251: 0.00%\n","n03417042: 0.00%\n","n03424325: 0.00%\n","n03425413: 25.00%\n","n03443371: 0.00%\n","n03444034: 33.33%\n","n03445777: 25.00%\n","n03445924: 0.00%\n","n03447447: 0.00%\n","n03447721: 14.29%\n","n03450230: 0.00%\n","n03452741: 66.67%\n","n03457902: 50.00%\n","n03459775: 50.00%\n","n03461385: 0.00%\n","n03467068: 0.00%\n","n03476684: 0.00%\n","n03476991: 0.00%\n","n03478589: 0.00%\n","n03481172: 0.00%\n","n03482405: 0.00%\n","n03483316: 0.00%\n","n03485407: 0.00%\n","n03485794: 0.00%\n","n03492542: 0.00%\n","n03494278: 0.00%\n","n03495258: 0.00%\n","n03496892: 0.00%\n","n03498962: 0.00%\n","n03527444: 0.00%\n","n03529860: 0.00%\n","n03530642: 0.00%\n","n03532672: 0.00%\n","n03534580: 0.00%\n","n03535780: 0.00%\n","n03538406: 0.00%\n","n03544143: 0.00%\n","n03584254: 33.33%\n","n03584829: 30.00%\n","n03590841: 20.00%\n","n03594734: 0.00%\n","n03594945: 0.00%\n","n03595614: 20.00%\n","n03598930: 0.00%\n","n03599486: 33.33%\n","n03602883: 0.00%\n","n03617480: 0.00%\n","n03623198: 11.11%\n","n03627232: 25.00%\n","n03630383: 42.86%\n","n03633091: 0.00%\n","n03637318: 0.00%\n","n03642806: 0.00%\n","n03649909: 0.00%\n","n03657121: 33.33%\n","n03658185: 0.00%\n","n03661043: 0.00%\n","n03662601: 66.67%\n","n03666591: 0.00%\n","n03670208: 0.00%\n","n03673027: 50.00%\n","n03676483: 0.00%\n","n03680355: 33.33%\n","n03690938: 0.00%\n","n03691459: 33.33%\n","n03692522: 0.00%\n","n03697007: 0.00%\n","n03706229: 0.00%\n","n03709823: 0.00%\n","n03710193: 25.00%\n","n03710637: 25.00%\n","n03710721: 25.00%\n","n03717622: 0.00%\n","n03720891: 0.00%\n","n03721384: 0.00%\n","n03724870: 0.00%\n","n03729826: 25.00%\n","n03733131: 0.00%\n","n03733281: 0.00%\n","n03733805: 25.00%\n","n03742115: 0.00%\n","n03743016: 0.00%\n","n03759954: 0.00%\n","n03761084: 25.00%\n","n03763968: 0.00%\n","n03764736: 0.00%\n","n03769881: 0.00%\n","n03770439: 33.33%\n","n03770679: 25.00%\n","n03773504: 0.00%\n","n03775071: 33.33%\n","n03775546: 33.33%\n","n03776460: 25.00%\n","n03777568: 33.33%\n","n03777754: 25.00%\n","n03781244: 0.00%\n","n03782006: 0.00%\n","n03785016: 33.33%\n","n03786901: 0.00%\n","n03787032: 0.00%\n","n03788195: 0.00%\n","n03788365: 50.00%\n","n03791053: 33.33%\n","n03792782: 25.00%\n","n03792972: 33.33%\n","n03793489: 16.67%\n","n03794056: 0.00%\n","n03796401: 0.00%\n","n03803284: 0.00%\n","n03804744: 0.00%\n","n03814639: 0.00%\n","n03814906: 0.00%\n","n03825788: 12.50%\n","n03832673: 0.00%\n","n03837869: 0.00%\n","n03838899: 11.11%\n","n03840681: 0.00%\n","n03841143: 80.00%\n","n03843555: 0.00%\n","n03854065: 66.67%\n","n03857828: 66.67%\n","n03866082: 0.00%\n","n03868242: 0.00%\n","n03868863: 14.29%\n","n03871628: 0.00%\n","n03873416: 0.00%\n","n03874293: 0.00%\n","n03874599: 0.00%\n","n03876231: 0.00%\n","n03877472: 0.00%\n","n03877845: 33.33%\n","n03884397: 16.67%\n","n03887697: 11.11%\n","n03888257: 40.00%\n","n03888605: 14.29%\n","n03891251: 0.00%\n","n03891332: 0.00%\n","n03895866: 0.00%\n","n03899768: 0.00%\n","n03902125: 33.33%\n","n03903868: 0.00%\n","n03908618: 25.00%\n","n03908714: 0.00%\n","n03916031: 0.00%\n","n03920288: 20.00%\n","n03924679: 55.56%\n","n03929660: 0.00%\n","n03929855: 0.00%\n","n03930313: 33.33%\n","n03930630: 66.67%\n","n03933933: 0.00%\n","n03935335: 25.00%\n","n03937543: 0.00%\n","n03938244: 0.00%\n","n03942813: 33.33%\n","n03944341: 0.00%\n","n03947888: 0.00%\n","n03950228: 0.00%\n","n03954731: 40.00%\n","n03956157: 33.33%\n","n03958227: 0.00%\n","n03961711: 0.00%\n","n03967562: 0.00%\n","n03970156: 0.00%\n","n03976467: 25.00%\n","n03976657: 0.00%\n","n03977966: 100.00%\n","n03980874: 0.00%\n","n03982430: 0.00%\n","n03983396: 0.00%\n","n03991062: 0.00%\n","n03992509: 0.00%\n","n03995372: 25.00%\n","n03998194: 0.00%\n","n04004767: 25.00%\n","n04005630: 0.00%\n","n04008634: 0.00%\n","n04009552: 0.00%\n","n04019541: 50.00%\n","n04023962: 0.00%\n","n04026417: 25.00%\n","n04033901: 0.00%\n","n04033995: 0.00%\n","n04037443: 33.33%\n","n04039381: 0.00%\n","n04040759: 0.00%\n","n04041544: 0.00%\n","n04044716: 40.00%\n","n04049303: 66.67%\n","n04065272: 20.00%\n","n04067472: 0.00%\n","n04069434: 20.00%\n","n04070727: 25.00%\n","n04074963: 0.00%\n","n04081281: 0.00%\n","n04086273: 22.22%\n","n04090263: 0.00%\n","n04099969: 0.00%\n","n04111531: 0.00%\n","n04116512: 0.00%\n","n04118538: 66.67%\n","n04118776: 0.00%\n","n04120489: 0.00%\n","n04125021: 0.00%\n","n04127249: 0.00%\n","n04131690: 20.00%\n","n04133789: 0.00%\n","n04136333: 0.00%\n","n04141076: 25.00%\n","n04141327: 0.00%\n","n04141975: 0.00%\n","n04146614: 33.33%\n","n04147183: 16.67%\n","n04149813: 0.00%\n","n04152593: 20.00%\n","n04153751: 25.00%\n","n04154565: 0.00%\n","n04162706: 0.00%\n","n04179913: 0.00%\n","n04192698: 0.00%\n","n04200800: 0.00%\n","n04201297: 50.00%\n","n04204238: 0.00%\n","n04204347: 0.00%\n","n04208210: 0.00%\n","n04209133: 0.00%\n","n04209239: 0.00%\n","n04228054: 0.00%\n","n04229816: 33.33%\n","n04235860: 0.00%\n","n04238763: 25.00%\n","n04239074: 0.00%\n","n04243546: 0.00%\n","n04251144: 0.00%\n","n04252077: 0.00%\n","n04252225: 25.00%\n","n04254120: 0.00%\n","n04254680: 0.00%\n","n04254777: 0.00%\n","n04258138: 50.00%\n","n04259630: 0.00%\n","n04263257: 33.33%\n","n04264628: 66.67%\n","n04265275: 0.00%\n","n04266014: 25.00%\n","n04270147: 0.00%\n","n04273569: 25.00%\n","n04275548: 0.00%\n","n04277352: 0.00%\n","n04285008: 33.33%\n","n04286575: 0.00%\n","n04296562: 25.00%\n","n04310018: 33.33%\n","n04311004: 25.00%\n","n04311174: 0.00%\n","n04317175: 0.00%\n","n04325704: 0.00%\n","n04326547: 25.00%\n","n04328186: 25.00%\n","n04330267: 0.00%\n","n04332243: 0.00%\n","n04335435: 66.67%\n","n04336792: 0.00%\n","n04344873: 0.00%\n","n04346328: 0.00%\n","n04347754: 25.00%\n","n04350905: 25.00%\n","n04355338: 0.00%\n","n04355933: 0.00%\n","n04356056: 0.00%\n","n04357314: 20.00%\n","n04366367: 0.00%\n","n04367480: 16.67%\n","n04370456: 0.00%\n","n04371430: 0.00%\n","n04371774: 0.00%\n","n04372370: 20.00%\n","n04376876: 0.00%\n","n04380533: 25.00%\n","n04389033: 0.00%\n","n04392985: 0.00%\n","n04398044: 0.00%\n","n04399382: 66.67%\n","n04404412: 0.00%\n","n04409515: 0.00%\n","n04417672: 25.00%\n","n04418357: 0.00%\n","n04423845: 0.00%\n","n04428191: 25.00%\n","n04429376: 0.00%\n","n04435653: 0.00%\n","n04442312: 0.00%\n","n04443257: 33.33%\n","n04447861: 0.00%\n","n04456115: 25.00%\n","n04458633: 33.33%\n","n04461696: 0.00%\n","n04462240: 0.00%\n","n04465501: 0.00%\n","n04467665: 0.00%\n","n04476259: 0.00%\n","n04479046: 0.00%\n","n04482393: 0.00%\n","n04483307: 0.00%\n","n04485082: 0.00%\n","n04486054: 0.00%\n","n04487081: 0.00%\n","n04487394: 0.00%\n","n04493381: 25.00%\n","n04501370: 0.00%\n","n04505470: 25.00%\n","n04507155: 0.00%\n","n04509417: 0.00%\n","n04515003: 20.00%\n","n04517823: 25.00%\n","n04522168: 0.00%\n","n04523525: 33.33%\n","n04525038: 0.00%\n","n04525305: 50.00%\n","n04532106: 0.00%\n","n04532670: 66.67%\n","n04536866: 0.00%\n","n04540053: 0.00%\n","n04542943: 33.33%\n","n04548280: 0.00%\n","n04548362: 0.00%\n","n04550184: 20.00%\n","n04552348: 20.00%\n","n04553703: 16.67%\n","n04554684: 0.00%\n","n04557648: 0.00%\n","n04560804: 0.00%\n","n04562935: 50.00%\n","n04579145: 0.00%\n","n04579432: 23.08%\n","n04584207: 14.29%\n","n04589890: 33.33%\n","n04590129: 0.00%\n","n04591157: 0.00%\n","n04591713: 0.00%\n","n04592741: 20.00%\n","n04596742: 33.33%\n","n04597913: 0.00%\n","n04599235: 0.00%\n","n04604644: 0.00%\n","n04606251: 20.00%\n","n04612504: 0.00%\n","n04613696: 0.00%\n","n06359193: 33.33%\n","n06596364: 0.00%\n","n06785654: 50.00%\n","n06794110: 0.00%\n","n06874185: 0.00%\n","n07248320: 0.00%\n","n07565083: 40.00%\n","n07579787: 0.00%\n","n07583066: 0.00%\n","n07584110: 25.00%\n","n07590611: 0.00%\n","n07613480: 0.00%\n","n07614500: 0.00%\n","n07615774: 0.00%\n","n07684084: 0.00%\n","n07693725: 0.00%\n","n07695742: 33.33%\n","n07697313: 0.00%\n","n07697537: 0.00%\n","n07711569: 100.00%\n","n07714571: 0.00%\n","n07714990: 25.00%\n","n07715103: 20.00%\n","n07716358: 33.33%\n","n07716906: 0.00%\n","n07717410: 0.00%\n","n07717556: 0.00%\n","n07718472: 0.00%\n","n07718747: 0.00%\n","n07720875: 0.00%\n","n07730033: 33.33%\n","n07734744: 0.00%\n","n07742313: 28.57%\n","n07745940: 20.00%\n","n07747607: 50.00%\n","n07749582: 25.00%\n","n07753113: 0.00%\n","n07753275: 0.00%\n","n07753592: 33.33%\n","n07754684: 0.00%\n","n07760859: 0.00%\n","n07768694: 40.00%\n","n07802026: 33.33%\n","n07831146: 0.00%\n","n07836838: 0.00%\n","n07860988: 0.00%\n","n07871810: 0.00%\n","n07873807: 0.00%\n","n07875152: 0.00%\n","n07880968: 0.00%\n","n07892512: 0.00%\n","n07920052: 25.00%\n","n07930864: 50.00%\n","n07932039: 0.00%\n","n09193705: 0.00%\n","n09229709: 0.00%\n","n09246464: 25.00%\n","n09256479: 0.00%\n","n09288635: 75.00%\n","n09332890: 0.00%\n","n09399592: 0.00%\n","n09421951: 25.00%\n","n09428293: 0.00%\n","n09468604: 33.33%\n","n09472597: 0.00%\n","n09835506: 33.33%\n","n10148035: 0.00%\n","n10565667: 25.00%\n","n11879895: 20.00%\n","n11939491: 25.00%\n","n12057211: 75.00%\n","n12144580: 0.00%\n","n12267677: 0.00%\n","n12620546: 25.00%\n","n12768682: 0.00%\n","n12985857: 0.00%\n","n12998815: 50.00%\n","n13037406: 33.33%\n","n13040303: 25.00%\n","n13044778: 33.33%\n","n13052670: 33.33%\n","n13054560: 33.33%\n","n13133613: 50.00%\n","n15075141: 0.00%\n","Model saved as resnet50_imagenet_last_epoch.pth\n"]},{"name":"stderr","output_type":"stream","text":["\n"]}],"source":["from torch.optim.lr_scheduler import OneCycleLR\n","\n","# Define the loss function and optimizer\n","criterion = torch.nn.CrossEntropyLoss()\n","optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n","\n","scheduler = OneCycleLR(\n"," optimizer,\n"," max_lr = best_lr,\n"," steps_per_epoch=len(train_loader),\n"," epochs=num_epochs,\n"," pct_start=5/num_epochs,\n"," div_factor=100,\n"," three_phase=False,\n"," final_div_factor=100,\n"," anneal_strategy='linear'\n",")\n","\n","# Function to evaluate the model\n","def evaluate_model(model, val_loader, criterion):\n"," model.eval()\n"," val_loss = 0.0\n"," correct = 0\n"," total = 0\n"," class_correct = [0] * len(val_dataset.classes)\n"," class_total = [0] * len(val_dataset.classes)\n"," \n"," with torch.no_grad():\n"," for inputs, labels in tqdm(val_loader, desc=\"Validating\"):\n"," inputs, labels = inputs.to(device), labels.to(device)\n"," \n"," outputs = model(inputs)\n"," loss = criterion(outputs, labels)\n"," val_loss += loss.item()\n","\n"," _, predicted = torch.max(outputs, 1)\n"," correct += (predicted == labels).sum().item()\n"," total += labels.size(0)\n","\n"," for i in range(len(labels)):\n"," label = labels[i]\n"," class_correct[label] += (predicted[i] == label).item()\n"," class_total[label] += 1\n","\n"," val_loss /= len(val_loader)\n"," accuracy = 100.0 * correct / total\n"," per_class_accuracy = {\n"," val_dataset.classes[i]: 100.0 * class_correct[i] / class_total[i]\n"," for i in range(len(val_dataset.classes))\n"," if class_total[i] > 0\n"," }\n"," return val_loss, accuracy, per_class_accuracy\n","\n","import os\n","\n","# Directory to save checkpoints\n","checkpoint_dir = \"checkpoints\"\n","os.makedirs(checkpoint_dir, exist_ok=True)\n","\n","# Train the model with checkpoint saving every 50 epochs\n","print(f'Training the model on ImageNet')\n","for epoch in range(num_epochs):\n"," model.train()\n"," running_loss = 0.0\n"," correct = 0\n"," total = 0\n","\n"," for inputs, labels in tqdm(train_loader, desc=f\"Epoch {epoch+1}/{num_epochs}\"):\n"," inputs, labels = inputs.to(device), labels.to(device)\n","\n"," # Zero out the optimizer\n"," optimizer.zero_grad()\n","\n"," # Forward pass\n"," outputs = model(inputs)\n"," loss = criterion(outputs, labels)\n","\n"," # Backward pass\n"," loss.backward()\n"," optimizer.step()\n","\n"," scheduler.step()\n","\n"," running_loss += loss.item()\n","\n"," # Calculate accuracy during training\n"," _, predicted = torch.max(outputs, 1)\n"," correct += (predicted == labels).sum().item()\n"," total += labels.size(0)\n","\n"," # Average loss and accuracy for the epoch\n"," train_loss = running_loss / len(train_loader)\n"," train_accuracy = 100.0 * correct / total\n","\n"," print(f\"Epoch {epoch+1}/{num_epochs} - Training Loss: {train_loss:.4f}, Training Accuracy: {train_accuracy:.2f}%\")\n","\n"," # Save a checkpoint every 50 epochs\n"," if (epoch + 1) % 50 == 0:\n"," # Format accuracy to 3 digits (e.g., 99.9 -> 999, 0.12 -> 012)\n"," acc_str = f\"{int(train_accuracy):03d}\"\n"," checkpoint_path = os.path.join(checkpoint_dir, f\"epoch{epoch}_acc{acc_str}_bs{batch_size}.pth\")\n","\n"," checkpoint = {\n"," 'epoch': epoch + 1, # Current epoch number\n"," 'model_state_dict': model.state_dict(),\n"," 'optimizer_state_dict': optimizer.state_dict(),\n"," 'scheduler_state_dict': scheduler.state_dict(),\n"," 'loss': train_loss\n"," }\n","\n"," torch.save(checkpoint, checkpoint_path)\n"," print(f\"Checkpoint saved at epoch {epoch + 1} with accuracy {train_accuracy:.2f}% at {checkpoint_path}\")\n","\n","# Run validation after all epochs\n","print(f\"Validating the model on unseen data after training...\")\n","val_loss, val_accuracy, per_class_accuracy = evaluate_model(model, val_loader, criterion)\n","print(f\"Validation Loss: {val_loss:.4f}, Validation Accuracy: {val_accuracy:.2f}%\")\n","print(\"Per-class Accuracy:\")\n","for class_name, acc in per_class_accuracy.items():\n"," print(f\"{class_name}: {acc:.2f}%\")\n","\n","# Save the final model at the end of training\n","torch.save(model.state_dict(), f\"resnet50_imagenet_bs{batch_size}_ep{num_epochs}.pth\")\n","print(\"Model saved as resnet50_imagenet_last_epoch.pth\")"]}],"metadata":{"kaggle":{"accelerator":"none","dataSources":[{"databundleVersionId":4225553,"sourceId":6799,"sourceType":"competition"},{"datasetId":547506,"sourceId":998277,"sourceType":"datasetVersion"}],"dockerImageVersionId":30823,"isGpuEnabled":false,"isInternetEnabled":true,"language":"python","sourceType":"notebook"},"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.10.12"}},"nbformat":4,"nbformat_minor":4}