| import torch |
| import torch.nn as nn |
|
|
| class Model(nn.Module): |
| def __init__(self): |
| super().__init__() |
|
|
| self.conv_layers = nn.Sequential( |
| |
| nn.Conv2d(1, 32, kernel_size=3, padding=1), |
| nn.BatchNorm2d(32), |
| nn.ReLU(), |
| nn.MaxPool2d(2), |
| nn.Dropout2d(0.25), |
|
|
| |
| nn.Conv2d(32, 64, kernel_size=3, padding=1), |
| nn.BatchNorm2d(64), |
| nn.ReLU(), |
| nn.MaxPool2d(2), |
| nn.Dropout2d(0.25), |
|
|
| |
| nn.Conv2d(64, 128, kernel_size=3, padding=1), |
| nn.BatchNorm2d(128), |
| nn.ReLU(), |
| nn.MaxPool2d(2), |
| nn.Dropout2d(0.25), |
|
|
| |
| nn.Conv2d(128, 256, kernel_size=1), |
| nn.BatchNorm2d(256), |
| nn.ReLU(), |
| nn.MaxPool2d(2), |
| nn.Dropout2d(0.25), |
| ) |
|
|
| self.fc_layers = nn.Sequential( |
| nn.Flatten(), |
| nn.Linear(256 * 1 * 1, 128), |
| nn.ReLU(), |
| nn.Dropout(0.25), |
| nn.Linear(128, 10) |
| ) |
|
|
| def forward(self, x): |
| x = self.conv_layers(x) |
| x = self.fc_layers(x) |
| return x |