| import math |
|
|
| import torch.nn as nn |
| from transformers import PreTrainedModel |
|
|
| from .configuration_resnet import ResEncoderConfig |
|
|
|
|
| def conv3x3(in_planes, out_planes, stride=1): |
| return nn.Conv2d( |
| in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False |
| ) |
|
|
|
|
| def downsample_basic_block(inplanes, outplanes, stride): |
| return nn.Sequential( |
| nn.Conv2d(inplanes, outplanes, kernel_size=1, stride=stride, bias=False), |
| nn.BatchNorm2d(outplanes), |
| ) |
|
|
|
|
| def downsample_basic_block_v2(inplanes, outplanes, stride): |
| return nn.Sequential( |
| nn.AvgPool2d( |
| kernel_size=stride, stride=stride, ceil_mode=True, count_include_pad=False |
| ), |
| nn.Conv2d(inplanes, outplanes, kernel_size=1, stride=1, bias=False), |
| nn.BatchNorm2d(outplanes), |
| ) |
|
|
|
|
| class BasicBlock(nn.Module): |
| expansion = 1 |
|
|
| def __init__(self, inplanes, planes, stride=1, downsample=None, relu_type="relu"): |
| super(BasicBlock, self).__init__() |
|
|
| assert relu_type in ["relu", "prelu"] |
|
|
| self.conv1 = conv3x3(inplanes, planes, stride) |
| self.bn1 = nn.BatchNorm2d(planes) |
|
|
| if relu_type == "relu": |
| self.relu1 = nn.ReLU(inplace=True) |
| self.relu2 = nn.ReLU(inplace=True) |
| elif relu_type == "prelu": |
| self.relu1 = nn.PReLU(num_parameters=planes) |
| self.relu2 = nn.PReLU(num_parameters=planes) |
| else: |
| raise Exception("relu type not implemented") |
|
|
| self.conv2 = conv3x3(planes, planes) |
| self.bn2 = nn.BatchNorm2d(planes) |
|
|
| self.downsample = downsample |
| self.stride = stride |
|
|
| def forward(self, x): |
| residual = x |
| out = self.conv1(x) |
| out = self.bn1(out) |
| out = self.relu1(out) |
| out = self.conv2(out) |
| out = self.bn2(out) |
| if self.downsample is not None: |
| residual = self.downsample(x) |
|
|
| out += residual |
| out = self.relu2(out) |
|
|
| return out |
|
|
|
|
| class ResNet(nn.Module): |
| def __init__( |
| self, |
| block, |
| layers, |
| num_classes=1000, |
| relu_type="relu", |
| gamma_zero=False, |
| avg_pool_downsample=False, |
| ): |
| self.inplanes = 64 |
| self.relu_type = relu_type |
| self.gamma_zero = gamma_zero |
| self.downsample_block = ( |
| downsample_basic_block_v2 if avg_pool_downsample else downsample_basic_block |
| ) |
|
|
| super(ResNet, self).__init__() |
| self.layer1 = self._make_layer(block, 64, layers[0]) |
| self.layer2 = self._make_layer(block, 128, layers[1], stride=2) |
| self.layer3 = self._make_layer(block, 256, layers[2], stride=2) |
| self.layer4 = self._make_layer(block, 512, layers[3], stride=2) |
| self.avgpool = nn.AdaptiveAvgPool2d(1) |
|
|
| for m in self.modules(): |
| if isinstance(m, nn.Conv2d): |
| n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels |
| m.weight.data.normal_(0, math.sqrt(2.0 / n)) |
| elif isinstance(m, nn.BatchNorm2d): |
| m.weight.data.fill_(1) |
| m.bias.data.zero_() |
|
|
| if self.gamma_zero: |
| for m in self.modules(): |
| if isinstance(m, BasicBlock): |
| m.bn2.weight.data.zero_() |
|
|
| def _make_layer(self, block, planes, blocks, stride=1): |
| downsample = None |
| if stride != 1 or self.inplanes != planes * block.expansion: |
| downsample = self.downsample_block( |
| inplanes=self.inplanes, |
| outplanes=planes * block.expansion, |
| stride=stride, |
| ) |
|
|
| layers = [] |
| layers.append( |
| block(self.inplanes, planes, stride, downsample, relu_type=self.relu_type) |
| ) |
| self.inplanes = planes * block.expansion |
| for i in range(1, blocks): |
| layers.append(block(self.inplanes, planes, relu_type=self.relu_type)) |
|
|
| return nn.Sequential(*layers) |
|
|
| def forward(self, x): |
| x = self.layer1(x) |
| x = self.layer2(x) |
| x = self.layer3(x) |
| x = self.layer4(x) |
| x = self.avgpool(x) |
| x = x.view(x.size(0), -1) |
| return x |
|
|
|
|
| class ResEncoder(PreTrainedModel): |
| def __init__(self, config: ResEncoderConfig): |
| super(ResEncoder, self).__init__(config=config) |
| self.frontend_nout = config.frontend_nout |
| self.backend_out = config.backend_out |
| frontend_relu = ( |
| nn.PReLU(num_parameters=self.frontend_nout) |
| if config.relu_type == "prelu" |
| else nn.ReLU() |
| ) |
| self.frontend3D = nn.Sequential( |
| nn.Conv3d( |
| 1, |
| self.frontend_nout, |
| kernel_size=(5, 7, 7), |
| stride=(1, 2, 2), |
| padding=(2, 3, 3), |
| bias=False, |
| ), |
| nn.BatchNorm3d(self.frontend_nout), |
| frontend_relu, |
| nn.MaxPool3d(kernel_size=(1, 3, 3), stride=(1, 2, 2), padding=(0, 1, 1)), |
| ) |
| self.trunk = ResNet(BasicBlock, [2, 2, 2, 2], relu_type=config.relu_type) |
|
|
| def forward(self, x): |
| B, C, T, H, W = x.size() |
| x = self.frontend3D(x) |
| Tnew = x.shape[2] |
| x = self.threeD_to_2D_tensor(x) |
| x = self.trunk(x) |
| x = x.view(B, Tnew, x.size(1)) |
| x = x.transpose(1, 2).contiguous() |
| return x |
|
|
| def threeD_to_2D_tensor(self, x): |
| n_batch, n_channels, s_time, sx, sy = x.shape |
| x = x.transpose(1, 2).contiguous() |
| return x.reshape(n_batch * s_time, n_channels, sx, sy) |
|
|