| import torch |
| import torch.nn as nn |
| import timm |
| import torch.nn.functional as F |
|
|
| mocov3_std = torch.tensor([0.0365, 0.0384, 0.0333, 0.0364, 0.0177, 0.0388, 0.0418, 0.0400, 0.0347, |
| 0.0327, 0.0478, 0.0385, 0.0384, 0.0396, 0.0361, 0.0347, 0.0443, 0.0342, |
| 0.0383, 0.0374, 0.0365, 0.0453, 0.0352, 0.0315, 0.0384, 0.0534, 0.0374, |
| 0.0358, 0.0355, 0.0349, 0.0350, 0.0392, 0.0360, 0.0369, 0.0356, 0.0332, |
| 0.0372, 0.0349, 0.0358, 0.0332, 0.0352, 0.0387, 0.0328, 0.0358, 0.0381, |
| 0.0373, 0.0359, 0.0326, 0.0342, 0.0338, 0.0347, 0.0725, 0.0400, 0.0345, |
| 0.0377, 0.0376, 0.0368, 0.0339, 0.0371, 0.0341, 0.0380, 0.0353, 0.0350, |
| 0.0389, 0.0363, 0.0347, 0.0363, 0.0363, 0.0354, 0.0354, 0.0369, 0.0538, |
| 0.0358, 0.0384, 0.0339, 0.0362, 0.0354, 0.0381, 0.0357, 0.0370, 0.0349, |
| 0.0394, 0.0355, 0.0344, 0.0372, 0.0379, 0.0361, 0.0376, 0.0344, 0.0354, |
| 0.0309, 0.0360, 0.0382, 0.0349, 0.0386, 0.0375, 0.0344, 0.0325, 0.0354, |
| 0.0392, 0.0329, 0.0361, 0.0358, 0.0364, 0.0361, 0.0345, 0.0361, 0.0347, |
| 0.0384, 0.0399, 0.0328, 0.0511, 0.0371, 0.0400, 0.0343, 0.0375, 0.0388, |
| 0.0417, 0.0373, 0.0392, 0.0352, 0.0483, 0.0374, 0.0373, 0.0347, 0.0357, |
| 0.0359, 0.0375, 0.0361, 0.0646, 0.0412, 0.0375, 0.0347, 0.0353, 0.0311, |
| 0.0371, 0.0352, 0.0376, 0.0372, 0.0411, 0.0349, 0.0338, 0.0398, 0.0341, |
| 0.0386, 0.0348, 0.0383, 0.0366, 0.0379, 0.0336, 0.0343, 0.0357, 0.0341, |
| 0.0348, 0.0331, 0.0382, 0.0374, 0.0355, 0.0378, 0.0336, 0.0376, 0.0362, |
| 0.0347, 0.0336, 0.0317, 0.0351, 0.0329, 0.0736, 0.0382, 0.0388, 0.0353, |
| 0.0357, 0.0381, 0.0406, 0.0390, 0.0349, 0.0392, 0.0341, 0.0358, 0.0398, |
| 0.0335, 0.0391, 0.0385, 0.0342, 0.0345, 0.0347, 0.0386, 0.0348, 0.0357, |
| 0.0345, 0.0357, 0.0368, 0.0394, 0.0391, 0.0397, 0.0328, 0.0366, 0.0351, |
| 0.0395, 0.0418, 0.0389, 0.0397, 0.0360, 0.0407, 0.0697, 0.1027, 0.0411, |
| 0.0194, 0.0363, 0.0398, 0.0371, 0.0362, 0.0372, 0.0310, 0.0338, 0.0425, |
| 0.0355, 0.0462, 0.0662, 0.0540, 0.0347, 0.0671, 0.0341, 0.0348, 0.0375, |
| 0.0374, 0.0334, 0.0372, 0.0346, 0.0376, 0.0372, 0.0412, 0.0353, 0.0411, |
| 0.0333, 0.0346, 0.0360, 0.0397, 0.0313, 0.0369, 0.0354, 0.0311, 0.0395, |
| 0.0510, 0.0361, 0.0366, 0.0364, 0.0392, 0.0405, 0.0348, 0.0339, 0.0509, |
| 0.0379, 0.0365, 0.0790, 0.0370, 0.0355, 0.0350, 0.0347, 0.0398, 0.0335, |
| 0.0357, 0.0356, 0.0311, 0.0380, 0.0338, 0.0385, 0.0358, 0.0360, 0.0355, |
| 0.0407, 0.0383, 0.0387, 0.0344, 0.0365, 0.0307, 0.0383, 0.0365, 0.0369, |
| 0.0343, 0.0341, 0.0363, 0.0387, 0.0361, 0.0384, 0.0362, 0.0440, 0.0352, |
| 0.0381, 0.0357, 0.0342, 0.0379, 0.0348, 0.0394, 0.0380, 0.0351, 0.0357, |
| 0.0334, 0.0357, 0.0368, 0.0356, 0.0406, 0.0365, 0.0380, 0.0384, 0.0388, |
| 0.0354, 0.0355, 0.0317, 0.0368, 0.0377, 0.0353, 0.0345, 0.0373, 0.0348, |
| 0.1193, 0.0546, 0.0362, 0.0352, 0.0354, 0.0381, 0.0387, 0.0455, 0.0358, |
| 0.0338, 0.0345, 0.0369, 0.0342, 0.0334, 0.0384, 0.0405, 0.0361, 0.0353, |
| 0.0364, 0.0347, 0.0335, 0.0430, 0.0375, 0.0371, 0.0374, 0.0350, 0.0358, |
| 0.0325, 0.0359, 0.0817, 0.0406, 0.0356, 0.0358, 0.0349, 0.0385, 0.0323, |
| 0.0361, 0.0372, 0.0405, 0.0346, 0.0349, 0.0361, 0.0371, 0.0387, 0.0402, |
| 0.0373, 0.0370, 0.0368, 0.0369, 0.0337, 0.0353, 0.0344, 0.0361, 0.0352, |
| 0.0369, 0.0357, 0.0370, 0.0359, 0.0361, 0.0335, 0.0351, 0.0351, 0.0365, |
| 0.0401, 0.0336, 0.0335, 0.0357, 0.0386, 0.0374, 0.0357, 0.0376, 0.0369, |
| 0.0369, 0.1032, 0.0365, 0.0823, 0.0367, 0.0459, 0.0823, 0.0397, 0.0364, |
| 0.0343, 0.0357, 0.0401, 0.0357, 0.0388, 0.0345, 0.0334, 0.0359, 0.0331, |
| 0.0362, 0.0388, 0.0586, 0.0402, 0.0385, 0.0373, 0.0368, 0.0332, 0.0357, |
| 0.0366, 0.0378, 0.0343, 0.0348, 0.0338, 0.0381, 0.0359, 0.0342, 0.0385, |
| 0.0347, 0.0359, 0.0396, 0.0366, 0.0374, 0.0305, 0.0689, 0.0389, 0.0359, |
| 0.0373, 0.0359, 0.0377, 0.0367, 0.0367, 0.0454, 0.0364, 0.0327, 0.0347, |
| 0.0406, 0.0374, 0.0360, 0.0336, 0.0363, 0.0364, 0.0413, 0.0364, 0.0348, |
| 0.0591, 0.0390, 0.0327, 0.0424, 0.0369, 0.0356, 0.0350, 0.0369, 0.0362, |
| 0.0346, 0.0479, 0.0319, 0.0370, 0.0406, 0.0371, 0.0387, 0.0346, 0.0389, |
| 0.0394, 0.0363, 0.0331, 0.0371, 0.0391, 0.0365, 0.0389, 0.0362, 0.0380, |
| 0.0378, 0.0369, 0.0353, 0.0365, 0.0363, 0.0374, 0.0362, 0.0642, 0.0399, |
| 0.0349, 0.0349, 0.0381, 0.0312, 0.0544, 0.0330, 0.0346, 0.0359, 0.0428, |
| 0.0365, 0.0380, 0.0370, 0.0377, 0.0378, 0.0377, 0.0370, 0.0364, 0.0365, |
| 0.0368, 0.0375, 0.0416, 0.0382, 0.0348, 0.0373, 0.0353, 0.0372, 0.0357, |
| 0.0338, 0.0395, 0.0358, 0.0359, 0.0382, 0.0352, 0.0356, 0.0340, 0.0396, |
| 0.0424, 0.0387, 0.0304, 0.0364, 0.0401, 0.0511, 0.0392, 0.0370, 0.0363, |
| 0.0405, 0.0341, 0.0348, 0.0334, 0.0346, 0.0349, 0.0392, 0.0370, 0.0406, |
| 0.0372, 0.0369, 0.0364, 0.0357, 0.0385, 0.0387, 0.0366, 0.0307, 0.0350, |
| 0.0357, 0.0422, 0.0404, 0.0334, 0.0345, 0.0532, 0.0368, 0.0333, 0.0374, |
| 0.0371, 0.0381, 0.0397, 0.0384, 0.0354, 0.0353, 0.0343, 0.0415, 0.0368, |
| 0.0351, 0.0398, 0.0395, 0.0365, 0.0370, 0.0367, 0.0370, 0.0362, 0.0349, |
| 0.0372, 0.0327, 0.0367, 0.0373, 0.0426, 0.0348, 0.0349, 0.0473, 0.0331, |
| 0.0365, 0.0376, 0.0352, 0.0366, 0.0410, 0.0381, 0.0373, 0.0407, 0.0331, |
| 0.0364, 0.0497, 0.0375, 0.0378, 0.0349, 0.0413, 0.0348, 0.0379, 0.0354, |
| 0.0364, 0.0347, 0.0356, 0.0347, 0.0383, 0.0714, 0.0392, 0.0452, 0.0353, |
| 0.0373, 0.0361, 0.0358, 0.0348, 0.0362, 0.0377, 0.0350, 0.0354, 0.0365, |
| 0.0360, 0.0367, 0.0366, 0.0358, 0.0357, 0.0357, 0.0508, 0.0368, 0.0353, |
| 0.0419, 0.0344, 0.0380, 0.0338, 0.0363, 0.0370, 0.0355, 0.0358, 0.0367, |
| 0.0375, 0.0375, 0.0559, 0.0361, 0.0378, 0.0381, 0.0343, 0.0379, 0.0390, |
| 0.0396, 0.0360, 0.0388, 0.0351, 0.0362, 0.0351, 0.0357, 0.0349, 0.0336, |
| 0.0371, 0.0344, 0.0358, 0.0354, 0.0382, 0.0386, 0.0406, 0.0834, 0.0361, |
| 0.0360, 0.0361, 0.0351, 0.0379, 0.0355, 0.0390, 0.0364, 0.0351, 0.0374, |
| 0.0436, 0.0375, 0.0363, 0.0353, 0.0388, 0.0355, 0.0348, 0.0364, 0.0325, |
| 0.0340, 0.0343, 0.0389, 0.0358, 0.0348, 0.0349, 0.0373, 0.0361, 0.0364, |
| 0.0367, 0.0373, 0.0377, 0.0322, 0.0379, 0.0333, 0.0442, 0.0389, 0.0324, |
| 0.0367, 0.0356, 0.0345, 0.0393, 0.0349, 0.0450, 0.0382, 0.0376, 0.0463, |
| 0.0363, 0.0328, 0.0356, 0.0379, 0.0360, 0.0342, 0.0371, 0.0356, 0.0373, |
| 0.0355, 0.0367, 0.0313, 0.0425, 0.0366, 0.0352, 0.0366, 0.0363, 0.0323, |
| 0.0328, 0.0335, 0.0337, 0.0402, 0.0369, 0.0390, 0.0363, 0.0416, 0.0592, |
| 0.0343, 0.0338, 0.0371, 0.0722, 0.0449, 0.0350, 0.0356, 0.0352, 0.0361, |
| 0.0366, 0.0362, 0.0463, 0.0347, 0.0400, 0.0327, 0.0362, 0.0375, 0.0466, |
| 0.0341, 0.0332, 0.0325, 0.0369, 0.0326, 0.0373, 0.0374, 0.0367, 0.0365, |
| 0.0344, 0.0398, 0.0378]) |
| mocov3_mean = torch.tensor([-4.9909e-03, 5.8531e-02, -8.0204e-02, 1.4484e-02, 6.5256e-04, |
| 3.1926e-02, 5.2389e-02, -4.6138e-02, -2.9104e-02, -1.0310e-03, |
| 1.4314e-02, 4.3464e-02, 5.4860e-02, -3.8034e-03, 9.6628e-02, |
| 6.7566e-02, -2.0503e-01, -5.7046e-02, -8.4732e-02, -5.1926e-02, |
| 2.8064e-02, -7.4545e-02, -3.0411e-02, -2.1032e-02, 1.0223e-02, |
| -3.9128e-02, -1.0685e-01, -4.2874e-02, 7.4012e-02, -8.5295e-02, |
| -5.1053e-02, 1.1215e-01, -3.4985e-02, -1.9459e-02, -5.4159e-02, |
| -3.3352e-02, -2.7664e-02, 6.8211e-02, -5.2040e-02, 1.4412e-02, |
| -5.8436e-02, 2.2623e-02, 1.6369e-02, -2.6669e-02, 7.5853e-03, |
| -1.7022e-02, 1.9521e-02, 1.7904e-02, -1.7904e-02, -5.8781e-02, |
| -5.1144e-02, -5.0436e-03, -2.6308e-02, 3.3595e-03, 2.5913e-02, |
| 2.7867e-03, -9.1458e-02, -4.5019e-02, -3.1314e-02, -4.4559e-02, |
| -5.5143e-02, 1.8014e-02, 2.7575e-02, -4.7217e-02, 4.3467e-02, |
| -8.2260e-02, -6.7334e-03, -5.6354e-02, 7.9308e-02, -2.9664e-02, |
| -1.8751e-02, -9.8325e-02, 7.9536e-02, -1.3846e-02, 3.8479e-02, |
| -2.6752e-02, 7.3832e-02, -3.5585e-03, 9.6148e-02, 2.4930e-02, |
| -3.7335e-04, -3.6863e-02, -2.6756e-02, 4.9271e-02, -2.8841e-02, |
| -3.0766e-03, 8.6419e-02, 6.1747e-02, -6.5190e-02, 5.2677e-02, |
| -1.5961e-02, -8.7862e-03, 5.4241e-02, 5.0302e-02, -7.1608e-02, |
| 3.5493e-02, -2.4699e-02, -5.9670e-02, 8.6370e-03, -3.0705e-02, |
| -8.9275e-03, -1.3236e-02, -1.0522e-01, -5.8801e-02, 4.5025e-03, |
| -1.9658e-02, -3.5878e-02, 7.4032e-03, -1.2370e-02, -5.7225e-02, |
| 6.1609e-02, 4.5315e-03, -4.6809e-02, 6.3947e-02, 1.8732e-02, |
| -2.0228e-02, -6.8604e-03, 2.5951e-03, -9.4610e-03, 3.7075e-02, |
| -3.4487e-02, 8.2085e-02, 1.9499e-03, 1.8524e-02, -2.0771e-02, |
| 4.1893e-02, -5.9360e-02, 2.2461e-02, -1.3253e-02, -2.4474e-02, |
| -1.0797e-01, -3.7946e-02, -2.8403e-02, 4.2671e-02, -6.6034e-02, |
| 4.8225e-05, 5.0553e-02, -1.6483e-02, -9.2907e-02, -5.0918e-02, |
| -4.4737e-02, 1.0900e-03, -2.2239e-02, -1.5004e-01, 1.5945e-02, |
| 1.0725e-01, 5.8067e-02, 4.5711e-03, 2.8622e-02, 6.3638e-04, |
| -9.8273e-03, 4.3623e-02, 8.0471e-02, 6.2474e-02, 5.5382e-02, |
| -3.9220e-02, -3.7855e-02, 1.7026e-02, 6.4659e-02, -4.5883e-03, |
| 8.0370e-02, 1.5961e-02, 1.9753e-02, -9.2319e-03, 4.0418e-02, |
| 4.7325e-03, 4.7597e-02, 1.9679e-03, 8.5419e-02, 3.2275e-02, |
| 5.3665e-02, -3.7527e-02, -1.0371e-01, -1.0602e-02, -6.3089e-02, |
| 2.1686e-02, -2.1665e-03, -4.0568e-02, -5.4864e-02, 1.2588e-02, |
| -4.0154e-02, -1.0553e-01, -1.7084e-02, 5.4653e-02, 1.3699e-01, |
| -2.0908e-02, 3.0141e-02, -2.8898e-02, -6.7819e-04, -1.5839e-02, |
| 1.6851e-02, 3.5577e-02, -4.6329e-03, -6.0051e-02, 2.6143e-02, |
| 1.7365e-02, -1.9256e-02, 3.1024e-02, 5.1721e-02, -8.4824e-02, |
| 6.7965e-02, -9.0949e-02, 4.3225e-03, -7.1089e-02, 9.0626e-02, |
| -1.1133e-01, -1.8509e-02, 5.5376e-02, 3.3383e-02, -2.9887e-02, |
| -5.2169e-02, 2.7724e-02, 1.2762e-02, 6.4547e-02, 6.4587e-02, |
| 1.9503e-02, 1.2702e-02, 1.5315e-02, 1.6836e-02, -1.3361e-01, |
| -9.1265e-02, -3.5769e-02, 4.2201e-02, 1.1958e-02, 5.4143e-03, |
| -4.0032e-02, 8.6459e-02, 6.5958e-02, 1.5095e-02, -8.1063e-02, |
| 2.3027e-02, 2.4872e-02, 2.2488e-02, 9.4502e-02, -1.9964e-02, |
| 1.3892e-02, 6.8407e-02, -1.8256e-02, 2.1523e-02, -2.7407e-02, |
| -1.0688e-02, -1.6652e-02, -1.9957e-02, 5.7456e-02, -1.1604e-01, |
| -5.1513e-02, -1.1086e-01, 7.0534e-02, -1.4484e-02, 4.3226e-03, |
| 1.0349e-01, -3.6197e-03, 8.9538e-02, -1.5885e-02, -3.8066e-02, |
| 5.3550e-02, 6.5762e-02, -9.3377e-02, 1.1694e-03, -2.6036e-02, |
| -4.9501e-02, 7.8390e-02, -2.3242e-03, -5.7216e-02, 7.1508e-03, |
| -4.0947e-02, 1.2399e-01, -3.8855e-03, -1.7977e-02, -1.2340e-01, |
| -5.2535e-02, 3.4631e-02, 1.9590e-02, 6.8397e-03, -2.1967e-02, |
| 2.6737e-02, -2.3443e-02, -8.6921e-03, -4.3219e-02, -4.7690e-02, |
| 1.5660e-02, 6.5133e-02, 5.2079e-02, -6.9453e-02, -5.0886e-02, |
| -3.9676e-02, -1.0395e-02, -5.7200e-02, 4.2374e-02, 8.4635e-02, |
| 5.6200e-02, 1.9147e-03, -2.8570e-02, 5.8081e-02, -4.9870e-02, |
| 8.7587e-02, -2.1561e-02, -3.9530e-02, 3.1023e-02, -3.2769e-02, |
| -3.1827e-02, 5.4912e-02, -6.4331e-02, 2.7848e-02, -1.4435e-01, |
| -1.3247e-02, 4.1248e-02, 2.4548e-03, -6.6767e-02, -2.2580e-02, |
| -4.4151e-02, -5.4278e-02, 1.0243e-02, 4.1607e-02, -3.6166e-02, |
| 8.6948e-02, 6.7075e-02, -1.1791e-01, 8.5181e-02, -1.2960e-02, |
| -1.8911e-02, 5.7109e-02, -3.8419e-02, -9.0466e-02, 4.8168e-03, |
| -2.6800e-02, 4.8001e-02, 9.3661e-02, 1.2480e-01, 2.9446e-02, |
| -7.8841e-02, 1.9951e-02, -5.6541e-02, 6.8220e-03, -1.4068e-02, |
| -2.4581e-02, -7.4762e-02, 4.4282e-02, 9.4121e-02, -8.8464e-03, |
| 1.2429e-02, 2.3094e-02, 2.2941e-02, -7.1354e-02, 2.9385e-01, |
| -1.6506e-02, -6.4741e-03, -3.3756e-02, -1.9813e-02, 1.0995e-02, |
| -1.5479e-02, 8.3897e-03, -1.7868e-02, -5.3500e-02, -1.5280e-01, |
| -2.4950e-02, -4.3315e-02, -5.0033e-02, 5.7700e-02, -2.2942e-02, |
| 2.8033e-02, -7.6005e-02, -1.5828e-02, 4.6690e-02, -1.2853e-01, |
| 8.6603e-02, 9.7747e-03, -1.5809e-02, -1.0410e-01, 4.8349e-02, |
| -7.7004e-03, -7.7936e-02, -5.0415e-02, -4.1027e-02, -5.7368e-02, |
| 6.3061e-02, -1.4738e-01, 9.0193e-03, 1.4264e-02, 8.1013e-02, |
| 1.4018e-02, 4.0492e-02, 5.7056e-03, 1.3297e-02, 2.2139e-02, |
| -5.1328e-02, -5.6951e-02, 1.8988e-02, 3.5907e-02, -7.2411e-02, |
| 1.8173e-01, 9.4154e-03, 2.4558e-04, 9.9371e-02, 3.6060e-03, |
| 5.8868e-02, -1.6166e-02, -3.3660e-03, -4.0091e-02, -7.3601e-02, |
| -4.7921e-02, 5.5936e-02, 3.3641e-02, -5.8169e-03, 1.7200e-03, |
| 1.4891e-03, 4.6669e-02, 2.6898e-02, -2.0748e-02, -3.7035e-02, |
| 7.1686e-02, -1.0291e-01, 5.2060e-02, 5.8818e-02, 5.6555e-04, |
| -4.9335e-02, 6.5931e-02, 1.1788e-01, 7.9230e-03, 7.9755e-02, |
| 5.8611e-02, -4.8066e-02, 8.9253e-03, -1.4002e-02, -6.4210e-02, |
| 4.7972e-03, 5.6867e-02, 1.2133e-01, -8.8800e-02, 1.0355e-01, |
| -1.8507e-02, 2.1943e-04, -1.9579e-02, 6.1412e-03, 2.9627e-02, |
| 6.6325e-02, -6.3568e-02, 6.6112e-02, 7.5182e-02, 3.0145e-03, |
| 7.8490e-03, 3.8883e-02, 9.4810e-02, 6.8651e-02, 9.3259e-02, |
| 7.6506e-02, -1.1391e-01, 4.2561e-02, 9.0650e-02, 5.0542e-02, |
| 4.6873e-02, 2.9639e-02, -7.0080e-02, -8.2195e-03, 1.7181e-02, |
| -3.2508e-02, -5.6378e-03, -1.0345e-01, 1.0686e-02, -7.7440e-02, |
| 1.0137e-01, 1.1383e-01, 4.9601e-02, 1.0087e-01, 6.7788e-03, |
| -5.2826e-02, 2.5621e-02, -9.7276e-02, -9.5657e-02, 3.9823e-02, |
| 8.1216e-03, 5.6924e-02, 9.2770e-03, 3.0634e-02, 5.1213e-02, |
| -1.9143e-02, -1.4004e-02, 1.0268e-01, 2.6726e-02, 1.3936e-03, |
| 1.0327e-02, 3.1209e-03, -1.9589e-02, -3.8289e-02, 7.4170e-02, |
| 5.5759e-02, 1.6756e-02, 1.5523e-01, 8.1868e-02, 7.1569e-03, |
| -1.0032e-01, -3.1106e-02, -4.5538e-02, 5.4754e-02, -4.1384e-03, |
| 6.6185e-04, 8.9977e-02, 1.1297e-01, -3.2720e-02, -1.1563e-02, |
| 2.9044e-02, 1.1240e-01, -6.2860e-02, -1.8176e-02, 3.7151e-02, |
| 9.9272e-02, -1.5495e-02, -7.4552e-02, 7.0645e-02, 7.8578e-03, |
| -9.8891e-02, -6.4297e-02, 9.9785e-02, 8.1721e-03, -6.9688e-02, |
| 1.1140e-03, -4.7135e-02, 5.7858e-02, 1.8430e-02, -1.4128e-03, |
| -3.9728e-02, 7.4199e-03, -8.4799e-02, 1.2801e-02, 1.5855e-01, |
| -1.4473e-02, -3.2350e-02, 8.8111e-02, 7.3757e-02, -5.9830e-02, |
| -6.8308e-02, -4.4946e-02, -1.1461e-02, -3.8021e-02, 1.9265e-03, |
| -4.3868e-02, 2.3531e-02, 4.0215e-02, -3.4769e-03, 1.7513e-02, |
| -3.2142e-02, 1.4742e-02, -3.9892e-02, -1.5716e-02, -7.3177e-03, |
| -2.1507e-02, 2.4539e-02, -4.8903e-03, -4.0340e-02, -3.9031e-02, |
| 1.6703e-01, -3.3599e-02, -8.6483e-02, 3.9196e-02, 5.4656e-02, |
| -6.0245e-02, 8.4745e-02, -1.0317e-01, -3.2529e-02, -3.1298e-02, |
| 4.5586e-02, 1.7218e-02, -5.1573e-02, -9.2497e-02, -3.7508e-02, |
| 9.3142e-02, 3.7266e-02, -7.8790e-02, -2.2965e-03, -8.8131e-03, |
| 1.1247e-01, 1.4398e-02, 3.6883e-02, 1.3535e-02, -2.2482e-02, |
| 7.7606e-03, -1.1564e-02, -9.2380e-02, 9.2117e-02, -8.6496e-02, |
| -4.7756e-02, 9.0737e-02, 4.9368e-02, -1.3184e-02, -3.3454e-02, |
| 6.3449e-03, 3.7228e-02, -2.4421e-03, 1.7024e-03, 2.9668e-02, |
| -5.9003e-02, -4.5759e-02, 5.7492e-02, -6.7707e-03, 3.1389e-02, |
| 2.8321e-02, -6.0809e-02, -2.4451e-03, 6.3512e-03, -1.8711e-02, |
| 1.2424e-02, 1.3970e-02, 5.6096e-03, -9.3184e-02, 1.4589e-02, |
| -1.9262e-03, -3.8669e-02, -5.9775e-02, -2.1852e-05, -1.3025e-02, |
| -4.6614e-02, 2.0437e-02, -2.0632e-03, 9.9417e-02, -8.5307e-02, |
| -6.2060e-02, -4.7353e-02, -8.4368e-02, 5.9125e-02, 1.3815e-02, |
| 6.7903e-02, -1.9176e-02, 8.1607e-02, 9.5300e-03, -7.5729e-02, |
| 4.8691e-02, 7.7487e-02, -1.1334e-02, -4.1340e-02, 1.1121e-02, |
| -2.4506e-02, 1.4673e-01, -4.1853e-02, 6.9515e-02, -1.2374e-01, |
| -5.0617e-02, 2.6989e-02, 1.0692e-02, -5.9575e-02, 3.0577e-02, |
| 5.9018e-02, -2.8683e-03, -5.3847e-02, -1.3862e-02, -7.3678e-03, |
| -2.5915e-02, 5.4326e-02, -1.0355e-02, -1.4217e-02, -3.8828e-02, |
| -4.3901e-02, -3.6999e-02, -8.7978e-02, -3.7600e-02, 7.3855e-02, |
| -4.6035e-02, -1.0064e-02, 8.4649e-02, 5.4544e-02, 6.6927e-02, |
| -6.7527e-02, 4.1447e-02, -5.2767e-02, 5.7202e-02, 2.0367e-02, |
| -1.4843e-02, -1.3606e-02, -6.5079e-02, 4.3446e-02, -3.8950e-02, |
| 3.5919e-02, 2.0620e-02, -6.4451e-02, 1.5398e-02, -2.2867e-03, |
| 1.1872e-02, 1.0481e-01, 5.0847e-02, -7.6596e-02, -9.0736e-02, |
| -3.9243e-02, -3.5298e-02, 2.8835e-02, -1.2076e-02, -1.7122e-02, |
| -1.9240e-02, 4.2200e-02, -2.8937e-03, 1.0121e-01, -1.1783e-02, |
| 1.6700e-02, -3.5787e-02, 8.3502e-02, -7.7383e-03, -1.8015e-02, |
| -2.6297e-02, 6.3130e-02, 2.6768e-02, 1.5342e-02, -5.8850e-02, |
| -1.4420e-01, -5.5196e-03, 7.0583e-02, -3.3988e-02, -5.3023e-02, |
| 3.2967e-02, 1.8001e-02, 6.6126e-02, 5.1371e-02, -8.5679e-02, |
| -5.4478e-02, -3.4152e-02, -2.4631e-03, -4.2347e-02, 2.5897e-02, |
| 6.2213e-02, 4.1291e-02, -5.2986e-02, -2.5307e-02, 2.5769e-03, |
| 4.4660e-03, -3.7381e-04, 2.0208e-03, 5.6412e-02, -1.5519e-02, |
| 5.4275e-02, 7.7032e-03, -2.1544e-02, 9.5500e-02, 1.2722e-02, |
| -3.2053e-03, -4.3996e-02, -4.5722e-02, 2.6009e-02, -6.7119e-02, |
| -2.0980e-02, 2.5636e-02, -6.9467e-02, -1.1082e-01, 1.2189e-02, |
| 2.5662e-02, 3.2322e-02, -8.3210e-02, 1.8254e-02, -2.7739e-02, |
| 3.8996e-02, 1.3017e-02, 4.5556e-02, -9.2341e-02, -8.9775e-02, |
| 8.1507e-02, 2.7921e-03, -1.1678e-01, 4.3381e-02, 5.3091e-02, |
| -1.0354e-01, -3.7595e-02, 8.8258e-02, -7.4204e-02, 9.5140e-02, |
| -2.0128e-02, -4.3937e-02, 2.2460e-02, 6.1824e-03, -3.3601e-02, |
| -5.7980e-03, -4.4147e-02, -1.0026e-01]) |
|
|
| |
| |
| |
|
|
| |
| |
|
|
| |
| |
| |
|
|
| from typing import Optional, Union, Tuple, List |
|
|
| @torch.fx.wrap |
| def resample_abs_pos_embed( |
| posemb: torch.Tensor, |
| new_size: List[int], |
| old_size: Optional[List[int]] = None, |
| num_prefix_tokens: int = 1, |
| interpolation: str = 'bicubic', |
| antialias: bool = True, |
| verbose: bool = False, |
| ): |
| |
| num_pos_tokens = posemb.shape[1] |
| num_new_tokens = new_size[0] * new_size[1] + num_prefix_tokens |
| if num_new_tokens == num_pos_tokens and new_size[0] == new_size[1]: |
| return posemb |
|
|
| if old_size is None: |
| hw = int(math.sqrt(num_pos_tokens - num_prefix_tokens)) |
| old_size = hw, hw |
|
|
| if num_prefix_tokens: |
| posemb_prefix, posemb = posemb[:, :num_prefix_tokens], posemb[:, num_prefix_tokens:] |
| else: |
| posemb_prefix, posemb = None, posemb |
|
|
| |
| embed_dim = posemb.shape[-1] |
| orig_dtype = posemb.dtype |
| posemb = posemb.float() |
| posemb = posemb.reshape(1, old_size[0], old_size[1], -1).permute(0, 3, 1, 2) |
| posemb = F.interpolate(posemb, size=new_size, mode=interpolation, antialias=antialias) |
| posemb = posemb.permute(0, 2, 3, 1).reshape(1, -1, embed_dim) |
| posemb = posemb.to(orig_dtype) |
|
|
| |
| if posemb_prefix is not None: |
| posemb = torch.cat([posemb_prefix, posemb], dim=1) |
|
|
| return posemb |
|
|
|
|
| def to_2tuple(x): |
| return x,x |
|
|
| def _init_img_size(self, img_size: Union[int, Tuple[int, int]]): |
| assert self.patch_size |
| if img_size is None: |
| return None, None, None |
| img_size = to_2tuple(img_size) |
| grid_size = tuple([s // p for s, p in zip(img_size, self.patch_size)]) |
| num_patches = grid_size[0] * grid_size[1] |
| return img_size, grid_size, num_patches |
|
|
| def set_input_size_patchembed( |
| self, |
| img_size: Optional[Union[int, Tuple[int, int]]] = None, |
| patch_size: Optional[Union[int, Tuple[int, int]]] = None, |
| ): |
| new_patch_size = None |
| if patch_size is not None: |
| new_patch_size = to_2tuple(patch_size) |
| if new_patch_size is not None and new_patch_size != self.patch_size: |
| with torch.no_grad(): |
| new_proj = nn.Conv2d( |
| self.proj.in_channels, |
| self.proj.out_channels, |
| kernel_size=new_patch_size, |
| stride=new_patch_size, |
| bias=self.proj.bias is not None, |
| device=self.proj.weight.device, |
| dtype=self.proj.weight.dtype, |
| ) |
| new_proj.weight.copy_(resample_patch_embed(self.proj.weight, new_patch_size, verbose=True)) |
| if self.proj.bias is not None: |
| new_proj.bias.copy_(self.proj.bias) |
| self.proj = new_proj |
| self.patch_size = new_patch_size |
| img_size = img_size or self.img_size |
| if img_size != self.img_size or new_patch_size is not None: |
| self.img_size, self.grid_size, self.num_patches = _init_img_size(self, img_size) |
|
|
| def set_input_size( |
| self, |
| img_size: Optional[Union[int, Tuple[int, int]]] = None, |
| patch_size: Optional[Union[int, Tuple[int, int]]] = None, |
| ) -> None: |
| """Update the input image resolution and patch size. |
| |
| Args: |
| img_size: New input resolution, if None current resolution is used. |
| patch_size: New patch size, if None existing patch size is used. |
| """ |
| prev_grid_size = self.patch_embed.grid_size |
| set_input_size_patchembed(self.patch_embed, img_size=img_size, patch_size=patch_size) |
| if self.pos_embed is not None: |
| num_prefix_tokens = 0 if self.no_embed_class else self.num_prefix_tokens |
| num_new_tokens = self.patch_embed.num_patches + num_prefix_tokens |
| if num_new_tokens != self.pos_embed.shape[1]: |
| self.pos_embed = nn.Parameter(resample_abs_pos_embed( |
| self.pos_embed, |
| new_size=self.patch_embed.grid_size, |
| old_size=prev_grid_size, |
| num_prefix_tokens=num_prefix_tokens, |
| verbose=True, |
| )) |
|
|
|
|
| class MocoV3(nn.Module): |
| def __init__( |
| self, |
| model_ckpt_path: str = '/path/to/latentforcing/mocov3b.pth.tar', |
| match_pixel_norm: float = 0.485, |
| ): |
| super().__init__() |
|
|
| self.register_buffer("latent_std", mocov3_std.clone().float()) |
| self.register_buffer("latent_mean", mocov3_mean.clone().float()) |
|
|
| self.register_buffer("pixel_std", torch.tensor((0.229, 0.224, 0.225))) |
| self.register_buffer("pixel_mean", torch.tensor((0.485, 0.456, 0.406))) |
|
|
| self.match_pixel_norm = match_pixel_norm |
| |
|
|
| checkpoint = torch.load(model_ckpt_path, map_location="cpu") |
| state_dict = checkpoint['state_dict'] |
| new_state_dict = {} |
| for k, v in state_dict.items(): |
| if k.startswith("module.base_encoder."): |
| new_k = k.replace("module.base_encoder.", "") |
| new_state_dict[new_k] = v |
|
|
| self.mocov3 = timm.create_model("vit_base_patch16_224", num_classes=0) |
| self.mocov3.load_state_dict(new_state_dict, strict=False) |
| set_input_size(self.mocov3, 256) |
| self.mocov3.eval() |
| self.mocov3.requires_grad_(False) |
|
|
| @torch.compile() |
| @torch.no_grad() |
| def encode(self, x: torch.Tensor) -> torch.Tensor: |
| |
| |
| |
| x = (x - self.pixel_mean.view(1,3,1,1)) / self.pixel_std.view(1,3,1,1) |
| z = self.mocov3.forward_features(x) |
| z = z[:,1:] |
| z = (z - self.latent_mean.view(1,1,-1)) / self.latent_std.view(1,1,-1) |
| z = z * self.match_pixel_norm |
| z = z.view(-1,16,16,768).permute(0,3,1,2) |
|
|
| return z |
|
|