diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..f6b1f326ca4ab7cf0c8798856f8fe0020ff82d58 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +*.png filter=lfs diff=lfs merge=lfs -text diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/dataset.json b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/dataset.json new file mode 100644 index 0000000000000000000000000000000000000000..14a564b8f283c6d1506e198126e7dfae044e3261 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/dataset.json @@ -0,0 +1,2803 @@ +{ + "name": "MSWAL", + "description": " 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset", + "licence": "CC BY-NC 4.0", + "relase": "July 8, 2025", + "tensorImageSize": "3D", + "file_ending": ".nii.gz", + "channel_names": { + "0": "CT" + }, + "labels": { + "background": 0, + "gallstone": 1, + "kidney stone": 2, + "liver tumor": 3, + "kidney tumor": 4, + "pancreatic cancer": 5, + "liver cyst": 6, + "kidney cyst": 7 + }, + "numTraining": 484, + "numTest": 210, + "training": [ + { + "image": "./imagesTr/MSWAL_0001_0000.nii.gz", + "label": "./labelsTr/MSWAL_0001.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0002_0000.nii.gz", + "label": "./labelsTr/MSWAL_0002.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0003_0000.nii.gz", + "label": "./labelsTr/MSWAL_0003.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0008_0000.nii.gz", + "label": "./labelsTr/MSWAL_0008.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0009_0000.nii.gz", + "label": "./labelsTr/MSWAL_0009.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0011_0000.nii.gz", + "label": "./labelsTr/MSWAL_0011.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0013_0000.nii.gz", + "label": "./labelsTr/MSWAL_0013.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0014_0000.nii.gz", + "label": "./labelsTr/MSWAL_0014.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0015_0000.nii.gz", + "label": "./labelsTr/MSWAL_0015.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0017_0000.nii.gz", + "label": "./labelsTr/MSWAL_0017.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0018_0000.nii.gz", + "label": "./labelsTr/MSWAL_0018.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0020_0000.nii.gz", + "label": "./labelsTr/MSWAL_0020.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0021_0000.nii.gz", + "label": "./labelsTr/MSWAL_0021.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0022_0000.nii.gz", + "label": "./labelsTr/MSWAL_0022.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0024_0000.nii.gz", + "label": "./labelsTr/MSWAL_0024.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0026_0000.nii.gz", + "label": "./labelsTr/MSWAL_0026.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0027_0000.nii.gz", + "label": "./labelsTr/MSWAL_0027.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0028_0000.nii.gz", + "label": "./labelsTr/MSWAL_0028.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0029_0000.nii.gz", + "label": "./labelsTr/MSWAL_0029.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0031_0000.nii.gz", + "label": "./labelsTr/MSWAL_0031.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0032_0000.nii.gz", + "label": "./labelsTr/MSWAL_0032.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0033_0000.nii.gz", + "label": "./labelsTr/MSWAL_0033.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0034_0000.nii.gz", + "label": "./labelsTr/MSWAL_0034.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0035_0000.nii.gz", + "label": "./labelsTr/MSWAL_0035.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0037_0000.nii.gz", + "label": "./labelsTr/MSWAL_0037.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0038_0000.nii.gz", + "label": "./labelsTr/MSWAL_0038.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0039_0000.nii.gz", + "label": "./labelsTr/MSWAL_0039.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0040_0000.nii.gz", + "label": "./labelsTr/MSWAL_0040.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0041_0000.nii.gz", + "label": "./labelsTr/MSWAL_0041.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0042_0000.nii.gz", + "label": "./labelsTr/MSWAL_0042.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0045_0000.nii.gz", + "label": "./labelsTr/MSWAL_0045.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0046_0000.nii.gz", + "label": "./labelsTr/MSWAL_0046.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0049_0000.nii.gz", + "label": "./labelsTr/MSWAL_0049.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0050_0000.nii.gz", + "label": "./labelsTr/MSWAL_0050.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0051_0000.nii.gz", + "label": "./labelsTr/MSWAL_0051.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0052_0000.nii.gz", + "label": "./labelsTr/MSWAL_0052.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0054_0000.nii.gz", + "label": "./labelsTr/MSWAL_0054.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0055_0000.nii.gz", + "label": "./labelsTr/MSWAL_0055.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0056_0000.nii.gz", + "label": "./labelsTr/MSWAL_0056.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0057_0000.nii.gz", + "label": "./labelsTr/MSWAL_0057.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0059_0000.nii.gz", + "label": "./labelsTr/MSWAL_0059.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0060_0000.nii.gz", + "label": "./labelsTr/MSWAL_0060.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0061_0000.nii.gz", + "label": "./labelsTr/MSWAL_0061.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0063_0000.nii.gz", + "label": "./labelsTr/MSWAL_0063.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0064_0000.nii.gz", + "label": "./labelsTr/MSWAL_0064.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0065_0000.nii.gz", + "label": "./labelsTr/MSWAL_0065.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0066_0000.nii.gz", + "label": "./labelsTr/MSWAL_0066.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0067_0000.nii.gz", + "label": "./labelsTr/MSWAL_0067.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0069_0000.nii.gz", + "label": "./labelsTr/MSWAL_0069.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0072_0000.nii.gz", + "label": "./labelsTr/MSWAL_0072.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0075_0000.nii.gz", + "label": "./labelsTr/MSWAL_0075.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0077_0000.nii.gz", + "label": "./labelsTr/MSWAL_0077.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0080_0000.nii.gz", + "label": "./labelsTr/MSWAL_0080.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0082_0000.nii.gz", + "label": "./labelsTr/MSWAL_0082.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0083_0000.nii.gz", + "label": "./labelsTr/MSWAL_0083.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0084_0000.nii.gz", + "label": "./labelsTr/MSWAL_0084.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0085_0000.nii.gz", + "label": "./labelsTr/MSWAL_0085.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0086_0000.nii.gz", + "label": "./labelsTr/MSWAL_0086.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0088_0000.nii.gz", + "label": "./labelsTr/MSWAL_0088.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0089_0000.nii.gz", + "label": "./labelsTr/MSWAL_0089.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0092_0000.nii.gz", + "label": "./labelsTr/MSWAL_0092.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0093_0000.nii.gz", + "label": "./labelsTr/MSWAL_0093.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0094_0000.nii.gz", + "label": "./labelsTr/MSWAL_0094.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0095_0000.nii.gz", + "label": "./labelsTr/MSWAL_0095.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0096_0000.nii.gz", + "label": "./labelsTr/MSWAL_0096.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0098_0000.nii.gz", + "label": "./labelsTr/MSWAL_0098.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0099_0000.nii.gz", + "label": "./labelsTr/MSWAL_0099.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0101_0000.nii.gz", + "label": "./labelsTr/MSWAL_0101.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0102_0000.nii.gz", + "label": "./labelsTr/MSWAL_0102.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0103_0000.nii.gz", + "label": "./labelsTr/MSWAL_0103.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0104_0000.nii.gz", + "label": "./labelsTr/MSWAL_0104.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0105_0000.nii.gz", + "label": "./labelsTr/MSWAL_0105.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0106_0000.nii.gz", + "label": "./labelsTr/MSWAL_0106.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0108_0000.nii.gz", + "label": "./labelsTr/MSWAL_0108.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0109_0000.nii.gz", + "label": "./labelsTr/MSWAL_0109.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0110_0000.nii.gz", + "label": "./labelsTr/MSWAL_0110.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0111_0000.nii.gz", + "label": "./labelsTr/MSWAL_0111.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0112_0000.nii.gz", + "label": "./labelsTr/MSWAL_0112.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0113_0000.nii.gz", + "label": "./labelsTr/MSWAL_0113.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0114_0000.nii.gz", + "label": "./labelsTr/MSWAL_0114.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0117_0000.nii.gz", + "label": "./labelsTr/MSWAL_0117.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0119_0000.nii.gz", + "label": "./labelsTr/MSWAL_0119.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0120_0000.nii.gz", + "label": "./labelsTr/MSWAL_0120.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0122_0000.nii.gz", + "label": "./labelsTr/MSWAL_0122.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0124_0000.nii.gz", + "label": "./labelsTr/MSWAL_0124.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0125_0000.nii.gz", + "label": "./labelsTr/MSWAL_0125.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0126_0000.nii.gz", + "label": "./labelsTr/MSWAL_0126.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0127_0000.nii.gz", + "label": "./labelsTr/MSWAL_0127.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0128_0000.nii.gz", + "label": "./labelsTr/MSWAL_0128.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0129_0000.nii.gz", + "label": "./labelsTr/MSWAL_0129.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0130_0000.nii.gz", + "label": "./labelsTr/MSWAL_0130.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0132_0000.nii.gz", + "label": "./labelsTr/MSWAL_0132.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0133_0000.nii.gz", + "label": "./labelsTr/MSWAL_0133.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0134_0000.nii.gz", + "label": "./labelsTr/MSWAL_0134.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0136_0000.nii.gz", + "label": "./labelsTr/MSWAL_0136.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0138_0000.nii.gz", + "label": "./labelsTr/MSWAL_0138.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0139_0000.nii.gz", + "label": "./labelsTr/MSWAL_0139.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0140_0000.nii.gz", + "label": "./labelsTr/MSWAL_0140.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0141_0000.nii.gz", + "label": "./labelsTr/MSWAL_0141.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0142_0000.nii.gz", + "label": "./labelsTr/MSWAL_0142.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0143_0000.nii.gz", + "label": "./labelsTr/MSWAL_0143.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0145_0000.nii.gz", + "label": "./labelsTr/MSWAL_0145.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0147_0000.nii.gz", + "label": "./labelsTr/MSWAL_0147.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0148_0000.nii.gz", + "label": "./labelsTr/MSWAL_0148.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0149_0000.nii.gz", + "label": "./labelsTr/MSWAL_0149.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0150_0000.nii.gz", + "label": "./labelsTr/MSWAL_0150.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0151_0000.nii.gz", + "label": "./labelsTr/MSWAL_0151.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0152_0000.nii.gz", + "label": "./labelsTr/MSWAL_0152.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0157_0000.nii.gz", + "label": "./labelsTr/MSWAL_0157.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0159_0000.nii.gz", + "label": "./labelsTr/MSWAL_0159.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0162_0000.nii.gz", + "label": "./labelsTr/MSWAL_0162.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0163_0000.nii.gz", + "label": "./labelsTr/MSWAL_0163.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0165_0000.nii.gz", + "label": "./labelsTr/MSWAL_0165.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0166_0000.nii.gz", + "label": "./labelsTr/MSWAL_0166.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0167_0000.nii.gz", + "label": "./labelsTr/MSWAL_0167.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0168_0000.nii.gz", + "label": "./labelsTr/MSWAL_0168.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0169_0000.nii.gz", + "label": "./labelsTr/MSWAL_0169.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0170_0000.nii.gz", + "label": "./labelsTr/MSWAL_0170.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0171_0000.nii.gz", + "label": "./labelsTr/MSWAL_0171.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0172_0000.nii.gz", + "label": "./labelsTr/MSWAL_0172.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0173_0000.nii.gz", + "label": "./labelsTr/MSWAL_0173.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0174_0000.nii.gz", + "label": "./labelsTr/MSWAL_0174.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0175_0000.nii.gz", + "label": "./labelsTr/MSWAL_0175.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0176_0000.nii.gz", + "label": "./labelsTr/MSWAL_0176.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0177_0000.nii.gz", + "label": "./labelsTr/MSWAL_0177.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0178_0000.nii.gz", + "label": "./labelsTr/MSWAL_0178.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0179_0000.nii.gz", + "label": "./labelsTr/MSWAL_0179.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0180_0000.nii.gz", + "label": "./labelsTr/MSWAL_0180.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0182_0000.nii.gz", + "label": "./labelsTr/MSWAL_0182.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0183_0000.nii.gz", + "label": "./labelsTr/MSWAL_0183.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0184_0000.nii.gz", + "label": "./labelsTr/MSWAL_0184.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0185_0000.nii.gz", + "label": "./labelsTr/MSWAL_0185.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0186_0000.nii.gz", + "label": "./labelsTr/MSWAL_0186.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0187_0000.nii.gz", + "label": "./labelsTr/MSWAL_0187.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0188_0000.nii.gz", + "label": "./labelsTr/MSWAL_0188.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0189_0000.nii.gz", + "label": "./labelsTr/MSWAL_0189.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0193_0000.nii.gz", + "label": "./labelsTr/MSWAL_0193.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0194_0000.nii.gz", + "label": "./labelsTr/MSWAL_0194.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0195_0000.nii.gz", + "label": "./labelsTr/MSWAL_0195.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0199_0000.nii.gz", + "label": "./labelsTr/MSWAL_0199.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0201_0000.nii.gz", + "label": "./labelsTr/MSWAL_0201.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0202_0000.nii.gz", + "label": "./labelsTr/MSWAL_0202.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0203_0000.nii.gz", + "label": "./labelsTr/MSWAL_0203.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0204_0000.nii.gz", + "label": "./labelsTr/MSWAL_0204.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0207_0000.nii.gz", + "label": "./labelsTr/MSWAL_0207.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0208_0000.nii.gz", + "label": "./labelsTr/MSWAL_0208.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0209_0000.nii.gz", + "label": "./labelsTr/MSWAL_0209.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0214_0000.nii.gz", + "label": "./labelsTr/MSWAL_0214.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0217_0000.nii.gz", + "label": "./labelsTr/MSWAL_0217.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0218_0000.nii.gz", + "label": "./labelsTr/MSWAL_0218.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0219_0000.nii.gz", + "label": "./labelsTr/MSWAL_0219.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0220_0000.nii.gz", + "label": "./labelsTr/MSWAL_0220.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0221_0000.nii.gz", + "label": "./labelsTr/MSWAL_0221.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0222_0000.nii.gz", + "label": "./labelsTr/MSWAL_0222.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0223_0000.nii.gz", + "label": "./labelsTr/MSWAL_0223.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0224_0000.nii.gz", + "label": "./labelsTr/MSWAL_0224.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0225_0000.nii.gz", + "label": "./labelsTr/MSWAL_0225.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0226_0000.nii.gz", + "label": "./labelsTr/MSWAL_0226.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0227_0000.nii.gz", + "label": "./labelsTr/MSWAL_0227.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0228_0000.nii.gz", + "label": "./labelsTr/MSWAL_0228.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0229_0000.nii.gz", + "label": "./labelsTr/MSWAL_0229.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0230_0000.nii.gz", + "label": "./labelsTr/MSWAL_0230.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0233_0000.nii.gz", + "label": "./labelsTr/MSWAL_0233.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0234_0000.nii.gz", + "label": "./labelsTr/MSWAL_0234.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0238_0000.nii.gz", + "label": "./labelsTr/MSWAL_0238.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0241_0000.nii.gz", + "label": "./labelsTr/MSWAL_0241.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0242_0000.nii.gz", + "label": "./labelsTr/MSWAL_0242.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0243_0000.nii.gz", + "label": "./labelsTr/MSWAL_0243.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0245_0000.nii.gz", + "label": "./labelsTr/MSWAL_0245.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0246_0000.nii.gz", + "label": "./labelsTr/MSWAL_0246.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0247_0000.nii.gz", + "label": "./labelsTr/MSWAL_0247.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0248_0000.nii.gz", + "label": "./labelsTr/MSWAL_0248.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0251_0000.nii.gz", + "label": "./labelsTr/MSWAL_0251.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0252_0000.nii.gz", + "label": "./labelsTr/MSWAL_0252.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0253_0000.nii.gz", + "label": "./labelsTr/MSWAL_0253.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0254_0000.nii.gz", + "label": "./labelsTr/MSWAL_0254.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0255_0000.nii.gz", + "label": "./labelsTr/MSWAL_0255.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0256_0000.nii.gz", + "label": "./labelsTr/MSWAL_0256.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0257_0000.nii.gz", + "label": "./labelsTr/MSWAL_0257.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0258_0000.nii.gz", + "label": "./labelsTr/MSWAL_0258.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0259_0000.nii.gz", + "label": "./labelsTr/MSWAL_0259.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0260_0000.nii.gz", + "label": "./labelsTr/MSWAL_0260.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0261_0000.nii.gz", + "label": "./labelsTr/MSWAL_0261.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0262_0000.nii.gz", + "label": "./labelsTr/MSWAL_0262.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0263_0000.nii.gz", + "label": "./labelsTr/MSWAL_0263.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0264_0000.nii.gz", + "label": "./labelsTr/MSWAL_0264.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0265_0000.nii.gz", + "label": "./labelsTr/MSWAL_0265.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0267_0000.nii.gz", + "label": "./labelsTr/MSWAL_0267.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0270_0000.nii.gz", + "label": "./labelsTr/MSWAL_0270.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0271_0000.nii.gz", + "label": "./labelsTr/MSWAL_0271.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0272_0000.nii.gz", + "label": "./labelsTr/MSWAL_0272.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0273_0000.nii.gz", + "label": "./labelsTr/MSWAL_0273.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0274_0000.nii.gz", + "label": "./labelsTr/MSWAL_0274.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0275_0000.nii.gz", + "label": "./labelsTr/MSWAL_0275.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0276_0000.nii.gz", + "label": "./labelsTr/MSWAL_0276.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0277_0000.nii.gz", + "label": "./labelsTr/MSWAL_0277.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0278_0000.nii.gz", + "label": "./labelsTr/MSWAL_0278.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0279_0000.nii.gz", + "label": "./labelsTr/MSWAL_0279.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0281_0000.nii.gz", + "label": "./labelsTr/MSWAL_0281.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0282_0000.nii.gz", + "label": "./labelsTr/MSWAL_0282.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0283_0000.nii.gz", + "label": "./labelsTr/MSWAL_0283.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0284_0000.nii.gz", + "label": "./labelsTr/MSWAL_0284.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0285_0000.nii.gz", + "label": "./labelsTr/MSWAL_0285.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0288_0000.nii.gz", + "label": "./labelsTr/MSWAL_0288.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0289_0000.nii.gz", + "label": "./labelsTr/MSWAL_0289.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0290_0000.nii.gz", + "label": "./labelsTr/MSWAL_0290.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0293_0000.nii.gz", + "label": "./labelsTr/MSWAL_0293.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0296_0000.nii.gz", + "label": "./labelsTr/MSWAL_0296.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0297_0000.nii.gz", + "label": "./labelsTr/MSWAL_0297.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0301_0000.nii.gz", + "label": "./labelsTr/MSWAL_0301.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0302_0000.nii.gz", + "label": "./labelsTr/MSWAL_0302.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0303_0000.nii.gz", + "label": "./labelsTr/MSWAL_0303.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0306_0000.nii.gz", + "label": "./labelsTr/MSWAL_0306.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0307_0000.nii.gz", + "label": "./labelsTr/MSWAL_0307.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0308_0000.nii.gz", + "label": "./labelsTr/MSWAL_0308.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0311_0000.nii.gz", + "label": "./labelsTr/MSWAL_0311.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0312_0000.nii.gz", + "label": "./labelsTr/MSWAL_0312.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0313_0000.nii.gz", + "label": "./labelsTr/MSWAL_0313.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0314_0000.nii.gz", + "label": "./labelsTr/MSWAL_0314.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0316_0000.nii.gz", + "label": "./labelsTr/MSWAL_0316.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0317_0000.nii.gz", + "label": "./labelsTr/MSWAL_0317.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0318_0000.nii.gz", + "label": "./labelsTr/MSWAL_0318.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0320_0000.nii.gz", + "label": "./labelsTr/MSWAL_0320.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0323_0000.nii.gz", + "label": "./labelsTr/MSWAL_0323.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0324_0000.nii.gz", + "label": "./labelsTr/MSWAL_0324.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0326_0000.nii.gz", + "label": "./labelsTr/MSWAL_0326.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0327_0000.nii.gz", + "label": "./labelsTr/MSWAL_0327.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0328_0000.nii.gz", + "label": "./labelsTr/MSWAL_0328.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0330_0000.nii.gz", + "label": "./labelsTr/MSWAL_0330.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0331_0000.nii.gz", + "label": "./labelsTr/MSWAL_0331.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0332_0000.nii.gz", + "label": "./labelsTr/MSWAL_0332.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0333_0000.nii.gz", + "label": "./labelsTr/MSWAL_0333.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0334_0000.nii.gz", + "label": "./labelsTr/MSWAL_0334.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0335_0000.nii.gz", + "label": "./labelsTr/MSWAL_0335.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0336_0000.nii.gz", + "label": "./labelsTr/MSWAL_0336.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0337_0000.nii.gz", + "label": "./labelsTr/MSWAL_0337.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0338_0000.nii.gz", + "label": "./labelsTr/MSWAL_0338.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0341_0000.nii.gz", + "label": "./labelsTr/MSWAL_0341.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0342_0000.nii.gz", + "label": "./labelsTr/MSWAL_0342.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0343_0000.nii.gz", + "label": "./labelsTr/MSWAL_0343.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0344_0000.nii.gz", + "label": "./labelsTr/MSWAL_0344.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0345_0000.nii.gz", + "label": "./labelsTr/MSWAL_0345.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0346_0000.nii.gz", + "label": "./labelsTr/MSWAL_0346.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0348_0000.nii.gz", + "label": "./labelsTr/MSWAL_0348.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0353_0000.nii.gz", + "label": "./labelsTr/MSWAL_0353.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0354_0000.nii.gz", + "label": "./labelsTr/MSWAL_0354.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0355_0000.nii.gz", + "label": "./labelsTr/MSWAL_0355.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0356_0000.nii.gz", + "label": "./labelsTr/MSWAL_0356.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0357_0000.nii.gz", + "label": "./labelsTr/MSWAL_0357.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0360_0000.nii.gz", + "label": "./labelsTr/MSWAL_0360.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0361_0000.nii.gz", + "label": "./labelsTr/MSWAL_0361.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0362_0000.nii.gz", + "label": "./labelsTr/MSWAL_0362.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0363_0000.nii.gz", + "label": "./labelsTr/MSWAL_0363.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0365_0000.nii.gz", + "label": "./labelsTr/MSWAL_0365.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0366_0000.nii.gz", + "label": "./labelsTr/MSWAL_0366.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0369_0000.nii.gz", + "label": "./labelsTr/MSWAL_0369.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0370_0000.nii.gz", + "label": "./labelsTr/MSWAL_0370.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0373_0000.nii.gz", + "label": "./labelsTr/MSWAL_0373.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0374_0000.nii.gz", + "label": "./labelsTr/MSWAL_0374.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0375_0000.nii.gz", + "label": "./labelsTr/MSWAL_0375.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0376_0000.nii.gz", + "label": "./labelsTr/MSWAL_0376.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0378_0000.nii.gz", + "label": "./labelsTr/MSWAL_0378.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0379_0000.nii.gz", + "label": "./labelsTr/MSWAL_0379.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0380_0000.nii.gz", + "label": "./labelsTr/MSWAL_0380.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0381_0000.nii.gz", + "label": "./labelsTr/MSWAL_0381.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0382_0000.nii.gz", + "label": "./labelsTr/MSWAL_0382.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0387_0000.nii.gz", + "label": "./labelsTr/MSWAL_0387.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0388_0000.nii.gz", + "label": "./labelsTr/MSWAL_0388.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0389_0000.nii.gz", + "label": "./labelsTr/MSWAL_0389.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0390_0000.nii.gz", + "label": "./labelsTr/MSWAL_0390.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0391_0000.nii.gz", + "label": "./labelsTr/MSWAL_0391.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0392_0000.nii.gz", + "label": "./labelsTr/MSWAL_0392.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0393_0000.nii.gz", + "label": "./labelsTr/MSWAL_0393.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0397_0000.nii.gz", + "label": "./labelsTr/MSWAL_0397.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0398_0000.nii.gz", + "label": "./labelsTr/MSWAL_0398.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0399_0000.nii.gz", + "label": "./labelsTr/MSWAL_0399.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0400_0000.nii.gz", + "label": "./labelsTr/MSWAL_0400.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0402_0000.nii.gz", + "label": "./labelsTr/MSWAL_0402.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0403_0000.nii.gz", + "label": "./labelsTr/MSWAL_0403.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0407_0000.nii.gz", + "label": "./labelsTr/MSWAL_0407.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0409_0000.nii.gz", + "label": "./labelsTr/MSWAL_0409.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0410_0000.nii.gz", + "label": "./labelsTr/MSWAL_0410.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0411_0000.nii.gz", + "label": "./labelsTr/MSWAL_0411.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0412_0000.nii.gz", + "label": "./labelsTr/MSWAL_0412.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0414_0000.nii.gz", + "label": "./labelsTr/MSWAL_0414.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0415_0000.nii.gz", + "label": "./labelsTr/MSWAL_0415.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0416_0000.nii.gz", + "label": "./labelsTr/MSWAL_0416.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0417_0000.nii.gz", + "label": "./labelsTr/MSWAL_0417.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0418_0000.nii.gz", + "label": "./labelsTr/MSWAL_0418.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0419_0000.nii.gz", + "label": "./labelsTr/MSWAL_0419.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0420_0000.nii.gz", + "label": "./labelsTr/MSWAL_0420.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0421_0000.nii.gz", + "label": "./labelsTr/MSWAL_0421.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0422_0000.nii.gz", + "label": "./labelsTr/MSWAL_0422.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0423_0000.nii.gz", + "label": "./labelsTr/MSWAL_0423.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0425_0000.nii.gz", + "label": "./labelsTr/MSWAL_0425.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0426_0000.nii.gz", + "label": "./labelsTr/MSWAL_0426.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0427_0000.nii.gz", + "label": "./labelsTr/MSWAL_0427.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0428_0000.nii.gz", + "label": "./labelsTr/MSWAL_0428.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0429_0000.nii.gz", + "label": "./labelsTr/MSWAL_0429.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0430_0000.nii.gz", + "label": "./labelsTr/MSWAL_0430.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0431_0000.nii.gz", + "label": "./labelsTr/MSWAL_0431.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0432_0000.nii.gz", + "label": "./labelsTr/MSWAL_0432.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0434_0000.nii.gz", + "label": "./labelsTr/MSWAL_0434.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0435_0000.nii.gz", + "label": "./labelsTr/MSWAL_0435.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0436_0000.nii.gz", + "label": "./labelsTr/MSWAL_0436.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0437_0000.nii.gz", + "label": "./labelsTr/MSWAL_0437.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0438_0000.nii.gz", + "label": "./labelsTr/MSWAL_0438.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0439_0000.nii.gz", + "label": "./labelsTr/MSWAL_0439.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0440_0000.nii.gz", + "label": "./labelsTr/MSWAL_0440.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0442_0000.nii.gz", + "label": "./labelsTr/MSWAL_0442.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0446_0000.nii.gz", + "label": "./labelsTr/MSWAL_0446.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0447_0000.nii.gz", + "label": "./labelsTr/MSWAL_0447.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0452_0000.nii.gz", + "label": "./labelsTr/MSWAL_0452.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0453_0000.nii.gz", + "label": "./labelsTr/MSWAL_0453.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0455_0000.nii.gz", + "label": "./labelsTr/MSWAL_0455.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0457_0000.nii.gz", + "label": "./labelsTr/MSWAL_0457.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0460_0000.nii.gz", + "label": "./labelsTr/MSWAL_0460.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0461_0000.nii.gz", + "label": "./labelsTr/MSWAL_0461.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0463_0000.nii.gz", + "label": "./labelsTr/MSWAL_0463.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0464_0000.nii.gz", + "label": "./labelsTr/MSWAL_0464.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0465_0000.nii.gz", + "label": "./labelsTr/MSWAL_0465.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0466_0000.nii.gz", + "label": "./labelsTr/MSWAL_0466.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0468_0000.nii.gz", + "label": "./labelsTr/MSWAL_0468.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0470_0000.nii.gz", + "label": "./labelsTr/MSWAL_0470.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0471_0000.nii.gz", + "label": "./labelsTr/MSWAL_0471.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0473_0000.nii.gz", + "label": "./labelsTr/MSWAL_0473.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0474_0000.nii.gz", + "label": "./labelsTr/MSWAL_0474.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0475_0000.nii.gz", + "label": "./labelsTr/MSWAL_0475.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0476_0000.nii.gz", + "label": "./labelsTr/MSWAL_0476.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0477_0000.nii.gz", + "label": "./labelsTr/MSWAL_0477.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0479_0000.nii.gz", + "label": "./labelsTr/MSWAL_0479.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0480_0000.nii.gz", + "label": "./labelsTr/MSWAL_0480.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0482_0000.nii.gz", + "label": "./labelsTr/MSWAL_0482.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0483_0000.nii.gz", + "label": "./labelsTr/MSWAL_0483.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0484_0000.nii.gz", + "label": "./labelsTr/MSWAL_0484.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0485_0000.nii.gz", + "label": "./labelsTr/MSWAL_0485.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0486_0000.nii.gz", + "label": "./labelsTr/MSWAL_0486.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0487_0000.nii.gz", + "label": "./labelsTr/MSWAL_0487.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0488_0000.nii.gz", + "label": "./labelsTr/MSWAL_0488.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0489_0000.nii.gz", + "label": "./labelsTr/MSWAL_0489.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0490_0000.nii.gz", + "label": "./labelsTr/MSWAL_0490.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0491_0000.nii.gz", + "label": "./labelsTr/MSWAL_0491.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0492_0000.nii.gz", + "label": "./labelsTr/MSWAL_0492.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0493_0000.nii.gz", + "label": "./labelsTr/MSWAL_0493.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0495_0000.nii.gz", + "label": "./labelsTr/MSWAL_0495.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0497_0000.nii.gz", + "label": "./labelsTr/MSWAL_0497.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0498_0000.nii.gz", + "label": "./labelsTr/MSWAL_0498.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0500_0000.nii.gz", + "label": "./labelsTr/MSWAL_0500.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0501_0000.nii.gz", + "label": "./labelsTr/MSWAL_0501.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0504_0000.nii.gz", + "label": "./labelsTr/MSWAL_0504.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0505_0000.nii.gz", + "label": "./labelsTr/MSWAL_0505.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0506_0000.nii.gz", + "label": "./labelsTr/MSWAL_0506.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0507_0000.nii.gz", + "label": "./labelsTr/MSWAL_0507.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0508_0000.nii.gz", + "label": "./labelsTr/MSWAL_0508.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0509_0000.nii.gz", + "label": "./labelsTr/MSWAL_0509.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0510_0000.nii.gz", + "label": "./labelsTr/MSWAL_0510.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0512_0000.nii.gz", + "label": "./labelsTr/MSWAL_0512.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0516_0000.nii.gz", + "label": "./labelsTr/MSWAL_0516.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0518_0000.nii.gz", + "label": "./labelsTr/MSWAL_0518.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0519_0000.nii.gz", + "label": "./labelsTr/MSWAL_0519.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0521_0000.nii.gz", + "label": "./labelsTr/MSWAL_0521.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0522_0000.nii.gz", + "label": "./labelsTr/MSWAL_0522.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0523_0000.nii.gz", + "label": "./labelsTr/MSWAL_0523.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0524_0000.nii.gz", + "label": "./labelsTr/MSWAL_0524.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0526_0000.nii.gz", + "label": "./labelsTr/MSWAL_0526.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0527_0000.nii.gz", + "label": "./labelsTr/MSWAL_0527.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0530_0000.nii.gz", + "label": "./labelsTr/MSWAL_0530.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0531_0000.nii.gz", + "label": "./labelsTr/MSWAL_0531.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0534_0000.nii.gz", + "label": "./labelsTr/MSWAL_0534.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0535_0000.nii.gz", + "label": "./labelsTr/MSWAL_0535.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0536_0000.nii.gz", + "label": "./labelsTr/MSWAL_0536.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0538_0000.nii.gz", + "label": "./labelsTr/MSWAL_0538.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0539_0000.nii.gz", + "label": "./labelsTr/MSWAL_0539.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0540_0000.nii.gz", + "label": "./labelsTr/MSWAL_0540.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0542_0000.nii.gz", + "label": "./labelsTr/MSWAL_0542.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0544_0000.nii.gz", + "label": "./labelsTr/MSWAL_0544.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0545_0000.nii.gz", + "label": "./labelsTr/MSWAL_0545.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0546_0000.nii.gz", + "label": "./labelsTr/MSWAL_0546.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0547_0000.nii.gz", + "label": "./labelsTr/MSWAL_0547.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0548_0000.nii.gz", + "label": "./labelsTr/MSWAL_0548.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0549_0000.nii.gz", + "label": "./labelsTr/MSWAL_0549.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0550_0000.nii.gz", + "label": "./labelsTr/MSWAL_0550.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0551_0000.nii.gz", + "label": "./labelsTr/MSWAL_0551.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0552_0000.nii.gz", + "label": "./labelsTr/MSWAL_0552.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0553_0000.nii.gz", + "label": "./labelsTr/MSWAL_0553.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0554_0000.nii.gz", + "label": "./labelsTr/MSWAL_0554.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0555_0000.nii.gz", + "label": "./labelsTr/MSWAL_0555.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0556_0000.nii.gz", + "label": "./labelsTr/MSWAL_0556.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0557_0000.nii.gz", + "label": "./labelsTr/MSWAL_0557.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0558_0000.nii.gz", + "label": "./labelsTr/MSWAL_0558.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0559_0000.nii.gz", + "label": "./labelsTr/MSWAL_0559.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0561_0000.nii.gz", + "label": "./labelsTr/MSWAL_0561.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0562_0000.nii.gz", + "label": "./labelsTr/MSWAL_0562.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0563_0000.nii.gz", + "label": "./labelsTr/MSWAL_0563.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0564_0000.nii.gz", + "label": "./labelsTr/MSWAL_0564.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0566_0000.nii.gz", + "label": "./labelsTr/MSWAL_0566.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0567_0000.nii.gz", + "label": "./labelsTr/MSWAL_0567.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0568_0000.nii.gz", + "label": "./labelsTr/MSWAL_0568.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0571_0000.nii.gz", + "label": "./labelsTr/MSWAL_0571.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0573_0000.nii.gz", + "label": "./labelsTr/MSWAL_0573.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0574_0000.nii.gz", + "label": "./labelsTr/MSWAL_0574.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0575_0000.nii.gz", + "label": "./labelsTr/MSWAL_0575.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0577_0000.nii.gz", + "label": "./labelsTr/MSWAL_0577.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0578_0000.nii.gz", + "label": "./labelsTr/MSWAL_0578.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0579_0000.nii.gz", + "label": "./labelsTr/MSWAL_0579.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0580_0000.nii.gz", + "label": "./labelsTr/MSWAL_0580.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0581_0000.nii.gz", + "label": "./labelsTr/MSWAL_0581.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0582_0000.nii.gz", + "label": "./labelsTr/MSWAL_0582.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0583_0000.nii.gz", + "label": "./labelsTr/MSWAL_0583.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0584_0000.nii.gz", + "label": "./labelsTr/MSWAL_0584.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0586_0000.nii.gz", + "label": "./labelsTr/MSWAL_0586.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0590_0000.nii.gz", + "label": "./labelsTr/MSWAL_0590.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0591_0000.nii.gz", + "label": "./labelsTr/MSWAL_0591.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0592_0000.nii.gz", + "label": "./labelsTr/MSWAL_0592.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0593_0000.nii.gz", + "label": "./labelsTr/MSWAL_0593.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0595_0000.nii.gz", + "label": "./labelsTr/MSWAL_0595.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0596_0000.nii.gz", + "label": "./labelsTr/MSWAL_0596.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0597_0000.nii.gz", + "label": "./labelsTr/MSWAL_0597.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0598_0000.nii.gz", + "label": "./labelsTr/MSWAL_0598.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0599_0000.nii.gz", + "label": "./labelsTr/MSWAL_0599.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0600_0000.nii.gz", + "label": "./labelsTr/MSWAL_0600.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0601_0000.nii.gz", + "label": "./labelsTr/MSWAL_0601.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0602_0000.nii.gz", + "label": "./labelsTr/MSWAL_0602.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0604_0000.nii.gz", + "label": "./labelsTr/MSWAL_0604.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0605_0000.nii.gz", + "label": "./labelsTr/MSWAL_0605.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0608_0000.nii.gz", + "label": "./labelsTr/MSWAL_0608.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0612_0000.nii.gz", + "label": "./labelsTr/MSWAL_0612.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0614_0000.nii.gz", + "label": "./labelsTr/MSWAL_0614.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0615_0000.nii.gz", + "label": "./labelsTr/MSWAL_0615.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0616_0000.nii.gz", + "label": "./labelsTr/MSWAL_0616.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0617_0000.nii.gz", + "label": "./labelsTr/MSWAL_0617.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0621_0000.nii.gz", + "label": "./labelsTr/MSWAL_0621.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0623_0000.nii.gz", + "label": "./labelsTr/MSWAL_0623.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0625_0000.nii.gz", + "label": "./labelsTr/MSWAL_0625.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0626_0000.nii.gz", + "label": "./labelsTr/MSWAL_0626.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0627_0000.nii.gz", + "label": "./labelsTr/MSWAL_0627.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0628_0000.nii.gz", + "label": "./labelsTr/MSWAL_0628.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0629_0000.nii.gz", + "label": "./labelsTr/MSWAL_0629.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0630_0000.nii.gz", + "label": "./labelsTr/MSWAL_0630.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0632_0000.nii.gz", + "label": "./labelsTr/MSWAL_0632.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0635_0000.nii.gz", + "label": "./labelsTr/MSWAL_0635.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0636_0000.nii.gz", + "label": "./labelsTr/MSWAL_0636.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0638_0000.nii.gz", + "label": "./labelsTr/MSWAL_0638.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0640_0000.nii.gz", + "label": "./labelsTr/MSWAL_0640.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0641_0000.nii.gz", + "label": "./labelsTr/MSWAL_0641.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0643_0000.nii.gz", + "label": "./labelsTr/MSWAL_0643.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0644_0000.nii.gz", + "label": "./labelsTr/MSWAL_0644.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0646_0000.nii.gz", + "label": "./labelsTr/MSWAL_0646.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0648_0000.nii.gz", + "label": "./labelsTr/MSWAL_0648.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0649_0000.nii.gz", + "label": "./labelsTr/MSWAL_0649.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0650_0000.nii.gz", + "label": "./labelsTr/MSWAL_0650.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0651_0000.nii.gz", + "label": "./labelsTr/MSWAL_0651.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0653_0000.nii.gz", + "label": "./labelsTr/MSWAL_0653.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0654_0000.nii.gz", + "label": "./labelsTr/MSWAL_0654.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0655_0000.nii.gz", + "label": "./labelsTr/MSWAL_0655.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0656_0000.nii.gz", + "label": "./labelsTr/MSWAL_0656.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0658_0000.nii.gz", + "label": "./labelsTr/MSWAL_0658.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0660_0000.nii.gz", + "label": "./labelsTr/MSWAL_0660.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0661_0000.nii.gz", + "label": "./labelsTr/MSWAL_0661.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0662_0000.nii.gz", + "label": "./labelsTr/MSWAL_0662.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0663_0000.nii.gz", + "label": "./labelsTr/MSWAL_0663.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0666_0000.nii.gz", + "label": "./labelsTr/MSWAL_0666.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0667_0000.nii.gz", + "label": "./labelsTr/MSWAL_0667.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0668_0000.nii.gz", + "label": "./labelsTr/MSWAL_0668.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0669_0000.nii.gz", + "label": "./labelsTr/MSWAL_0669.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0670_0000.nii.gz", + "label": "./labelsTr/MSWAL_0670.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0671_0000.nii.gz", + "label": "./labelsTr/MSWAL_0671.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0673_0000.nii.gz", + "label": "./labelsTr/MSWAL_0673.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0674_0000.nii.gz", + "label": "./labelsTr/MSWAL_0674.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0675_0000.nii.gz", + "label": "./labelsTr/MSWAL_0675.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0676_0000.nii.gz", + "label": "./labelsTr/MSWAL_0676.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0677_0000.nii.gz", + "label": "./labelsTr/MSWAL_0677.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0679_0000.nii.gz", + "label": "./labelsTr/MSWAL_0679.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0680_0000.nii.gz", + "label": "./labelsTr/MSWAL_0680.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0681_0000.nii.gz", + "label": "./labelsTr/MSWAL_0681.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0682_0000.nii.gz", + "label": "./labelsTr/MSWAL_0682.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0685_0000.nii.gz", + "label": "./labelsTr/MSWAL_0685.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0686_0000.nii.gz", + "label": "./labelsTr/MSWAL_0686.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0687_0000.nii.gz", + "label": "./labelsTr/MSWAL_0687.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0688_0000.nii.gz", + "label": "./labelsTr/MSWAL_0688.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0690_0000.nii.gz", + "label": "./labelsTr/MSWAL_0690.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0692_0000.nii.gz", + "label": "./labelsTr/MSWAL_0692.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0693_0000.nii.gz", + "label": "./labelsTr/MSWAL_0693.nii.gz" + }, + { + "image": "./imagesTr/MSWAL_0694_0000.nii.gz", + "label": "./labelsTr/MSWAL_0694.nii.gz" + } + ], + "test": [ + { + "image": "./imagesTs/MSWAL_0004_0000.nii.gz", + "label": "./labelsTs/MSWAL_0004.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0005_0000.nii.gz", + "label": "./labelsTs/MSWAL_0005.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0006_0000.nii.gz", + "label": "./labelsTs/MSWAL_0006.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0007_0000.nii.gz", + "label": "./labelsTs/MSWAL_0007.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0010_0000.nii.gz", + "label": "./labelsTs/MSWAL_0010.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0012_0000.nii.gz", + "label": "./labelsTs/MSWAL_0012.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0016_0000.nii.gz", + "label": "./labelsTs/MSWAL_0016.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0019_0000.nii.gz", + "label": "./labelsTs/MSWAL_0019.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0023_0000.nii.gz", + "label": "./labelsTs/MSWAL_0023.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0025_0000.nii.gz", + "label": "./labelsTs/MSWAL_0025.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0030_0000.nii.gz", + "label": "./labelsTs/MSWAL_0030.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0036_0000.nii.gz", + "label": "./labelsTs/MSWAL_0036.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0043_0000.nii.gz", + "label": "./labelsTs/MSWAL_0043.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0044_0000.nii.gz", + "label": "./labelsTs/MSWAL_0044.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0047_0000.nii.gz", + "label": "./labelsTs/MSWAL_0047.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0048_0000.nii.gz", + "label": "./labelsTs/MSWAL_0048.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0053_0000.nii.gz", + "label": "./labelsTs/MSWAL_0053.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0058_0000.nii.gz", + "label": "./labelsTs/MSWAL_0058.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0062_0000.nii.gz", + "label": "./labelsTs/MSWAL_0062.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0068_0000.nii.gz", + "label": "./labelsTs/MSWAL_0068.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0070_0000.nii.gz", + "label": "./labelsTs/MSWAL_0070.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0071_0000.nii.gz", + "label": "./labelsTs/MSWAL_0071.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0073_0000.nii.gz", + "label": "./labelsTs/MSWAL_0073.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0074_0000.nii.gz", + "label": "./labelsTs/MSWAL_0074.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0076_0000.nii.gz", + "label": "./labelsTs/MSWAL_0076.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0078_0000.nii.gz", + "label": "./labelsTs/MSWAL_0078.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0079_0000.nii.gz", + "label": "./labelsTs/MSWAL_0079.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0081_0000.nii.gz", + "label": "./labelsTs/MSWAL_0081.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0087_0000.nii.gz", + "label": "./labelsTs/MSWAL_0087.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0090_0000.nii.gz", + "label": "./labelsTs/MSWAL_0090.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0091_0000.nii.gz", + "label": "./labelsTs/MSWAL_0091.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0097_0000.nii.gz", + "label": "./labelsTs/MSWAL_0097.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0100_0000.nii.gz", + "label": "./labelsTs/MSWAL_0100.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0107_0000.nii.gz", + "label": "./labelsTs/MSWAL_0107.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0115_0000.nii.gz", + "label": "./labelsTs/MSWAL_0115.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0116_0000.nii.gz", + "label": "./labelsTs/MSWAL_0116.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0118_0000.nii.gz", + "label": "./labelsTs/MSWAL_0118.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0121_0000.nii.gz", + "label": "./labelsTs/MSWAL_0121.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0123_0000.nii.gz", + "label": "./labelsTs/MSWAL_0123.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0131_0000.nii.gz", + "label": "./labelsTs/MSWAL_0131.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0135_0000.nii.gz", + "label": "./labelsTs/MSWAL_0135.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0137_0000.nii.gz", + "label": "./labelsTs/MSWAL_0137.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0144_0000.nii.gz", + "label": "./labelsTs/MSWAL_0144.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0146_0000.nii.gz", + "label": "./labelsTs/MSWAL_0146.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0153_0000.nii.gz", + "label": "./labelsTs/MSWAL_0153.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0154_0000.nii.gz", + "label": "./labelsTs/MSWAL_0154.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0155_0000.nii.gz", + "label": "./labelsTs/MSWAL_0155.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0156_0000.nii.gz", + "label": "./labelsTs/MSWAL_0156.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0158_0000.nii.gz", + "label": "./labelsTs/MSWAL_0158.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0160_0000.nii.gz", + "label": "./labelsTs/MSWAL_0160.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0161_0000.nii.gz", + "label": "./labelsTs/MSWAL_0161.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0164_0000.nii.gz", + "label": "./labelsTs/MSWAL_0164.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0181_0000.nii.gz", + "label": "./labelsTs/MSWAL_0181.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0190_0000.nii.gz", + "label": "./labelsTs/MSWAL_0190.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0191_0000.nii.gz", + "label": "./labelsTs/MSWAL_0191.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0192_0000.nii.gz", + "label": "./labelsTs/MSWAL_0192.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0196_0000.nii.gz", + "label": "./labelsTs/MSWAL_0196.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0197_0000.nii.gz", + "label": "./labelsTs/MSWAL_0197.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0198_0000.nii.gz", + "label": "./labelsTs/MSWAL_0198.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0200_0000.nii.gz", + "label": "./labelsTs/MSWAL_0200.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0205_0000.nii.gz", + "label": "./labelsTs/MSWAL_0205.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0206_0000.nii.gz", + "label": "./labelsTs/MSWAL_0206.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0210_0000.nii.gz", + "label": "./labelsTs/MSWAL_0210.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0211_0000.nii.gz", + "label": "./labelsTs/MSWAL_0211.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0212_0000.nii.gz", + "label": "./labelsTs/MSWAL_0212.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0213_0000.nii.gz", + "label": "./labelsTs/MSWAL_0213.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0215_0000.nii.gz", + "label": "./labelsTs/MSWAL_0215.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0216_0000.nii.gz", + "label": "./labelsTs/MSWAL_0216.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0231_0000.nii.gz", + "label": "./labelsTs/MSWAL_0231.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0232_0000.nii.gz", + "label": "./labelsTs/MSWAL_0232.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0235_0000.nii.gz", + "label": "./labelsTs/MSWAL_0235.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0236_0000.nii.gz", + "label": "./labelsTs/MSWAL_0236.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0237_0000.nii.gz", + "label": "./labelsTs/MSWAL_0237.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0239_0000.nii.gz", + "label": "./labelsTs/MSWAL_0239.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0240_0000.nii.gz", + "label": "./labelsTs/MSWAL_0240.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0244_0000.nii.gz", + "label": "./labelsTs/MSWAL_0244.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0249_0000.nii.gz", + "label": "./labelsTs/MSWAL_0249.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0250_0000.nii.gz", + "label": "./labelsTs/MSWAL_0250.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0266_0000.nii.gz", + "label": "./labelsTs/MSWAL_0266.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0268_0000.nii.gz", + "label": "./labelsTs/MSWAL_0268.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0269_0000.nii.gz", + "label": "./labelsTs/MSWAL_0269.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0280_0000.nii.gz", + "label": "./labelsTs/MSWAL_0280.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0286_0000.nii.gz", + "label": "./labelsTs/MSWAL_0286.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0287_0000.nii.gz", + "label": "./labelsTs/MSWAL_0287.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0291_0000.nii.gz", + "label": "./labelsTs/MSWAL_0291.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0292_0000.nii.gz", + "label": "./labelsTs/MSWAL_0292.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0294_0000.nii.gz", + "label": "./labelsTs/MSWAL_0294.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0295_0000.nii.gz", + "label": "./labelsTs/MSWAL_0295.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0298_0000.nii.gz", + "label": "./labelsTs/MSWAL_0298.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0299_0000.nii.gz", + "label": "./labelsTs/MSWAL_0299.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0300_0000.nii.gz", + "label": "./labelsTs/MSWAL_0300.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0304_0000.nii.gz", + "label": "./labelsTs/MSWAL_0304.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0305_0000.nii.gz", + "label": "./labelsTs/MSWAL_0305.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0309_0000.nii.gz", + "label": "./labelsTs/MSWAL_0309.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0310_0000.nii.gz", + "label": "./labelsTs/MSWAL_0310.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0315_0000.nii.gz", + "label": "./labelsTs/MSWAL_0315.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0319_0000.nii.gz", + "label": "./labelsTs/MSWAL_0319.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0321_0000.nii.gz", + "label": "./labelsTs/MSWAL_0321.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0322_0000.nii.gz", + "label": "./labelsTs/MSWAL_0322.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0325_0000.nii.gz", + "label": "./labelsTs/MSWAL_0325.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0329_0000.nii.gz", + "label": "./labelsTs/MSWAL_0329.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0339_0000.nii.gz", + "label": "./labelsTs/MSWAL_0339.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0340_0000.nii.gz", + "label": "./labelsTs/MSWAL_0340.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0347_0000.nii.gz", + "label": "./labelsTs/MSWAL_0347.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0349_0000.nii.gz", + "label": "./labelsTs/MSWAL_0349.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0350_0000.nii.gz", + "label": "./labelsTs/MSWAL_0350.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0351_0000.nii.gz", + "label": "./labelsTs/MSWAL_0351.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0352_0000.nii.gz", + "label": "./labelsTs/MSWAL_0352.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0358_0000.nii.gz", + "label": "./labelsTs/MSWAL_0358.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0359_0000.nii.gz", + "label": "./labelsTs/MSWAL_0359.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0364_0000.nii.gz", + "label": "./labelsTs/MSWAL_0364.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0367_0000.nii.gz", + "label": "./labelsTs/MSWAL_0367.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0368_0000.nii.gz", + "label": "./labelsTs/MSWAL_0368.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0371_0000.nii.gz", + "label": "./labelsTs/MSWAL_0371.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0372_0000.nii.gz", + "label": "./labelsTs/MSWAL_0372.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0377_0000.nii.gz", + "label": "./labelsTs/MSWAL_0377.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0383_0000.nii.gz", + "label": "./labelsTs/MSWAL_0383.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0384_0000.nii.gz", + "label": "./labelsTs/MSWAL_0384.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0385_0000.nii.gz", + "label": "./labelsTs/MSWAL_0385.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0386_0000.nii.gz", + "label": "./labelsTs/MSWAL_0386.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0394_0000.nii.gz", + "label": "./labelsTs/MSWAL_0394.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0395_0000.nii.gz", + "label": "./labelsTs/MSWAL_0395.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0396_0000.nii.gz", + "label": "./labelsTs/MSWAL_0396.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0401_0000.nii.gz", + "label": "./labelsTs/MSWAL_0401.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0404_0000.nii.gz", + "label": "./labelsTs/MSWAL_0404.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0405_0000.nii.gz", + "label": "./labelsTs/MSWAL_0405.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0406_0000.nii.gz", + "label": "./labelsTs/MSWAL_0406.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0408_0000.nii.gz", + "label": "./labelsTs/MSWAL_0408.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0413_0000.nii.gz", + "label": "./labelsTs/MSWAL_0413.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0424_0000.nii.gz", + "label": "./labelsTs/MSWAL_0424.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0433_0000.nii.gz", + "label": "./labelsTs/MSWAL_0433.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0441_0000.nii.gz", + "label": "./labelsTs/MSWAL_0441.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0443_0000.nii.gz", + "label": "./labelsTs/MSWAL_0443.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0444_0000.nii.gz", + "label": "./labelsTs/MSWAL_0444.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0445_0000.nii.gz", + "label": "./labelsTs/MSWAL_0445.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0448_0000.nii.gz", + "label": "./labelsTs/MSWAL_0448.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0449_0000.nii.gz", + "label": "./labelsTs/MSWAL_0449.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0450_0000.nii.gz", + "label": "./labelsTs/MSWAL_0450.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0451_0000.nii.gz", + "label": "./labelsTs/MSWAL_0451.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0454_0000.nii.gz", + "label": "./labelsTs/MSWAL_0454.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0456_0000.nii.gz", + "label": "./labelsTs/MSWAL_0456.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0458_0000.nii.gz", + "label": "./labelsTs/MSWAL_0458.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0459_0000.nii.gz", + "label": "./labelsTs/MSWAL_0459.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0462_0000.nii.gz", + "label": "./labelsTs/MSWAL_0462.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0467_0000.nii.gz", + "label": "./labelsTs/MSWAL_0467.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0469_0000.nii.gz", + "label": "./labelsTs/MSWAL_0469.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0472_0000.nii.gz", + "label": "./labelsTs/MSWAL_0472.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0478_0000.nii.gz", + "label": "./labelsTs/MSWAL_0478.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0481_0000.nii.gz", + "label": "./labelsTs/MSWAL_0481.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0494_0000.nii.gz", + "label": "./labelsTs/MSWAL_0494.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0496_0000.nii.gz", + "label": "./labelsTs/MSWAL_0496.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0499_0000.nii.gz", + "label": "./labelsTs/MSWAL_0499.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0502_0000.nii.gz", + "label": "./labelsTs/MSWAL_0502.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0503_0000.nii.gz", + "label": "./labelsTs/MSWAL_0503.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0511_0000.nii.gz", + "label": "./labelsTs/MSWAL_0511.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0513_0000.nii.gz", + "label": "./labelsTs/MSWAL_0513.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0514_0000.nii.gz", + "label": "./labelsTs/MSWAL_0514.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0515_0000.nii.gz", + "label": "./labelsTs/MSWAL_0515.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0517_0000.nii.gz", + "label": "./labelsTs/MSWAL_0517.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0520_0000.nii.gz", + "label": "./labelsTs/MSWAL_0520.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0525_0000.nii.gz", + "label": "./labelsTs/MSWAL_0525.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0528_0000.nii.gz", + "label": "./labelsTs/MSWAL_0528.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0529_0000.nii.gz", + "label": "./labelsTs/MSWAL_0529.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0532_0000.nii.gz", + "label": "./labelsTs/MSWAL_0532.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0533_0000.nii.gz", + "label": "./labelsTs/MSWAL_0533.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0537_0000.nii.gz", + "label": "./labelsTs/MSWAL_0537.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0541_0000.nii.gz", + "label": "./labelsTs/MSWAL_0541.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0543_0000.nii.gz", + "label": "./labelsTs/MSWAL_0543.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0560_0000.nii.gz", + "label": "./labelsTs/MSWAL_0560.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0565_0000.nii.gz", + "label": "./labelsTs/MSWAL_0565.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0569_0000.nii.gz", + "label": "./labelsTs/MSWAL_0569.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0570_0000.nii.gz", + "label": "./labelsTs/MSWAL_0570.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0572_0000.nii.gz", + "label": "./labelsTs/MSWAL_0572.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0576_0000.nii.gz", + "label": "./labelsTs/MSWAL_0576.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0585_0000.nii.gz", + "label": "./labelsTs/MSWAL_0585.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0587_0000.nii.gz", + "label": "./labelsTs/MSWAL_0587.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0588_0000.nii.gz", + "label": "./labelsTs/MSWAL_0588.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0589_0000.nii.gz", + "label": "./labelsTs/MSWAL_0589.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0594_0000.nii.gz", + "label": "./labelsTs/MSWAL_0594.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0603_0000.nii.gz", + "label": "./labelsTs/MSWAL_0603.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0606_0000.nii.gz", + "label": "./labelsTs/MSWAL_0606.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0607_0000.nii.gz", + "label": "./labelsTs/MSWAL_0607.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0609_0000.nii.gz", + "label": "./labelsTs/MSWAL_0609.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0610_0000.nii.gz", + "label": "./labelsTs/MSWAL_0610.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0611_0000.nii.gz", + "label": "./labelsTs/MSWAL_0611.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0613_0000.nii.gz", + "label": "./labelsTs/MSWAL_0613.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0618_0000.nii.gz", + "label": "./labelsTs/MSWAL_0618.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0619_0000.nii.gz", + "label": "./labelsTs/MSWAL_0619.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0620_0000.nii.gz", + "label": "./labelsTs/MSWAL_0620.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0622_0000.nii.gz", + "label": "./labelsTs/MSWAL_0622.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0624_0000.nii.gz", + "label": "./labelsTs/MSWAL_0624.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0631_0000.nii.gz", + "label": "./labelsTs/MSWAL_0631.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0633_0000.nii.gz", + "label": "./labelsTs/MSWAL_0633.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0634_0000.nii.gz", + "label": "./labelsTs/MSWAL_0634.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0637_0000.nii.gz", + "label": "./labelsTs/MSWAL_0637.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0639_0000.nii.gz", + "label": "./labelsTs/MSWAL_0639.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0642_0000.nii.gz", + "label": "./labelsTs/MSWAL_0642.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0645_0000.nii.gz", + "label": "./labelsTs/MSWAL_0645.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0647_0000.nii.gz", + "label": "./labelsTs/MSWAL_0647.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0652_0000.nii.gz", + "label": "./labelsTs/MSWAL_0652.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0657_0000.nii.gz", + "label": "./labelsTs/MSWAL_0657.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0659_0000.nii.gz", + "label": "./labelsTs/MSWAL_0659.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0664_0000.nii.gz", + "label": "./labelsTs/MSWAL_0664.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0665_0000.nii.gz", + "label": "./labelsTs/MSWAL_0665.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0672_0000.nii.gz", + "label": "./labelsTs/MSWAL_0672.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0678_0000.nii.gz", + "label": "./labelsTs/MSWAL_0678.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0683_0000.nii.gz", + "label": "./labelsTs/MSWAL_0683.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0684_0000.nii.gz", + "label": "./labelsTs/MSWAL_0684.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0689_0000.nii.gz", + "label": "./labelsTs/MSWAL_0689.nii.gz" + }, + { + "image": "./imagesTs/MSWAL_0691_0000.nii.gz", + "label": "./labelsTs/MSWAL_0691.nii.gz" + } + ] +} \ No newline at end of file diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/dataset_fingerprint.json b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/dataset_fingerprint.json new file mode 100644 index 0000000000000000000000000000000000000000..288ba1dca5ae59b1eb3732826b63216826b10695 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/dataset_fingerprint.json @@ -0,0 +1,4858 @@ +{ + "foreground_intensity_properties_per_channel": { + "0": { + "max": 3071.0, + "mean": 71.96339416503906, + "median": 45.0, + "min": -932.0, + "percentile_00_5": -93.0, + "percentile_99_5": 1052.0, + "std": 141.6230926513672 + } + }, + "median_relative_size_after_cropping": 1.0, + "shapes_after_crop": [ + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 381, + 512, + 512 + ], + [ + 201, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 185, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 285, + 512, + 512 + ], + [ + 433, + 512, + 512 + ], + [ + 181, + 512, + 512 + ], + [ + 205, + 512, + 512 + ], + [ + 393, + 512, + 512 + ], + [ + 253, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 137, + 512, + 512 + ], + [ + 185, + 512, + 512 + ], + [ + 217, + 512, + 512 + ], + [ + 221, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 168, + 512, + 512 + ], + [ + 293, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 209, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 185, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 205, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 193, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 145, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 185, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 265, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 221, + 512, + 512 + ], + [ + 284, + 512, + 512 + ], + [ + 345, + 512, + 512 + ], + [ + 290, + 512, + 512 + ], + [ + 162, + 512, + 512 + ], + [ + 307, + 512, + 512 + ], + [ + 293, + 512, + 512 + ], + [ + 181, + 512, + 512 + ], + [ + 181, + 512, + 512 + ], + [ + 361, + 512, + 512 + ], + [ + 285, + 512, + 512 + ], + [ + 217, + 512, + 512 + ], + [ + 313, + 512, + 512 + ], + [ + 201, + 512, + 512 + ], + [ + 277, + 512, + 512 + ], + [ + 333, + 512, + 512 + ], + [ + 369, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 313, + 512, + 512 + ], + [ + 317, + 512, + 512 + ], + [ + 169, + 512, + 512 + ], + [ + 169, + 512, + 512 + ], + [ + 169, + 512, + 512 + ], + [ + 237, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 205, + 512, + 512 + ], + [ + 281, + 512, + 512 + ], + [ + 288, + 512, + 512 + ], + [ + 166, + 512, + 512 + ], + [ + 258, + 512, + 512 + ], + [ + 462, + 512, + 512 + ], + [ + 451, + 512, + 512 + ], + [ + 458, + 512, + 512 + ], + [ + 414, + 512, + 512 + ], + [ + 274, + 512, + 512 + ], + [ + 466, + 512, + 512 + ], + [ + 373, + 512, + 512 + ], + [ + 310, + 512, + 512 + ], + [ + 270, + 512, + 512 + ], + [ + 259, + 512, + 512 + ], + [ + 288, + 512, + 512 + ], + [ + 458, + 512, + 512 + ], + [ + 305, + 512, + 512 + ], + [ + 514, + 512, + 512 + ], + [ + 294, + 512, + 512 + ], + [ + 326, + 512, + 512 + ], + [ + 310, + 512, + 512 + ], + [ + 318, + 512, + 512 + ], + [ + 185, + 512, + 512 + ], + [ + 388, + 512, + 512 + ], + [ + 134, + 512, + 512 + ], + [ + 274, + 512, + 512 + ], + [ + 286, + 512, + 512 + ], + [ + 331, + 512, + 512 + ], + [ + 358, + 512, + 512 + ], + [ + 421, + 512, + 512 + ], + [ + 434, + 512, + 512 + ], + [ + 402, + 512, + 512 + ], + [ + 469, + 512, + 512 + ], + [ + 454, + 512, + 512 + ], + [ + 310, + 512, + 512 + ], + [ + 322, + 512, + 512 + ], + [ + 569, + 512, + 512 + ], + [ + 170, + 512, + 512 + ], + [ + 174, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 190, + 512, + 512 + ], + [ + 337, + 512, + 512 + ], + [ + 421, + 512, + 512 + ], + [ + 478, + 512, + 512 + ], + [ + 313, + 512, + 512 + ], + [ + 350, + 512, + 512 + ], + [ + 330, + 512, + 512 + ], + [ + 301, + 512, + 512 + ], + [ + 182, + 512, + 512 + ], + [ + 126, + 512, + 512 + ], + [ + 261, + 512, + 512 + ], + [ + 458, + 512, + 512 + ], + [ + 248, + 512, + 512 + ], + [ + 298, + 512, + 512 + ], + [ + 318, + 512, + 512 + ], + [ + 342, + 512, + 512 + ], + [ + 422, + 512, + 512 + ], + [ + 185, + 512, + 512 + ], + [ + 201, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 305, + 512, + 512 + ], + [ + 265, + 512, + 512 + ], + [ + 201, + 512, + 512 + ], + [ + 201, + 512, + 512 + ], + [ + 193, + 512, + 512 + ], + [ + 485, + 512, + 512 + ], + [ + 309, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 391, + 512, + 512 + ], + [ + 296, + 512, + 512 + ], + [ + 553, + 512, + 512 + ], + [ + 440, + 512, + 512 + ], + [ + 316, + 512, + 512 + ], + [ + 296, + 512, + 512 + ], + [ + 264, + 512, + 512 + ], + [ + 286, + 512, + 512 + ], + [ + 300, + 512, + 512 + ], + [ + 498, + 512, + 512 + ], + [ + 518, + 512, + 512 + ], + [ + 340, + 512, + 512 + ], + [ + 542, + 512, + 512 + ], + [ + 285, + 512, + 512 + ], + [ + 268, + 512, + 512 + ], + [ + 254, + 512, + 512 + ], + [ + 310, + 512, + 512 + ], + [ + 258, + 512, + 512 + ], + [ + 256, + 512, + 512 + ], + [ + 305, + 512, + 512 + ], + [ + 142, + 512, + 512 + ], + [ + 176, + 512, + 512 + ], + [ + 296, + 512, + 512 + ], + [ + 252, + 512, + 512 + ], + [ + 196, + 512, + 512 + ], + [ + 182, + 512, + 512 + ], + [ + 342, + 512, + 512 + ], + [ + 319, + 512, + 512 + ], + [ + 350, + 512, + 512 + ], + [ + 397, + 512, + 512 + ], + [ + 367, + 512, + 512 + ], + [ + 334, + 512, + 512 + ], + [ + 320, + 512, + 512 + ], + [ + 237, + 512, + 512 + ], + [ + 355, + 512, + 512 + ], + [ + 253, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 357, + 512, + 512 + ], + [ + 233, + 512, + 512 + ], + [ + 217, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 217, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 193, + 512, + 512 + ], + [ + 174, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 301, + 512, + 512 + ], + [ + 373, + 512, + 512 + ], + [ + 285, + 512, + 512 + ], + [ + 269, + 512, + 512 + ], + [ + 205, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 229, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 213, + 512, + 512 + ], + [ + 181, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 237, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 361, + 512, + 512 + ], + [ + 301, + 512, + 512 + ], + [ + 301, + 512, + 512 + ], + [ + 317, + 512, + 512 + ], + [ + 437, + 512, + 512 + ], + [ + 309, + 512, + 512 + ], + [ + 721, + 512, + 512 + ], + [ + 309, + 512, + 512 + ], + [ + 401, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 357, + 512, + 512 + ], + [ + 1089, + 512, + 512 + ], + [ + 283, + 512, + 512 + ], + [ + 453, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 361, + 512, + 512 + ], + [ + 377, + 512, + 512 + ], + [ + 317, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 277, + 512, + 512 + ], + [ + 333, + 512, + 512 + ], + [ + 293, + 512, + 512 + ], + [ + 379, + 512, + 512 + ], + [ + 357, + 512, + 512 + ], + [ + 377, + 512, + 512 + ], + [ + 297, + 512, + 512 + ], + [ + 277, + 512, + 512 + ], + [ + 305, + 512, + 512 + ], + [ + 317, + 512, + 512 + ], + [ + 415, + 512, + 512 + ], + [ + 247, + 512, + 512 + ], + [ + 325, + 512, + 512 + ], + [ + 305, + 512, + 512 + ], + [ + 217, + 512, + 512 + ], + [ + 325, + 512, + 512 + ], + [ + 217, + 512, + 512 + ], + [ + 273, + 512, + 512 + ], + [ + 369, + 512, + 512 + ], + [ + 295, + 512, + 512 + ], + [ + 307, + 512, + 512 + ], + [ + 325, + 512, + 512 + ], + [ + 295, + 512, + 512 + ], + [ + 253, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 149, + 512, + 512 + ], + [ + 385, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 415, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 217, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 305, + 512, + 512 + ], + [ + 305, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 117, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 295, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 377, + 512, + 512 + ], + [ + 379, + 512, + 512 + ], + [ + 337, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 309, + 512, + 512 + ], + [ + 325, + 512, + 512 + ], + [ + 337, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 349, + 512, + 512 + ], + [ + 357, + 512, + 512 + ], + [ + 341, + 512, + 512 + ], + [ + 229, + 512, + 512 + ], + [ + 349, + 512, + 512 + ], + [ + 361, + 512, + 512 + ], + [ + 325, + 512, + 512 + ], + [ + 337, + 512, + 512 + ], + [ + 186, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 333, + 512, + 512 + ], + [ + 237, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 292, + 512, + 512 + ], + [ + 366, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 297, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 261, + 512, + 512 + ], + [ + 301, + 512, + 512 + ], + [ + 317, + 512, + 512 + ], + [ + 377, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 209, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 369, + 512, + 512 + ], + [ + 185, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 257, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 229, + 512, + 512 + ], + [ + 201, + 512, + 512 + ], + [ + 217, + 512, + 512 + ], + [ + 213, + 512, + 512 + ], + [ + 217, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 237, + 512, + 512 + ], + [ + 237, + 512, + 512 + ], + [ + 52, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 157, + 512, + 512 + ], + [ + 209, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 277, + 512, + 512 + ], + [ + 337, + 512, + 512 + ], + [ + 197, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 165, + 512, + 512 + ], + [ + 261, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 261, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 149, + 512, + 512 + ], + [ + 397, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 145, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 179, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 137, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 329, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 529, + 512, + 512 + ], + [ + 185, + 512, + 512 + ], + [ + 273, + 512, + 512 + ], + [ + 285, + 512, + 512 + ], + [ + 153, + 512, + 512 + ], + [ + 321, + 512, + 512 + ], + [ + 269, + 512, + 512 + ], + [ + 249, + 512, + 512 + ], + [ + 297, + 512, + 512 + ], + [ + 201, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 249, + 512, + 512 + ], + [ + 409, + 512, + 512 + ], + [ + 412, + 512, + 512 + ], + [ + 248, + 512, + 512 + ], + [ + 316, + 512, + 512 + ], + [ + 289, + 512, + 512 + ], + [ + 329, + 512, + 512 + ], + [ + 284, + 512, + 512 + ], + [ + 369, + 512, + 512 + ], + [ + 433, + 512, + 512 + ], + [ + 237, + 512, + 512 + ], + [ + 323, + 512, + 512 + ], + [ + 254, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 166, + 512, + 512 + ], + [ + 325, + 512, + 512 + ], + [ + 83, + 512, + 512 + ], + [ + 178, + 512, + 512 + ], + [ + 302, + 512, + 512 + ], + [ + 296, + 512, + 512 + ], + [ + 205, + 512, + 512 + ], + [ + 326, + 512, + 512 + ], + [ + 308, + 512, + 512 + ], + [ + 154, + 512, + 512 + ], + [ + 189, + 512, + 512 + ], + [ + 346, + 512, + 512 + ], + [ + 166, + 512, + 512 + ], + [ + 201, + 512, + 512 + ], + [ + 158, + 512, + 512 + ], + [ + 174, + 512, + 512 + ], + [ + 210, + 512, + 512 + ], + [ + 336, + 512, + 512 + ], + [ + 174, + 512, + 512 + ], + [ + 306, + 512, + 512 + ], + [ + 145, + 512, + 512 + ], + [ + 154, + 512, + 512 + ], + [ + 338, + 512, + 512 + ], + [ + 346, + 512, + 512 + ], + [ + 314, + 512, + 512 + ], + [ + 328, + 512, + 512 + ], + [ + 344, + 512, + 512 + ], + [ + 343, + 512, + 512 + ], + [ + 366, + 512, + 512 + ], + [ + 300, + 512, + 512 + ], + [ + 178, + 512, + 512 + ], + [ + 208, + 512, + 512 + ], + [ + 263, + 512, + 512 + ], + [ + 316, + 512, + 512 + ], + [ + 313, + 512, + 512 + ], + [ + 218, + 512, + 512 + ], + [ + 258, + 512, + 512 + ], + [ + 316, + 512, + 512 + ], + [ + 266, + 512, + 512 + ], + [ + 343, + 512, + 512 + ], + [ + 344, + 512, + 512 + ], + [ + 308, + 512, + 512 + ], + [ + 490, + 512, + 512 + ], + [ + 322, + 512, + 512 + ], + [ + 328, + 512, + 512 + ], + [ + 402, + 512, + 512 + ], + [ + 320, + 512, + 512 + ], + [ + 409, + 512, + 512 + ], + [ + 277, + 512, + 512 + ], + [ + 225, + 512, + 512 + ], + [ + 283, + 512, + 512 + ], + [ + 177, + 512, + 512 + ], + [ + 277, + 512, + 512 + ], + [ + 321, + 512, + 512 + ], + [ + 550, + 512, + 512 + ], + [ + 244, + 512, + 512 + ], + [ + 249, + 512, + 512 + ], + [ + 724, + 512, + 512 + ], + [ + 280, + 512, + 512 + ], + [ + 603, + 512, + 512 + ], + [ + 518, + 512, + 512 + ], + [ + 580, + 512, + 512 + ], + [ + 494, + 512, + 512 + ], + [ + 331, + 512, + 512 + ], + [ + 321, + 512, + 512 + ], + [ + 352, + 512, + 512 + ], + [ + 317, + 512, + 512 + ], + [ + 324, + 512, + 512 + ], + [ + 381, + 512, + 512 + ], + [ + 340, + 512, + 512 + ], + [ + 216, + 512, + 512 + ], + [ + 280, + 512, + 512 + ], + [ + 502, + 512, + 512 + ], + [ + 328, + 512, + 512 + ], + [ + 365, + 512, + 512 + ], + [ + 316, + 512, + 512 + ], + [ + 286, + 512, + 512 + ], + [ + 300, + 512, + 512 + ], + [ + 340, + 512, + 512 + ], + [ + 474, + 512, + 512 + ], + [ + 523, + 512, + 512 + ], + [ + 293, + 512, + 512 + ], + [ + 240, + 512, + 512 + ], + [ + 276, + 512, + 512 + ] + ], + "spacings": [ + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8828129768371582, + 0.8828129768371582 + ], + [ + 1.25, + 0.7871090173721313, + 0.7871090173721313 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7617189884185791, + 0.7617189884185791 + ], + [ + 1.25, + 0.71875, + 0.71875 + ], + [ + 1.25, + 0.7851560115814209, + 0.7851560115814209 + ], + [ + 1.25, + 0.7792969942092896, + 0.7792969942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8710939884185791, + 0.8710939884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8046879768371582, + 0.8046879768371582 + ], + [ + 0.625, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7714840173721313, + 0.7714840173721313 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7734379768371582, + 0.7734379768371582 + ], + [ + 1.25, + 0.6503909826278687, + 0.6503909826278687 + ], + [ + 1.25, + 0.8066409826278687, + 0.8066409826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7773439884185791, + 0.7773439884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7929689884185791, + 0.7929689884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7832030057907104, + 0.7832030057907104 + ], + [ + 1.25, + 0.828125, + 0.828125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7558590173721313, + 0.7558590173721313 + ], + [ + 1.25, + 0.7617189884185791, + 0.7617189884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.796875, + 0.796875 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8164060115814209, + 0.8164060115814209 + ], + [ + 1.25, + 0.8339840173721313, + 0.8339840173721313 + ], + [ + 1.25, + 0.8066409826278687, + 0.8066409826278687 + ], + [ + 1.25, + 0.8867189884185791, + 0.8867189884185791 + ], + [ + 1.25, + 0.7675780057907104, + 0.7675780057907104 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7714840173721313, + 0.7714840173721313 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8242189884185791, + 0.8242189884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7832030057907104, + 0.7832030057907104 + ], + [ + 1.25, + 0.8222659826278687, + 0.8222659826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7890620231628418, + 0.7890620231628418 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 0.699999988079071, + 0.62890625, + 0.62890625 + ], + [ + 0.699999988079071, + 0.716796875, + 0.716796875 + ], + [ + 0.699999988079071, + 0.650390625, + 0.650390625 + ], + [ + 1.5, + 0.677734375, + 0.677734375 + ], + [ + 0.699999988079071, + 0.69140625, + 0.69140625 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8378909826278687, + 0.8378909826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 0.625, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8945310115814209, + 0.8945310115814209 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8535159826278687, + 0.8535159826278687 + ], + [ + 1.25, + 0.7910159826278687, + 0.7910159826278687 + ], + [ + 1.25, + 0.7949219942092896, + 0.7949219942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7929689884185791, + 0.7929689884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7871090173721313, + 0.7871090173721313 + ], + [ + 1.25, + 0.7441409826278687, + 0.7441409826278687 + ], + [ + 1.25, + 0.8769530057907104, + 0.8769530057907104 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8417969942092896, + 0.8417969942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8144530057907104, + 0.8144530057907104 + ], + [ + 1.25, + 0.791015625, + 0.791015625 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.75390625, + 0.75390625 + ], + [ + 1.25, + 0.86328125, + 0.86328125 + ], + [ + 1.25, + 0.8808590173721313, + 0.8808590173721313 + ], + [ + 1.25, + 0.83984375, + 0.83984375 + ], + [ + 1.2500001192092896, + 0.798828125, + 0.798828125 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.2500001192092896, + 0.900390625, + 0.900390625 + ], + [ + 1.25, + 0.9238280057907104, + 0.9238280057907104 + ], + [ + 1.2500001192092896, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8046875, + 0.8046875 + ], + [ + 1.25, + 0.783203125, + 0.783203125 + ], + [ + 1.25, + 0.78125, + 0.78125 + ], + [ + 1.25, + 0.74609375, + 0.74609375 + ], + [ + 1.25, + 0.779296875, + 0.779296875 + ], + [ + 1.2500001192092896, + 0.78125, + 0.78125 + ], + [ + 1.2499998807907104, + 0.703125, + 0.703125 + ], + [ + 1.2500001192092896, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.8100000023841858, + 0.8100000023841858 + ], + [ + 1.25, + 0.7890625, + 0.7890625 + ], + [ + 1.25, + 0.7749999761581421, + 0.7749999761581421 + ], + [ + 1.2500001192092896, + 0.828125, + 0.828125 + ], + [ + 1.25, + 0.748046875, + 0.748046875 + ], + [ + 1.25, + 0.8222659826278687, + 0.8222659826278687 + ], + [ + 1.25, + 0.890625, + 0.890625 + ], + [ + 1.25, + 0.8496090173721313, + 0.8496090173721313 + ], + [ + 1.25, + 0.888671875, + 0.888671875 + ], + [ + 1.25, + 0.837890625, + 0.837890625 + ], + [ + 1.25, + 0.9492189884185791, + 0.9492189884185791 + ], + [ + 1.2500001192092896, + 0.77734375, + 0.77734375 + ], + [ + 1.25, + 0.83984375, + 0.83984375 + ], + [ + 1.25, + 0.74609375, + 0.74609375 + ], + [ + 0.625, + 0.7949219942092896, + 0.7949219942092896 + ], + [ + 1.25, + 0.6809999942779541, + 0.6809999942779541 + ], + [ + 1.25, + 0.759765625, + 0.759765625 + ], + [ + 1.25, + 0.8515620231628418, + 0.8515620231628418 + ], + [ + 1.25, + 0.798828125, + 0.798828125 + ], + [ + 0.625, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8984379768371582, + 0.8984379768371582 + ], + [ + 1.2499998807907104, + 0.912109375, + 0.912109375 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.2499998807907104, + 0.787109375, + 0.787109375 + ], + [ + 1.25, + 0.775390625, + 0.775390625 + ], + [ + 1.25, + 0.7910159826278687, + 0.7910159826278687 + ], + [ + 1.25, + 0.73828125, + 0.73828125 + ], + [ + 1.5, + 0.798828125, + 0.798828125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.837890625, + 0.837890625 + ], + [ + 1.25, + 0.88671875, + 0.88671875 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.2500001192092896, + 0.849609375, + 0.849609375 + ], + [ + 1.25, + 0.744140625, + 0.744140625 + ], + [ + 1.2500001192092896, + 0.771484375, + 0.771484375 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7949219942092896, + 0.7949219942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.71875, + 0.71875 + ], + [ + 1.25, + 0.8496090173721313, + 0.8496090173721313 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8027340173721313, + 0.8027340173721313 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8222659826278687, + 0.8222659826278687 + ], + [ + 1.25, + 0.7734379768371582, + 0.7734379768371582 + ], + [ + 1.25, + 0.814453125, + 0.814453125 + ], + [ + 1.25, + 0.8378909826278687, + 0.8378909826278687 + ], + [ + 1.25, + 0.8569999933242798, + 0.8569999933242798 + ], + [ + 1.25, + 0.80078125, + 0.80078125 + ], + [ + 1.25, + 0.8359375, + 0.8359375 + ], + [ + 1.25, + 0.81640625, + 0.81640625 + ], + [ + 1.25, + 0.662109375, + 0.662109375 + ], + [ + 1.25, + 0.759765625, + 0.759765625 + ], + [ + 1.2499998807907104, + 0.818359375, + 0.818359375 + ], + [ + 1.25, + 0.904296875, + 0.904296875 + ], + [ + 1.25, + 0.75, + 0.75 + ], + [ + 1.25, + 0.8339999914169312, + 0.8339999914169312 + ], + [ + 0.699999988079071, + 0.84765625, + 0.84765625 + ], + [ + 1.5, + 0.7265625, + 0.7265625 + ], + [ + 1.5, + 0.771484375, + 0.771484375 + ], + [ + 0.699999988079071, + 0.7265625, + 0.7265625 + ], + [ + 1.5, + 0.7265625, + 0.7265625 + ], + [ + 1.5, + 0.7265625, + 0.7265625 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7689999938011169, + 0.7689999938011169 + ], + [ + 1.25, + 0.8125, + 0.8125 + ], + [ + 1.5, + 0.75390625, + 0.75390625 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.779296875, + 0.779296875 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.2499998807907104, + 0.828125, + 0.828125 + ], + [ + 1.25, + 0.7832030057907104, + 0.7832030057907104 + ], + [ + 1.2499998807907104, + 0.8515625, + 0.8515625 + ], + [ + 1.25, + 0.9394530057907104, + 0.9394530057907104 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.2499998807907104, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.763671875, + 0.763671875 + ], + [ + 1.5, + 0.755859375, + 0.755859375 + ], + [ + 1.25, + 0.8378909826278687, + 0.8378909826278687 + ], + [ + 1.5, + 0.7265625, + 0.7265625 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8164060115814209, + 0.8164060115814209 + ], + [ + 1.25, + 0.7949219942092896, + 0.7949219942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.6523439884185791, + 0.6523439884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.828125, + 0.828125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7792969942092896, + 0.7792969942092896 + ], + [ + 1.25, + 0.6328120231628418, + 0.6328120231628418 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8652340173721313, + 0.8652340173721313 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.9042969942092896, + 0.9042969942092896 + ], + [ + 1.25, + 0.8867189884185791, + 0.8867189884185791 + ], + [ + 1.25, + 0.7226560115814209, + 0.7226560115814209 + ], + [ + 0.625, + 0.8515629768371582, + 0.8515629768371582 + ], + [ + 1.25, + 0.8144530057907104, + 0.8144530057907104 + ], + [ + 0.625, + 0.8691409826278687, + 0.8691409826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 0.625, + 0.921875, + 0.921875 + ], + [ + 1.25, + 0.7597659826278687, + 0.7597659826278687 + ], + [ + 1.25, + 0.8105469942092896, + 0.8105469942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8574219942092896, + 0.8574219942092896 + ], + [ + 1.25, + 0.8496090173721313, + 0.8496090173721313 + ], + [ + 1.25, + 0.8066409826278687, + 0.8066409826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8203120231628418, + 0.8203120231628418 + ], + [ + 1.25, + 0.7285159826278687, + 0.7285159826278687 + ], + [ + 1.25, + 0.921875, + 0.921875 + ], + [ + 1.25, + 0.875, + 0.875 + ], + [ + 1.25, + 0.8007810115814209, + 0.8007810115814209 + ], + [ + 1.25, + 0.7753909826278687, + 0.7753909826278687 + ], + [ + 1.25, + 0.8320310115814209, + 0.8320310115814209 + ], + [ + 1.25, + 0.7871090173721313, + 0.7871090173721313 + ], + [ + 1.25, + 0.8300780057907104, + 0.8300780057907104 + ], + [ + 1.25, + 0.8046879768371582, + 0.8046879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7929689884185791, + 0.7929689884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7851560115814209, + 0.7851560115814209 + ], + [ + 1.25, + 0.953125, + 0.953125 + ], + [ + 1.25, + 0.9765620231628418, + 0.9765620231628418 + ], + [ + 1.25, + 0.8203120231628418, + 0.8203120231628418 + ], + [ + 1.25, + 0.7871090173721313, + 0.7871090173721313 + ], + [ + 1.25, + 0.7480469942092896, + 0.7480469942092896 + ], + [ + 1.25, + 0.7109379768371582, + 0.7109379768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7382810115814209, + 0.7382810115814209 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7851560115814209, + 0.7851560115814209 + ], + [ + 1.25, + 0.9316409826278687, + 0.9316409826278687 + ], + [ + 1.25, + 0.7792969942092896, + 0.7792969942092896 + ], + [ + 1.25, + 0.8183590173721313, + 0.8183590173721313 + ], + [ + 1.25, + 0.75, + 0.75 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7636719942092896, + 0.7636719942092896 + ], + [ + 1.25, + 0.8007810115814209, + 0.8007810115814209 + ], + [ + 1.25, + 0.8261719942092896, + 0.8261719942092896 + ], + [ + 1.25, + 0.8417969942092896, + 0.8417969942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8085939884185791, + 0.8085939884185791 + ], + [ + 1.25, + 0.8476560115814209, + 0.8476560115814209 + ], + [ + 1.25, + 0.8144530057907104, + 0.8144530057907104 + ], + [ + 1.25, + 0.8535159826278687, + 0.8535159826278687 + ], + [ + 1.25, + 0.7753909826278687, + 0.7753909826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7949219942092896, + 0.7949219942092896 + ], + [ + 1.25, + 0.8378909826278687, + 0.8378909826278687 + ], + [ + 1.25, + 0.8007810115814209, + 0.8007810115814209 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8828129768371582, + 0.8828129768371582 + ], + [ + 1.25, + 0.7851560115814209, + 0.7851560115814209 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8183590173721313, + 0.8183590173721313 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7773439884185791, + 0.7773439884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 0.699999988079071, + 0.650390625, + 0.650390625 + ], + [ + 0.699999988079071, + 0.744140625, + 0.744140625 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8710939884185791, + 0.8710939884185791 + ], + [ + 1.25, + 0.890625, + 0.890625 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7949219942092896, + 0.7949219942092896 + ], + [ + 1.25, + 0.8554689884185791, + 0.8554689884185791 + ], + [ + 1.25, + 0.9003909826278687, + 0.9003909826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8535159826278687, + 0.8535159826278687 + ], + [ + 1.25, + 0.8476560115814209, + 0.8476560115814209 + ], + [ + 1.25, + 0.8535159826278687, + 0.8535159826278687 + ], + [ + 0.625, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7675780057907104, + 0.7675780057907104 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7636719942092896, + 0.7636719942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8417969942092896, + 0.8417969942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 5.0, + 0.8203120231628418, + 0.8203120231628418 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.6660159826278687, + 0.6660159826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7773439884185791, + 0.7773439884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7695310115814209, + 0.7695310115814209 + ], + [ + 1.25, + 0.7578120231628418, + 0.7578120231628418 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.6796879768371582, + 0.6796879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7617189884185791, + 0.7617189884185791 + ], + [ + 1.25, + 0.7578120231628418, + 0.7578120231628418 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7910159826278687, + 0.7910159826278687 + ], + [ + 1.25, + 0.7949219942092896, + 0.7949219942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7675780057907104, + 0.7675780057907104 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7773439884185791, + 0.7773439884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.84375, + 0.84375 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7753909826278687, + 0.7753909826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.9003909826278687, + 0.9003909826278687 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 0.625, + 0.8867189884185791, + 0.8867189884185791 + ], + [ + 1.25, + 0.7890629768371582, + 0.7890629768371582 + ], + [ + 1.25, + 0.7949219942092896, + 0.7949219942092896 + ], + [ + 1.25, + 0.7871090173721313, + 0.7871090173721313 + ], + [ + 1.25, + 0.7773439884185791, + 0.7773439884185791 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.9550780057907104, + 0.9550780057907104 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.9042969942092896, + 0.9042969942092896 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8125, + 0.8125 + ], + [ + 1.5, + 0.68359375, + 0.68359375 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.2500001192092896, + 0.810546875, + 0.810546875 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.86328125, + 0.86328125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.9160159826278687, + 0.9160159826278687 + ], + [ + 1.25, + 0.822265625, + 0.822265625 + ], + [ + 1.25, + 0.953125, + 0.953125 + ], + [ + 1.25, + 0.9414060115814209, + 0.9414060115814209 + ], + [ + 1.25, + 0.7246090173721313, + 0.7246090173721313 + ], + [ + 1.25, + 0.7749999761581421, + 0.7749999761581421 + ], + [ + 1.25, + 0.8203125, + 0.8203125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.8535159826278687, + 0.8535159826278687 + ], + [ + 5.0, + 0.853515625, + 0.853515625 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.2500001192092896, + 0.865234375, + 0.865234375 + ], + [ + 1.25, + 0.8378909826278687, + 0.8378909826278687 + ], + [ + 1.25, + 0.76171875, + 0.76171875 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.75, + 0.75 + ], + [ + 1.25, + 0.8320310115814209, + 0.8320310115814209 + ], + [ + 1.25, + 0.85546875, + 0.85546875 + ], + [ + 1.25, + 0.796875, + 0.796875 + ], + [ + 1.25, + 0.7910159826278687, + 0.7910159826278687 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.249999761581421, + 0.84765625, + 0.84765625 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.2500001192092896, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.83203125, + 0.83203125 + ], + [ + 1.2499998807907104, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.787109375, + 0.787109375 + ], + [ + 1.250000238418579, + 0.86328125, + 0.86328125 + ], + [ + 1.2499998807907104, + 0.83984375, + 0.83984375 + ], + [ + 1.25, + 0.8417969942092896, + 0.8417969942092896 + ], + [ + 1.25, + 0.875, + 0.875 + ], + [ + 1.25, + 0.810546875, + 0.810546875 + ], + [ + 1.25, + 0.728515625, + 0.728515625 + ], + [ + 1.25, + 0.513671875, + 0.513671875 + ], + [ + 1.5, + 0.68359375, + 0.68359375 + ], + [ + 1.25, + 0.826171875, + 0.826171875 + ], + [ + 1.25, + 0.8203120231628418, + 0.8203120231628418 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.2500001192092896, + 0.794921875, + 0.794921875 + ], + [ + 1.25, + 0.79296875, + 0.79296875 + ], + [ + 1.25, + 0.6953125, + 0.6953125 + ], + [ + 1.25, + 0.9316409826278687, + 0.9316409826278687 + ], + [ + 1.25, + 0.82421875, + 0.82421875 + ], + [ + 1.25, + 0.7890625, + 0.7890625 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.2499998807907104, + 0.806640625, + 0.806640625 + ], + [ + 1.25, + 0.818359375, + 0.818359375 + ], + [ + 1.2499998807907104, + 0.78515625, + 0.78515625 + ], + [ + 1.25, + 0.798828125, + 0.798828125 + ], + [ + 1.25, + 0.8476560115814209, + 0.8476560115814209 + ], + [ + 1.25, + 0.8046879768371582, + 0.8046879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 0.699999988079071, + 0.814453125, + 0.814453125 + ], + [ + 0.699999988079071, + 0.615234375, + 0.615234375 + ], + [ + 0.699999988079071, + 0.7265625, + 0.7265625 + ], + [ + 0.699999988079071, + 0.748046875, + 0.748046875 + ], + [ + 0.699999988079071, + 0.6875, + 0.6875 + ], + [ + 0.699999988079071, + 0.7265625, + 0.7265625 + ], + [ + 0.699999988079071, + 0.748046875, + 0.748046875 + ], + [ + 0.699999988079071, + 0.62109375, + 0.62109375 + ], + [ + 0.699999988079071, + 0.744140625, + 0.744140625 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.25, + 0.8046875, + 0.8046875 + ], + [ + 1.25, + 0.7421879768371582, + 0.7421879768371582 + ], + [ + 1.250000238418579, + 0.7734375, + 0.7734375 + ], + [ + 1.25, + 0.8769530057907104, + 0.8769530057907104 + ], + [ + 1.25, + 0.875, + 0.875 + ], + [ + 1.5, + 0.68359375, + 0.68359375 + ], + [ + 1.25, + 0.869140625, + 0.869140625 + ], + [ + 1.25, + 0.703125, + 0.703125 + ], + [ + 1.25, + 0.857421875, + 0.857421875 + ], + [ + 1.25, + 0.7949219942092896, + 0.7949219942092896 + ], + [ + 1.25, + 0.837890625, + 0.837890625 + ], + [ + 1.5, + 0.80859375, + 0.80859375 + ], + [ + 1.25, + 0.9140625, + 0.9140625 + ], + [ + 1.249999761581421, + 0.73046875, + 0.73046875 + ], + [ + 1.2500001192092896, + 0.755859375, + 0.755859375 + ], + [ + 1.25, + 0.9765620231628418, + 0.9765620231628418 + ], + [ + 1.25, + 0.7910159826278687, + 0.7910159826278687 + ], + [ + 1.5, + 0.697265625, + 0.697265625 + ], + [ + 1.25, + 0.796875, + 0.796875 + ] + ] +} \ No newline at end of file diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/checkpoint_best.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..7374902ef4c498a8999ce96611666594b4e914f8 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de036943d50eb28073525ebac991375779235160fe1723cde7139008d8d92883 +size 1129744978 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/checkpoint_final.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/checkpoint_final.pth new file mode 100644 index 0000000000000000000000000000000000000000..4394be813a8105711ae783556bd09276a63baff3 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/checkpoint_final.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e73ce95e622ee25df47201134d50431f1eaf29ed1b2d98118f647ed7def34cf +size 1129851926 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/debug.json b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/debug.json new file mode 100644 index 0000000000000000000000000000000000000000..38583f6faf49bf20c82dd0426d1b81202929d107 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/debug.json @@ -0,0 +1,53 @@ +{ + "_best_ema": "None", + "batch_size": "2", + "configuration_manager": "{'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}", + "configuration_name": "3d_fullres", + "cudnn_version": 90100, + "current_epoch": "0", + "dataloader_train": "", + "dataloader_train.generator": "", + "dataloader_train.num_processes": "12", + "dataloader_train.transform": "None", + "dataloader_val": "", + "dataloader_val.generator": "", + "dataloader_val.num_processes": "6", + "dataloader_val.transform": "None", + "dataset_json": "{'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}", + "device": "cuda:0", + "disable_checkpointing": "False", + "enable_deep_supervision": "True", + "fold": "0", + "folder_with_segs_from_previous_stage": "None", + "gpu_name": "NVIDIA A100-SXM4-80GB", + "grad_scaler": "", + "hostname": "cn1105", + "inference_allowed_mirroring_axes": "(0, 1, 2)", + "initial_lr": "0.01", + "is_cascaded": "False", + "is_ddp": "False", + "label_manager": "", + "local_rank": "0", + "log_file": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/training_log_2026_4_8_15_43_26.txt", + "logger": "", + "loss": "DeepSupervisionWrapper(\n (loss): DC_and_CE_loss(\n (ce): RobustCrossEntropyLoss()\n (dc): OptimizedModule(\n (_orig_mod): MemoryEfficientSoftDiceLoss()\n )\n )\n)", + "lr_scheduler": "", + "my_init_kwargs": "{'plans': {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}, 'configuration': '3d_fullres', 'fold': 0, 'dataset_json': {'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}, 'unpack_dataset': True, 'device': device(type='cuda')}", + "network": "OptimizedModule", + "num_epochs": "1000", + "num_input_channels": "1", + "num_iterations_per_epoch": "250", + "num_val_iterations_per_epoch": "50", + "optimizer": "SGD (\nParameter Group 0\n dampening: 0\n differentiable: False\n foreach: None\n fused: None\n initial_lr: 0.01\n lr: 0.01\n maximize: False\n momentum: 0.99\n nesterov: True\n weight_decay: 3e-05\n)", + "output_folder": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0", + "output_folder_base": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres", + "oversample_foreground_percent": "0.33", + "plans_manager": "{'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}", + "preprocessed_dataset_folder": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/nnUNetPlans_3d_fullres", + "preprocessed_dataset_folder_base": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL", + "save_every": "50", + "torch_version": "2.5.0+cu121", + "unpack_dataset": "True", + "was_initialized": "True", + "weight_decay": "3e-05" +} \ No newline at end of file diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/progress.png b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/progress.png new file mode 100644 index 0000000000000000000000000000000000000000..988a0f3d37f3a22e3db58e25ab50843d6fe49dd3 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/progress.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45c6ab0cf5dd9f78219bbf470eb5aed3788dd5b3425ca2d0b41dae3ca7c2aac0 +size 1204396 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/training_log_2026_4_8_15_26_21.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/training_log_2026_4_8_15_26_21.txt new file mode 100644 index 0000000000000000000000000000000000000000..9b7ae01afeb0177e222ed3daebdaf42c4e53eeb5 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/training_log_2026_4_8_15_26_21.txt @@ -0,0 +1,10 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 15:26:21.098278: do_dummy_2d_data_aug: False +2026-04-08 15:26:21.143579: Creating new 5-fold cross-validation split... +2026-04-08 15:26:21.152699: Desired fold for training: 0 +2026-04-08 15:26:21.155444: This split has 387 training and 97 validation cases. diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/training_log_2026_4_8_15_43_26.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/training_log_2026_4_8_15_43_26.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a62afff8ecb3a179738d43fe15dae5588892d14 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_0/training_log_2026_4_8_15_43_26.txt @@ -0,0 +1,7364 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 15:43:26.163102: do_dummy_2d_data_aug: False +2026-04-08 15:43:26.224650: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-08 15:43:26.228125: The split file contains 5 splits. +2026-04-08 15:43:26.229509: Desired fold for training: 0 +2026-04-08 15:43:26.230805: This split has 387 training and 97 validation cases. +2026-04-08 15:43:36.784825: Using torch.compile... + +This is the configuration used by this training: +Configuration name: 3d_fullres + {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True} + +These are the global plan.json settings: + {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}} + +2026-04-08 15:43:38.465393: unpacking dataset... +2026-04-08 15:46:18.147562: unpacking done... +2026-04-08 15:46:18.169112: Unable to plot network architecture: nnUNet_compile is enabled! +2026-04-08 15:46:18.227520: +2026-04-08 15:46:18.229061: Epoch 0 +2026-04-08 15:46:18.230612: Current learning rate: 0.01 +2026-04-08 15:50:19.809936: train_loss 0.205 +2026-04-08 15:50:19.819860: val_loss 0.0628 +2026-04-08 15:50:19.821753: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 15:50:19.823586: Epoch time: 241.58 s +2026-04-08 15:50:19.825661: Yayy! New best EMA pseudo Dice: 0.0 +2026-04-08 15:50:22.600119: +2026-04-08 15:50:22.601827: Epoch 1 +2026-04-08 15:50:22.603858: Current learning rate: 0.00999 +2026-04-08 15:52:02.889454: train_loss 0.0693 +2026-04-08 15:52:02.898987: val_loss 0.0913 +2026-04-08 15:52:02.900627: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 15:52:02.902768: Epoch time: 100.29 s +2026-04-08 15:52:03.921074: +2026-04-08 15:52:03.922636: Epoch 2 +2026-04-08 15:52:03.924007: Current learning rate: 0.00998 +2026-04-08 15:53:44.275629: train_loss 0.0707 +2026-04-08 15:53:44.282332: val_loss 0.0502 +2026-04-08 15:53:44.284968: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 15:53:44.287132: Epoch time: 100.36 s +2026-04-08 15:53:45.370403: +2026-04-08 15:53:45.376404: Epoch 3 +2026-04-08 15:53:45.379536: Current learning rate: 0.00997 +2026-04-08 15:55:26.165115: train_loss 0.057 +2026-04-08 15:55:26.171821: val_loss 0.0449 +2026-04-08 15:55:26.173749: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 15:55:26.175970: Epoch time: 100.8 s +2026-04-08 15:55:27.233744: +2026-04-08 15:55:27.235650: Epoch 4 +2026-04-08 15:55:27.237808: Current learning rate: 0.00996 +2026-04-08 15:57:09.519225: train_loss 0.0531 +2026-04-08 15:57:09.527230: val_loss 0.0659 +2026-04-08 15:57:09.530177: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 15:57:09.532659: Epoch time: 102.29 s +2026-04-08 15:57:10.643427: +2026-04-08 15:57:10.645480: Epoch 5 +2026-04-08 15:57:10.647208: Current learning rate: 0.00995 +2026-04-08 15:58:51.608056: train_loss 0.0518 +2026-04-08 15:58:51.613941: val_loss 0.0617 +2026-04-08 15:58:51.615789: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 15:58:51.617555: Epoch time: 100.97 s +2026-04-08 15:58:52.633038: +2026-04-08 15:58:52.635169: Epoch 6 +2026-04-08 15:58:52.638523: Current learning rate: 0.00995 +2026-04-08 16:00:33.513490: train_loss 0.0417 +2026-04-08 16:00:33.520413: val_loss 0.0553 +2026-04-08 16:00:33.522333: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:00:33.525342: Epoch time: 100.88 s +2026-04-08 16:00:34.585019: +2026-04-08 16:00:34.589948: Epoch 7 +2026-04-08 16:00:34.605192: Current learning rate: 0.00994 +2026-04-08 16:02:15.362061: train_loss 0.0526 +2026-04-08 16:02:15.370011: val_loss 0.0366 +2026-04-08 16:02:15.371991: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:02:15.374101: Epoch time: 100.78 s +2026-04-08 16:02:16.442625: +2026-04-08 16:02:16.444471: Epoch 8 +2026-04-08 16:02:16.447155: Current learning rate: 0.00993 +2026-04-08 16:03:57.116926: train_loss 0.0485 +2026-04-08 16:03:57.126067: val_loss 0.0555 +2026-04-08 16:03:57.128035: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:03:57.130636: Epoch time: 100.68 s +2026-04-08 16:03:58.201278: +2026-04-08 16:03:58.210974: Epoch 9 +2026-04-08 16:03:58.213166: Current learning rate: 0.00992 +2026-04-08 16:05:39.675490: train_loss 0.0442 +2026-04-08 16:05:39.685900: val_loss 0.0714 +2026-04-08 16:05:39.689674: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:05:39.692913: Epoch time: 101.48 s +2026-04-08 16:05:40.708256: +2026-04-08 16:05:40.709917: Epoch 10 +2026-04-08 16:05:40.711603: Current learning rate: 0.00991 +2026-04-08 16:07:21.833243: train_loss 0.0611 +2026-04-08 16:07:21.841047: val_loss 0.0504 +2026-04-08 16:07:21.842968: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:07:21.845817: Epoch time: 101.13 s +2026-04-08 16:07:22.892314: +2026-04-08 16:07:22.895626: Epoch 11 +2026-04-08 16:07:22.899025: Current learning rate: 0.0099 +2026-04-08 16:09:05.196230: train_loss 0.0521 +2026-04-08 16:09:05.205541: val_loss 0.0461 +2026-04-08 16:09:05.208588: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:09:05.212707: Epoch time: 102.31 s +2026-04-08 16:09:06.230799: +2026-04-08 16:09:06.232853: Epoch 12 +2026-04-08 16:09:06.234739: Current learning rate: 0.00989 +2026-04-08 16:10:49.347576: train_loss 0.0552 +2026-04-08 16:10:49.365734: val_loss 0.0711 +2026-04-08 16:10:49.368710: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:10:49.374022: Epoch time: 103.12 s +2026-04-08 16:10:50.414410: +2026-04-08 16:10:50.419227: Epoch 13 +2026-04-08 16:10:50.424306: Current learning rate: 0.00988 +2026-04-08 16:12:31.896683: train_loss 0.0551 +2026-04-08 16:12:31.904126: val_loss 0.0249 +2026-04-08 16:12:31.906716: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:12:31.909690: Epoch time: 101.49 s +2026-04-08 16:12:32.962350: +2026-04-08 16:12:32.964669: Epoch 14 +2026-04-08 16:12:32.966596: Current learning rate: 0.00987 +2026-04-08 16:14:16.250224: train_loss 0.0517 +2026-04-08 16:14:16.258598: val_loss 0.0591 +2026-04-08 16:14:16.261332: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:14:16.263278: Epoch time: 103.29 s +2026-04-08 16:14:17.346020: +2026-04-08 16:14:17.349616: Epoch 15 +2026-04-08 16:14:17.353153: Current learning rate: 0.00986 +2026-04-08 16:15:59.255392: train_loss 0.0514 +2026-04-08 16:15:59.262652: val_loss 0.0315 +2026-04-08 16:15:59.265444: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:15:59.268672: Epoch time: 101.91 s +2026-04-08 16:16:00.361626: +2026-04-08 16:16:00.364668: Epoch 16 +2026-04-08 16:16:00.366720: Current learning rate: 0.00986 +2026-04-08 16:17:42.792712: train_loss 0.0516 +2026-04-08 16:17:42.798587: val_loss 0.0489 +2026-04-08 16:17:42.800540: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:17:42.802995: Epoch time: 102.43 s +2026-04-08 16:17:43.900693: +2026-04-08 16:17:43.902285: Epoch 17 +2026-04-08 16:17:43.903783: Current learning rate: 0.00985 +2026-04-08 16:19:26.054237: train_loss 0.0508 +2026-04-08 16:19:26.062818: val_loss 0.0594 +2026-04-08 16:19:26.065729: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:19:26.069127: Epoch time: 102.16 s +2026-04-08 16:19:28.386492: +2026-04-08 16:19:28.389152: Epoch 18 +2026-04-08 16:19:28.391647: Current learning rate: 0.00984 +2026-04-08 16:21:13.151056: train_loss 0.0405 +2026-04-08 16:21:13.160015: val_loss 0.0258 +2026-04-08 16:21:13.162508: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:21:13.166212: Epoch time: 104.77 s +2026-04-08 16:21:14.258193: +2026-04-08 16:21:14.260624: Epoch 19 +2026-04-08 16:21:14.264352: Current learning rate: 0.00983 +2026-04-08 16:23:00.007244: train_loss 0.046 +2026-04-08 16:23:00.018238: val_loss 0.0608 +2026-04-08 16:23:00.020254: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:23:00.022694: Epoch time: 105.75 s +2026-04-08 16:23:01.098995: +2026-04-08 16:23:01.101423: Epoch 20 +2026-04-08 16:23:01.103847: Current learning rate: 0.00982 +2026-04-08 16:24:44.754318: train_loss 0.0542 +2026-04-08 16:24:44.776266: val_loss 0.0374 +2026-04-08 16:24:44.785036: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:24:44.800312: Epoch time: 103.66 s +2026-04-08 16:24:45.899694: +2026-04-08 16:24:45.904335: Epoch 21 +2026-04-08 16:24:45.909817: Current learning rate: 0.00981 +2026-04-08 16:26:28.749631: train_loss 0.0465 +2026-04-08 16:26:28.759333: val_loss 0.0389 +2026-04-08 16:26:28.762600: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:26:28.766630: Epoch time: 102.85 s +2026-04-08 16:26:29.794530: +2026-04-08 16:26:29.796950: Epoch 22 +2026-04-08 16:26:29.799083: Current learning rate: 0.0098 +2026-04-08 16:28:15.345833: train_loss 0.0572 +2026-04-08 16:28:15.356926: val_loss 0.0329 +2026-04-08 16:28:15.359098: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:28:15.362140: Epoch time: 105.55 s +2026-04-08 16:28:16.400555: +2026-04-08 16:28:16.402810: Epoch 23 +2026-04-08 16:28:16.405114: Current learning rate: 0.00979 +2026-04-08 16:30:00.079214: train_loss 0.0412 +2026-04-08 16:30:00.088068: val_loss 0.062 +2026-04-08 16:30:00.090744: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:30:00.094065: Epoch time: 103.68 s +2026-04-08 16:30:01.098631: +2026-04-08 16:30:01.100892: Epoch 24 +2026-04-08 16:30:01.103081: Current learning rate: 0.00978 +2026-04-08 16:31:45.353158: train_loss 0.0387 +2026-04-08 16:31:45.360665: val_loss 0.0419 +2026-04-08 16:31:45.362422: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:31:45.365227: Epoch time: 104.26 s +2026-04-08 16:31:46.389310: +2026-04-08 16:31:46.391079: Epoch 25 +2026-04-08 16:31:46.393190: Current learning rate: 0.00977 +2026-04-08 16:33:29.269307: train_loss 0.0519 +2026-04-08 16:33:29.278076: val_loss 0.026 +2026-04-08 16:33:29.280294: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:33:29.285376: Epoch time: 102.88 s +2026-04-08 16:33:30.306954: +2026-04-08 16:33:30.310202: Epoch 26 +2026-04-08 16:33:30.313152: Current learning rate: 0.00977 +2026-04-08 16:35:18.055636: train_loss 0.0406 +2026-04-08 16:35:18.064755: val_loss 0.038 +2026-04-08 16:35:18.067157: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:35:18.069522: Epoch time: 107.75 s +2026-04-08 16:35:19.089874: +2026-04-08 16:35:19.094473: Epoch 27 +2026-04-08 16:35:19.097711: Current learning rate: 0.00976 +2026-04-08 16:37:01.463831: train_loss 0.0412 +2026-04-08 16:37:01.472002: val_loss 0.0252 +2026-04-08 16:37:01.474420: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:37:01.477186: Epoch time: 102.38 s +2026-04-08 16:37:02.496564: +2026-04-08 16:37:02.499766: Epoch 28 +2026-04-08 16:37:02.503251: Current learning rate: 0.00975 +2026-04-08 16:38:44.921684: train_loss 0.0405 +2026-04-08 16:38:44.927925: val_loss 0.0371 +2026-04-08 16:38:44.930371: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:38:44.932516: Epoch time: 102.43 s +2026-04-08 16:38:45.970824: +2026-04-08 16:38:45.973207: Epoch 29 +2026-04-08 16:38:45.975747: Current learning rate: 0.00974 +2026-04-08 16:40:28.568464: train_loss 0.0365 +2026-04-08 16:40:28.581607: val_loss 0.0297 +2026-04-08 16:40:28.584810: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:40:28.587486: Epoch time: 102.6 s +2026-04-08 16:40:29.637922: +2026-04-08 16:40:29.653003: Epoch 30 +2026-04-08 16:40:29.655064: Current learning rate: 0.00973 +2026-04-08 16:42:11.793077: train_loss 0.0368 +2026-04-08 16:42:11.800225: val_loss 0.0747 +2026-04-08 16:42:11.802166: Pseudo dice [0.0, 0.0, 0.0001, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:42:11.805259: Epoch time: 102.16 s +2026-04-08 16:42:11.807339: Yayy! New best EMA pseudo Dice: 0.0 +2026-04-08 16:42:14.645254: +2026-04-08 16:42:14.648011: Epoch 31 +2026-04-08 16:42:14.649804: Current learning rate: 0.00972 +2026-04-08 16:43:57.792114: train_loss 0.0491 +2026-04-08 16:43:57.800450: val_loss 0.0325 +2026-04-08 16:43:57.803191: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:43:57.807325: Epoch time: 103.15 s +2026-04-08 16:43:58.859165: +2026-04-08 16:43:58.861307: Epoch 32 +2026-04-08 16:43:58.863301: Current learning rate: 0.00971 +2026-04-08 16:45:41.466569: train_loss 0.0392 +2026-04-08 16:45:41.474079: val_loss 0.029 +2026-04-08 16:45:41.476560: Pseudo dice [0.0, 0.0, 0.0017, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:45:41.479124: Epoch time: 102.61 s +2026-04-08 16:45:41.481414: Yayy! New best EMA pseudo Dice: 0.0 +2026-04-08 16:45:44.365162: +2026-04-08 16:45:44.369887: Epoch 33 +2026-04-08 16:45:44.373842: Current learning rate: 0.0097 +2026-04-08 16:47:26.888544: train_loss 0.0424 +2026-04-08 16:47:26.898399: val_loss 0.0478 +2026-04-08 16:47:26.900728: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:47:26.904091: Epoch time: 102.53 s +2026-04-08 16:47:27.992554: +2026-04-08 16:47:27.994841: Epoch 34 +2026-04-08 16:47:27.996915: Current learning rate: 0.00969 +2026-04-08 16:49:10.033239: train_loss 0.036 +2026-04-08 16:49:10.038802: val_loss 0.0243 +2026-04-08 16:49:10.041110: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:49:10.043932: Epoch time: 102.04 s +2026-04-08 16:49:11.111479: +2026-04-08 16:49:11.113935: Epoch 35 +2026-04-08 16:49:11.116241: Current learning rate: 0.00968 +2026-04-08 16:50:53.844751: train_loss 0.0415 +2026-04-08 16:50:53.851571: val_loss 0.037 +2026-04-08 16:50:53.853810: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:50:53.856929: Epoch time: 102.74 s +2026-04-08 16:50:54.954095: +2026-04-08 16:50:54.959716: Epoch 36 +2026-04-08 16:50:54.962556: Current learning rate: 0.00968 +2026-04-08 16:52:38.020869: train_loss 0.0386 +2026-04-08 16:52:38.029095: val_loss 0.0287 +2026-04-08 16:52:38.031542: Pseudo dice [0.0, 0.0, 0.0005, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:52:38.037341: Epoch time: 103.07 s +2026-04-08 16:52:39.118583: +2026-04-08 16:52:39.121908: Epoch 37 +2026-04-08 16:52:39.123708: Current learning rate: 0.00967 +2026-04-08 16:54:22.041379: train_loss 0.0402 +2026-04-08 16:54:22.047878: val_loss 0.015 +2026-04-08 16:54:22.049881: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:54:22.053681: Epoch time: 102.93 s +2026-04-08 16:54:24.377509: +2026-04-08 16:54:24.379405: Epoch 38 +2026-04-08 16:54:24.381987: Current learning rate: 0.00966 +2026-04-08 16:56:09.825570: train_loss 0.0254 +2026-04-08 16:56:09.833490: val_loss 0.0119 +2026-04-08 16:56:09.836492: Pseudo dice [0.0, 0.0, 0.0352, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:56:09.840038: Epoch time: 105.45 s +2026-04-08 16:56:09.842201: Yayy! New best EMA pseudo Dice: 0.0005 +2026-04-08 16:56:12.732334: +2026-04-08 16:56:12.734167: Epoch 39 +2026-04-08 16:56:12.735602: Current learning rate: 0.00965 +2026-04-08 16:57:55.387996: train_loss 0.0362 +2026-04-08 16:57:55.396616: val_loss 0.0274 +2026-04-08 16:57:55.398815: Pseudo dice [0.0, 0.0, 0.0824, 0.0, 0.0, 0.0, 0.0167] +2026-04-08 16:57:55.401345: Epoch time: 102.66 s +2026-04-08 16:57:55.403399: Yayy! New best EMA pseudo Dice: 0.0019 +2026-04-08 16:57:58.235033: +2026-04-08 16:57:58.248964: Epoch 40 +2026-04-08 16:57:58.261998: Current learning rate: 0.00964 +2026-04-08 16:59:39.996492: train_loss 0.0372 +2026-04-08 16:59:40.004777: val_loss 0.0236 +2026-04-08 16:59:40.009138: Pseudo dice [0.0, 0.0, 0.0212, 0.0, 0.0, 0.0, 0.0003] +2026-04-08 16:59:40.013171: Epoch time: 101.76 s +2026-04-08 16:59:40.016577: Yayy! New best EMA pseudo Dice: 0.002 +2026-04-08 16:59:42.868799: +2026-04-08 16:59:42.871054: Epoch 41 +2026-04-08 16:59:42.873908: Current learning rate: 0.00963 +2026-04-08 17:01:25.094814: train_loss 0.0304 +2026-04-08 17:01:25.102716: val_loss 0.0296 +2026-04-08 17:01:25.105493: Pseudo dice [0.0, 0.0, 0.0426, 0.0, 0.0, 0.0, 0.0069] +2026-04-08 17:01:25.108229: Epoch time: 102.23 s +2026-04-08 17:01:25.110844: Yayy! New best EMA pseudo Dice: 0.0025 +2026-04-08 17:01:27.837623: +2026-04-08 17:01:27.839335: Epoch 42 +2026-04-08 17:01:27.840992: Current learning rate: 0.00962 +2026-04-08 17:03:10.367761: train_loss 0.0319 +2026-04-08 17:03:10.374397: val_loss 0.07 +2026-04-08 17:03:10.377244: Pseudo dice [0.0, 0.0, 0.0596, 0.0, 0.0, 0.0, 0.0551] +2026-04-08 17:03:10.380399: Epoch time: 102.53 s +2026-04-08 17:03:10.382989: Yayy! New best EMA pseudo Dice: 0.0039 +2026-04-08 17:03:13.211416: +2026-04-08 17:03:13.213024: Epoch 43 +2026-04-08 17:03:13.214719: Current learning rate: 0.00961 +2026-04-08 17:04:55.319291: train_loss 0.0326 +2026-04-08 17:04:55.325243: val_loss 0.0185 +2026-04-08 17:04:55.327337: Pseudo dice [0.0, 0.0, 0.1436, 0.0, 0.0, 0.0, 0.0194] +2026-04-08 17:04:55.329548: Epoch time: 102.11 s +2026-04-08 17:04:55.331861: Yayy! New best EMA pseudo Dice: 0.0058 +2026-04-08 17:04:58.111144: +2026-04-08 17:04:58.113670: Epoch 44 +2026-04-08 17:04:58.115232: Current learning rate: 0.0096 +2026-04-08 17:06:39.675329: train_loss 0.0258 +2026-04-08 17:06:39.682277: val_loss 0.0032 +2026-04-08 17:06:39.684692: Pseudo dice [0.0, 0.0, 0.0201, 0.0, 0.0, 0.0, 0.261] +2026-04-08 17:06:39.687369: Epoch time: 101.57 s +2026-04-08 17:06:39.689830: Yayy! New best EMA pseudo Dice: 0.0093 +2026-04-08 17:06:42.471605: +2026-04-08 17:06:42.476430: Epoch 45 +2026-04-08 17:06:42.479062: Current learning rate: 0.00959 +2026-04-08 17:08:24.338189: train_loss 0.0392 +2026-04-08 17:08:24.346498: val_loss 0.0291 +2026-04-08 17:08:24.348707: Pseudo dice [0.0, 0.0, 0.0013, 0.0, 0.0, 0.0, 0.18] +2026-04-08 17:08:24.351309: Epoch time: 101.87 s +2026-04-08 17:08:24.353004: Yayy! New best EMA pseudo Dice: 0.0109 +2026-04-08 17:08:27.087022: +2026-04-08 17:08:27.088854: Epoch 46 +2026-04-08 17:08:27.090252: Current learning rate: 0.00959 +2026-04-08 17:10:08.700289: train_loss 0.0298 +2026-04-08 17:10:08.716861: val_loss 0.014 +2026-04-08 17:10:08.721994: Pseudo dice [0.0, 0.0, 0.0027, 0.0, 0.0, 0.0, 0.2988] +2026-04-08 17:10:08.727974: Epoch time: 101.62 s +2026-04-08 17:10:08.733049: Yayy! New best EMA pseudo Dice: 0.0141 +2026-04-08 17:10:11.444925: +2026-04-08 17:10:11.446422: Epoch 47 +2026-04-08 17:10:11.448257: Current learning rate: 0.00958 +2026-04-08 17:11:52.914473: train_loss 0.0301 +2026-04-08 17:11:52.926960: val_loss 0.026 +2026-04-08 17:11:52.929860: Pseudo dice [0.0, 0.0, 0.112, 0.0, 0.0, 0.0, 0.0346] +2026-04-08 17:11:52.933426: Epoch time: 101.47 s +2026-04-08 17:11:52.935853: Yayy! New best EMA pseudo Dice: 0.0148 +2026-04-08 17:11:55.783463: +2026-04-08 17:11:55.785216: Epoch 48 +2026-04-08 17:11:55.786762: Current learning rate: 0.00957 +2026-04-08 17:13:37.442189: train_loss 0.0275 +2026-04-08 17:13:37.450618: val_loss 0.0349 +2026-04-08 17:13:37.453558: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.347] +2026-04-08 17:13:37.458146: Epoch time: 101.66 s +2026-04-08 17:13:37.460607: Yayy! New best EMA pseudo Dice: 0.0183 +2026-04-08 17:13:40.260313: +2026-04-08 17:13:40.262542: Epoch 49 +2026-04-08 17:13:40.265243: Current learning rate: 0.00956 +2026-04-08 17:15:23.058102: train_loss 0.0372 +2026-04-08 17:15:23.064433: val_loss 0.0013 +2026-04-08 17:15:23.067248: Pseudo dice [0.0, 0.0, 0.1853, 0.0, 0.0, 0.0, 0.2593] +2026-04-08 17:15:23.069356: Epoch time: 102.8 s +2026-04-08 17:15:24.986711: Yayy! New best EMA pseudo Dice: 0.0228 +2026-04-08 17:15:27.719552: +2026-04-08 17:15:27.721305: Epoch 50 +2026-04-08 17:15:27.722950: Current learning rate: 0.00955 +2026-04-08 17:17:09.674777: train_loss 0.0231 +2026-04-08 17:17:09.680212: val_loss 0.0155 +2026-04-08 17:17:09.682243: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3153] +2026-04-08 17:17:09.686055: Epoch time: 101.96 s +2026-04-08 17:17:09.701772: Yayy! New best EMA pseudo Dice: 0.025 +2026-04-08 17:17:12.439277: +2026-04-08 17:17:12.442118: Epoch 51 +2026-04-08 17:17:12.446243: Current learning rate: 0.00954 +2026-04-08 17:18:54.134461: train_loss 0.0163 +2026-04-08 17:18:54.144024: val_loss 0.0459 +2026-04-08 17:18:54.146374: Pseudo dice [0.0, 0.0, 0.001, 0.0, 0.0, 0.0106, 0.2727] +2026-04-08 17:18:54.148699: Epoch time: 101.7 s +2026-04-08 17:18:54.150675: Yayy! New best EMA pseudo Dice: 0.0266 +2026-04-08 17:18:56.939535: +2026-04-08 17:18:56.941288: Epoch 52 +2026-04-08 17:18:56.943236: Current learning rate: 0.00953 +2026-04-08 17:20:40.141639: train_loss 0.0076 +2026-04-08 17:20:40.154029: val_loss 0.0082 +2026-04-08 17:20:40.157615: Pseudo dice [0.0, 0.0, 0.3207, 0.0, 0.0, 0.0014, 0.4937] +2026-04-08 17:20:40.160968: Epoch time: 103.21 s +2026-04-08 17:20:40.164032: Yayy! New best EMA pseudo Dice: 0.0356 +2026-04-08 17:20:42.962752: +2026-04-08 17:20:42.964579: Epoch 53 +2026-04-08 17:20:42.966252: Current learning rate: 0.00952 +2026-04-08 17:22:24.491732: train_loss 0.0109 +2026-04-08 17:22:24.498927: val_loss 0.0157 +2026-04-08 17:22:24.500781: Pseudo dice [0.0, 0.0, 0.0332, 0.0, 0.0, 0.0003, 0.2689] +2026-04-08 17:22:24.503339: Epoch time: 101.53 s +2026-04-08 17:22:24.505270: Yayy! New best EMA pseudo Dice: 0.0364 +2026-04-08 17:22:28.238443: +2026-04-08 17:22:28.240512: Epoch 54 +2026-04-08 17:22:28.241891: Current learning rate: 0.00951 +2026-04-08 17:24:11.159224: train_loss 0.0286 +2026-04-08 17:24:11.167434: val_loss 0.0293 +2026-04-08 17:24:11.169269: Pseudo dice [0.0, 0.0, 0.2768, 0.0, 0.0, 0.0, 0.4351] +2026-04-08 17:24:11.171578: Epoch time: 102.92 s +2026-04-08 17:24:11.174706: Yayy! New best EMA pseudo Dice: 0.0429 +2026-04-08 17:24:14.023471: +2026-04-08 17:24:14.025749: Epoch 55 +2026-04-08 17:24:14.028241: Current learning rate: 0.0095 +2026-04-08 17:25:56.320458: train_loss 0.0179 +2026-04-08 17:25:56.353783: val_loss 0.0297 +2026-04-08 17:25:56.357940: Pseudo dice [0.0, 0.0, 0.1665, 0.0, 0.0, 0.0063, 0.2212] +2026-04-08 17:25:56.361058: Epoch time: 102.3 s +2026-04-08 17:25:56.363106: Yayy! New best EMA pseudo Dice: 0.0442 +2026-04-08 17:25:59.160573: +2026-04-08 17:25:59.162452: Epoch 56 +2026-04-08 17:25:59.163872: Current learning rate: 0.00949 +2026-04-08 17:27:41.124787: train_loss 0.0176 +2026-04-08 17:27:41.132995: val_loss -0.0075 +2026-04-08 17:27:41.135576: Pseudo dice [0.0, 0.0, 0.3836, 0.0, 0.0, 0.0, 0.5132] +2026-04-08 17:27:41.138899: Epoch time: 101.97 s +2026-04-08 17:27:41.141382: Yayy! New best EMA pseudo Dice: 0.0526 +2026-04-08 17:27:43.983771: +2026-04-08 17:27:43.985721: Epoch 57 +2026-04-08 17:27:43.987272: Current learning rate: 0.00949 +2026-04-08 17:29:25.771890: train_loss 0.0039 +2026-04-08 17:29:25.779072: val_loss -0.0146 +2026-04-08 17:29:25.780805: Pseudo dice [0.0, 0.0, 0.2677, 0.0, 0.0, 0.0002, 0.4534] +2026-04-08 17:29:25.783193: Epoch time: 101.79 s +2026-04-08 17:29:25.785957: Yayy! New best EMA pseudo Dice: 0.0577 +2026-04-08 17:29:28.521427: +2026-04-08 17:29:28.523009: Epoch 58 +2026-04-08 17:29:28.524606: Current learning rate: 0.00948 +2026-04-08 17:31:10.471332: train_loss 0.0133 +2026-04-08 17:31:10.479770: val_loss 0.0073 +2026-04-08 17:31:10.482198: Pseudo dice [0.0, 0.0, 0.2015, 0.0, 0.0, 0.0, 0.4483] +2026-04-08 17:31:10.484601: Epoch time: 101.95 s +2026-04-08 17:31:10.486609: Yayy! New best EMA pseudo Dice: 0.0612 +2026-04-08 17:31:13.364707: +2026-04-08 17:31:13.366323: Epoch 59 +2026-04-08 17:31:13.367843: Current learning rate: 0.00947 +2026-04-08 17:32:55.548460: train_loss 0.0167 +2026-04-08 17:32:55.559162: val_loss 0.016 +2026-04-08 17:32:55.561863: Pseudo dice [0.0, 0.0, 0.311, 0.0, 0.0, 0.0636, 0.3141] +2026-04-08 17:32:55.564525: Epoch time: 102.19 s +2026-04-08 17:32:55.567374: Yayy! New best EMA pseudo Dice: 0.0649 +2026-04-08 17:32:58.408326: +2026-04-08 17:32:58.412895: Epoch 60 +2026-04-08 17:32:58.427952: Current learning rate: 0.00946 +2026-04-08 17:34:39.938740: train_loss -0.0034 +2026-04-08 17:34:39.947352: val_loss -0.0157 +2026-04-08 17:34:39.950005: Pseudo dice [0.0, 0.0, 0.3423, 0.0, 0.0, 0.2114, 0.325] +2026-04-08 17:34:39.954892: Epoch time: 101.53 s +2026-04-08 17:34:39.957729: Yayy! New best EMA pseudo Dice: 0.071 +2026-04-08 17:34:42.952966: +2026-04-08 17:34:42.956127: Epoch 61 +2026-04-08 17:34:42.957831: Current learning rate: 0.00945 +2026-04-08 17:36:25.932744: train_loss -0.0012 +2026-04-08 17:36:25.939419: val_loss 0.0214 +2026-04-08 17:36:25.941669: Pseudo dice [0.0, 0.0, 0.0078, 0.0, 0.0, 0.1401, 0.3067] +2026-04-08 17:36:25.943679: Epoch time: 102.98 s +2026-04-08 17:36:27.039883: +2026-04-08 17:36:27.041865: Epoch 62 +2026-04-08 17:36:27.043580: Current learning rate: 0.00944 +2026-04-08 17:38:09.122062: train_loss 0.0091 +2026-04-08 17:38:09.128187: val_loss 0.001 +2026-04-08 17:38:09.130618: Pseudo dice [0.0, 0.0, 0.3719, 0.0, 0.0, 0.2054, 0.3339] +2026-04-08 17:38:09.133813: Epoch time: 102.09 s +2026-04-08 17:38:09.136070: Yayy! New best EMA pseudo Dice: 0.0763 +2026-04-08 17:38:12.008690: +2026-04-08 17:38:12.011104: Epoch 63 +2026-04-08 17:38:12.014577: Current learning rate: 0.00943 +2026-04-08 17:39:54.374996: train_loss -0.0124 +2026-04-08 17:39:54.381380: val_loss -0.0318 +2026-04-08 17:39:54.383663: Pseudo dice [0.0, 0.0, 0.3602, 0.0, 0.0, 0.4781, 0.5507] +2026-04-08 17:39:54.386701: Epoch time: 102.37 s +2026-04-08 17:39:54.389815: Yayy! New best EMA pseudo Dice: 0.0886 +2026-04-08 17:39:57.169569: +2026-04-08 17:39:57.171576: Epoch 64 +2026-04-08 17:39:57.173086: Current learning rate: 0.00942 +2026-04-08 17:41:38.592255: train_loss 0.0072 +2026-04-08 17:41:38.608995: val_loss 0.0102 +2026-04-08 17:41:38.614096: Pseudo dice [0.0, 0.0, 0.4297, 0.0, 0.0, 0.3124, 0.5244] +2026-04-08 17:41:38.618907: Epoch time: 101.43 s +2026-04-08 17:41:38.624403: Yayy! New best EMA pseudo Dice: 0.0978 +2026-04-08 17:41:41.391924: +2026-04-08 17:41:41.393655: Epoch 65 +2026-04-08 17:41:41.395106: Current learning rate: 0.00941 +2026-04-08 17:43:24.031890: train_loss -0.0115 +2026-04-08 17:43:24.050629: val_loss -0.0239 +2026-04-08 17:43:24.057055: Pseudo dice [0.0, 0.0, 0.4786, 0.0, 0.0, 0.3941, 0.4867] +2026-04-08 17:43:24.062041: Epoch time: 102.64 s +2026-04-08 17:43:24.067925: Yayy! New best EMA pseudo Dice: 0.1074 +2026-04-08 17:43:27.002058: +2026-04-08 17:43:27.007228: Epoch 66 +2026-04-08 17:43:27.009720: Current learning rate: 0.0094 +2026-04-08 17:45:09.433917: train_loss -0.002 +2026-04-08 17:45:09.441776: val_loss 0.0092 +2026-04-08 17:45:09.443443: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.1298, 0.3528] +2026-04-08 17:45:09.445546: Epoch time: 102.43 s +2026-04-08 17:45:10.512703: +2026-04-08 17:45:10.515611: Epoch 67 +2026-04-08 17:45:10.517620: Current learning rate: 0.00939 +2026-04-08 17:46:52.051537: train_loss -0.0125 +2026-04-08 17:46:52.056882: val_loss -0.031 +2026-04-08 17:46:52.059111: Pseudo dice [0.0, 0.0, 0.339, 0.0, 0.0, 0.525, 0.5281] +2026-04-08 17:46:52.061256: Epoch time: 101.54 s +2026-04-08 17:46:52.063094: Yayy! New best EMA pseudo Dice: 0.1131 +2026-04-08 17:46:54.857457: +2026-04-08 17:46:54.859436: Epoch 68 +2026-04-08 17:46:54.861158: Current learning rate: 0.00939 +2026-04-08 17:48:36.895464: train_loss -0.012 +2026-04-08 17:48:36.901050: val_loss -0.0441 +2026-04-08 17:48:36.902722: Pseudo dice [0.0, 0.0, 0.5072, 0.0, 0.0, 0.5171, 0.5197] +2026-04-08 17:48:36.905082: Epoch time: 102.04 s +2026-04-08 17:48:36.907257: Yayy! New best EMA pseudo Dice: 0.1239 +2026-04-08 17:48:39.648359: +2026-04-08 17:48:39.650589: Epoch 69 +2026-04-08 17:48:39.652499: Current learning rate: 0.00938 +2026-04-08 17:50:21.088219: train_loss -0.0088 +2026-04-08 17:50:21.093571: val_loss -0.0147 +2026-04-08 17:50:21.095499: Pseudo dice [0.0, 0.0, 0.4786, 0.0, 0.0, 0.3492, 0.3354] +2026-04-08 17:50:21.098604: Epoch time: 101.44 s +2026-04-08 17:50:21.100543: Yayy! New best EMA pseudo Dice: 0.1281 +2026-04-08 17:50:24.927303: +2026-04-08 17:50:24.928741: Epoch 70 +2026-04-08 17:50:24.930138: Current learning rate: 0.00937 +2026-04-08 17:52:06.162316: train_loss -0.0182 +2026-04-08 17:52:06.168339: val_loss -0.0369 +2026-04-08 17:52:06.170940: Pseudo dice [0.0, 0.0, 0.3823, 0.0, 0.0, 0.341, 0.2396] +2026-04-08 17:52:06.173079: Epoch time: 101.24 s +2026-04-08 17:52:06.176562: Yayy! New best EMA pseudo Dice: 0.129 +2026-04-08 17:52:08.982588: +2026-04-08 17:52:09.007370: Epoch 71 +2026-04-08 17:52:09.016555: Current learning rate: 0.00936 +2026-04-08 17:53:50.630161: train_loss -0.0202 +2026-04-08 17:53:50.638921: val_loss -0.0347 +2026-04-08 17:53:50.641081: Pseudo dice [0.0, 0.0, 0.4098, 0.0, 0.0, 0.3242, 0.7223] +2026-04-08 17:53:50.643362: Epoch time: 101.65 s +2026-04-08 17:53:50.645807: Yayy! New best EMA pseudo Dice: 0.1369 +2026-04-08 17:53:53.395269: +2026-04-08 17:53:53.396888: Epoch 72 +2026-04-08 17:53:53.398499: Current learning rate: 0.00935 +2026-04-08 17:55:35.366957: train_loss -0.0217 +2026-04-08 17:55:35.373438: val_loss -0.0358 +2026-04-08 17:55:35.375417: Pseudo dice [0.0, 0.0, 0.1286, 0.0, 0.0, 0.3786, 0.5717] +2026-04-08 17:55:35.377687: Epoch time: 101.97 s +2026-04-08 17:55:35.379446: Yayy! New best EMA pseudo Dice: 0.1387 +2026-04-08 17:55:38.138053: +2026-04-08 17:55:38.139601: Epoch 73 +2026-04-08 17:55:38.141039: Current learning rate: 0.00934 +2026-04-08 17:57:19.942906: train_loss -0.0263 +2026-04-08 17:57:19.949323: val_loss -0.0243 +2026-04-08 17:57:19.952130: Pseudo dice [0.0, 0.0, 0.4465, 0.0, 0.0, 0.4052, 0.5731] +2026-04-08 17:57:19.954443: Epoch time: 101.81 s +2026-04-08 17:57:19.956732: Yayy! New best EMA pseudo Dice: 0.1451 +2026-04-08 17:57:22.874663: +2026-04-08 17:57:22.876460: Epoch 74 +2026-04-08 17:57:22.877786: Current learning rate: 0.00933 +2026-04-08 17:59:04.401538: train_loss -0.0262 +2026-04-08 17:59:04.408434: val_loss -0.0349 +2026-04-08 17:59:04.410513: Pseudo dice [0.0, 0.0, 0.3433, 0.0, 0.0, 0.5881, 0.4628] +2026-04-08 17:59:04.414383: Epoch time: 101.53 s +2026-04-08 17:59:04.416823: Yayy! New best EMA pseudo Dice: 0.1505 +2026-04-08 17:59:07.153059: +2026-04-08 17:59:07.154647: Epoch 75 +2026-04-08 17:59:07.155973: Current learning rate: 0.00932 +2026-04-08 18:00:48.883345: train_loss -0.023 +2026-04-08 18:00:48.888333: val_loss -0.0573 +2026-04-08 18:00:48.889770: Pseudo dice [0.0, 0.0, 0.4489, 0.0, 0.0, 0.5095, 0.6224] +2026-04-08 18:00:48.892255: Epoch time: 101.73 s +2026-04-08 18:00:48.893742: Yayy! New best EMA pseudo Dice: 0.1581 +2026-04-08 18:00:51.630872: +2026-04-08 18:00:51.632845: Epoch 76 +2026-04-08 18:00:51.634432: Current learning rate: 0.00931 +2026-04-08 18:02:33.379456: train_loss -0.0054 +2026-04-08 18:02:33.384423: val_loss -0.0301 +2026-04-08 18:02:33.389482: Pseudo dice [0.0, 0.0, 0.5166, 0.0, 0.0, 0.4117, 0.4694] +2026-04-08 18:02:33.393485: Epoch time: 101.75 s +2026-04-08 18:02:33.395272: Yayy! New best EMA pseudo Dice: 0.1622 +2026-04-08 18:02:36.191538: +2026-04-08 18:02:36.193521: Epoch 77 +2026-04-08 18:02:36.195323: Current learning rate: 0.0093 +2026-04-08 18:04:17.818743: train_loss -0.042 +2026-04-08 18:04:17.835005: val_loss -0.0297 +2026-04-08 18:04:17.840321: Pseudo dice [0.0, 0.0, 0.4386, 0.0, 0.0, 0.4551, 0.5407] +2026-04-08 18:04:17.844932: Epoch time: 101.63 s +2026-04-08 18:04:17.849972: Yayy! New best EMA pseudo Dice: 0.1665 +2026-04-08 18:04:20.700598: +2026-04-08 18:04:20.702490: Epoch 78 +2026-04-08 18:04:20.704149: Current learning rate: 0.0093 +2026-04-08 18:06:02.749268: train_loss -0.0288 +2026-04-08 18:06:02.755734: val_loss -0.0265 +2026-04-08 18:06:02.757692: Pseudo dice [0.0, 0.0, 0.3269, 0.0, 0.0, 0.5571, 0.4506] +2026-04-08 18:06:02.760211: Epoch time: 102.05 s +2026-04-08 18:06:02.762437: Yayy! New best EMA pseudo Dice: 0.1689 +2026-04-08 18:06:05.543269: +2026-04-08 18:06:05.545679: Epoch 79 +2026-04-08 18:06:05.547241: Current learning rate: 0.00929 +2026-04-08 18:07:47.348207: train_loss -0.0255 +2026-04-08 18:07:47.353504: val_loss -0.032 +2026-04-08 18:07:47.355262: Pseudo dice [0.0, 0.0, 0.3382, 0.0, 0.0, 0.6358, 0.4339] +2026-04-08 18:07:47.357786: Epoch time: 101.81 s +2026-04-08 18:07:47.360081: Yayy! New best EMA pseudo Dice: 0.1721 +2026-04-08 18:07:50.156142: +2026-04-08 18:07:50.158259: Epoch 80 +2026-04-08 18:07:50.159597: Current learning rate: 0.00928 +2026-04-08 18:09:32.350627: train_loss -0.0237 +2026-04-08 18:09:32.357564: val_loss -0.0622 +2026-04-08 18:09:32.360183: Pseudo dice [0.0, 0.0, 0.4275, 0.0, 0.0, 0.4113, 0.6595] +2026-04-08 18:09:32.363253: Epoch time: 102.2 s +2026-04-08 18:09:32.366020: Yayy! New best EMA pseudo Dice: 0.1763 +2026-04-08 18:09:35.188880: +2026-04-08 18:09:35.190835: Epoch 81 +2026-04-08 18:09:35.192528: Current learning rate: 0.00927 +2026-04-08 18:11:17.377122: train_loss -0.041 +2026-04-08 18:11:17.382925: val_loss -0.061 +2026-04-08 18:11:17.388566: Pseudo dice [0.0, 0.0, 0.5989, 0.0, 0.0, 0.5804, 0.5473] +2026-04-08 18:11:17.392575: Epoch time: 102.19 s +2026-04-08 18:11:17.394906: Yayy! New best EMA pseudo Dice: 0.1834 +2026-04-08 18:11:20.216820: +2026-04-08 18:11:20.218405: Epoch 82 +2026-04-08 18:11:20.219753: Current learning rate: 0.00926 +2026-04-08 18:13:02.476831: train_loss -0.0319 +2026-04-08 18:13:02.483353: val_loss -0.0188 +2026-04-08 18:13:02.487123: Pseudo dice [0.0, 0.0, 0.5964, 0.0, 0.0, 0.2231, 0.2504] +2026-04-08 18:13:02.490768: Epoch time: 102.26 s +2026-04-08 18:13:03.528356: +2026-04-08 18:13:03.529934: Epoch 83 +2026-04-08 18:13:03.531591: Current learning rate: 0.00925 +2026-04-08 18:14:44.547656: train_loss -0.0283 +2026-04-08 18:14:44.553064: val_loss -0.0607 +2026-04-08 18:14:44.554538: Pseudo dice [0.0, 0.0, 0.4531, 0.0, 0.0, 0.5513, 0.41] +2026-04-08 18:14:44.556890: Epoch time: 101.02 s +2026-04-08 18:14:45.592664: +2026-04-08 18:14:45.595093: Epoch 84 +2026-04-08 18:14:45.597139: Current learning rate: 0.00924 +2026-04-08 18:16:28.833857: train_loss -0.0237 +2026-04-08 18:16:28.839058: val_loss -0.0667 +2026-04-08 18:16:28.840900: Pseudo dice [0.0, 0.0, 0.2651, 0.0, 0.0, 0.593, 0.7233] +2026-04-08 18:16:28.842948: Epoch time: 103.24 s +2026-04-08 18:16:28.844678: Yayy! New best EMA pseudo Dice: 0.1868 +2026-04-08 18:16:31.546066: +2026-04-08 18:16:31.548052: Epoch 85 +2026-04-08 18:16:31.549721: Current learning rate: 0.00923 +2026-04-08 18:18:13.045745: train_loss -0.0381 +2026-04-08 18:18:13.051593: val_loss -0.063 +2026-04-08 18:18:13.053770: Pseudo dice [0.0, 0.0, 0.2749, 0.0, 0.0, 0.6114, 0.7652] +2026-04-08 18:18:13.055830: Epoch time: 101.5 s +2026-04-08 18:18:13.058451: Yayy! New best EMA pseudo Dice: 0.1917 +2026-04-08 18:18:15.760204: +2026-04-08 18:18:15.761825: Epoch 86 +2026-04-08 18:18:15.763347: Current learning rate: 0.00922 +2026-04-08 18:19:58.264987: train_loss -0.0395 +2026-04-08 18:19:58.270561: val_loss -0.0569 +2026-04-08 18:19:58.272473: Pseudo dice [0.0, 0.0, 0.5326, 0.0001, 0.0, 0.4395, 0.5013] +2026-04-08 18:19:58.274766: Epoch time: 102.51 s +2026-04-08 18:19:58.276434: Yayy! New best EMA pseudo Dice: 0.1936 +2026-04-08 18:20:01.062289: +2026-04-08 18:20:01.063802: Epoch 87 +2026-04-08 18:20:01.065395: Current learning rate: 0.00921 +2026-04-08 18:21:42.044588: train_loss -0.05 +2026-04-08 18:21:42.051985: val_loss -0.0496 +2026-04-08 18:21:42.054497: Pseudo dice [0.0, 0.0, 0.4797, 0.0, 0.0, 0.3597, 0.702] +2026-04-08 18:21:42.056585: Epoch time: 100.99 s +2026-04-08 18:21:42.058846: Yayy! New best EMA pseudo Dice: 0.1963 +2026-04-08 18:21:44.922665: +2026-04-08 18:21:44.924088: Epoch 88 +2026-04-08 18:21:44.925587: Current learning rate: 0.0092 +2026-04-08 18:23:26.319498: train_loss -0.0475 +2026-04-08 18:23:26.326319: val_loss -0.0644 +2026-04-08 18:23:26.330507: Pseudo dice [0.0, 0.0, 0.4312, 0.0, 0.0, 0.5898, 0.6676] +2026-04-08 18:23:26.335450: Epoch time: 101.4 s +2026-04-08 18:23:26.337468: Yayy! New best EMA pseudo Dice: 0.2008 +2026-04-08 18:23:29.055183: +2026-04-08 18:23:29.057208: Epoch 89 +2026-04-08 18:23:29.060061: Current learning rate: 0.0092 +2026-04-08 18:25:10.146816: train_loss -0.0477 +2026-04-08 18:25:10.152183: val_loss -0.0194 +2026-04-08 18:25:10.153979: Pseudo dice [0.0, 0.0, 0.3212, 0.0, 0.0, 0.3737, 0.4072] +2026-04-08 18:25:10.156547: Epoch time: 101.09 s +2026-04-08 18:25:11.188799: +2026-04-08 18:25:11.190309: Epoch 90 +2026-04-08 18:25:11.192000: Current learning rate: 0.00919 +2026-04-08 18:26:52.232572: train_loss -0.041 +2026-04-08 18:26:52.238440: val_loss -0.0504 +2026-04-08 18:26:52.240282: Pseudo dice [0.0, 0.0, 0.6193, 0.0, 0.0, 0.5927, 0.5976] +2026-04-08 18:26:52.242092: Epoch time: 101.05 s +2026-04-08 18:26:52.244197: Yayy! New best EMA pseudo Dice: 0.2026 +2026-04-08 18:26:54.937208: +2026-04-08 18:26:54.938847: Epoch 91 +2026-04-08 18:26:54.940339: Current learning rate: 0.00918 +2026-04-08 18:28:36.470866: train_loss -0.0533 +2026-04-08 18:28:36.477245: val_loss -0.0599 +2026-04-08 18:28:36.479469: Pseudo dice [0.0, 0.0, 0.4588, 0.0001, 0.0, 0.6488, 0.6279] +2026-04-08 18:28:36.481545: Epoch time: 101.54 s +2026-04-08 18:28:36.483356: Yayy! New best EMA pseudo Dice: 0.2072 +2026-04-08 18:28:39.368605: +2026-04-08 18:28:39.370292: Epoch 92 +2026-04-08 18:28:39.371768: Current learning rate: 0.00917 +2026-04-08 18:30:21.207503: train_loss -0.0516 +2026-04-08 18:30:21.213396: val_loss -0.0486 +2026-04-08 18:30:21.215333: Pseudo dice [0.0, 0.0, 0.1243, 0.0, 0.0, 0.5313, 0.7116] +2026-04-08 18:30:21.217590: Epoch time: 101.84 s +2026-04-08 18:30:22.313013: +2026-04-08 18:30:22.314888: Epoch 93 +2026-04-08 18:30:22.316814: Current learning rate: 0.00916 +2026-04-08 18:32:03.847288: train_loss -0.0256 +2026-04-08 18:32:03.852731: val_loss -0.0539 +2026-04-08 18:32:03.854793: Pseudo dice [0.1458, 0.0, 0.5467, 0.0016, 0.0, 0.6768, 0.5688] +2026-04-08 18:32:03.856992: Epoch time: 101.54 s +2026-04-08 18:32:03.858671: Yayy! New best EMA pseudo Dice: 0.2131 +2026-04-08 18:32:06.638424: +2026-04-08 18:32:06.640431: Epoch 94 +2026-04-08 18:32:06.641789: Current learning rate: 0.00915 +2026-04-08 18:33:47.573721: train_loss -0.054 +2026-04-08 18:33:47.580920: val_loss -0.061 +2026-04-08 18:33:47.583021: Pseudo dice [0.468, 0.0, 0.4739, 0.0, 0.0, 0.216, 0.5885] +2026-04-08 18:33:47.585885: Epoch time: 100.94 s +2026-04-08 18:33:47.587904: Yayy! New best EMA pseudo Dice: 0.2167 +2026-04-08 18:33:50.305318: +2026-04-08 18:33:50.306789: Epoch 95 +2026-04-08 18:33:50.308390: Current learning rate: 0.00914 +2026-04-08 18:35:33.398937: train_loss -0.0535 +2026-04-08 18:35:33.403730: val_loss -0.0945 +2026-04-08 18:35:33.405535: Pseudo dice [0.2569, 0.0, 0.6246, 0.0263, 0.0, 0.6826, 0.7281] +2026-04-08 18:35:33.407529: Epoch time: 103.1 s +2026-04-08 18:35:33.409739: Yayy! New best EMA pseudo Dice: 0.2282 +2026-04-08 18:35:36.110025: +2026-04-08 18:35:36.111854: Epoch 96 +2026-04-08 18:35:36.113520: Current learning rate: 0.00913 +2026-04-08 18:37:19.105974: train_loss -0.064 +2026-04-08 18:37:19.112446: val_loss -0.0525 +2026-04-08 18:37:19.113955: Pseudo dice [0.1719, 0.0373, 0.4116, 0.0, 0.0, 0.6342, 0.5853] +2026-04-08 18:37:19.116020: Epoch time: 103.0 s +2026-04-08 18:37:19.118271: Yayy! New best EMA pseudo Dice: 0.2317 +2026-04-08 18:37:21.905549: +2026-04-08 18:37:21.907388: Epoch 97 +2026-04-08 18:37:21.909241: Current learning rate: 0.00912 +2026-04-08 18:39:04.148240: train_loss -0.0652 +2026-04-08 18:39:04.155710: val_loss -0.0773 +2026-04-08 18:39:04.157470: Pseudo dice [0.3936, 0.1575, 0.6209, 0.0001, 0.0, 0.6293, 0.6758] +2026-04-08 18:39:04.159831: Epoch time: 102.25 s +2026-04-08 18:39:04.162031: Yayy! New best EMA pseudo Dice: 0.2439 +2026-04-08 18:39:06.859586: +2026-04-08 18:39:06.861154: Epoch 98 +2026-04-08 18:39:06.862543: Current learning rate: 0.00911 +2026-04-08 18:40:48.385658: train_loss -0.0649 +2026-04-08 18:40:48.391357: val_loss -0.0462 +2026-04-08 18:40:48.393801: Pseudo dice [0.0592, 0.0797, 0.3777, 0.0001, 0.0, 0.6812, 0.4231] +2026-04-08 18:40:48.396214: Epoch time: 101.53 s +2026-04-08 18:40:49.448200: +2026-04-08 18:40:49.449729: Epoch 99 +2026-04-08 18:40:49.451354: Current learning rate: 0.0091 +2026-04-08 18:42:31.628254: train_loss -0.0466 +2026-04-08 18:42:31.636729: val_loss -0.0588 +2026-04-08 18:42:31.646681: Pseudo dice [0.4174, 0.409, 0.2553, 0.0, 0.0, 0.6846, 0.719] +2026-04-08 18:42:31.656238: Epoch time: 102.18 s +2026-04-08 18:42:33.401049: Yayy! New best EMA pseudo Dice: 0.2539 +2026-04-08 18:42:36.330254: +2026-04-08 18:42:36.331783: Epoch 100 +2026-04-08 18:42:36.333165: Current learning rate: 0.0091 +2026-04-08 18:44:18.413764: train_loss -0.0557 +2026-04-08 18:44:18.420615: val_loss -0.0106 +2026-04-08 18:44:18.422403: Pseudo dice [0.0867, 0.0496, 0.4178, 0.0001, 0.0, 0.6894, 0.686] +2026-04-08 18:44:18.424408: Epoch time: 102.09 s +2026-04-08 18:44:18.426103: Yayy! New best EMA pseudo Dice: 0.2561 +2026-04-08 18:44:21.295574: +2026-04-08 18:44:21.297003: Epoch 101 +2026-04-08 18:44:21.298638: Current learning rate: 0.00909 +2026-04-08 18:46:03.447795: train_loss -0.0533 +2026-04-08 18:46:03.452575: val_loss -0.0253 +2026-04-08 18:46:03.455503: Pseudo dice [0.0749, 0.132, 0.2011, 0.0, 0.0, 0.6006, 0.3998] +2026-04-08 18:46:03.457893: Epoch time: 102.16 s +2026-04-08 18:46:04.502683: +2026-04-08 18:46:04.504601: Epoch 102 +2026-04-08 18:46:04.507138: Current learning rate: 0.00908 +2026-04-08 18:47:46.756335: train_loss -0.0489 +2026-04-08 18:47:46.769300: val_loss -0.0638 +2026-04-08 18:47:46.787639: Pseudo dice [0.2966, 0.1396, 0.5605, 0.0003, 0.0, 0.2187, 0.6115] +2026-04-08 18:47:46.806112: Epoch time: 102.26 s +2026-04-08 18:47:47.867814: +2026-04-08 18:47:47.869532: Epoch 103 +2026-04-08 18:47:47.871421: Current learning rate: 0.00907 +2026-04-08 18:49:29.567786: train_loss -0.0549 +2026-04-08 18:49:29.575618: val_loss -0.0552 +2026-04-08 18:49:29.579074: Pseudo dice [0.2749, 0.1826, 0.2153, 0.0058, 0.0, 0.6707, 0.543] +2026-04-08 18:49:29.581301: Epoch time: 101.7 s +2026-04-08 18:49:31.809747: +2026-04-08 18:49:31.811928: Epoch 104 +2026-04-08 18:49:31.813311: Current learning rate: 0.00906 +2026-04-08 18:51:13.460477: train_loss -0.0727 +2026-04-08 18:51:13.467171: val_loss -0.0798 +2026-04-08 18:51:13.469350: Pseudo dice [0.1832, 0.0832, 0.6158, 0.0007, 0.0, 0.4957, 0.6428] +2026-04-08 18:51:13.471558: Epoch time: 101.65 s +2026-04-08 18:51:13.473382: Yayy! New best EMA pseudo Dice: 0.257 +2026-04-08 18:51:16.225858: +2026-04-08 18:51:16.227585: Epoch 105 +2026-04-08 18:51:16.228913: Current learning rate: 0.00905 +2026-04-08 18:52:57.952701: train_loss -0.0813 +2026-04-08 18:52:57.965740: val_loss -0.0617 +2026-04-08 18:52:57.969276: Pseudo dice [0.1863, 0.2739, 0.2609, 0.0057, 0.0, 0.6554, 0.688] +2026-04-08 18:52:57.973960: Epoch time: 101.73 s +2026-04-08 18:52:57.979309: Yayy! New best EMA pseudo Dice: 0.2609 +2026-04-08 18:53:00.924152: +2026-04-08 18:53:00.925906: Epoch 106 +2026-04-08 18:53:00.927860: Current learning rate: 0.00904 +2026-04-08 18:54:43.616107: train_loss -0.0778 +2026-04-08 18:54:43.621266: val_loss -0.0887 +2026-04-08 18:54:43.622667: Pseudo dice [0.1895, 0.371, 0.3791, 0.0, 0.0, 0.7541, 0.673] +2026-04-08 18:54:43.628266: Epoch time: 102.7 s +2026-04-08 18:54:43.630039: Yayy! New best EMA pseudo Dice: 0.2686 +2026-04-08 18:54:46.540094: +2026-04-08 18:54:46.544683: Epoch 107 +2026-04-08 18:54:46.548284: Current learning rate: 0.00903 +2026-04-08 18:56:28.013728: train_loss -0.0812 +2026-04-08 18:56:28.019803: val_loss -0.0752 +2026-04-08 18:56:28.022587: Pseudo dice [0.242, 0.3841, 0.4375, 0.0, 0.0, 0.6266, 0.6851] +2026-04-08 18:56:28.024518: Epoch time: 101.48 s +2026-04-08 18:56:28.026234: Yayy! New best EMA pseudo Dice: 0.2757 +2026-04-08 18:56:30.739408: +2026-04-08 18:56:30.741167: Epoch 108 +2026-04-08 18:56:30.742455: Current learning rate: 0.00902 +2026-04-08 18:58:12.432919: train_loss -0.0731 +2026-04-08 18:58:12.438747: val_loss -0.092 +2026-04-08 18:58:12.440976: Pseudo dice [0.1518, 0.3448, 0.7075, 0.0314, 0.0458, 0.5084, 0.7459] +2026-04-08 18:58:12.442687: Epoch time: 101.7 s +2026-04-08 18:58:12.444479: Yayy! New best EMA pseudo Dice: 0.2843 +2026-04-08 18:58:15.135478: +2026-04-08 18:58:15.137178: Epoch 109 +2026-04-08 18:58:15.138487: Current learning rate: 0.00901 +2026-04-08 18:59:57.032747: train_loss -0.0806 +2026-04-08 18:59:57.038038: val_loss -0.0427 +2026-04-08 18:59:57.040138: Pseudo dice [0.274, 0.3748, 0.491, 0.0179, 0.0223, 0.66, 0.4945] +2026-04-08 18:59:57.042260: Epoch time: 101.9 s +2026-04-08 18:59:57.044244: Yayy! New best EMA pseudo Dice: 0.2893 +2026-04-08 18:59:59.713484: +2026-04-08 18:59:59.715082: Epoch 110 +2026-04-08 18:59:59.716617: Current learning rate: 0.009 +2026-04-08 19:01:42.039760: train_loss -0.0801 +2026-04-08 19:01:42.044807: val_loss -0.0805 +2026-04-08 19:01:42.046323: Pseudo dice [0.2443, 0.4652, 0.5642, 0.0025, 0.0406, 0.574, 0.7658] +2026-04-08 19:01:42.048624: Epoch time: 102.33 s +2026-04-08 19:01:42.050384: Yayy! New best EMA pseudo Dice: 0.2983 +2026-04-08 19:01:44.772571: +2026-04-08 19:01:44.774062: Epoch 111 +2026-04-08 19:01:44.775740: Current learning rate: 0.009 +2026-04-08 19:03:28.202238: train_loss -0.0771 +2026-04-08 19:03:28.208727: val_loss -0.0402 +2026-04-08 19:03:28.211074: Pseudo dice [0.1274, 0.3348, 0.4715, 0.0, 0.1231, 0.6471, 0.5326] +2026-04-08 19:03:28.213389: Epoch time: 103.43 s +2026-04-08 19:03:28.215386: Yayy! New best EMA pseudo Dice: 0.3004 +2026-04-08 19:03:31.011180: +2026-04-08 19:03:31.012760: Epoch 112 +2026-04-08 19:03:31.014034: Current learning rate: 0.00899 +2026-04-08 19:05:12.547426: train_loss -0.0789 +2026-04-08 19:05:12.554178: val_loss -0.0759 +2026-04-08 19:05:12.556322: Pseudo dice [0.3184, 0.4599, 0.5554, 0.0002, 0.1693, 0.597, 0.5826] +2026-04-08 19:05:12.558942: Epoch time: 101.54 s +2026-04-08 19:05:12.560876: Yayy! New best EMA pseudo Dice: 0.3087 +2026-04-08 19:05:15.272531: +2026-04-08 19:05:15.274224: Epoch 113 +2026-04-08 19:05:15.276112: Current learning rate: 0.00898 +2026-04-08 19:06:56.757644: train_loss -0.0755 +2026-04-08 19:06:56.764544: val_loss -0.0652 +2026-04-08 19:06:56.766854: Pseudo dice [0.4189, 0.3891, 0.628, 0.0, 0.1241, 0.6375, 0.5111] +2026-04-08 19:06:56.769162: Epoch time: 101.49 s +2026-04-08 19:06:56.771082: Yayy! New best EMA pseudo Dice: 0.3165 +2026-04-08 19:06:59.736917: +2026-04-08 19:06:59.738732: Epoch 114 +2026-04-08 19:06:59.740132: Current learning rate: 0.00897 +2026-04-08 19:08:41.559974: train_loss -0.0841 +2026-04-08 19:08:41.565961: val_loss -0.0671 +2026-04-08 19:08:41.567464: Pseudo dice [0.1284, 0.2064, 0.6887, 0.0353, 0.1613, 0.6022, 0.6551] +2026-04-08 19:08:41.569583: Epoch time: 101.83 s +2026-04-08 19:08:41.571493: Yayy! New best EMA pseudo Dice: 0.3203 +2026-04-08 19:08:44.294085: +2026-04-08 19:08:44.295712: Epoch 115 +2026-04-08 19:08:44.297106: Current learning rate: 0.00896 +2026-04-08 19:10:25.745990: train_loss -0.067 +2026-04-08 19:10:25.754764: val_loss -0.0607 +2026-04-08 19:10:25.756431: Pseudo dice [0.0404, 0.1545, 0.3431, 0.0001, 0.0976, 0.4008, 0.6703] +2026-04-08 19:10:25.760946: Epoch time: 101.46 s +2026-04-08 19:10:26.839148: +2026-04-08 19:10:26.841412: Epoch 116 +2026-04-08 19:10:26.843139: Current learning rate: 0.00895 +2026-04-08 19:12:10.243880: train_loss -0.0813 +2026-04-08 19:12:10.249377: val_loss -0.0775 +2026-04-08 19:12:10.251515: Pseudo dice [0.4105, 0.307, 0.3555, 0.0, 0.1131, 0.5313, 0.7011] +2026-04-08 19:12:10.254609: Epoch time: 103.41 s +2026-04-08 19:12:11.349449: +2026-04-08 19:12:11.351355: Epoch 117 +2026-04-08 19:12:11.352781: Current learning rate: 0.00894 +2026-04-08 19:13:53.271650: train_loss -0.0941 +2026-04-08 19:13:53.277489: val_loss -0.0744 +2026-04-08 19:13:53.279428: Pseudo dice [0.0002, 0.4848, 0.449, 0.0833, 0.1638, 0.6599, 0.264] +2026-04-08 19:13:53.281289: Epoch time: 101.93 s +2026-04-08 19:13:54.347417: +2026-04-08 19:13:54.352479: Epoch 118 +2026-04-08 19:13:54.361050: Current learning rate: 0.00893 +2026-04-08 19:15:35.734427: train_loss -0.0875 +2026-04-08 19:15:35.740546: val_loss -0.0899 +2026-04-08 19:15:35.742568: Pseudo dice [0.4061, 0.3111, 0.5984, 0.0, 0.178, 0.6493, 0.7223] +2026-04-08 19:15:35.744673: Epoch time: 101.39 s +2026-04-08 19:15:35.747206: Yayy! New best EMA pseudo Dice: 0.3239 +2026-04-08 19:15:38.462348: +2026-04-08 19:15:38.464035: Epoch 119 +2026-04-08 19:15:38.465409: Current learning rate: 0.00892 +2026-04-08 19:17:19.595438: train_loss -0.0859 +2026-04-08 19:17:19.605115: val_loss -0.0617 +2026-04-08 19:17:19.607179: Pseudo dice [0.2352, 0.2619, 0.6453, 0.0383, 0.0981, 0.6419, 0.4812] +2026-04-08 19:17:19.610022: Epoch time: 101.14 s +2026-04-08 19:17:19.613367: Yayy! New best EMA pseudo Dice: 0.3258 +2026-04-08 19:17:22.336509: +2026-04-08 19:17:22.337929: Epoch 120 +2026-04-08 19:17:22.340055: Current learning rate: 0.00891 +2026-04-08 19:19:07.349015: train_loss -0.0845 +2026-04-08 19:19:07.355513: val_loss -0.085 +2026-04-08 19:19:07.357324: Pseudo dice [0.3886, 0.4853, 0.4458, 0.0, 0.1112, 0.585, 0.6557] +2026-04-08 19:19:07.359522: Epoch time: 105.02 s +2026-04-08 19:19:07.361462: Yayy! New best EMA pseudo Dice: 0.3314 +2026-04-08 19:19:10.200987: +2026-04-08 19:19:10.205017: Epoch 121 +2026-04-08 19:19:10.207919: Current learning rate: 0.0089 +2026-04-08 19:20:51.433354: train_loss -0.0952 +2026-04-08 19:20:51.442103: val_loss -0.0685 +2026-04-08 19:20:51.443736: Pseudo dice [0.3297, 0.1905, 0.3187, 0.0, 0.2356, 0.5194, 0.5319] +2026-04-08 19:20:51.445780: Epoch time: 101.24 s +2026-04-08 19:20:52.509548: +2026-04-08 19:20:52.512005: Epoch 122 +2026-04-08 19:20:52.514806: Current learning rate: 0.00889 +2026-04-08 19:22:33.677887: train_loss -0.095 +2026-04-08 19:22:33.682992: val_loss -0.0784 +2026-04-08 19:22:33.686285: Pseudo dice [0.1194, 0.0621, 0.5354, 0.0666, 0.215, 0.5614, 0.6628] +2026-04-08 19:22:33.688990: Epoch time: 101.17 s +2026-04-08 19:22:34.773618: +2026-04-08 19:22:34.775567: Epoch 123 +2026-04-08 19:22:34.777373: Current learning rate: 0.00889 +2026-04-08 19:24:16.480820: train_loss -0.0933 +2026-04-08 19:24:16.488267: val_loss -0.0858 +2026-04-08 19:24:16.489831: Pseudo dice [0.1557, 0.5554, 0.5847, 0.0005, 0.284, 0.4355, 0.5126] +2026-04-08 19:24:16.492169: Epoch time: 101.71 s +2026-04-08 19:24:17.560669: +2026-04-08 19:24:17.562236: Epoch 124 +2026-04-08 19:24:17.563708: Current learning rate: 0.00888 +2026-04-08 19:25:59.096027: train_loss -0.0881 +2026-04-08 19:25:59.102865: val_loss -0.0784 +2026-04-08 19:25:59.105108: Pseudo dice [0.1094, 0.3948, 0.4867, 0.0113, 0.2599, 0.5464, 0.4615] +2026-04-08 19:25:59.107473: Epoch time: 101.54 s +2026-04-08 19:26:00.181647: +2026-04-08 19:26:00.183522: Epoch 125 +2026-04-08 19:26:00.185282: Current learning rate: 0.00887 +2026-04-08 19:27:42.165817: train_loss -0.1008 +2026-04-08 19:27:42.173821: val_loss -0.0849 +2026-04-08 19:27:42.176488: Pseudo dice [0.2224, 0.0282, 0.6426, 0.2572, 0.2191, 0.5135, 0.2911] +2026-04-08 19:27:42.179491: Epoch time: 101.99 s +2026-04-08 19:27:43.269090: +2026-04-08 19:27:43.270702: Epoch 126 +2026-04-08 19:27:43.272481: Current learning rate: 0.00886 +2026-04-08 19:29:26.766957: train_loss -0.097 +2026-04-08 19:29:26.774598: val_loss -0.11 +2026-04-08 19:29:26.776597: Pseudo dice [0.5228, 0.4889, 0.5208, 0.3518, 0.2108, 0.5202, 0.6247] +2026-04-08 19:29:26.779489: Epoch time: 103.5 s +2026-04-08 19:29:26.781889: Yayy! New best EMA pseudo Dice: 0.3417 +2026-04-08 19:29:29.703467: +2026-04-08 19:29:29.705455: Epoch 127 +2026-04-08 19:29:29.707257: Current learning rate: 0.00885 +2026-04-08 19:31:10.969628: train_loss -0.0879 +2026-04-08 19:31:10.976429: val_loss -0.053 +2026-04-08 19:31:10.978207: Pseudo dice [0.0542, 0.0509, 0.5387, 0.0, 0.0996, 0.3853, 0.6295] +2026-04-08 19:31:10.981094: Epoch time: 101.27 s +2026-04-08 19:31:12.055045: +2026-04-08 19:31:12.056802: Epoch 128 +2026-04-08 19:31:12.058395: Current learning rate: 0.00884 +2026-04-08 19:32:53.558158: train_loss -0.0788 +2026-04-08 19:32:53.564685: val_loss -0.1035 +2026-04-08 19:32:53.567123: Pseudo dice [0.3394, 0.5025, 0.702, 0.0007, 0.281, 0.6091, 0.7552] +2026-04-08 19:32:53.569428: Epoch time: 101.51 s +2026-04-08 19:32:53.571370: Yayy! New best EMA pseudo Dice: 0.345 +2026-04-08 19:32:56.426134: +2026-04-08 19:32:56.428631: Epoch 129 +2026-04-08 19:32:56.431734: Current learning rate: 0.00883 +2026-04-08 19:34:39.134993: train_loss -0.1094 +2026-04-08 19:34:39.141810: val_loss -0.0695 +2026-04-08 19:34:39.143721: Pseudo dice [0.4399, 0.2376, 0.5976, 0.0678, 0.0741, 0.7585, 0.6756] +2026-04-08 19:34:39.147218: Epoch time: 102.71 s +2026-04-08 19:34:39.149044: Yayy! New best EMA pseudo Dice: 0.3512 +2026-04-08 19:34:41.974284: +2026-04-08 19:34:41.975922: Epoch 130 +2026-04-08 19:34:41.977573: Current learning rate: 0.00882 +2026-04-08 19:36:23.950158: train_loss -0.0866 +2026-04-08 19:36:23.957358: val_loss -0.0468 +2026-04-08 19:36:23.970693: Pseudo dice [0.4417, 0.2843, 0.4212, 0.0002, 0.2079, 0.3944, 0.5401] +2026-04-08 19:36:23.978242: Epoch time: 101.98 s +2026-04-08 19:36:25.036607: +2026-04-08 19:36:25.038495: Epoch 131 +2026-04-08 19:36:25.040400: Current learning rate: 0.00881 +2026-04-08 19:38:06.745717: train_loss -0.1133 +2026-04-08 19:38:06.750904: val_loss -0.0858 +2026-04-08 19:38:06.752693: Pseudo dice [0.4702, 0.2017, 0.6163, 0.2915, 0.0959, 0.5408, 0.7917] +2026-04-08 19:38:06.754838: Epoch time: 101.71 s +2026-04-08 19:38:06.756699: Yayy! New best EMA pseudo Dice: 0.3569 +2026-04-08 19:38:09.476056: +2026-04-08 19:38:09.477925: Epoch 132 +2026-04-08 19:38:09.479406: Current learning rate: 0.0088 +2026-04-08 19:39:50.632919: train_loss -0.1014 +2026-04-08 19:39:50.639712: val_loss -0.0941 +2026-04-08 19:39:50.641586: Pseudo dice [0.5032, 0.3949, 0.6334, 0.0, 0.1893, 0.2429, 0.7693] +2026-04-08 19:39:50.646850: Epoch time: 101.16 s +2026-04-08 19:39:50.648937: Yayy! New best EMA pseudo Dice: 0.3602 +2026-04-08 19:39:53.402630: +2026-04-08 19:39:53.405073: Epoch 133 +2026-04-08 19:39:53.406549: Current learning rate: 0.00879 +2026-04-08 19:41:35.806674: train_loss -0.1092 +2026-04-08 19:41:35.814828: val_loss -0.109 +2026-04-08 19:41:35.817787: Pseudo dice [0.4465, 0.2093, 0.7121, 0.1154, 0.13, 0.5957, 0.6666] +2026-04-08 19:41:35.820998: Epoch time: 102.41 s +2026-04-08 19:41:35.823046: Yayy! New best EMA pseudo Dice: 0.3653 +2026-04-08 19:41:38.536078: +2026-04-08 19:41:38.538274: Epoch 134 +2026-04-08 19:41:38.539912: Current learning rate: 0.00879 +2026-04-08 19:43:20.624409: train_loss -0.1053 +2026-04-08 19:43:20.629507: val_loss -0.1019 +2026-04-08 19:43:20.631215: Pseudo dice [0.5471, 0.2429, 0.7011, 0.1009, 0.253, 0.5413, 0.6029] +2026-04-08 19:43:20.633606: Epoch time: 102.09 s +2026-04-08 19:43:20.635128: Yayy! New best EMA pseudo Dice: 0.3715 +2026-04-08 19:43:23.390442: +2026-04-08 19:43:23.392146: Epoch 135 +2026-04-08 19:43:23.393569: Current learning rate: 0.00878 +2026-04-08 19:45:05.317781: train_loss -0.1171 +2026-04-08 19:45:05.325352: val_loss -0.0922 +2026-04-08 19:45:05.329252: Pseudo dice [0.1624, 0.0731, 0.4984, 0.1076, 0.3261, 0.6553, 0.6916] +2026-04-08 19:45:05.333597: Epoch time: 101.93 s +2026-04-08 19:45:06.418355: +2026-04-08 19:45:06.420866: Epoch 136 +2026-04-08 19:45:06.424016: Current learning rate: 0.00877 +2026-04-08 19:46:47.471354: train_loss -0.1013 +2026-04-08 19:46:47.476812: val_loss -0.1044 +2026-04-08 19:46:47.478887: Pseudo dice [0.3774, 0.3997, 0.7104, 0.4431, 0.1305, 0.361, 0.6733] +2026-04-08 19:46:47.482079: Epoch time: 101.06 s +2026-04-08 19:46:47.483945: Yayy! New best EMA pseudo Dice: 0.3774 +2026-04-08 19:46:50.291921: +2026-04-08 19:46:50.295577: Epoch 137 +2026-04-08 19:46:50.297616: Current learning rate: 0.00876 +2026-04-08 19:48:32.085474: train_loss -0.1122 +2026-04-08 19:48:32.095531: val_loss -0.0769 +2026-04-08 19:48:32.100539: Pseudo dice [0.1922, 0.2793, 0.5066, 0.0013, 0.1558, 0.3007, 0.6916] +2026-04-08 19:48:32.104898: Epoch time: 101.8 s +2026-04-08 19:48:34.309582: +2026-04-08 19:48:34.319017: Epoch 138 +2026-04-08 19:48:34.330019: Current learning rate: 0.00875 +2026-04-08 19:50:16.830013: train_loss -0.1038 +2026-04-08 19:50:16.835488: val_loss -0.1112 +2026-04-08 19:50:16.838115: Pseudo dice [0.448, 0.5519, 0.6855, 0.034, 0.3521, 0.7575, 0.8024] +2026-04-08 19:50:16.840314: Epoch time: 102.52 s +2026-04-08 19:50:16.843712: Yayy! New best EMA pseudo Dice: 0.385 +2026-04-08 19:50:19.637750: +2026-04-08 19:50:19.639492: Epoch 139 +2026-04-08 19:50:19.641260: Current learning rate: 0.00874 +2026-04-08 19:52:01.563654: train_loss -0.118 +2026-04-08 19:52:01.569651: val_loss -0.1248 +2026-04-08 19:52:01.571325: Pseudo dice [0.1097, 0.1812, 0.5637, 0.0017, 0.1826, 0.5206, 0.8114] +2026-04-08 19:52:01.576874: Epoch time: 101.93 s +2026-04-08 19:52:02.653636: +2026-04-08 19:52:02.655501: Epoch 140 +2026-04-08 19:52:02.657369: Current learning rate: 0.00873 +2026-04-08 19:53:45.851449: train_loss -0.1215 +2026-04-08 19:53:45.858707: val_loss -0.0967 +2026-04-08 19:53:45.861052: Pseudo dice [0.4697, 0.5029, 0.544, 0.0027, 0.2006, 0.7777, 0.5761] +2026-04-08 19:53:45.863917: Epoch time: 103.2 s +2026-04-08 19:53:45.865833: Yayy! New best EMA pseudo Dice: 0.3862 +2026-04-08 19:53:48.658213: +2026-04-08 19:53:48.660576: Epoch 141 +2026-04-08 19:53:48.662415: Current learning rate: 0.00872 +2026-04-08 19:55:30.950187: train_loss -0.1173 +2026-04-08 19:55:30.956745: val_loss -0.1289 +2026-04-08 19:55:30.958414: Pseudo dice [0.5651, 0.5231, 0.6869, 0.0007, 0.1909, 0.7474, 0.6735] +2026-04-08 19:55:30.962385: Epoch time: 102.3 s +2026-04-08 19:55:30.964063: Yayy! New best EMA pseudo Dice: 0.396 +2026-04-08 19:55:33.837643: +2026-04-08 19:55:33.840002: Epoch 142 +2026-04-08 19:55:33.842086: Current learning rate: 0.00871 +2026-04-08 19:57:17.280801: train_loss -0.1262 +2026-04-08 19:57:17.287034: val_loss -0.1075 +2026-04-08 19:57:17.288648: Pseudo dice [0.2575, 0.128, 0.6134, 0.2783, 0.1245, 0.7388, 0.3936] +2026-04-08 19:57:17.293727: Epoch time: 103.45 s +2026-04-08 19:57:18.401332: +2026-04-08 19:57:18.403162: Epoch 143 +2026-04-08 19:57:18.405101: Current learning rate: 0.0087 +2026-04-08 19:59:01.427444: train_loss -0.1164 +2026-04-08 19:59:01.433333: val_loss -0.0841 +2026-04-08 19:59:01.436132: Pseudo dice [0.2189, 0.3675, 0.5122, 0.0178, 0.0565, 0.4859, 0.6818] +2026-04-08 19:59:01.440176: Epoch time: 103.03 s +2026-04-08 19:59:02.515525: +2026-04-08 19:59:02.517531: Epoch 144 +2026-04-08 19:59:02.519384: Current learning rate: 0.00869 +2026-04-08 20:00:44.010010: train_loss -0.1083 +2026-04-08 20:00:44.015472: val_loss -0.1012 +2026-04-08 20:00:44.017672: Pseudo dice [0.3778, 0.4412, 0.5747, 0.0008, 0.2563, 0.3378, 0.7956] +2026-04-08 20:00:44.019655: Epoch time: 101.5 s +2026-04-08 20:00:45.123496: +2026-04-08 20:00:45.125229: Epoch 145 +2026-04-08 20:00:45.127010: Current learning rate: 0.00868 +2026-04-08 20:02:27.163217: train_loss -0.1144 +2026-04-08 20:02:27.169949: val_loss -0.0819 +2026-04-08 20:02:27.172335: Pseudo dice [0.4872, 0.3072, 0.7496, 0.0072, 0.2747, 0.6454, 0.4887] +2026-04-08 20:02:27.176876: Epoch time: 102.04 s +2026-04-08 20:02:28.263852: +2026-04-08 20:02:28.265681: Epoch 146 +2026-04-08 20:02:28.267196: Current learning rate: 0.00868 +2026-04-08 20:04:10.369293: train_loss -0.1187 +2026-04-08 20:04:10.375803: val_loss -0.1263 +2026-04-08 20:04:10.379279: Pseudo dice [0.6237, 0.324, 0.6082, 0.0013, 0.2778, 0.6382, 0.6278] +2026-04-08 20:04:10.382033: Epoch time: 102.11 s +2026-04-08 20:04:10.384372: Yayy! New best EMA pseudo Dice: 0.3965 +2026-04-08 20:04:13.238534: +2026-04-08 20:04:13.240291: Epoch 147 +2026-04-08 20:04:13.242714: Current learning rate: 0.00867 +2026-04-08 20:05:55.139239: train_loss -0.1092 +2026-04-08 20:05:55.145514: val_loss -0.0974 +2026-04-08 20:05:55.147958: Pseudo dice [0.2693, 0.2396, 0.6683, 0.0008, 0.1046, 0.5214, 0.7753] +2026-04-08 20:05:55.150971: Epoch time: 101.9 s +2026-04-08 20:05:56.253699: +2026-04-08 20:05:56.255370: Epoch 148 +2026-04-08 20:05:56.256985: Current learning rate: 0.00866 +2026-04-08 20:07:38.695701: train_loss -0.1229 +2026-04-08 20:07:38.702751: val_loss -0.1141 +2026-04-08 20:07:38.704731: Pseudo dice [0.4208, 0.4481, 0.6039, 0.001, 0.2136, 0.669, 0.488] +2026-04-08 20:07:38.707413: Epoch time: 102.45 s +2026-04-08 20:07:39.817646: +2026-04-08 20:07:39.819562: Epoch 149 +2026-04-08 20:07:39.821069: Current learning rate: 0.00865 +2026-04-08 20:09:21.435072: train_loss -0.1082 +2026-04-08 20:09:21.441904: val_loss -0.0744 +2026-04-08 20:09:21.444126: Pseudo dice [0.5455, 0.0707, 0.4667, 0.0106, 0.1742, 0.6594, 0.6014] +2026-04-08 20:09:21.448248: Epoch time: 101.62 s +2026-04-08 20:09:24.262099: +2026-04-08 20:09:24.263719: Epoch 150 +2026-04-08 20:09:24.265066: Current learning rate: 0.00864 +2026-04-08 20:11:06.123811: train_loss -0.1208 +2026-04-08 20:11:06.132223: val_loss -0.0865 +2026-04-08 20:11:06.136448: Pseudo dice [0.5326, 0.4596, 0.5164, 0.0, 0.2587, 0.6598, 0.7029] +2026-04-08 20:11:06.138667: Epoch time: 101.86 s +2026-04-08 20:11:06.141227: Yayy! New best EMA pseudo Dice: 0.3972 +2026-04-08 20:11:09.060822: +2026-04-08 20:11:09.062399: Epoch 151 +2026-04-08 20:11:09.064262: Current learning rate: 0.00863 +2026-04-08 20:12:52.176716: train_loss -0.1203 +2026-04-08 20:12:52.185030: val_loss -0.0903 +2026-04-08 20:12:52.187656: Pseudo dice [0.2682, 0.5212, 0.5897, 0.0, 0.3956, 0.7618, 0.7891] +2026-04-08 20:12:52.194884: Epoch time: 103.12 s +2026-04-08 20:12:52.198582: Yayy! New best EMA pseudo Dice: 0.405 +2026-04-08 20:12:55.173297: +2026-04-08 20:12:55.177314: Epoch 152 +2026-04-08 20:12:55.180109: Current learning rate: 0.00862 +2026-04-08 20:14:37.053072: train_loss -0.1361 +2026-04-08 20:14:37.064891: val_loss -0.1053 +2026-04-08 20:14:37.067207: Pseudo dice [0.3139, 0.2328, 0.6051, 0.0, 0.2928, 0.6069, 0.489] +2026-04-08 20:14:37.069692: Epoch time: 101.88 s +2026-04-08 20:14:38.147994: +2026-04-08 20:14:38.149488: Epoch 153 +2026-04-08 20:14:38.150902: Current learning rate: 0.00861 +2026-04-08 20:16:19.237872: train_loss -0.1211 +2026-04-08 20:16:19.243844: val_loss -0.1163 +2026-04-08 20:16:19.245677: Pseudo dice [0.4559, 0.3877, 0.5653, 0.0, 0.3671, 0.616, 0.7012] +2026-04-08 20:16:19.248696: Epoch time: 101.09 s +2026-04-08 20:16:20.367111: +2026-04-08 20:16:20.369497: Epoch 154 +2026-04-08 20:16:20.371506: Current learning rate: 0.0086 +2026-04-08 20:18:02.034476: train_loss -0.0961 +2026-04-08 20:18:02.042374: val_loss -0.0941 +2026-04-08 20:18:02.046171: Pseudo dice [0.3752, 0.114, 0.5627, 0.0992, 0.3653, 0.6605, 0.7163] +2026-04-08 20:18:02.048971: Epoch time: 101.67 s +2026-04-08 20:18:02.051727: Yayy! New best EMA pseudo Dice: 0.4057 +2026-04-08 20:18:06.021291: +2026-04-08 20:18:06.023851: Epoch 155 +2026-04-08 20:18:06.025497: Current learning rate: 0.00859 +2026-04-08 20:19:47.734964: train_loss -0.1115 +2026-04-08 20:19:47.740581: val_loss -0.1201 +2026-04-08 20:19:47.742410: Pseudo dice [0.528, 0.0969, 0.6907, 0.3846, 0.3699, 0.7097, 0.7133] +2026-04-08 20:19:47.744726: Epoch time: 101.72 s +2026-04-08 20:19:47.746208: Yayy! New best EMA pseudo Dice: 0.415 +2026-04-08 20:19:50.534492: +2026-04-08 20:19:50.536431: Epoch 156 +2026-04-08 20:19:50.537868: Current learning rate: 0.00858 +2026-04-08 20:21:33.045314: train_loss -0.1262 +2026-04-08 20:21:33.052540: val_loss -0.1391 +2026-04-08 20:21:33.054916: Pseudo dice [0.7506, 0.1353, 0.5448, 0.0011, 0.134, 0.7283, 0.6331] +2026-04-08 20:21:33.059509: Epoch time: 102.51 s +2026-04-08 20:21:33.061719: Yayy! New best EMA pseudo Dice: 0.4154 +2026-04-08 20:21:36.005842: +2026-04-08 20:21:36.007747: Epoch 157 +2026-04-08 20:21:36.009111: Current learning rate: 0.00858 +2026-04-08 20:23:18.504391: train_loss -0.1267 +2026-04-08 20:23:18.510823: val_loss -0.1053 +2026-04-08 20:23:18.513228: Pseudo dice [0.6597, 0.1418, 0.4357, 0.0172, 0.3868, 0.8135, 0.8009] +2026-04-08 20:23:18.515544: Epoch time: 102.5 s +2026-04-08 20:23:18.517733: Yayy! New best EMA pseudo Dice: 0.4203 +2026-04-08 20:23:21.499879: +2026-04-08 20:23:21.501906: Epoch 158 +2026-04-08 20:23:21.503990: Current learning rate: 0.00857 +2026-04-08 20:25:03.781483: train_loss -0.1087 +2026-04-08 20:25:03.787454: val_loss -0.0899 +2026-04-08 20:25:03.790128: Pseudo dice [0.1908, 0.0929, 0.4193, 0.1308, 0.4358, 0.7356, 0.5213] +2026-04-08 20:25:03.794232: Epoch time: 102.28 s +2026-04-08 20:25:04.901438: +2026-04-08 20:25:04.903751: Epoch 159 +2026-04-08 20:25:04.906904: Current learning rate: 0.00856 +2026-04-08 20:26:47.048743: train_loss -0.1185 +2026-04-08 20:26:47.057055: val_loss -0.0992 +2026-04-08 20:26:47.059442: Pseudo dice [0.5133, 0.2895, 0.5933, 0.2163, 0.4011, 0.6852, 0.638] +2026-04-08 20:26:47.063540: Epoch time: 102.15 s +2026-04-08 20:26:47.066206: Yayy! New best EMA pseudo Dice: 0.4206 +2026-04-08 20:26:50.061655: +2026-04-08 20:26:50.064784: Epoch 160 +2026-04-08 20:26:50.067803: Current learning rate: 0.00855 +2026-04-08 20:28:32.369039: train_loss -0.1171 +2026-04-08 20:28:32.375019: val_loss -0.1116 +2026-04-08 20:28:32.377940: Pseudo dice [0.3509, 0.1993, 0.6268, 0.4152, 0.1514, 0.3834, 0.6226] +2026-04-08 20:28:32.380843: Epoch time: 102.31 s +2026-04-08 20:28:33.499068: +2026-04-08 20:28:33.501021: Epoch 161 +2026-04-08 20:28:33.503845: Current learning rate: 0.00854 +2026-04-08 20:30:15.261441: train_loss -0.1192 +2026-04-08 20:30:15.267754: val_loss -0.1077 +2026-04-08 20:30:15.269585: Pseudo dice [0.4215, 0.4939, 0.6049, 0.1662, 0.2727, 0.5922, 0.8115] +2026-04-08 20:30:15.274333: Epoch time: 101.77 s +2026-04-08 20:30:15.276872: Yayy! New best EMA pseudo Dice: 0.4241 +2026-04-08 20:30:18.190969: +2026-04-08 20:30:18.192800: Epoch 162 +2026-04-08 20:30:18.194371: Current learning rate: 0.00853 +2026-04-08 20:31:59.829103: train_loss -0.126 +2026-04-08 20:31:59.835765: val_loss -0.0809 +2026-04-08 20:31:59.839148: Pseudo dice [0.2802, 0.3598, 0.491, 0.2555, 0.0721, 0.671, 0.537] +2026-04-08 20:31:59.843434: Epoch time: 101.64 s +2026-04-08 20:32:00.958446: +2026-04-08 20:32:00.960801: Epoch 163 +2026-04-08 20:32:00.972221: Current learning rate: 0.00852 +2026-04-08 20:33:43.932068: train_loss -0.1282 +2026-04-08 20:33:43.940011: val_loss -0.1236 +2026-04-08 20:33:43.950815: Pseudo dice [0.5425, 0.2069, 0.496, 0.5283, 0.2267, 0.7656, 0.4655] +2026-04-08 20:33:43.952853: Epoch time: 102.98 s +2026-04-08 20:33:45.097198: +2026-04-08 20:33:45.099018: Epoch 164 +2026-04-08 20:33:45.101375: Current learning rate: 0.00851 +2026-04-08 20:35:27.262280: train_loss -0.1201 +2026-04-08 20:35:27.269313: val_loss -0.0886 +2026-04-08 20:35:27.272027: Pseudo dice [0.5219, 0.1476, 0.4544, 0.1387, 0.5156, 0.6673, 0.6409] +2026-04-08 20:35:27.276732: Epoch time: 102.17 s +2026-04-08 20:35:27.279249: Yayy! New best EMA pseudo Dice: 0.4257 +2026-04-08 20:35:30.082680: +2026-04-08 20:35:30.084306: Epoch 165 +2026-04-08 20:35:30.085740: Current learning rate: 0.0085 +2026-04-08 20:37:12.071749: train_loss -0.1344 +2026-04-08 20:37:12.076841: val_loss -0.1065 +2026-04-08 20:37:12.079482: Pseudo dice [0.2905, 0.2126, 0.5238, 0.0288, 0.4505, 0.7524, 0.8043] +2026-04-08 20:37:12.083758: Epoch time: 101.99 s +2026-04-08 20:37:12.085365: Yayy! New best EMA pseudo Dice: 0.4269 +2026-04-08 20:37:14.889006: +2026-04-08 20:37:14.890953: Epoch 166 +2026-04-08 20:37:14.892513: Current learning rate: 0.00849 +2026-04-08 20:38:57.594439: train_loss -0.1401 +2026-04-08 20:38:57.600486: val_loss -0.1112 +2026-04-08 20:38:57.602235: Pseudo dice [0.7175, 0.3932, 0.6356, 0.2291, 0.2, 0.5932, 0.6867] +2026-04-08 20:38:57.606147: Epoch time: 102.71 s +2026-04-08 20:38:57.607949: Yayy! New best EMA pseudo Dice: 0.4335 +2026-04-08 20:39:00.376631: +2026-04-08 20:39:00.378873: Epoch 167 +2026-04-08 20:39:00.380854: Current learning rate: 0.00848 +2026-04-08 20:40:42.333761: train_loss -0.1288 +2026-04-08 20:40:42.339607: val_loss -0.1136 +2026-04-08 20:40:42.341507: Pseudo dice [0.5483, 0.2278, 0.6826, 0.0022, 0.2866, 0.7546, 0.5064] +2026-04-08 20:40:42.343744: Epoch time: 101.96 s +2026-04-08 20:40:43.431260: +2026-04-08 20:40:43.433059: Epoch 168 +2026-04-08 20:40:43.435488: Current learning rate: 0.00847 +2026-04-08 20:42:25.334424: train_loss -0.1206 +2026-04-08 20:42:25.339911: val_loss -0.1063 +2026-04-08 20:42:25.341862: Pseudo dice [0.4856, 0.3239, 0.7067, 0.0163, 0.1547, 0.5263, 0.8196] +2026-04-08 20:42:25.344035: Epoch time: 101.91 s +2026-04-08 20:42:26.444724: +2026-04-08 20:42:26.447108: Epoch 169 +2026-04-08 20:42:26.449776: Current learning rate: 0.00847 +2026-04-08 20:44:08.425346: train_loss -0.1466 +2026-04-08 20:44:08.430707: val_loss -0.1113 +2026-04-08 20:44:08.432776: Pseudo dice [0.5871, 0.0457, 0.7296, 0.0003, 0.1541, 0.6171, 0.7402] +2026-04-08 20:44:08.435176: Epoch time: 101.98 s +2026-04-08 20:44:09.546387: +2026-04-08 20:44:09.548727: Epoch 170 +2026-04-08 20:44:09.550488: Current learning rate: 0.00846 +2026-04-08 20:45:51.832519: train_loss -0.1405 +2026-04-08 20:45:51.838402: val_loss -0.0892 +2026-04-08 20:45:51.840366: Pseudo dice [0.1324, 0.3294, 0.5564, 0.043, 0.3222, 0.5155, 0.6358] +2026-04-08 20:45:51.843418: Epoch time: 102.29 s +2026-04-08 20:45:52.948962: +2026-04-08 20:45:52.953305: Epoch 171 +2026-04-08 20:45:52.968379: Current learning rate: 0.00845 +2026-04-08 20:47:35.157758: train_loss -0.1286 +2026-04-08 20:47:35.164139: val_loss -0.1062 +2026-04-08 20:47:35.166501: Pseudo dice [0.5619, 0.192, 0.6859, 0.0002, 0.5216, 0.6578, 0.6086] +2026-04-08 20:47:35.169075: Epoch time: 102.21 s +2026-04-08 20:47:36.263374: +2026-04-08 20:47:36.265340: Epoch 172 +2026-04-08 20:47:36.266849: Current learning rate: 0.00844 +2026-04-08 20:49:20.773376: train_loss -0.141 +2026-04-08 20:49:20.779254: val_loss -0.071 +2026-04-08 20:49:20.781028: Pseudo dice [0.4087, 0.2523, 0.6712, 0.004, 0.1555, 0.5499, 0.562] +2026-04-08 20:49:20.783435: Epoch time: 104.51 s +2026-04-08 20:49:21.886513: +2026-04-08 20:49:21.888407: Epoch 173 +2026-04-08 20:49:21.890592: Current learning rate: 0.00843 +2026-04-08 20:51:07.021657: train_loss -0.1163 +2026-04-08 20:51:07.027921: val_loss -0.1233 +2026-04-08 20:51:07.031455: Pseudo dice [0.4627, 0.3207, 0.7328, 0.3212, 0.5966, 0.5846, 0.7067] +2026-04-08 20:51:07.033627: Epoch time: 105.14 s +2026-04-08 20:51:08.169366: +2026-04-08 20:51:08.171107: Epoch 174 +2026-04-08 20:51:08.172793: Current learning rate: 0.00842 +2026-04-08 20:52:52.186962: train_loss -0.1398 +2026-04-08 20:52:52.197113: val_loss -0.0776 +2026-04-08 20:52:52.201136: Pseudo dice [0.6458, 0.4092, 0.4104, 0.0623, 0.2882, 0.5182, 0.6139] +2026-04-08 20:52:52.206391: Epoch time: 104.02 s +2026-04-08 20:52:53.324027: +2026-04-08 20:52:53.326049: Epoch 175 +2026-04-08 20:52:53.329829: Current learning rate: 0.00841 +2026-04-08 20:54:36.493708: train_loss -0.1436 +2026-04-08 20:54:36.501467: val_loss -0.1467 +2026-04-08 20:54:36.503940: Pseudo dice [0.5358, 0.3429, 0.5853, 0.0, 0.4597, 0.754, 0.7831] +2026-04-08 20:54:36.506479: Epoch time: 103.17 s +2026-04-08 20:54:36.510188: Yayy! New best EMA pseudo Dice: 0.4382 +2026-04-08 20:54:39.399373: +2026-04-08 20:54:39.402079: Epoch 176 +2026-04-08 20:54:39.404785: Current learning rate: 0.0084 +2026-04-08 20:56:22.067480: train_loss -0.1405 +2026-04-08 20:56:22.074907: val_loss -0.1118 +2026-04-08 20:56:22.078063: Pseudo dice [0.1687, 0.5045, 0.5186, 0.056, 0.2845, 0.3879, 0.799] +2026-04-08 20:56:22.080509: Epoch time: 102.67 s +2026-04-08 20:56:23.185339: +2026-04-08 20:56:23.187938: Epoch 177 +2026-04-08 20:56:23.190046: Current learning rate: 0.00839 +2026-04-08 20:58:08.990336: train_loss -0.1405 +2026-04-08 20:58:08.996929: val_loss -0.1406 +2026-04-08 20:58:08.999974: Pseudo dice [0.6549, 0.4436, 0.765, 0.4462, 0.2532, 0.6327, 0.8188] +2026-04-08 20:58:09.002822: Epoch time: 105.81 s +2026-04-08 20:58:09.004465: Yayy! New best EMA pseudo Dice: 0.4473 +2026-04-08 20:58:11.994001: +2026-04-08 20:58:11.998220: Epoch 178 +2026-04-08 20:58:12.001747: Current learning rate: 0.00838 +2026-04-08 20:59:54.869543: train_loss -0.1408 +2026-04-08 20:59:54.875769: val_loss -0.1044 +2026-04-08 20:59:54.877819: Pseudo dice [0.6767, 0.1565, 0.7385, 0.0271, 0.1631, 0.7318, 0.8367] +2026-04-08 20:59:54.883358: Epoch time: 102.88 s +2026-04-08 20:59:54.885605: Yayy! New best EMA pseudo Dice: 0.4501 +2026-04-08 20:59:57.722096: +2026-04-08 20:59:57.724086: Epoch 179 +2026-04-08 20:59:57.725853: Current learning rate: 0.00837 +2026-04-08 21:01:39.151149: train_loss -0.1206 +2026-04-08 21:01:39.158075: val_loss -0.0955 +2026-04-08 21:01:39.160012: Pseudo dice [0.374, 0.3308, 0.646, 0.0995, 0.3058, 0.267, 0.5655] +2026-04-08 21:01:39.162915: Epoch time: 101.43 s +2026-04-08 21:01:40.245979: +2026-04-08 21:01:40.247679: Epoch 180 +2026-04-08 21:01:40.249461: Current learning rate: 0.00836 +2026-04-08 21:03:21.596875: train_loss -0.1405 +2026-04-08 21:03:21.603306: val_loss -0.1159 +2026-04-08 21:03:21.605636: Pseudo dice [0.4575, 0.0239, 0.6658, 0.0, 0.2445, 0.6471, 0.6191] +2026-04-08 21:03:21.607809: Epoch time: 101.35 s +2026-04-08 21:03:22.738662: +2026-04-08 21:03:22.741053: Epoch 181 +2026-04-08 21:03:22.743252: Current learning rate: 0.00836 +2026-04-08 21:05:04.192341: train_loss -0.1401 +2026-04-08 21:05:04.198384: val_loss -0.1401 +2026-04-08 21:05:04.200361: Pseudo dice [0.7156, 0.2632, 0.6129, 0.0, 0.3643, 0.7555, 0.7847] +2026-04-08 21:05:04.202729: Epoch time: 101.46 s +2026-04-08 21:05:05.297296: +2026-04-08 21:05:05.298863: Epoch 182 +2026-04-08 21:05:05.300777: Current learning rate: 0.00835 +2026-04-08 21:06:47.589766: train_loss -0.1463 +2026-04-08 21:06:47.594779: val_loss -0.123 +2026-04-08 21:06:47.596738: Pseudo dice [0.5953, 0.0834, 0.7395, 0.2082, 0.3477, 0.6819, 0.6859] +2026-04-08 21:06:47.600787: Epoch time: 102.3 s +2026-04-08 21:06:48.759910: +2026-04-08 21:06:48.762066: Epoch 183 +2026-04-08 21:06:48.763691: Current learning rate: 0.00834 +2026-04-08 21:08:30.704328: train_loss -0.1306 +2026-04-08 21:08:30.710970: val_loss -0.1185 +2026-04-08 21:08:30.713936: Pseudo dice [0.5436, 0.1381, 0.6022, 0.4179, 0.2437, 0.7365, 0.5733] +2026-04-08 21:08:30.718194: Epoch time: 101.95 s +2026-04-08 21:08:31.855206: +2026-04-08 21:08:31.860355: Epoch 184 +2026-04-08 21:08:31.862052: Current learning rate: 0.00833 +2026-04-08 21:10:14.404466: train_loss -0.1427 +2026-04-08 21:10:14.410248: val_loss -0.1306 +2026-04-08 21:10:14.412963: Pseudo dice [0.7346, 0.507, 0.6881, 0.6772, 0.4249, 0.2544, 0.6483] +2026-04-08 21:10:14.418210: Epoch time: 102.55 s +2026-04-08 21:10:14.421365: Yayy! New best EMA pseudo Dice: 0.4591 +2026-04-08 21:10:17.240129: +2026-04-08 21:10:17.241808: Epoch 185 +2026-04-08 21:10:17.243227: Current learning rate: 0.00832 +2026-04-08 21:11:58.424668: train_loss -0.1277 +2026-04-08 21:11:58.430305: val_loss -0.0867 +2026-04-08 21:11:58.432061: Pseudo dice [0.4596, 0.573, 0.6557, 0.1272, 0.197, 0.4002, 0.6175] +2026-04-08 21:11:58.434164: Epoch time: 101.19 s +2026-04-08 21:11:59.537430: +2026-04-08 21:11:59.539595: Epoch 186 +2026-04-08 21:11:59.541315: Current learning rate: 0.00831 +2026-04-08 21:13:41.304684: train_loss -0.1471 +2026-04-08 21:13:41.312900: val_loss -0.1246 +2026-04-08 21:13:41.314538: Pseudo dice [0.6169, 0.4584, 0.4992, 0.2634, 0.3229, 0.8226, 0.8279] +2026-04-08 21:13:41.318377: Epoch time: 101.77 s +2026-04-08 21:13:41.320505: Yayy! New best EMA pseudo Dice: 0.4653 +2026-04-08 21:13:44.176113: +2026-04-08 21:13:44.177843: Epoch 187 +2026-04-08 21:13:44.179486: Current learning rate: 0.0083 +2026-04-08 21:15:26.087536: train_loss -0.1378 +2026-04-08 21:15:26.093650: val_loss -0.1027 +2026-04-08 21:15:26.096534: Pseudo dice [0.2787, 0.5912, 0.3097, 0.2385, 0.2813, 0.3836, 0.8057] +2026-04-08 21:15:26.099800: Epoch time: 101.91 s +2026-04-08 21:15:27.217323: +2026-04-08 21:15:27.219396: Epoch 188 +2026-04-08 21:15:27.221235: Current learning rate: 0.00829 +2026-04-08 21:17:09.126516: train_loss -0.1349 +2026-04-08 21:17:09.132851: val_loss -0.108 +2026-04-08 21:17:09.134913: Pseudo dice [0.427, 0.2206, 0.6221, 0.0089, 0.321, 0.7258, 0.6678] +2026-04-08 21:17:09.137337: Epoch time: 101.91 s +2026-04-08 21:17:10.246176: +2026-04-08 21:17:10.247918: Epoch 189 +2026-04-08 21:17:10.249234: Current learning rate: 0.00828 +2026-04-08 21:18:53.373605: train_loss -0.1331 +2026-04-08 21:18:53.380391: val_loss -0.1137 +2026-04-08 21:18:53.384459: Pseudo dice [0.5746, 0.3506, 0.6795, 0.6156, 0.3669, 0.753, 0.8339] +2026-04-08 21:18:53.389595: Epoch time: 103.13 s +2026-04-08 21:18:53.391865: Yayy! New best EMA pseudo Dice: 0.4707 +2026-04-08 21:18:57.521319: +2026-04-08 21:18:57.522955: Epoch 190 +2026-04-08 21:18:57.524449: Current learning rate: 0.00827 +2026-04-08 21:20:38.461777: train_loss -0.1359 +2026-04-08 21:20:38.468834: val_loss -0.1179 +2026-04-08 21:20:38.472320: Pseudo dice [0.7231, 0.6288, 0.4392, 0.015, 0.4757, 0.4913, 0.7633] +2026-04-08 21:20:38.476766: Epoch time: 100.94 s +2026-04-08 21:20:38.478702: Yayy! New best EMA pseudo Dice: 0.4742 +2026-04-08 21:20:41.189979: +2026-04-08 21:20:41.191617: Epoch 191 +2026-04-08 21:20:41.193063: Current learning rate: 0.00826 +2026-04-08 21:22:22.800422: train_loss -0.138 +2026-04-08 21:22:22.807987: val_loss -0.1265 +2026-04-08 21:22:22.810128: Pseudo dice [0.5657, 0.1495, 0.6681, 0.0264, 0.2703, 0.655, 0.6363] +2026-04-08 21:22:22.812279: Epoch time: 101.61 s +2026-04-08 21:22:23.934270: +2026-04-08 21:22:23.936308: Epoch 192 +2026-04-08 21:22:23.938872: Current learning rate: 0.00825 +2026-04-08 21:24:04.925380: train_loss -0.1345 +2026-04-08 21:24:04.932649: val_loss -0.1201 +2026-04-08 21:24:04.934410: Pseudo dice [0.5915, 0.4338, 0.6264, 0.2307, 0.3212, 0.726, 0.741] +2026-04-08 21:24:04.938452: Epoch time: 100.99 s +2026-04-08 21:24:04.940142: Yayy! New best EMA pseudo Dice: 0.4747 +2026-04-08 21:24:07.745156: +2026-04-08 21:24:07.746838: Epoch 193 +2026-04-08 21:24:07.748221: Current learning rate: 0.00824 +2026-04-08 21:25:49.749770: train_loss -0.1412 +2026-04-08 21:25:49.766712: val_loss -0.0968 +2026-04-08 21:25:49.771264: Pseudo dice [0.444, 0.1067, 0.7563, 0.003, 0.3787, 0.5649, 0.2845] +2026-04-08 21:25:49.777491: Epoch time: 102.01 s +2026-04-08 21:25:50.904036: +2026-04-08 21:25:50.905658: Epoch 194 +2026-04-08 21:25:50.907377: Current learning rate: 0.00824 +2026-04-08 21:27:33.211385: train_loss -0.1432 +2026-04-08 21:27:33.217346: val_loss -0.1299 +2026-04-08 21:27:33.219526: Pseudo dice [0.6618, 0.299, 0.7447, 0.0, 0.2096, 0.6767, 0.8331] +2026-04-08 21:27:33.222162: Epoch time: 102.31 s +2026-04-08 21:27:34.352626: +2026-04-08 21:27:34.354538: Epoch 195 +2026-04-08 21:27:34.357929: Current learning rate: 0.00823 +2026-04-08 21:29:16.046005: train_loss -0.1284 +2026-04-08 21:29:16.051776: val_loss -0.1079 +2026-04-08 21:29:16.053949: Pseudo dice [0.6009, 0.4105, 0.6507, 0.2531, 0.328, 0.769, 0.7184] +2026-04-08 21:29:16.058903: Epoch time: 101.7 s +2026-04-08 21:29:17.224022: +2026-04-08 21:29:17.225928: Epoch 196 +2026-04-08 21:29:17.228049: Current learning rate: 0.00822 +2026-04-08 21:30:59.904247: train_loss -0.1531 +2026-04-08 21:30:59.911068: val_loss -0.1158 +2026-04-08 21:30:59.913206: Pseudo dice [0.6183, 0.6425, 0.2918, 0.1892, 0.4062, 0.7335, 0.7397] +2026-04-08 21:30:59.915975: Epoch time: 102.68 s +2026-04-08 21:30:59.919057: Yayy! New best EMA pseudo Dice: 0.4772 +2026-04-08 21:31:02.809947: +2026-04-08 21:31:02.811887: Epoch 197 +2026-04-08 21:31:02.813396: Current learning rate: 0.00821 +2026-04-08 21:32:44.424785: train_loss -0.1294 +2026-04-08 21:32:44.433176: val_loss -0.1037 +2026-04-08 21:32:44.437193: Pseudo dice [0.8064, 0.3206, 0.4792, 0.0003, 0.3045, 0.4965, 0.8101] +2026-04-08 21:32:44.443051: Epoch time: 101.62 s +2026-04-08 21:32:45.566299: +2026-04-08 21:32:45.568170: Epoch 198 +2026-04-08 21:32:45.569896: Current learning rate: 0.0082 +2026-04-08 21:34:26.620845: train_loss -0.1576 +2026-04-08 21:34:26.627189: val_loss -0.13 +2026-04-08 21:34:26.628957: Pseudo dice [0.3288, 0.5863, 0.38, 0.3684, 0.4923, 0.6859, 0.7162] +2026-04-08 21:34:26.633335: Epoch time: 101.06 s +2026-04-08 21:34:26.635744: Yayy! New best EMA pseudo Dice: 0.4787 +2026-04-08 21:34:29.452486: +2026-04-08 21:34:29.455851: Epoch 199 +2026-04-08 21:34:29.458583: Current learning rate: 0.00819 +2026-04-08 21:36:10.982458: train_loss -0.1329 +2026-04-08 21:36:10.989235: val_loss -0.1146 +2026-04-08 21:36:10.992532: Pseudo dice [0.7167, 0.2435, 0.7365, 0.0, 0.1097, 0.8116, 0.6226] +2026-04-08 21:36:10.995765: Epoch time: 101.53 s +2026-04-08 21:36:13.905188: +2026-04-08 21:36:13.907295: Epoch 200 +2026-04-08 21:36:13.909101: Current learning rate: 0.00818 +2026-04-08 21:37:55.760118: train_loss -0.1477 +2026-04-08 21:37:55.768660: val_loss -0.0844 +2026-04-08 21:37:55.771472: Pseudo dice [0.1772, 0.4462, 0.5149, 0.0076, 0.4022, 0.5584, 0.6354] +2026-04-08 21:37:55.775257: Epoch time: 101.86 s +2026-04-08 21:37:56.893689: +2026-04-08 21:37:56.897887: Epoch 201 +2026-04-08 21:37:56.900823: Current learning rate: 0.00817 +2026-04-08 21:39:38.865136: train_loss -0.1386 +2026-04-08 21:39:38.871145: val_loss -0.0894 +2026-04-08 21:39:38.874698: Pseudo dice [0.5629, 0.18, 0.5545, 0.0518, 0.2589, 0.5937, 0.7578] +2026-04-08 21:39:38.878503: Epoch time: 101.97 s +2026-04-08 21:39:40.006558: +2026-04-08 21:39:40.008558: Epoch 202 +2026-04-08 21:39:40.010825: Current learning rate: 0.00816 +2026-04-08 21:41:21.343664: train_loss -0.1417 +2026-04-08 21:41:21.352612: val_loss -0.1069 +2026-04-08 21:41:21.355261: Pseudo dice [0.5521, 0.6092, 0.6419, 0.2188, 0.5669, 0.6161, 0.7268] +2026-04-08 21:41:21.357489: Epoch time: 101.34 s +2026-04-08 21:41:22.491777: +2026-04-08 21:41:22.494000: Epoch 203 +2026-04-08 21:41:22.495481: Current learning rate: 0.00815 +2026-04-08 21:43:05.885036: train_loss -0.1334 +2026-04-08 21:43:05.892112: val_loss -0.1144 +2026-04-08 21:43:05.895715: Pseudo dice [0.6707, 0.3631, 0.5123, 0.1884, 0.2728, 0.3968, 0.5709] +2026-04-08 21:43:05.898372: Epoch time: 103.4 s +2026-04-08 21:43:07.026040: +2026-04-08 21:43:07.027770: Epoch 204 +2026-04-08 21:43:07.029745: Current learning rate: 0.00814 +2026-04-08 21:44:48.782908: train_loss -0.1488 +2026-04-08 21:44:48.789743: val_loss -0.148 +2026-04-08 21:44:48.791869: Pseudo dice [0.638, 0.4864, 0.7043, 0.2516, 0.2221, 0.4556, 0.8244] +2026-04-08 21:44:48.795146: Epoch time: 101.76 s +2026-04-08 21:44:49.921391: +2026-04-08 21:44:49.923015: Epoch 205 +2026-04-08 21:44:49.924627: Current learning rate: 0.00813 +2026-04-08 21:46:31.341662: train_loss -0.1508 +2026-04-08 21:46:31.348061: val_loss -0.1177 +2026-04-08 21:46:31.350132: Pseudo dice [0.2697, 0.1259, 0.7042, 0.0, 0.5164, 0.5374, 0.7701] +2026-04-08 21:46:31.357055: Epoch time: 101.42 s +2026-04-08 21:46:32.406029: +2026-04-08 21:46:32.432544: Epoch 206 +2026-04-08 21:46:32.434343: Current learning rate: 0.00813 +2026-04-08 21:48:14.292533: train_loss -0.1495 +2026-04-08 21:48:14.297685: val_loss -0.1348 +2026-04-08 21:48:14.299656: Pseudo dice [0.5006, 0.2465, 0.6716, 0.458, 0.411, 0.6018, 0.6377] +2026-04-08 21:48:14.302238: Epoch time: 101.89 s +2026-04-08 21:48:15.360409: +2026-04-08 21:48:15.362386: Epoch 207 +2026-04-08 21:48:15.364369: Current learning rate: 0.00812 +2026-04-08 21:49:59.226155: train_loss -0.1316 +2026-04-08 21:49:59.232263: val_loss -0.106 +2026-04-08 21:49:59.234771: Pseudo dice [0.5109, 0.1866, 0.4748, 0.062, 0.3137, 0.3878, 0.7979] +2026-04-08 21:49:59.237482: Epoch time: 103.87 s +2026-04-08 21:50:00.287893: +2026-04-08 21:50:00.289809: Epoch 208 +2026-04-08 21:50:00.291610: Current learning rate: 0.00811 +2026-04-08 21:51:43.795105: train_loss -0.1532 +2026-04-08 21:51:43.804343: val_loss -0.1129 +2026-04-08 21:51:43.807070: Pseudo dice [0.6721, 0.6223, 0.6967, 0.0421, 0.2611, 0.5611, 0.5961] +2026-04-08 21:51:43.809669: Epoch time: 103.51 s +2026-04-08 21:51:44.864643: +2026-04-08 21:51:44.869467: Epoch 209 +2026-04-08 21:51:44.874028: Current learning rate: 0.0081 +2026-04-08 21:53:27.198216: train_loss -0.159 +2026-04-08 21:53:27.206289: val_loss -0.1331 +2026-04-08 21:53:27.210390: Pseudo dice [0.4738, 0.503, 0.5508, 0.0003, 0.4476, 0.7019, 0.6456] +2026-04-08 21:53:27.212554: Epoch time: 102.34 s +2026-04-08 21:53:28.263368: +2026-04-08 21:53:28.266515: Epoch 210 +2026-04-08 21:53:28.269792: Current learning rate: 0.00809 +2026-04-08 21:55:10.618688: train_loss -0.1351 +2026-04-08 21:55:10.624151: val_loss -0.1097 +2026-04-08 21:55:10.626140: Pseudo dice [0.5315, 0.367, 0.7785, 0.0034, 0.2632, 0.6878, 0.584] +2026-04-08 21:55:10.628117: Epoch time: 102.36 s +2026-04-08 21:55:11.681754: +2026-04-08 21:55:11.684154: Epoch 211 +2026-04-08 21:55:11.686539: Current learning rate: 0.00808 +2026-04-08 21:56:54.733093: train_loss -0.1313 +2026-04-08 21:56:54.739178: val_loss -0.1054 +2026-04-08 21:56:54.741140: Pseudo dice [0.7394, 0.376, 0.6295, 0.1703, 0.2476, 0.8025, 0.5981] +2026-04-08 21:56:54.743422: Epoch time: 103.05 s +2026-04-08 21:56:55.790656: +2026-04-08 21:56:55.792771: Epoch 212 +2026-04-08 21:56:55.794767: Current learning rate: 0.00807 +2026-04-08 21:58:38.166482: train_loss -0.1555 +2026-04-08 21:58:38.173533: val_loss -0.1115 +2026-04-08 21:58:38.175798: Pseudo dice [0.6522, 0.5908, 0.6209, 0.0219, 0.4367, 0.5662, 0.4901] +2026-04-08 21:58:38.178952: Epoch time: 102.38 s +2026-04-08 21:58:39.266747: +2026-04-08 21:58:39.268853: Epoch 213 +2026-04-08 21:58:39.270848: Current learning rate: 0.00806 +2026-04-08 22:00:23.283998: train_loss -0.1444 +2026-04-08 22:00:23.292131: val_loss -0.138 +2026-04-08 22:00:23.294926: Pseudo dice [0.54, 0.3903, 0.694, 0.5032, 0.4901, 0.7575, 0.6057] +2026-04-08 22:00:23.297959: Epoch time: 104.02 s +2026-04-08 22:00:23.301141: Yayy! New best EMA pseudo Dice: 0.4815 +2026-04-08 22:00:26.313579: +2026-04-08 22:00:26.315319: Epoch 214 +2026-04-08 22:00:26.316901: Current learning rate: 0.00805 +2026-04-08 22:02:07.470720: train_loss -0.1501 +2026-04-08 22:02:07.497658: val_loss -0.1266 +2026-04-08 22:02:07.499952: Pseudo dice [0.492, 0.3611, 0.5142, 0.1619, 0.4024, 0.8011, 0.6357] +2026-04-08 22:02:07.502447: Epoch time: 101.16 s +2026-04-08 22:02:08.565750: +2026-04-08 22:02:08.567856: Epoch 215 +2026-04-08 22:02:08.569571: Current learning rate: 0.00804 +2026-04-08 22:03:52.038583: train_loss -0.1468 +2026-04-08 22:03:52.045227: val_loss -0.1192 +2026-04-08 22:03:52.048204: Pseudo dice [0.6312, 0.5669, 0.4933, 0.2465, 0.2622, 0.6806, 0.8676] +2026-04-08 22:03:52.051982: Epoch time: 103.48 s +2026-04-08 22:03:52.054265: Yayy! New best EMA pseudo Dice: 0.4868 +2026-04-08 22:03:54.944423: +2026-04-08 22:03:54.946234: Epoch 216 +2026-04-08 22:03:54.947923: Current learning rate: 0.00803 +2026-04-08 22:05:37.449373: train_loss -0.1529 +2026-04-08 22:05:37.455681: val_loss -0.1319 +2026-04-08 22:05:37.457644: Pseudo dice [0.6317, 0.1436, 0.7401, 0.0253, 0.2497, 0.5371, 0.8592] +2026-04-08 22:05:37.460935: Epoch time: 102.51 s +2026-04-08 22:05:38.517118: +2026-04-08 22:05:38.519952: Epoch 217 +2026-04-08 22:05:38.521978: Current learning rate: 0.00802 +2026-04-08 22:07:20.450495: train_loss -0.146 +2026-04-08 22:07:20.456760: val_loss -0.1424 +2026-04-08 22:07:20.460010: Pseudo dice [0.7588, 0.0565, 0.7656, 0.6404, 0.4462, 0.5852, 0.688] +2026-04-08 22:07:20.462363: Epoch time: 101.94 s +2026-04-08 22:07:20.464861: Yayy! New best EMA pseudo Dice: 0.4916 +2026-04-08 22:07:23.273873: +2026-04-08 22:07:23.275395: Epoch 218 +2026-04-08 22:07:23.276860: Current learning rate: 0.00801 +2026-04-08 22:09:06.117303: train_loss -0.1544 +2026-04-08 22:09:06.127003: val_loss -0.1113 +2026-04-08 22:09:06.129208: Pseudo dice [0.3682, 0.5276, 0.6574, 0.5462, 0.3564, 0.5899, 0.6971] +2026-04-08 22:09:06.131854: Epoch time: 102.85 s +2026-04-08 22:09:06.133692: Yayy! New best EMA pseudo Dice: 0.4959 +2026-04-08 22:09:08.902947: +2026-04-08 22:09:08.905392: Epoch 219 +2026-04-08 22:09:08.907543: Current learning rate: 0.00801 +2026-04-08 22:10:50.811588: train_loss -0.1357 +2026-04-08 22:10:50.817851: val_loss -0.1418 +2026-04-08 22:10:50.819767: Pseudo dice [0.4441, 0.5034, 0.6031, 0.3126, 0.4868, 0.6917, 0.7358] +2026-04-08 22:10:50.822483: Epoch time: 101.91 s +2026-04-08 22:10:50.824290: Yayy! New best EMA pseudo Dice: 0.5003 +2026-04-08 22:10:53.559081: +2026-04-08 22:10:53.561736: Epoch 220 +2026-04-08 22:10:53.564422: Current learning rate: 0.008 +2026-04-08 22:12:35.522721: train_loss -0.138 +2026-04-08 22:12:35.529861: val_loss -0.0971 +2026-04-08 22:12:35.532510: Pseudo dice [0.7502, 0.5749, 0.699, 0.05, 0.218, 0.6469, 0.8583] +2026-04-08 22:12:35.537619: Epoch time: 101.97 s +2026-04-08 22:12:35.539923: Yayy! New best EMA pseudo Dice: 0.5045 +2026-04-08 22:12:38.318739: +2026-04-08 22:12:38.320334: Epoch 221 +2026-04-08 22:12:38.322226: Current learning rate: 0.00799 +2026-04-08 22:14:20.195652: train_loss -0.1483 +2026-04-08 22:14:20.203570: val_loss -0.1268 +2026-04-08 22:14:20.205712: Pseudo dice [0.4321, 0.1029, 0.7614, 0.0546, 0.3324, 0.722, 0.7574] +2026-04-08 22:14:20.208350: Epoch time: 101.88 s +2026-04-08 22:14:21.274241: +2026-04-08 22:14:21.276816: Epoch 222 +2026-04-08 22:14:21.279199: Current learning rate: 0.00798 +2026-04-08 22:16:04.485222: train_loss -0.1568 +2026-04-08 22:16:04.495325: val_loss -0.1302 +2026-04-08 22:16:04.498580: Pseudo dice [0.628, 0.5158, 0.599, 0.0006, 0.45, 0.6327, 0.6546] +2026-04-08 22:16:04.503968: Epoch time: 103.21 s +2026-04-08 22:16:05.562637: +2026-04-08 22:16:05.565654: Epoch 223 +2026-04-08 22:16:05.569557: Current learning rate: 0.00797 +2026-04-08 22:17:48.525082: train_loss -0.1461 +2026-04-08 22:17:48.534025: val_loss -0.121 +2026-04-08 22:17:48.536289: Pseudo dice [0.5112, 0.6661, 0.7462, 0.5714, 0.2616, 0.6349, 0.5325] +2026-04-08 22:17:48.539039: Epoch time: 102.97 s +2026-04-08 22:17:48.541313: Yayy! New best EMA pseudo Dice: 0.5052 +2026-04-08 22:17:51.334350: +2026-04-08 22:17:51.336107: Epoch 224 +2026-04-08 22:17:51.338407: Current learning rate: 0.00796 +2026-04-08 22:19:33.733858: train_loss -0.1468 +2026-04-08 22:19:33.740455: val_loss -0.0724 +2026-04-08 22:19:33.742343: Pseudo dice [0.6029, 0.0562, 0.5343, 0.0009, 0.2277, 0.7742, 0.5477] +2026-04-08 22:19:33.744943: Epoch time: 102.4 s +2026-04-08 22:19:34.793701: +2026-04-08 22:19:34.798393: Epoch 225 +2026-04-08 22:19:34.800212: Current learning rate: 0.00795 +2026-04-08 22:21:16.410600: train_loss -0.1462 +2026-04-08 22:21:16.441128: val_loss -0.1297 +2026-04-08 22:21:16.445932: Pseudo dice [0.1751, 0.5997, 0.7058, 0.6302, 0.359, 0.487, 0.7781] +2026-04-08 22:21:16.450840: Epoch time: 101.62 s +2026-04-08 22:21:17.497377: +2026-04-08 22:21:17.500693: Epoch 226 +2026-04-08 22:21:17.502948: Current learning rate: 0.00794 +2026-04-08 22:23:02.247342: train_loss -0.1437 +2026-04-08 22:23:02.252254: val_loss -0.1353 +2026-04-08 22:23:02.254962: Pseudo dice [0.6399, 0.593, 0.4405, 0.6114, 0.3872, 0.78, 0.6164] +2026-04-08 22:23:02.257241: Epoch time: 104.75 s +2026-04-08 22:23:02.260011: Yayy! New best EMA pseudo Dice: 0.5062 +2026-04-08 22:23:05.102045: +2026-04-08 22:23:05.103773: Epoch 227 +2026-04-08 22:23:05.106250: Current learning rate: 0.00793 +2026-04-08 22:24:46.922476: train_loss -0.1585 +2026-04-08 22:24:46.940814: val_loss -0.0875 +2026-04-08 22:24:46.943319: Pseudo dice [0.6656, 0.1964, 0.6786, 0.0063, 0.216, 0.7904, 0.7888] +2026-04-08 22:24:46.946690: Epoch time: 101.82 s +2026-04-08 22:24:48.011702: +2026-04-08 22:24:48.013933: Epoch 228 +2026-04-08 22:24:48.015625: Current learning rate: 0.00792 +2026-04-08 22:26:30.807006: train_loss -0.1589 +2026-04-08 22:26:30.814920: val_loss -0.1323 +2026-04-08 22:26:30.817996: Pseudo dice [0.3111, 0.1028, 0.7668, 0.6124, 0.2712, 0.7085, 0.7491] +2026-04-08 22:26:30.820083: Epoch time: 102.8 s +2026-04-08 22:26:31.866743: +2026-04-08 22:26:31.869901: Epoch 229 +2026-04-08 22:26:31.872052: Current learning rate: 0.00791 +2026-04-08 22:28:14.368363: train_loss -0.148 +2026-04-08 22:28:14.385707: val_loss -0.1161 +2026-04-08 22:28:14.390924: Pseudo dice [0.6639, 0.2411, 0.7789, 0.0011, 0.2779, 0.6238, 0.4013] +2026-04-08 22:28:14.398425: Epoch time: 102.5 s +2026-04-08 22:28:15.461626: +2026-04-08 22:28:15.465245: Epoch 230 +2026-04-08 22:28:15.468354: Current learning rate: 0.0079 +2026-04-08 22:29:56.955218: train_loss -0.1716 +2026-04-08 22:29:56.962505: val_loss -0.1021 +2026-04-08 22:29:56.966378: Pseudo dice [0.5002, 0.3219, 0.4078, 0.002, 0.3282, 0.8421, 0.6788] +2026-04-08 22:29:56.970586: Epoch time: 101.5 s +2026-04-08 22:29:58.049807: +2026-04-08 22:29:58.053228: Epoch 231 +2026-04-08 22:29:58.057317: Current learning rate: 0.00789 +2026-04-08 22:31:40.960942: train_loss -0.1595 +2026-04-08 22:31:40.966663: val_loss -0.1175 +2026-04-08 22:31:40.968465: Pseudo dice [0.4387, 0.3174, 0.5799, 0.1283, 0.2691, 0.3887, 0.8257] +2026-04-08 22:31:40.972491: Epoch time: 102.91 s +2026-04-08 22:31:42.018200: +2026-04-08 22:31:42.020281: Epoch 232 +2026-04-08 22:31:42.022153: Current learning rate: 0.00789 +2026-04-08 22:33:24.094578: train_loss -0.1373 +2026-04-08 22:33:24.099551: val_loss -0.1094 +2026-04-08 22:33:24.101400: Pseudo dice [0.5076, 0.0717, 0.5108, 0.3097, 0.228, 0.8433, 0.8043] +2026-04-08 22:33:24.103510: Epoch time: 102.08 s +2026-04-08 22:33:25.153785: +2026-04-08 22:33:25.155841: Epoch 233 +2026-04-08 22:33:25.157685: Current learning rate: 0.00788 +2026-04-08 22:35:06.318090: train_loss -0.1497 +2026-04-08 22:35:06.325043: val_loss -0.1487 +2026-04-08 22:35:06.326809: Pseudo dice [0.3504, 0.6375, 0.718, 0.5728, 0.3708, 0.7859, 0.7048] +2026-04-08 22:35:06.329143: Epoch time: 101.17 s +2026-04-08 22:35:07.364865: +2026-04-08 22:35:07.367439: Epoch 234 +2026-04-08 22:35:07.369931: Current learning rate: 0.00787 +2026-04-08 22:36:49.635095: train_loss -0.1451 +2026-04-08 22:36:49.642891: val_loss -0.1353 +2026-04-08 22:36:49.646053: Pseudo dice [0.1417, 0.3454, 0.6788, 0.0633, 0.4539, 0.6619, 0.7933] +2026-04-08 22:36:49.648365: Epoch time: 102.27 s +2026-04-08 22:36:50.713244: +2026-04-08 22:36:50.715657: Epoch 235 +2026-04-08 22:36:50.717606: Current learning rate: 0.00786 +2026-04-08 22:38:32.552090: train_loss -0.1476 +2026-04-08 22:38:32.558979: val_loss -0.1326 +2026-04-08 22:38:32.562525: Pseudo dice [0.4542, 0.1437, 0.7348, 0.2205, 0.2049, 0.4754, 0.8097] +2026-04-08 22:38:32.564909: Epoch time: 101.84 s +2026-04-08 22:38:33.623161: +2026-04-08 22:38:33.624883: Epoch 236 +2026-04-08 22:38:33.626690: Current learning rate: 0.00785 +2026-04-08 22:40:17.813086: train_loss -0.1669 +2026-04-08 22:40:17.820649: val_loss -0.1331 +2026-04-08 22:40:17.822748: Pseudo dice [0.725, 0.1199, 0.7029, 0.1689, 0.2203, 0.8612, 0.8085] +2026-04-08 22:40:17.845017: Epoch time: 104.19 s +2026-04-08 22:40:18.902997: +2026-04-08 22:40:18.904794: Epoch 237 +2026-04-08 22:40:18.906732: Current learning rate: 0.00784 +2026-04-08 22:42:00.604368: train_loss -0.165 +2026-04-08 22:42:00.611310: val_loss -0.1315 +2026-04-08 22:42:00.620111: Pseudo dice [0.1326, 0.6766, 0.7794, 0.57, 0.1141, 0.623, 0.7725] +2026-04-08 22:42:00.624039: Epoch time: 101.7 s +2026-04-08 22:42:01.695294: +2026-04-08 22:42:01.697416: Epoch 238 +2026-04-08 22:42:01.699694: Current learning rate: 0.00783 +2026-04-08 22:43:43.418735: train_loss -0.1477 +2026-04-08 22:43:43.423743: val_loss -0.0949 +2026-04-08 22:43:43.425602: Pseudo dice [0.7152, 0.3176, 0.7678, 0.0032, 0.2719, 0.7629, 0.6997] +2026-04-08 22:43:43.427343: Epoch time: 101.73 s +2026-04-08 22:43:44.555497: +2026-04-08 22:43:44.561186: Epoch 239 +2026-04-08 22:43:44.562906: Current learning rate: 0.00782 +2026-04-08 22:45:26.002468: train_loss -0.1542 +2026-04-08 22:45:26.009137: val_loss -0.1658 +2026-04-08 22:45:26.011137: Pseudo dice [0.66, 0.2887, 0.5749, 0.1821, 0.3287, 0.8451, 0.6075] +2026-04-08 22:45:26.014011: Epoch time: 101.45 s +2026-04-08 22:45:27.086652: +2026-04-08 22:45:27.089874: Epoch 240 +2026-04-08 22:45:27.091429: Current learning rate: 0.00781 +2026-04-08 22:47:09.558684: train_loss -0.1429 +2026-04-08 22:47:09.563666: val_loss -0.1441 +2026-04-08 22:47:09.566328: Pseudo dice [0.1002, 0.4418, 0.7186, 0.0015, 0.2674, 0.7748, 0.885] +2026-04-08 22:47:09.570807: Epoch time: 102.48 s +2026-04-08 22:47:10.638095: +2026-04-08 22:47:10.640280: Epoch 241 +2026-04-08 22:47:10.642661: Current learning rate: 0.0078 +2026-04-08 22:48:54.070853: train_loss -0.1628 +2026-04-08 22:48:54.078773: val_loss -0.1095 +2026-04-08 22:48:54.080915: Pseudo dice [0.3168, 0.3921, 0.5688, 0.061, 0.3239, 0.4979, 0.7982] +2026-04-08 22:48:54.083976: Epoch time: 103.44 s +2026-04-08 22:48:55.146926: +2026-04-08 22:48:55.148681: Epoch 242 +2026-04-08 22:48:55.150054: Current learning rate: 0.00779 +2026-04-08 22:50:37.917324: train_loss -0.1529 +2026-04-08 22:50:37.927394: val_loss -0.095 +2026-04-08 22:50:37.930245: Pseudo dice [0.6978, 0.5818, 0.6233, 0.0817, 0.2813, 0.1904, 0.781] +2026-04-08 22:50:37.934977: Epoch time: 102.77 s +2026-04-08 22:50:38.991070: +2026-04-08 22:50:38.993100: Epoch 243 +2026-04-08 22:50:38.995088: Current learning rate: 0.00778 +2026-04-08 22:52:24.785992: train_loss -0.1485 +2026-04-08 22:52:24.791519: val_loss -0.133 +2026-04-08 22:52:24.793774: Pseudo dice [0.2516, 0.1025, 0.7015, 0.5255, 0.4948, 0.7332, 0.8207] +2026-04-08 22:52:24.796075: Epoch time: 105.8 s +2026-04-08 22:52:25.862044: +2026-04-08 22:52:25.864162: Epoch 244 +2026-04-08 22:52:25.866093: Current learning rate: 0.00777 +2026-04-08 22:54:10.482692: train_loss -0.1619 +2026-04-08 22:54:10.495796: val_loss -0.1328 +2026-04-08 22:54:10.501162: Pseudo dice [0.6761, 0.5967, 0.4911, 0.0774, 0.3522, 0.8037, 0.812] +2026-04-08 22:54:10.509240: Epoch time: 104.62 s +2026-04-08 22:54:11.581578: +2026-04-08 22:54:11.585134: Epoch 245 +2026-04-08 22:54:11.586841: Current learning rate: 0.00777 +2026-04-08 22:55:53.669943: train_loss -0.1545 +2026-04-08 22:55:53.676489: val_loss -0.1401 +2026-04-08 22:55:53.678921: Pseudo dice [0.7054, 0.6034, 0.7632, 0.8118, 0.3847, 0.6701, 0.7814] +2026-04-08 22:55:53.683550: Epoch time: 102.09 s +2026-04-08 22:55:53.685768: Yayy! New best EMA pseudo Dice: 0.5083 +2026-04-08 22:55:56.542100: +2026-04-08 22:55:56.544239: Epoch 246 +2026-04-08 22:55:56.545646: Current learning rate: 0.00776 +2026-04-08 22:57:41.400339: train_loss -0.1572 +2026-04-08 22:57:41.407315: val_loss -0.1285 +2026-04-08 22:57:41.408816: Pseudo dice [0.6567, 0.4942, 0.8116, 0.0, 0.3885, 0.6194, 0.6269] +2026-04-08 22:57:41.410710: Epoch time: 104.86 s +2026-04-08 22:57:41.412604: Yayy! New best EMA pseudo Dice: 0.5088 +2026-04-08 22:57:44.268900: +2026-04-08 22:57:44.271347: Epoch 247 +2026-04-08 22:57:44.274227: Current learning rate: 0.00775 +2026-04-08 22:59:26.463563: train_loss -0.1471 +2026-04-08 22:59:26.473098: val_loss -0.1 +2026-04-08 22:59:26.476818: Pseudo dice [0.3346, 0.2696, 0.7873, 0.0723, 0.3352, 0.397, 0.8061] +2026-04-08 22:59:26.480618: Epoch time: 102.2 s +2026-04-08 22:59:27.554687: +2026-04-08 22:59:27.558643: Epoch 248 +2026-04-08 22:59:27.560554: Current learning rate: 0.00774 +2026-04-08 23:01:10.042349: train_loss -0.1665 +2026-04-08 23:01:10.048736: val_loss -0.099 +2026-04-08 23:01:10.050706: Pseudo dice [0.7718, 0.2206, 0.464, 0.0494, 0.3433, 0.7273, 0.7943] +2026-04-08 23:01:10.055001: Epoch time: 102.49 s +2026-04-08 23:01:11.118745: +2026-04-08 23:01:11.120928: Epoch 249 +2026-04-08 23:01:11.123202: Current learning rate: 0.00773 +2026-04-08 23:02:52.627154: train_loss -0.1619 +2026-04-08 23:02:52.633807: val_loss -0.1135 +2026-04-08 23:02:52.636049: Pseudo dice [0.6418, 0.1254, 0.5918, 0.0001, 0.3486, 0.5504, 0.6621] +2026-04-08 23:02:52.639238: Epoch time: 101.51 s +2026-04-08 23:02:55.572203: +2026-04-08 23:02:55.574306: Epoch 250 +2026-04-08 23:02:55.575835: Current learning rate: 0.00772 +2026-04-08 23:04:38.129908: train_loss -0.1524 +2026-04-08 23:04:38.138506: val_loss -0.0816 +2026-04-08 23:04:38.142449: Pseudo dice [0.4751, 0.3245, 0.5896, 0.0001, 0.3096, 0.5313, 0.5742] +2026-04-08 23:04:38.146141: Epoch time: 102.56 s +2026-04-08 23:04:39.222456: +2026-04-08 23:04:39.227516: Epoch 251 +2026-04-08 23:04:39.231277: Current learning rate: 0.00771 +2026-04-08 23:06:20.996822: train_loss -0.1679 +2026-04-08 23:06:21.003526: val_loss -0.1427 +2026-04-08 23:06:21.005450: Pseudo dice [0.57, 0.5378, 0.6256, 0.3561, 0.3073, 0.8572, 0.7757] +2026-04-08 23:06:21.007643: Epoch time: 101.78 s +2026-04-08 23:06:22.088716: +2026-04-08 23:06:22.090857: Epoch 252 +2026-04-08 23:06:22.092893: Current learning rate: 0.0077 +2026-04-08 23:08:04.591332: train_loss -0.1553 +2026-04-08 23:08:04.598336: val_loss -0.1246 +2026-04-08 23:08:04.600141: Pseudo dice [0.3463, 0.2246, 0.6883, 0.0596, 0.2233, 0.6552, 0.6777] +2026-04-08 23:08:04.603109: Epoch time: 102.51 s +2026-04-08 23:08:05.664819: +2026-04-08 23:08:05.666605: Epoch 253 +2026-04-08 23:08:05.668288: Current learning rate: 0.00769 +2026-04-08 23:09:48.164825: train_loss -0.1629 +2026-04-08 23:09:48.170228: val_loss -0.1321 +2026-04-08 23:09:48.173098: Pseudo dice [0.5101, 0.338, 0.5646, 0.0056, 0.3046, 0.7559, 0.7036] +2026-04-08 23:09:48.175479: Epoch time: 102.5 s +2026-04-08 23:09:49.259928: +2026-04-08 23:09:49.269920: Epoch 254 +2026-04-08 23:09:49.272250: Current learning rate: 0.00768 +2026-04-08 23:11:31.838598: train_loss -0.145 +2026-04-08 23:11:31.845073: val_loss -0.1172 +2026-04-08 23:11:31.847532: Pseudo dice [0.597, 0.3064, 0.5566, 0.0121, 0.295, 0.8307, 0.3975] +2026-04-08 23:11:31.850144: Epoch time: 102.58 s +2026-04-08 23:11:32.918410: +2026-04-08 23:11:32.920874: Epoch 255 +2026-04-08 23:11:32.923050: Current learning rate: 0.00767 +2026-04-08 23:13:15.112267: train_loss -0.1745 +2026-04-08 23:13:15.118328: val_loss -0.1274 +2026-04-08 23:13:15.120357: Pseudo dice [0.5269, 0.3566, 0.6198, 0.4596, 0.4717, 0.5974, 0.6523] +2026-04-08 23:13:15.122860: Epoch time: 102.2 s +2026-04-08 23:13:16.195525: +2026-04-08 23:13:16.197449: Epoch 256 +2026-04-08 23:13:16.199091: Current learning rate: 0.00766 +2026-04-08 23:14:58.456188: train_loss -0.1642 +2026-04-08 23:14:58.462771: val_loss -0.0995 +2026-04-08 23:14:58.465698: Pseudo dice [0.4902, 0.5909, 0.6969, 0.0001, 0.3028, 0.8043, 0.7405] +2026-04-08 23:14:58.467809: Epoch time: 102.26 s +2026-04-08 23:14:59.586089: +2026-04-08 23:14:59.590177: Epoch 257 +2026-04-08 23:14:59.592586: Current learning rate: 0.00765 +2026-04-08 23:16:42.446111: train_loss -0.1556 +2026-04-08 23:16:42.457206: val_loss -0.1391 +2026-04-08 23:16:42.459252: Pseudo dice [0.1408, 0.6096, 0.6757, 0.6391, 0.4265, 0.6235, 0.8045] +2026-04-08 23:16:42.462531: Epoch time: 102.86 s +2026-04-08 23:16:43.565567: +2026-04-08 23:16:43.570260: Epoch 258 +2026-04-08 23:16:43.573720: Current learning rate: 0.00764 +2026-04-08 23:18:27.980140: train_loss -0.1575 +2026-04-08 23:18:27.987812: val_loss -0.1467 +2026-04-08 23:18:27.992000: Pseudo dice [0.207, 0.3719, 0.6655, 0.3015, 0.3169, 0.7637, 0.7718] +2026-04-08 23:18:27.994553: Epoch time: 104.42 s +2026-04-08 23:18:29.086275: +2026-04-08 23:18:29.090875: Epoch 259 +2026-04-08 23:18:29.093020: Current learning rate: 0.00764 +2026-04-08 23:20:12.259126: train_loss -0.1675 +2026-04-08 23:20:12.265389: val_loss -0.1402 +2026-04-08 23:20:12.268120: Pseudo dice [0.5071, 0.3278, 0.4835, 0.7117, 0.543, 0.4894, 0.6813] +2026-04-08 23:20:12.271366: Epoch time: 103.18 s +2026-04-08 23:20:13.345240: +2026-04-08 23:20:13.346958: Epoch 260 +2026-04-08 23:20:13.348785: Current learning rate: 0.00763 +2026-04-08 23:21:56.590055: train_loss -0.149 +2026-04-08 23:21:56.596303: val_loss -0.0812 +2026-04-08 23:21:56.599279: Pseudo dice [0.5227, 0.438, 0.5705, 0.0634, 0.3699, 0.435, 0.6514] +2026-04-08 23:21:56.603387: Epoch time: 103.25 s +2026-04-08 23:21:57.665818: +2026-04-08 23:21:57.667668: Epoch 261 +2026-04-08 23:21:57.669761: Current learning rate: 0.00762 +2026-04-08 23:23:41.293563: train_loss -0.1643 +2026-04-08 23:23:41.299508: val_loss -0.1101 +2026-04-08 23:23:41.301920: Pseudo dice [0.7643, 0.358, 0.5258, 0.0346, 0.3494, 0.6343, 0.7685] +2026-04-08 23:23:41.304673: Epoch time: 103.63 s +2026-04-08 23:23:42.389524: +2026-04-08 23:23:42.391635: Epoch 262 +2026-04-08 23:23:42.394137: Current learning rate: 0.00761 +2026-04-08 23:25:27.792774: train_loss -0.1652 +2026-04-08 23:25:27.799481: val_loss -0.1505 +2026-04-08 23:25:27.802253: Pseudo dice [0.7144, 0.3441, 0.644, 0.186, 0.4307, 0.836, 0.7665] +2026-04-08 23:25:27.805252: Epoch time: 105.41 s +2026-04-08 23:25:28.891004: +2026-04-08 23:25:28.893338: Epoch 263 +2026-04-08 23:25:28.896212: Current learning rate: 0.0076 +2026-04-08 23:27:11.228580: train_loss -0.1709 +2026-04-08 23:27:11.233569: val_loss -0.1386 +2026-04-08 23:27:11.235320: Pseudo dice [0.7193, 0.2746, 0.5153, 0.828, 0.4889, 0.7236, 0.7156] +2026-04-08 23:27:11.237286: Epoch time: 102.34 s +2026-04-08 23:27:12.312597: +2026-04-08 23:27:12.316091: Epoch 264 +2026-04-08 23:27:12.319218: Current learning rate: 0.00759 +2026-04-08 23:28:54.608448: train_loss -0.172 +2026-04-08 23:28:54.614308: val_loss -0.1515 +2026-04-08 23:28:54.616179: Pseudo dice [0.8232, 0.6913, 0.6772, 0.7597, 0.4474, 0.6163, 0.7059] +2026-04-08 23:28:54.619951: Epoch time: 102.3 s +2026-04-08 23:28:54.621701: Yayy! New best EMA pseudo Dice: 0.5245 +2026-04-08 23:28:57.413217: +2026-04-08 23:28:57.414951: Epoch 265 +2026-04-08 23:28:57.416518: Current learning rate: 0.00758 +2026-04-08 23:30:41.385289: train_loss -0.1548 +2026-04-08 23:30:41.391897: val_loss -0.1176 +2026-04-08 23:30:41.394334: Pseudo dice [0.5339, 0.2623, 0.6605, 0.0017, 0.2723, 0.6155, 0.6059] +2026-04-08 23:30:41.396605: Epoch time: 103.98 s +2026-04-08 23:30:42.550568: +2026-04-08 23:30:42.552732: Epoch 266 +2026-04-08 23:30:42.554734: Current learning rate: 0.00757 +2026-04-08 23:32:25.089214: train_loss -0.1511 +2026-04-08 23:32:25.094849: val_loss -0.1512 +2026-04-08 23:32:25.097099: Pseudo dice [0.7741, 0.3501, 0.6021, 0.4562, 0.4888, 0.4338, 0.8268] +2026-04-08 23:32:25.099550: Epoch time: 102.54 s +2026-04-08 23:32:26.167546: +2026-04-08 23:32:26.171299: Epoch 267 +2026-04-08 23:32:26.173396: Current learning rate: 0.00756 +2026-04-08 23:34:08.202274: train_loss -0.162 +2026-04-08 23:34:08.209018: val_loss -0.1565 +2026-04-08 23:34:08.211323: Pseudo dice [0.413, 0.2493, 0.6109, 0.749, 0.4098, 0.764, 0.6889] +2026-04-08 23:34:08.215332: Epoch time: 102.04 s +2026-04-08 23:34:09.303118: +2026-04-08 23:34:09.305180: Epoch 268 +2026-04-08 23:34:09.307268: Current learning rate: 0.00755 +2026-04-08 23:35:51.671741: train_loss -0.1803 +2026-04-08 23:35:51.678056: val_loss -0.1103 +2026-04-08 23:35:51.680314: Pseudo dice [0.8112, 0.4184, 0.742, 0.0339, 0.3812, 0.549, 0.7793] +2026-04-08 23:35:51.682571: Epoch time: 102.37 s +2026-04-08 23:35:52.755816: +2026-04-08 23:35:52.761195: Epoch 269 +2026-04-08 23:35:52.766759: Current learning rate: 0.00754 +2026-04-08 23:37:34.724149: train_loss -0.1694 +2026-04-08 23:37:34.741854: val_loss -0.1266 +2026-04-08 23:37:34.749247: Pseudo dice [0.179, 0.242, 0.6104, 0.001, 0.3046, 0.7743, 0.8832] +2026-04-08 23:37:34.754914: Epoch time: 101.97 s +2026-04-08 23:37:35.833024: +2026-04-08 23:37:35.836144: Epoch 270 +2026-04-08 23:37:35.839093: Current learning rate: 0.00753 +2026-04-08 23:39:18.777559: train_loss -0.1681 +2026-04-08 23:39:18.783883: val_loss -0.0997 +2026-04-08 23:39:18.786499: Pseudo dice [0.6925, 0.3321, 0.5498, 0.0691, 0.4851, 0.4917, 0.6444] +2026-04-08 23:39:18.789246: Epoch time: 102.95 s +2026-04-08 23:39:19.876528: +2026-04-08 23:39:19.881559: Epoch 271 +2026-04-08 23:39:19.888259: Current learning rate: 0.00752 +2026-04-08 23:41:03.267412: train_loss -0.1569 +2026-04-08 23:41:03.273863: val_loss -0.1092 +2026-04-08 23:41:03.276359: Pseudo dice [0.5737, 0.2416, 0.7223, 0.0487, 0.3809, 0.7975, 0.553] +2026-04-08 23:41:03.280411: Epoch time: 103.39 s +2026-04-08 23:41:04.362849: +2026-04-08 23:41:04.369884: Epoch 272 +2026-04-08 23:41:04.392379: Current learning rate: 0.00751 +2026-04-08 23:42:46.203887: train_loss -0.1579 +2026-04-08 23:42:46.216739: val_loss -0.1284 +2026-04-08 23:42:46.227047: Pseudo dice [0.6831, 0.364, 0.6412, 0.4238, 0.486, 0.6709, 0.6806] +2026-04-08 23:42:46.229593: Epoch time: 101.84 s +2026-04-08 23:42:47.305505: +2026-04-08 23:42:47.307238: Epoch 273 +2026-04-08 23:42:47.309228: Current learning rate: 0.00751 +2026-04-08 23:44:29.752609: train_loss -0.1658 +2026-04-08 23:44:29.759031: val_loss -0.1285 +2026-04-08 23:44:29.763815: Pseudo dice [0.7414, 0.5081, 0.6616, 0.0254, 0.2999, 0.4752, 0.8651] +2026-04-08 23:44:29.766137: Epoch time: 102.45 s +2026-04-08 23:44:30.849125: +2026-04-08 23:44:30.851053: Epoch 274 +2026-04-08 23:44:30.852939: Current learning rate: 0.0075 +2026-04-08 23:46:13.453978: train_loss -0.166 +2026-04-08 23:46:13.461264: val_loss -0.143 +2026-04-08 23:46:13.463312: Pseudo dice [0.7588, 0.4984, 0.6831, 0.005, 0.3836, 0.2292, 0.7602] +2026-04-08 23:46:13.465789: Epoch time: 102.61 s +2026-04-08 23:46:14.538078: +2026-04-08 23:46:14.540332: Epoch 275 +2026-04-08 23:46:14.542534: Current learning rate: 0.00749 +2026-04-08 23:47:56.309713: train_loss -0.1716 +2026-04-08 23:47:56.316026: val_loss -0.1213 +2026-04-08 23:47:56.318433: Pseudo dice [0.4103, 0.713, 0.7556, 0.1192, 0.2486, 0.8359, 0.6777] +2026-04-08 23:47:56.320786: Epoch time: 101.77 s +2026-04-08 23:47:57.386947: +2026-04-08 23:47:57.389951: Epoch 276 +2026-04-08 23:47:57.394532: Current learning rate: 0.00748 +2026-04-08 23:49:38.754438: train_loss -0.1566 +2026-04-08 23:49:38.760341: val_loss -0.0942 +2026-04-08 23:49:38.762787: Pseudo dice [0.8168, 0.0144, 0.7227, 0.0002, 0.3465, 0.794, 0.7277] +2026-04-08 23:49:38.765100: Epoch time: 101.37 s +2026-04-08 23:49:39.829669: +2026-04-08 23:49:39.832559: Epoch 277 +2026-04-08 23:49:39.835927: Current learning rate: 0.00747 +2026-04-08 23:51:21.498503: train_loss -0.1536 +2026-04-08 23:51:21.504678: val_loss -0.1219 +2026-04-08 23:51:21.507830: Pseudo dice [0.6722, 0.2128, 0.6564, 0.6399, 0.2115, 0.2938, 0.8454] +2026-04-08 23:51:21.511403: Epoch time: 101.67 s +2026-04-08 23:51:22.573458: +2026-04-08 23:51:22.575999: Epoch 278 +2026-04-08 23:51:22.578706: Current learning rate: 0.00746 +2026-04-08 23:53:05.178309: train_loss -0.1617 +2026-04-08 23:53:05.183189: val_loss -0.1363 +2026-04-08 23:53:05.184948: Pseudo dice [0.5018, 0.4808, 0.7041, 0.5367, 0.0956, 0.6696, 0.743] +2026-04-08 23:53:05.187385: Epoch time: 102.61 s +2026-04-08 23:53:06.259540: +2026-04-08 23:53:06.261531: Epoch 279 +2026-04-08 23:53:06.263572: Current learning rate: 0.00745 +2026-04-08 23:54:47.847927: train_loss -0.1596 +2026-04-08 23:54:47.854295: val_loss -0.1513 +2026-04-08 23:54:47.856714: Pseudo dice [0.6807, 0.7652, 0.5946, 0.652, 0.3667, 0.6819, 0.8635] +2026-04-08 23:54:47.859783: Epoch time: 101.59 s +2026-04-08 23:54:47.861619: Yayy! New best EMA pseudo Dice: 0.5253 +2026-04-08 23:54:50.643100: +2026-04-08 23:54:50.644934: Epoch 280 +2026-04-08 23:54:50.646393: Current learning rate: 0.00744 +2026-04-08 23:56:32.504115: train_loss -0.1708 +2026-04-08 23:56:32.511563: val_loss -0.1241 +2026-04-08 23:56:32.513980: Pseudo dice [0.6095, 0.5752, 0.3647, 0.0935, 0.2921, 0.8468, 0.5243] +2026-04-08 23:56:32.516258: Epoch time: 101.86 s +2026-04-08 23:56:33.590586: +2026-04-08 23:56:33.593358: Epoch 281 +2026-04-08 23:56:33.594924: Current learning rate: 0.00743 +2026-04-08 23:58:15.383419: train_loss -0.1898 +2026-04-08 23:58:15.389658: val_loss -0.1369 +2026-04-08 23:58:15.391960: Pseudo dice [0.6955, 0.1395, 0.7731, 0.0182, 0.436, 0.5899, 0.7832] +2026-04-08 23:58:15.395280: Epoch time: 101.8 s +2026-04-08 23:58:16.470585: +2026-04-08 23:58:16.473706: Epoch 282 +2026-04-08 23:58:16.476117: Current learning rate: 0.00742 +2026-04-08 23:59:57.582050: train_loss -0.1719 +2026-04-08 23:59:57.588474: val_loss -0.092 +2026-04-08 23:59:57.590460: Pseudo dice [0.4845, 0.6333, 0.7114, 0.0196, 0.4376, 0.7421, 0.8818] +2026-04-08 23:59:57.592877: Epoch time: 101.11 s +2026-04-08 23:59:58.661112: +2026-04-08 23:59:58.662939: Epoch 283 +2026-04-08 23:59:58.665468: Current learning rate: 0.00741 +2026-04-09 00:01:45.158392: train_loss -0.1714 +2026-04-09 00:01:45.169308: val_loss -0.1624 +2026-04-09 00:01:45.172593: Pseudo dice [0.7642, 0.6059, 0.7715, 0.6817, 0.2656, 0.7704, 0.856] +2026-04-09 00:01:45.175661: Epoch time: 106.5 s +2026-04-09 00:01:45.180195: Yayy! New best EMA pseudo Dice: 0.5365 +2026-04-09 00:01:48.206568: +2026-04-09 00:01:48.209665: Epoch 284 +2026-04-09 00:01:48.211707: Current learning rate: 0.0074 +2026-04-09 00:03:32.892855: train_loss -0.1793 +2026-04-09 00:03:32.899248: val_loss -0.1293 +2026-04-09 00:03:32.902604: Pseudo dice [0.633, 0.496, 0.4182, 0.0407, 0.4263, 0.8024, 0.7796] +2026-04-09 00:03:32.907580: Epoch time: 104.69 s +2026-04-09 00:03:33.985664: +2026-04-09 00:03:33.989221: Epoch 285 +2026-04-09 00:03:33.991311: Current learning rate: 0.00739 +2026-04-09 00:05:18.548463: train_loss -0.1782 +2026-04-09 00:05:18.554829: val_loss -0.1471 +2026-04-09 00:05:18.557809: Pseudo dice [0.6454, 0.3935, 0.6649, 0.3813, 0.3671, 0.8334, 0.7495] +2026-04-09 00:05:18.561481: Epoch time: 104.57 s +2026-04-09 00:05:18.563757: Yayy! New best EMA pseudo Dice: 0.5384 +2026-04-09 00:05:21.574581: +2026-04-09 00:05:21.577268: Epoch 286 +2026-04-09 00:05:21.579000: Current learning rate: 0.00738 +2026-04-09 00:07:08.074717: train_loss -0.1583 +2026-04-09 00:07:08.089867: val_loss -0.0908 +2026-04-09 00:07:08.095700: Pseudo dice [0.673, 0.1013, 0.6766, 0.0156, 0.3522, 0.7283, 0.6807] +2026-04-09 00:07:08.101255: Epoch time: 106.5 s +2026-04-09 00:07:09.206844: +2026-04-09 00:07:09.208842: Epoch 287 +2026-04-09 00:07:09.210669: Current learning rate: 0.00738 +2026-04-09 00:08:50.820420: train_loss -0.1601 +2026-04-09 00:08:50.829846: val_loss -0.1491 +2026-04-09 00:08:50.832877: Pseudo dice [0.755, 0.6671, 0.6915, 0.0008, 0.4016, 0.5521, 0.7298] +2026-04-09 00:08:50.836524: Epoch time: 101.62 s +2026-04-09 00:08:51.933882: +2026-04-09 00:08:51.939843: Epoch 288 +2026-04-09 00:08:51.945261: Current learning rate: 0.00737 +2026-04-09 00:10:34.350762: train_loss -0.1785 +2026-04-09 00:10:34.362743: val_loss -0.135 +2026-04-09 00:10:34.365244: Pseudo dice [0.6744, 0.2291, 0.4896, 0.1071, 0.3313, 0.5787, 0.7648] +2026-04-09 00:10:34.369685: Epoch time: 102.42 s +2026-04-09 00:10:35.466740: +2026-04-09 00:10:35.468852: Epoch 289 +2026-04-09 00:10:35.473757: Current learning rate: 0.00736 +2026-04-09 00:12:19.774518: train_loss -0.157 +2026-04-09 00:12:19.780166: val_loss -0.1498 +2026-04-09 00:12:19.782790: Pseudo dice [0.5354, 0.5972, 0.7564, 0.1199, 0.428, 0.5101, 0.8307] +2026-04-09 00:12:19.785204: Epoch time: 104.31 s +2026-04-09 00:12:20.871516: +2026-04-09 00:12:20.873528: Epoch 290 +2026-04-09 00:12:20.876089: Current learning rate: 0.00735 +2026-04-09 00:14:02.499370: train_loss -0.1699 +2026-04-09 00:14:02.505997: val_loss -0.1108 +2026-04-09 00:14:02.508304: Pseudo dice [0.4247, 0.7366, 0.7354, 0.2159, 0.3241, 0.5505, 0.5619] +2026-04-09 00:14:02.510975: Epoch time: 101.63 s +2026-04-09 00:14:03.630447: +2026-04-09 00:14:03.633105: Epoch 291 +2026-04-09 00:14:03.635515: Current learning rate: 0.00734 +2026-04-09 00:15:45.449769: train_loss -0.1624 +2026-04-09 00:15:45.470091: val_loss -0.0722 +2026-04-09 00:15:45.472317: Pseudo dice [0.3869, 0.6424, 0.6126, 0.058, 0.2874, 0.4966, 0.5261] +2026-04-09 00:15:45.474825: Epoch time: 101.82 s +2026-04-09 00:15:46.565178: +2026-04-09 00:15:46.568505: Epoch 292 +2026-04-09 00:15:46.571647: Current learning rate: 0.00733 +2026-04-09 00:17:28.413277: train_loss -0.1626 +2026-04-09 00:17:28.420636: val_loss -0.1166 +2026-04-09 00:17:28.422822: Pseudo dice [0.6194, 0.1858, 0.6166, 0.6261, 0.3408, 0.5077, 0.6133] +2026-04-09 00:17:28.425159: Epoch time: 101.85 s +2026-04-09 00:17:29.512309: +2026-04-09 00:17:29.514928: Epoch 293 +2026-04-09 00:17:29.517052: Current learning rate: 0.00732 +2026-04-09 00:19:10.823452: train_loss -0.1689 +2026-04-09 00:19:10.828489: val_loss -0.0772 +2026-04-09 00:19:10.830679: Pseudo dice [0.6098, 0.5686, 0.2814, 0.1121, 0.3888, 0.2708, 0.6401] +2026-04-09 00:19:10.833041: Epoch time: 101.31 s +2026-04-09 00:19:11.930361: +2026-04-09 00:19:11.932899: Epoch 294 +2026-04-09 00:19:11.934728: Current learning rate: 0.00731 +2026-04-09 00:20:53.980071: train_loss -0.1697 +2026-04-09 00:20:53.985444: val_loss -0.1481 +2026-04-09 00:20:53.987339: Pseudo dice [0.8263, 0.3581, 0.5567, 0.003, 0.412, 0.8578, 0.8869] +2026-04-09 00:20:53.991429: Epoch time: 102.05 s +2026-04-09 00:20:55.079294: +2026-04-09 00:20:55.081030: Epoch 295 +2026-04-09 00:20:55.082726: Current learning rate: 0.0073 +2026-04-09 00:22:37.098644: train_loss -0.1557 +2026-04-09 00:22:37.104932: val_loss -0.1155 +2026-04-09 00:22:37.106874: Pseudo dice [0.1764, 0.7093, 0.5948, 0.0022, 0.2726, 0.6105, 0.5748] +2026-04-09 00:22:37.109189: Epoch time: 102.02 s +2026-04-09 00:22:38.209275: +2026-04-09 00:22:38.211537: Epoch 296 +2026-04-09 00:22:38.214025: Current learning rate: 0.00729 +2026-04-09 00:24:19.464375: train_loss -0.1543 +2026-04-09 00:24:19.472399: val_loss -0.103 +2026-04-09 00:24:19.474738: Pseudo dice [0.4935, 0.6029, 0.5391, 0.0027, 0.2802, 0.5532, 0.5505] +2026-04-09 00:24:19.481010: Epoch time: 101.26 s +2026-04-09 00:24:20.590017: +2026-04-09 00:24:20.592067: Epoch 297 +2026-04-09 00:24:20.594558: Current learning rate: 0.00728 +2026-04-09 00:26:02.299308: train_loss -0.1651 +2026-04-09 00:26:02.305658: val_loss -0.1133 +2026-04-09 00:26:02.307520: Pseudo dice [0.7994, 0.2446, 0.5262, 0.1277, 0.3506, 0.1351, 0.8055] +2026-04-09 00:26:02.309528: Epoch time: 101.71 s +2026-04-09 00:26:03.400227: +2026-04-09 00:26:03.401791: Epoch 298 +2026-04-09 00:26:03.403693: Current learning rate: 0.00727 +2026-04-09 00:27:44.670158: train_loss -0.1758 +2026-04-09 00:27:44.676747: val_loss -0.1048 +2026-04-09 00:27:44.678647: Pseudo dice [0.5623, 0.41, 0.6052, 0.1602, 0.2441, 0.3347, 0.6494] +2026-04-09 00:27:44.683095: Epoch time: 101.27 s +2026-04-09 00:27:45.768298: +2026-04-09 00:27:45.770506: Epoch 299 +2026-04-09 00:27:45.772042: Current learning rate: 0.00726 +2026-04-09 00:29:27.028023: train_loss -0.1611 +2026-04-09 00:29:27.034246: val_loss -0.1172 +2026-04-09 00:29:27.036301: Pseudo dice [0.5272, 0.4009, 0.6852, 0.481, 0.5256, 0.7733, 0.4116] +2026-04-09 00:29:27.040809: Epoch time: 101.26 s +2026-04-09 00:29:29.798657: +2026-04-09 00:29:29.800610: Epoch 300 +2026-04-09 00:29:29.802290: Current learning rate: 0.00725 +2026-04-09 00:31:11.866545: train_loss -0.1537 +2026-04-09 00:31:11.871953: val_loss -0.1139 +2026-04-09 00:31:11.873371: Pseudo dice [0.5048, 0.4963, 0.4422, 0.7069, 0.234, 0.7412, 0.6949] +2026-04-09 00:31:11.875298: Epoch time: 102.07 s +2026-04-09 00:31:12.968302: +2026-04-09 00:31:12.970062: Epoch 301 +2026-04-09 00:31:12.971738: Current learning rate: 0.00724 +2026-04-09 00:32:54.599200: train_loss -0.164 +2026-04-09 00:32:54.604866: val_loss -0.1267 +2026-04-09 00:32:54.607121: Pseudo dice [0.6388, 0.2171, 0.5677, 0.6076, 0.3593, 0.6818, 0.7736] +2026-04-09 00:32:54.611136: Epoch time: 101.63 s +2026-04-09 00:32:55.707829: +2026-04-09 00:32:55.709659: Epoch 302 +2026-04-09 00:32:55.711798: Current learning rate: 0.00724 +2026-04-09 00:34:37.537571: train_loss -0.1357 +2026-04-09 00:34:37.545048: val_loss -0.1069 +2026-04-09 00:34:37.547862: Pseudo dice [0.3544, 0.4849, 0.6394, 0.0013, 0.5064, 0.6819, 0.4152] +2026-04-09 00:34:37.550830: Epoch time: 101.83 s +2026-04-09 00:34:38.643304: +2026-04-09 00:34:38.646669: Epoch 303 +2026-04-09 00:34:38.649336: Current learning rate: 0.00723 +2026-04-09 00:36:19.712927: train_loss -0.1566 +2026-04-09 00:36:19.735533: val_loss -0.1311 +2026-04-09 00:36:19.739211: Pseudo dice [0.7434, 0.2776, 0.5855, 0.0011, 0.2993, 0.7489, 0.7594] +2026-04-09 00:36:19.742617: Epoch time: 101.07 s +2026-04-09 00:36:20.831202: +2026-04-09 00:36:20.832940: Epoch 304 +2026-04-09 00:36:20.834625: Current learning rate: 0.00722 +2026-04-09 00:38:03.401734: train_loss -0.1695 +2026-04-09 00:38:03.408877: val_loss -0.1619 +2026-04-09 00:38:03.411074: Pseudo dice [0.5528, 0.4867, 0.5641, 0.4646, 0.5295, 0.8066, 0.8858] +2026-04-09 00:38:03.413824: Epoch time: 102.57 s +2026-04-09 00:38:04.483413: +2026-04-09 00:38:04.485245: Epoch 305 +2026-04-09 00:38:04.486805: Current learning rate: 0.00721 +2026-04-09 00:39:46.290298: train_loss -0.1724 +2026-04-09 00:39:46.296596: val_loss -0.1377 +2026-04-09 00:39:46.298289: Pseudo dice [0.2552, 0.2533, 0.7679, 0.0893, 0.2586, 0.72, 0.8482] +2026-04-09 00:39:46.301346: Epoch time: 101.81 s +2026-04-09 00:39:47.398118: +2026-04-09 00:39:47.399831: Epoch 306 +2026-04-09 00:39:47.401415: Current learning rate: 0.0072 +2026-04-09 00:41:28.837617: train_loss -0.1599 +2026-04-09 00:41:28.843512: val_loss -0.1119 +2026-04-09 00:41:28.845187: Pseudo dice [0.1516, 0.3929, 0.6664, 0.6676, 0.3165, 0.7324, 0.6271] +2026-04-09 00:41:28.847537: Epoch time: 101.44 s +2026-04-09 00:41:29.944396: +2026-04-09 00:41:29.946094: Epoch 307 +2026-04-09 00:41:29.947677: Current learning rate: 0.00719 +2026-04-09 00:43:11.488822: train_loss -0.1491 +2026-04-09 00:43:11.496290: val_loss -0.1447 +2026-04-09 00:43:11.498709: Pseudo dice [0.7282, 0.3512, 0.8219, 0.3284, 0.317, 0.6995, 0.7945] +2026-04-09 00:43:11.500999: Epoch time: 101.55 s +2026-04-09 00:43:12.598435: +2026-04-09 00:43:12.600513: Epoch 308 +2026-04-09 00:43:12.602315: Current learning rate: 0.00718 +2026-04-09 00:44:53.965608: train_loss -0.1802 +2026-04-09 00:44:53.971369: val_loss -0.1531 +2026-04-09 00:44:53.973745: Pseudo dice [0.7595, 0.3538, 0.4017, 0.7717, 0.5826, 0.8057, 0.6393] +2026-04-09 00:44:53.976245: Epoch time: 101.37 s +2026-04-09 00:44:55.083022: +2026-04-09 00:44:55.085457: Epoch 309 +2026-04-09 00:44:55.087910: Current learning rate: 0.00717 +2026-04-09 00:46:37.203302: train_loss -0.1762 +2026-04-09 00:46:37.209665: val_loss -0.1077 +2026-04-09 00:46:37.212238: Pseudo dice [0.8102, 0.2717, 0.4482, 0.0812, 0.388, 0.406, 0.6622] +2026-04-09 00:46:37.214492: Epoch time: 102.12 s +2026-04-09 00:46:38.332407: +2026-04-09 00:46:38.334118: Epoch 310 +2026-04-09 00:46:38.335649: Current learning rate: 0.00716 +2026-04-09 00:48:19.979776: train_loss -0.1723 +2026-04-09 00:48:19.984635: val_loss -0.138 +2026-04-09 00:48:19.986739: Pseudo dice [0.4152, 0.2548, 0.7655, 0.1941, 0.4103, 0.8443, 0.8167] +2026-04-09 00:48:19.989053: Epoch time: 101.65 s +2026-04-09 00:48:21.115032: +2026-04-09 00:48:21.116500: Epoch 311 +2026-04-09 00:48:21.117907: Current learning rate: 0.00715 +2026-04-09 00:50:02.812736: train_loss -0.1592 +2026-04-09 00:50:02.820318: val_loss -0.1328 +2026-04-09 00:50:02.823007: Pseudo dice [0.739, 0.3281, 0.6451, 0.0832, 0.3821, 0.7512, 0.8045] +2026-04-09 00:50:02.825332: Epoch time: 101.7 s +2026-04-09 00:50:03.926237: +2026-04-09 00:50:03.928316: Epoch 312 +2026-04-09 00:50:03.930021: Current learning rate: 0.00714 +2026-04-09 00:51:45.309963: train_loss -0.1639 +2026-04-09 00:51:45.314723: val_loss -0.1074 +2026-04-09 00:51:45.316601: Pseudo dice [0.6915, 0.3463, 0.6435, 0.2681, 0.3606, 0.4786, 0.4977] +2026-04-09 00:51:45.318606: Epoch time: 101.39 s +2026-04-09 00:51:46.417353: +2026-04-09 00:51:46.419310: Epoch 313 +2026-04-09 00:51:46.420856: Current learning rate: 0.00713 +2026-04-09 00:53:28.107690: train_loss -0.163 +2026-04-09 00:53:28.114780: val_loss -0.1416 +2026-04-09 00:53:28.116413: Pseudo dice [0.7494, 0.2232, 0.7959, 0.0059, 0.3201, 0.8073, 0.6451] +2026-04-09 00:53:28.119126: Epoch time: 101.69 s +2026-04-09 00:53:29.221899: +2026-04-09 00:53:29.223738: Epoch 314 +2026-04-09 00:53:29.225081: Current learning rate: 0.00712 +2026-04-09 00:55:11.755452: train_loss -0.1663 +2026-04-09 00:55:11.762496: val_loss -0.1611 +2026-04-09 00:55:11.764711: Pseudo dice [0.6643, 0.2723, 0.7428, 0.4697, 0.5039, 0.653, 0.7439] +2026-04-09 00:55:11.767240: Epoch time: 102.54 s +2026-04-09 00:55:12.874721: +2026-04-09 00:55:12.876364: Epoch 315 +2026-04-09 00:55:12.877836: Current learning rate: 0.00711 +2026-04-09 00:56:54.346753: train_loss -0.1657 +2026-04-09 00:56:54.352507: val_loss -0.1247 +2026-04-09 00:56:54.354310: Pseudo dice [0.8111, 0.091, 0.6393, 0.0143, 0.3387, 0.4676, 0.5661] +2026-04-09 00:56:54.356809: Epoch time: 101.48 s +2026-04-09 00:56:55.453814: +2026-04-09 00:56:55.456720: Epoch 316 +2026-04-09 00:56:55.459070: Current learning rate: 0.0071 +2026-04-09 00:58:37.612977: train_loss -0.1545 +2026-04-09 00:58:37.618441: val_loss -0.1139 +2026-04-09 00:58:37.620156: Pseudo dice [0.7867, 0.1922, 0.5944, 0.0711, 0.4075, 0.4964, 0.7708] +2026-04-09 00:58:37.622355: Epoch time: 102.16 s +2026-04-09 00:58:38.717942: +2026-04-09 00:58:38.719629: Epoch 317 +2026-04-09 00:58:38.721009: Current learning rate: 0.0071 +2026-04-09 01:00:20.165796: train_loss -0.1536 +2026-04-09 01:00:20.171103: val_loss -0.1381 +2026-04-09 01:00:20.173025: Pseudo dice [0.7189, 0.3323, 0.6164, 0.5454, 0.2729, 0.6949, 0.8486] +2026-04-09 01:00:20.175395: Epoch time: 101.45 s +2026-04-09 01:00:21.272653: +2026-04-09 01:00:21.274649: Epoch 318 +2026-04-09 01:00:21.276602: Current learning rate: 0.00709 +2026-04-09 01:02:04.249690: train_loss -0.1701 +2026-04-09 01:02:04.255531: val_loss -0.1008 +2026-04-09 01:02:04.257332: Pseudo dice [0.111, 0.127, 0.7697, 0.4359, 0.2012, 0.7943, 0.7318] +2026-04-09 01:02:04.261695: Epoch time: 102.98 s +2026-04-09 01:02:05.367701: +2026-04-09 01:02:05.369758: Epoch 319 +2026-04-09 01:02:05.372329: Current learning rate: 0.00708 +2026-04-09 01:03:46.696311: train_loss -0.1596 +2026-04-09 01:03:46.701652: val_loss -0.1302 +2026-04-09 01:03:46.704719: Pseudo dice [0.525, 0.3625, 0.7248, 0.4923, 0.3784, 0.6368, 0.7267] +2026-04-09 01:03:46.706986: Epoch time: 101.33 s +2026-04-09 01:03:47.869584: +2026-04-09 01:03:47.871307: Epoch 320 +2026-04-09 01:03:47.873304: Current learning rate: 0.00707 +2026-04-09 01:05:28.946733: train_loss -0.1663 +2026-04-09 01:05:28.952415: val_loss -0.1635 +2026-04-09 01:05:28.954280: Pseudo dice [0.6078, 0.2511, 0.6913, 0.2124, 0.2832, 0.7477, 0.8381] +2026-04-09 01:05:28.956223: Epoch time: 101.08 s +2026-04-09 01:05:30.060888: +2026-04-09 01:05:30.062614: Epoch 321 +2026-04-09 01:05:30.064317: Current learning rate: 0.00706 +2026-04-09 01:07:11.633934: train_loss -0.1741 +2026-04-09 01:07:11.639954: val_loss -0.1393 +2026-04-09 01:07:11.642414: Pseudo dice [0.5337, 0.4612, 0.8138, 0.8334, 0.3179, 0.8285, 0.8963] +2026-04-09 01:07:11.646302: Epoch time: 101.58 s +2026-04-09 01:07:12.738220: +2026-04-09 01:07:12.740764: Epoch 322 +2026-04-09 01:07:12.744609: Current learning rate: 0.00705 +2026-04-09 01:08:54.164670: train_loss -0.167 +2026-04-09 01:08:54.172346: val_loss -0.1418 +2026-04-09 01:08:54.174582: Pseudo dice [0.7845, 0.5571, 0.7333, 0.3527, 0.2718, 0.8088, 0.798] +2026-04-09 01:08:54.180029: Epoch time: 101.43 s +2026-04-09 01:08:55.270168: +2026-04-09 01:08:55.271769: Epoch 323 +2026-04-09 01:08:55.274298: Current learning rate: 0.00704 +2026-04-09 01:10:36.388206: train_loss -0.1619 +2026-04-09 01:10:36.396082: val_loss -0.1004 +2026-04-09 01:10:36.398020: Pseudo dice [0.5224, 0.3482, 0.5438, 0.0017, 0.2968, 0.6781, 0.7462] +2026-04-09 01:10:36.400873: Epoch time: 101.12 s +2026-04-09 01:10:38.680168: +2026-04-09 01:10:38.682114: Epoch 324 +2026-04-09 01:10:38.683589: Current learning rate: 0.00703 +2026-04-09 01:12:20.335036: train_loss -0.1574 +2026-04-09 01:12:20.340426: val_loss -0.096 +2026-04-09 01:12:20.342811: Pseudo dice [0.6418, 0.1971, 0.5648, 0.0858, 0.3685, 0.6893, 0.6985] +2026-04-09 01:12:20.347763: Epoch time: 101.66 s +2026-04-09 01:12:21.455157: +2026-04-09 01:12:21.457351: Epoch 325 +2026-04-09 01:12:21.459814: Current learning rate: 0.00702 +2026-04-09 01:14:02.860594: train_loss -0.1673 +2026-04-09 01:14:02.865986: val_loss -0.1365 +2026-04-09 01:14:02.868048: Pseudo dice [0.795, 0.288, 0.5544, 0.0092, 0.3872, 0.6186, 0.4859] +2026-04-09 01:14:02.870227: Epoch time: 101.41 s +2026-04-09 01:14:03.966079: +2026-04-09 01:14:03.968262: Epoch 326 +2026-04-09 01:14:03.969892: Current learning rate: 0.00701 +2026-04-09 01:15:45.108645: train_loss -0.1542 +2026-04-09 01:15:45.114299: val_loss -0.1484 +2026-04-09 01:15:45.116693: Pseudo dice [0.7259, 0.4486, 0.5315, 0.0039, 0.3505, 0.6647, 0.871] +2026-04-09 01:15:45.119004: Epoch time: 101.15 s +2026-04-09 01:15:46.220131: +2026-04-09 01:15:46.223238: Epoch 327 +2026-04-09 01:15:46.224730: Current learning rate: 0.007 +2026-04-09 01:17:28.301161: train_loss -0.1639 +2026-04-09 01:17:28.307050: val_loss -0.13 +2026-04-09 01:17:28.308632: Pseudo dice [0.5892, 0.3144, 0.6872, 0.1631, 0.3419, 0.782, 0.6874] +2026-04-09 01:17:28.313393: Epoch time: 102.08 s +2026-04-09 01:17:29.422821: +2026-04-09 01:17:29.424596: Epoch 328 +2026-04-09 01:17:29.426473: Current learning rate: 0.00699 +2026-04-09 01:19:11.230089: train_loss -0.1777 +2026-04-09 01:19:11.235454: val_loss -0.1403 +2026-04-09 01:19:11.238868: Pseudo dice [0.6683, 0.5105, 0.6229, 0.0011, 0.3746, 0.4142, 0.7462] +2026-04-09 01:19:11.241399: Epoch time: 101.81 s +2026-04-09 01:19:12.339157: +2026-04-09 01:19:12.340950: Epoch 329 +2026-04-09 01:19:12.343050: Current learning rate: 0.00698 +2026-04-09 01:20:54.034693: train_loss -0.1776 +2026-04-09 01:20:54.039615: val_loss -0.1076 +2026-04-09 01:20:54.041092: Pseudo dice [0.7708, 0.534, 0.5728, 0.0177, 0.5595, 0.6434, 0.4739] +2026-04-09 01:20:54.044569: Epoch time: 101.7 s +2026-04-09 01:20:55.141104: +2026-04-09 01:20:55.143268: Epoch 330 +2026-04-09 01:20:55.145381: Current learning rate: 0.00697 +2026-04-09 01:22:36.584651: train_loss -0.1668 +2026-04-09 01:22:36.591131: val_loss -0.1409 +2026-04-09 01:22:36.593975: Pseudo dice [0.7906, 0.4061, 0.7315, 0.0022, 0.188, 0.8353, 0.8605] +2026-04-09 01:22:36.597482: Epoch time: 101.45 s +2026-04-09 01:22:37.718544: +2026-04-09 01:22:37.721510: Epoch 331 +2026-04-09 01:22:37.723031: Current learning rate: 0.00696 +2026-04-09 01:24:19.999681: train_loss -0.1874 +2026-04-09 01:24:20.006893: val_loss -0.1433 +2026-04-09 01:24:20.009065: Pseudo dice [0.7535, 0.559, 0.6559, 0.0004, 0.5252, 0.8302, 0.8646] +2026-04-09 01:24:20.011282: Epoch time: 102.28 s +2026-04-09 01:24:21.110488: +2026-04-09 01:24:21.112249: Epoch 332 +2026-04-09 01:24:21.113642: Current learning rate: 0.00696 +2026-04-09 01:26:02.762032: train_loss -0.1819 +2026-04-09 01:26:02.773268: val_loss -0.1255 +2026-04-09 01:26:02.776461: Pseudo dice [0.197, 0.3757, 0.6928, 0.469, 0.3452, 0.5976, 0.856] +2026-04-09 01:26:02.779029: Epoch time: 101.65 s +2026-04-09 01:26:03.909742: +2026-04-09 01:26:03.911693: Epoch 333 +2026-04-09 01:26:03.913348: Current learning rate: 0.00695 +2026-04-09 01:27:45.161067: train_loss -0.1854 +2026-04-09 01:27:45.166563: val_loss -0.1416 +2026-04-09 01:27:45.168578: Pseudo dice [0.6785, 0.4195, 0.7932, 0.6672, 0.3882, 0.7023, 0.5458] +2026-04-09 01:27:45.171618: Epoch time: 101.25 s +2026-04-09 01:27:46.279938: +2026-04-09 01:27:46.281405: Epoch 334 +2026-04-09 01:27:46.282863: Current learning rate: 0.00694 +2026-04-09 01:29:28.125131: train_loss -0.1755 +2026-04-09 01:29:28.130841: val_loss -0.1111 +2026-04-09 01:29:28.132962: Pseudo dice [0.6657, 0.3944, 0.6591, 0.0713, 0.1523, 0.6028, 0.7785] +2026-04-09 01:29:28.137060: Epoch time: 101.85 s +2026-04-09 01:29:29.257276: +2026-04-09 01:29:29.258914: Epoch 335 +2026-04-09 01:29:29.260317: Current learning rate: 0.00693 +2026-04-09 01:31:10.665385: train_loss -0.1737 +2026-04-09 01:31:10.670645: val_loss -0.1333 +2026-04-09 01:31:10.672327: Pseudo dice [0.5055, 0.5682, 0.588, 0.5302, 0.4032, 0.5247, 0.829] +2026-04-09 01:31:10.674317: Epoch time: 101.41 s +2026-04-09 01:31:11.792339: +2026-04-09 01:31:11.794563: Epoch 336 +2026-04-09 01:31:11.796546: Current learning rate: 0.00692 +2026-04-09 01:32:53.171013: train_loss -0.1655 +2026-04-09 01:32:53.176808: val_loss -0.0951 +2026-04-09 01:32:53.179266: Pseudo dice [0.4108, 0.0773, 0.6522, 0.0034, 0.2842, 0.3611, 0.5506] +2026-04-09 01:32:53.181657: Epoch time: 101.38 s +2026-04-09 01:32:54.307125: +2026-04-09 01:32:54.316105: Epoch 337 +2026-04-09 01:32:54.318897: Current learning rate: 0.00691 +2026-04-09 01:34:35.377657: train_loss -0.1721 +2026-04-09 01:34:35.384262: val_loss -0.1394 +2026-04-09 01:34:35.386820: Pseudo dice [0.6416, 0.5315, 0.5251, 0.339, 0.4097, 0.8646, 0.8138] +2026-04-09 01:34:35.388787: Epoch time: 101.07 s +2026-04-09 01:34:36.511347: +2026-04-09 01:34:36.515587: Epoch 338 +2026-04-09 01:34:36.517946: Current learning rate: 0.0069 +2026-04-09 01:36:17.872568: train_loss -0.1547 +2026-04-09 01:36:17.883785: val_loss -0.1568 +2026-04-09 01:36:17.885967: Pseudo dice [0.5433, 0.0975, 0.6781, 0.3827, 0.3516, 0.6486, 0.8112] +2026-04-09 01:36:17.891778: Epoch time: 101.36 s +2026-04-09 01:36:19.008757: +2026-04-09 01:36:19.010539: Epoch 339 +2026-04-09 01:36:19.011934: Current learning rate: 0.00689 +2026-04-09 01:38:00.784106: train_loss -0.1624 +2026-04-09 01:38:00.792058: val_loss -0.1292 +2026-04-09 01:38:00.794358: Pseudo dice [0.6271, 0.3945, 0.6342, 0.0795, 0.3304, 0.4725, 0.8501] +2026-04-09 01:38:00.797363: Epoch time: 101.78 s +2026-04-09 01:38:01.925531: +2026-04-09 01:38:01.927160: Epoch 340 +2026-04-09 01:38:01.928658: Current learning rate: 0.00688 +2026-04-09 01:39:43.214383: train_loss -0.1654 +2026-04-09 01:39:43.222006: val_loss -0.1365 +2026-04-09 01:39:43.224483: Pseudo dice [0.795, 0.1255, 0.6878, 0.0209, 0.2593, 0.7903, 0.8457] +2026-04-09 01:39:43.228453: Epoch time: 101.29 s +2026-04-09 01:39:44.341856: +2026-04-09 01:39:44.344029: Epoch 341 +2026-04-09 01:39:44.346911: Current learning rate: 0.00687 +2026-04-09 01:41:25.709560: train_loss -0.1751 +2026-04-09 01:41:25.714667: val_loss -0.1383 +2026-04-09 01:41:25.716515: Pseudo dice [0.804, 0.6578, 0.6444, 0.0953, 0.3046, 0.7752, 0.6528] +2026-04-09 01:41:25.719320: Epoch time: 101.37 s +2026-04-09 01:41:26.832013: +2026-04-09 01:41:26.834533: Epoch 342 +2026-04-09 01:41:26.836247: Current learning rate: 0.00686 +2026-04-09 01:43:08.024327: train_loss -0.1599 +2026-04-09 01:43:08.032208: val_loss -0.1086 +2026-04-09 01:43:08.037008: Pseudo dice [0.6693, 0.5247, 0.5824, 0.0033, 0.2635, 0.6896, 0.6653] +2026-04-09 01:43:08.040256: Epoch time: 101.2 s +2026-04-09 01:43:09.154961: +2026-04-09 01:43:09.157655: Epoch 343 +2026-04-09 01:43:09.159233: Current learning rate: 0.00685 +2026-04-09 01:44:50.843882: train_loss -0.1565 +2026-04-09 01:44:50.850940: val_loss -0.1237 +2026-04-09 01:44:50.853836: Pseudo dice [0.801, 0.63, 0.7147, 0.0015, 0.2918, 0.639, 0.5875] +2026-04-09 01:44:50.856486: Epoch time: 101.69 s +2026-04-09 01:44:53.148142: +2026-04-09 01:44:53.149685: Epoch 344 +2026-04-09 01:44:53.151169: Current learning rate: 0.00684 +2026-04-09 01:46:34.397463: train_loss -0.1785 +2026-04-09 01:46:34.403791: val_loss -0.1222 +2026-04-09 01:46:34.406159: Pseudo dice [0.575, 0.477, 0.6632, 0.0726, 0.4171, 0.7001, 0.5808] +2026-04-09 01:46:34.408359: Epoch time: 101.25 s +2026-04-09 01:46:35.609951: +2026-04-09 01:46:35.611654: Epoch 345 +2026-04-09 01:46:35.613582: Current learning rate: 0.00683 +2026-04-09 01:48:16.491969: train_loss -0.1857 +2026-04-09 01:48:16.497802: val_loss -0.1154 +2026-04-09 01:48:16.500025: Pseudo dice [0.771, 0.4733, 0.7418, 0.0074, 0.2625, 0.7293, 0.6404] +2026-04-09 01:48:16.502395: Epoch time: 100.89 s +2026-04-09 01:48:17.619679: +2026-04-09 01:48:17.621968: Epoch 346 +2026-04-09 01:48:17.623522: Current learning rate: 0.00682 +2026-04-09 01:49:59.331282: train_loss -0.1798 +2026-04-09 01:49:59.336365: val_loss -0.128 +2026-04-09 01:49:59.338074: Pseudo dice [0.6813, 0.2657, 0.6364, 0.6021, 0.5106, 0.7715, 0.7475] +2026-04-09 01:49:59.340140: Epoch time: 101.71 s +2026-04-09 01:50:00.468992: +2026-04-09 01:50:00.470697: Epoch 347 +2026-04-09 01:50:00.472648: Current learning rate: 0.00681 +2026-04-09 01:51:41.970713: train_loss -0.1769 +2026-04-09 01:51:41.976557: val_loss -0.1409 +2026-04-09 01:51:41.978651: Pseudo dice [0.8076, 0.0927, 0.6097, 0.5578, 0.3657, 0.716, 0.6928] +2026-04-09 01:51:41.981337: Epoch time: 101.5 s +2026-04-09 01:51:43.098033: +2026-04-09 01:51:43.099562: Epoch 348 +2026-04-09 01:51:43.100928: Current learning rate: 0.0068 +2026-04-09 01:53:24.884231: train_loss -0.1865 +2026-04-09 01:53:24.889688: val_loss -0.1267 +2026-04-09 01:53:24.891681: Pseudo dice [0.7659, 0.3413, 0.7883, 0.0, 0.3955, 0.8613, 0.5794] +2026-04-09 01:53:24.893778: Epoch time: 101.79 s +2026-04-09 01:53:26.004695: +2026-04-09 01:53:26.006578: Epoch 349 +2026-04-09 01:53:26.010100: Current learning rate: 0.0068 +2026-04-09 01:55:07.926271: train_loss -0.1752 +2026-04-09 01:55:07.931420: val_loss -0.1187 +2026-04-09 01:55:07.934235: Pseudo dice [0.5353, 0.5517, 0.5667, 0.0047, 0.1808, 0.7674, 0.3467] +2026-04-09 01:55:07.936527: Epoch time: 101.92 s +2026-04-09 01:55:10.756322: +2026-04-09 01:55:10.758195: Epoch 350 +2026-04-09 01:55:10.759732: Current learning rate: 0.00679 +2026-04-09 01:56:52.594915: train_loss -0.1645 +2026-04-09 01:56:52.599321: val_loss -0.1316 +2026-04-09 01:56:52.601060: Pseudo dice [0.6164, 0.6782, 0.7467, 0.4323, 0.2134, 0.6791, 0.6643] +2026-04-09 01:56:52.602894: Epoch time: 101.84 s +2026-04-09 01:56:53.728488: +2026-04-09 01:56:53.731289: Epoch 351 +2026-04-09 01:56:53.733383: Current learning rate: 0.00678 +2026-04-09 01:58:35.591480: train_loss -0.1819 +2026-04-09 01:58:35.597857: val_loss -0.1587 +2026-04-09 01:58:35.601614: Pseudo dice [0.3464, 0.1742, 0.802, 0.8018, 0.2777, 0.7664, 0.8845] +2026-04-09 01:58:35.604342: Epoch time: 101.87 s +2026-04-09 01:58:36.720458: +2026-04-09 01:58:36.722845: Epoch 352 +2026-04-09 01:58:36.725060: Current learning rate: 0.00677 +2026-04-09 02:00:17.707670: train_loss -0.1711 +2026-04-09 02:00:17.714166: val_loss -0.1404 +2026-04-09 02:00:17.716816: Pseudo dice [0.5077, 0.296, 0.6006, 0.4764, 0.3907, 0.7805, 0.5496] +2026-04-09 02:00:17.721076: Epoch time: 100.99 s +2026-04-09 02:00:18.848050: +2026-04-09 02:00:18.850607: Epoch 353 +2026-04-09 02:00:18.852507: Current learning rate: 0.00676 +2026-04-09 02:02:00.185417: train_loss -0.1727 +2026-04-09 02:02:00.192751: val_loss -0.1207 +2026-04-09 02:02:00.194793: Pseudo dice [0.2103, 0.3315, 0.5784, 0.0004, 0.711, 0.7683, 0.7011] +2026-04-09 02:02:00.197202: Epoch time: 101.34 s +2026-04-09 02:02:01.320496: +2026-04-09 02:02:01.322394: Epoch 354 +2026-04-09 02:02:01.324598: Current learning rate: 0.00675 +2026-04-09 02:03:42.925626: train_loss -0.184 +2026-04-09 02:03:42.945451: val_loss -0.1404 +2026-04-09 02:03:42.948090: Pseudo dice [0.371, 0.4137, 0.624, 0.0004, 0.4409, 0.6058, 0.8626] +2026-04-09 02:03:42.950439: Epoch time: 101.61 s +2026-04-09 02:03:44.059916: +2026-04-09 02:03:44.061841: Epoch 355 +2026-04-09 02:03:44.064665: Current learning rate: 0.00674 +2026-04-09 02:05:25.504288: train_loss -0.1831 +2026-04-09 02:05:25.510967: val_loss -0.1433 +2026-04-09 02:05:25.513133: Pseudo dice [0.6777, 0.2605, 0.6746, 0.8819, 0.3831, 0.5732, 0.7179] +2026-04-09 02:05:25.515858: Epoch time: 101.45 s +2026-04-09 02:05:26.631508: +2026-04-09 02:05:26.633482: Epoch 356 +2026-04-09 02:05:26.634863: Current learning rate: 0.00673 +2026-04-09 02:07:08.046011: train_loss -0.1827 +2026-04-09 02:07:08.052130: val_loss -0.1567 +2026-04-09 02:07:08.054857: Pseudo dice [0.4672, 0.0715, 0.5638, 0.7677, 0.4515, 0.8083, 0.7166] +2026-04-09 02:07:08.058335: Epoch time: 101.42 s +2026-04-09 02:07:09.191772: +2026-04-09 02:07:09.194220: Epoch 357 +2026-04-09 02:07:09.197817: Current learning rate: 0.00672 +2026-04-09 02:08:50.579856: train_loss -0.1845 +2026-04-09 02:08:50.587263: val_loss -0.1414 +2026-04-09 02:08:50.589181: Pseudo dice [0.7929, 0.3579, 0.7265, 0.4959, 0.3674, 0.4489, 0.8719] +2026-04-09 02:08:50.591610: Epoch time: 101.39 s +2026-04-09 02:08:51.709127: +2026-04-09 02:08:51.711076: Epoch 358 +2026-04-09 02:08:51.712678: Current learning rate: 0.00671 +2026-04-09 02:10:34.458656: train_loss -0.1739 +2026-04-09 02:10:34.463900: val_loss -0.1478 +2026-04-09 02:10:34.465954: Pseudo dice [0.6266, 0.1155, 0.6246, 0.4748, 0.5191, 0.6702, 0.8027] +2026-04-09 02:10:34.468468: Epoch time: 102.75 s +2026-04-09 02:10:35.587539: +2026-04-09 02:10:35.589987: Epoch 359 +2026-04-09 02:10:35.592330: Current learning rate: 0.0067 +2026-04-09 02:12:17.414573: train_loss -0.1755 +2026-04-09 02:12:17.420668: val_loss -0.1427 +2026-04-09 02:12:17.422942: Pseudo dice [0.714, 0.4707, 0.7099, 0.1209, 0.335, 0.5913, 0.8935] +2026-04-09 02:12:17.427151: Epoch time: 101.83 s +2026-04-09 02:12:18.565255: +2026-04-09 02:12:18.571185: Epoch 360 +2026-04-09 02:12:18.573584: Current learning rate: 0.00669 +2026-04-09 02:13:59.818978: train_loss -0.1724 +2026-04-09 02:13:59.823725: val_loss -0.1328 +2026-04-09 02:13:59.828757: Pseudo dice [0.7878, 0.5618, 0.5754, 0.0142, 0.3238, 0.7645, 0.6959] +2026-04-09 02:13:59.833473: Epoch time: 101.26 s +2026-04-09 02:14:00.951391: +2026-04-09 02:14:00.958080: Epoch 361 +2026-04-09 02:14:00.959841: Current learning rate: 0.00668 +2026-04-09 02:15:42.133948: train_loss -0.1754 +2026-04-09 02:15:42.139249: val_loss -0.1252 +2026-04-09 02:15:42.141260: Pseudo dice [0.678, 0.203, 0.703, 0.0167, 0.3083, 0.8756, 0.8173] +2026-04-09 02:15:42.143589: Epoch time: 101.19 s +2026-04-09 02:15:43.265515: +2026-04-09 02:15:43.268089: Epoch 362 +2026-04-09 02:15:43.269955: Current learning rate: 0.00667 +2026-04-09 02:17:25.175153: train_loss -0.1753 +2026-04-09 02:17:25.182718: val_loss -0.1757 +2026-04-09 02:17:25.184959: Pseudo dice [0.7391, 0.2394, 0.6455, 0.0284, 0.3722, 0.6894, 0.8836] +2026-04-09 02:17:25.189768: Epoch time: 101.91 s +2026-04-09 02:17:26.304225: +2026-04-09 02:17:26.306602: Epoch 363 +2026-04-09 02:17:26.308627: Current learning rate: 0.00666 +2026-04-09 02:19:08.678130: train_loss -0.1929 +2026-04-09 02:19:08.685221: val_loss -0.1431 +2026-04-09 02:19:08.687498: Pseudo dice [0.564, 0.0567, 0.6658, 0.0807, 0.5151, 0.7721, 0.6766] +2026-04-09 02:19:08.691427: Epoch time: 102.38 s +2026-04-09 02:19:09.822601: +2026-04-09 02:19:09.824251: Epoch 364 +2026-04-09 02:19:09.825762: Current learning rate: 0.00665 +2026-04-09 02:20:50.809444: train_loss -0.1739 +2026-04-09 02:20:50.819593: val_loss -0.1321 +2026-04-09 02:20:50.821537: Pseudo dice [0.6426, 0.2488, 0.6121, 0.0005, 0.4743, 0.7684, 0.8116] +2026-04-09 02:20:50.823748: Epoch time: 100.99 s +2026-04-09 02:20:51.948864: +2026-04-09 02:20:51.951243: Epoch 365 +2026-04-09 02:20:51.952881: Current learning rate: 0.00665 +2026-04-09 02:22:33.664052: train_loss -0.1787 +2026-04-09 02:22:33.671023: val_loss -0.1581 +2026-04-09 02:22:33.672936: Pseudo dice [0.7885, 0.4829, 0.7317, 0.2563, 0.3438, 0.6785, 0.862] +2026-04-09 02:22:33.675683: Epoch time: 101.72 s +2026-04-09 02:22:34.806711: +2026-04-09 02:22:34.808322: Epoch 366 +2026-04-09 02:22:34.810339: Current learning rate: 0.00664 +2026-04-09 02:24:16.163795: train_loss -0.1732 +2026-04-09 02:24:16.171174: val_loss -0.1051 +2026-04-09 02:24:16.175002: Pseudo dice [0.7227, 0.2616, 0.7057, 0.0022, 0.3756, 0.5524, 0.7687] +2026-04-09 02:24:16.177458: Epoch time: 101.36 s +2026-04-09 02:24:17.346647: +2026-04-09 02:24:17.348832: Epoch 367 +2026-04-09 02:24:17.350756: Current learning rate: 0.00663 +2026-04-09 02:25:58.814117: train_loss -0.1909 +2026-04-09 02:25:58.820444: val_loss -0.1472 +2026-04-09 02:25:58.823636: Pseudo dice [0.768, 0.1592, 0.7002, 0.0191, 0.4806, 0.5662, 0.6676] +2026-04-09 02:25:58.828299: Epoch time: 101.47 s +2026-04-09 02:25:59.957328: +2026-04-09 02:25:59.958982: Epoch 368 +2026-04-09 02:25:59.960913: Current learning rate: 0.00662 +2026-04-09 02:27:40.818725: train_loss -0.1681 +2026-04-09 02:27:40.825661: val_loss -0.1492 +2026-04-09 02:27:40.827723: Pseudo dice [0.5961, 0.6721, 0.6696, 0.0134, 0.3568, 0.7832, 0.7558] +2026-04-09 02:27:40.836627: Epoch time: 100.86 s +2026-04-09 02:27:41.950201: +2026-04-09 02:27:41.952145: Epoch 369 +2026-04-09 02:27:41.953703: Current learning rate: 0.00661 +2026-04-09 02:29:23.423103: train_loss -0.1941 +2026-04-09 02:29:23.427933: val_loss -0.1028 +2026-04-09 02:29:23.430007: Pseudo dice [0.6969, 0.3741, 0.5078, 0.0001, 0.4823, 0.6353, 0.6987] +2026-04-09 02:29:23.434214: Epoch time: 101.48 s +2026-04-09 02:29:24.556119: +2026-04-09 02:29:24.557674: Epoch 370 +2026-04-09 02:29:24.559535: Current learning rate: 0.0066 +2026-04-09 02:31:06.052827: train_loss -0.1751 +2026-04-09 02:31:06.060146: val_loss -0.1091 +2026-04-09 02:31:06.062394: Pseudo dice [0.3057, 0.2301, 0.5543, 0.4986, 0.3167, 0.777, 0.5391] +2026-04-09 02:31:06.064599: Epoch time: 101.5 s +2026-04-09 02:31:07.185438: +2026-04-09 02:31:07.189839: Epoch 371 +2026-04-09 02:31:07.191962: Current learning rate: 0.00659 +2026-04-09 02:32:49.539521: train_loss -0.1772 +2026-04-09 02:32:49.545699: val_loss -0.1437 +2026-04-09 02:32:49.547608: Pseudo dice [0.7311, 0.3111, 0.5895, 0.6461, 0.5831, 0.8338, 0.7257] +2026-04-09 02:32:49.549662: Epoch time: 102.36 s +2026-04-09 02:32:50.668494: +2026-04-09 02:32:50.670523: Epoch 372 +2026-04-09 02:32:50.672232: Current learning rate: 0.00658 +2026-04-09 02:34:32.345489: train_loss -0.1894 +2026-04-09 02:34:32.351373: val_loss -0.1626 +2026-04-09 02:34:32.353785: Pseudo dice [0.528, 0.3914, 0.7974, 0.1643, 0.4049, 0.8337, 0.8525] +2026-04-09 02:34:32.356126: Epoch time: 101.68 s +2026-04-09 02:34:33.484370: +2026-04-09 02:34:33.487156: Epoch 373 +2026-04-09 02:34:33.489985: Current learning rate: 0.00657 +2026-04-09 02:36:14.940092: train_loss -0.1788 +2026-04-09 02:36:14.945121: val_loss -0.1214 +2026-04-09 02:36:14.947326: Pseudo dice [0.7709, 0.473, 0.6331, 0.5197, 0.2219, 0.7157, 0.5599] +2026-04-09 02:36:14.951561: Epoch time: 101.46 s +2026-04-09 02:36:16.065048: +2026-04-09 02:36:16.066989: Epoch 374 +2026-04-09 02:36:16.068504: Current learning rate: 0.00656 +2026-04-09 02:37:58.694659: train_loss -0.1622 +2026-04-09 02:37:58.700510: val_loss -0.052 +2026-04-09 02:37:58.702640: Pseudo dice [0.6692, 0.14, 0.6187, 0.0608, 0.2963, 0.6032, 0.5706] +2026-04-09 02:37:58.704714: Epoch time: 102.63 s +2026-04-09 02:37:59.842198: +2026-04-09 02:37:59.844158: Epoch 375 +2026-04-09 02:37:59.846016: Current learning rate: 0.00655 +2026-04-09 02:39:41.119495: train_loss -0.1788 +2026-04-09 02:39:41.124159: val_loss -0.1236 +2026-04-09 02:39:41.125956: Pseudo dice [0.7572, 0.3811, 0.5576, 0.1697, 0.4299, 0.7836, 0.4252] +2026-04-09 02:39:41.128221: Epoch time: 101.28 s +2026-04-09 02:39:42.263517: +2026-04-09 02:39:42.265219: Epoch 376 +2026-04-09 02:39:42.267081: Current learning rate: 0.00654 +2026-04-09 02:41:23.873743: train_loss -0.1733 +2026-04-09 02:41:23.878825: val_loss -0.1224 +2026-04-09 02:41:23.881184: Pseudo dice [0.7804, 0.1418, 0.6501, 0.2547, 0.4423, 0.5308, 0.8516] +2026-04-09 02:41:23.884465: Epoch time: 101.61 s +2026-04-09 02:41:25.008028: +2026-04-09 02:41:25.010005: Epoch 377 +2026-04-09 02:41:25.011410: Current learning rate: 0.00653 +2026-04-09 02:43:06.448756: train_loss -0.1789 +2026-04-09 02:43:06.454014: val_loss -0.1384 +2026-04-09 02:43:06.456030: Pseudo dice [0.7695, 0.3132, 0.7022, 0.3921, 0.4347, 0.8043, 0.8074] +2026-04-09 02:43:06.458129: Epoch time: 101.44 s +2026-04-09 02:43:07.590894: +2026-04-09 02:43:07.592704: Epoch 378 +2026-04-09 02:43:07.594636: Current learning rate: 0.00652 +2026-04-09 02:44:49.018898: train_loss -0.1656 +2026-04-09 02:44:49.026926: val_loss -0.1481 +2026-04-09 02:44:49.031607: Pseudo dice [0.7902, 0.5768, 0.7061, 0.0914, 0.4129, 0.7503, 0.8567] +2026-04-09 02:44:49.034207: Epoch time: 101.43 s +2026-04-09 02:44:50.150888: +2026-04-09 02:44:50.152590: Epoch 379 +2026-04-09 02:44:50.154453: Current learning rate: 0.00651 +2026-04-09 02:46:31.741969: train_loss -0.1702 +2026-04-09 02:46:31.748693: val_loss -0.1481 +2026-04-09 02:46:31.750935: Pseudo dice [0.8032, 0.0573, 0.7011, 0.323, 0.5359, 0.8526, 0.6768] +2026-04-09 02:46:31.753697: Epoch time: 101.59 s +2026-04-09 02:46:32.886289: +2026-04-09 02:46:32.888050: Epoch 380 +2026-04-09 02:46:32.889871: Current learning rate: 0.0065 +2026-04-09 02:48:14.104938: train_loss -0.1784 +2026-04-09 02:48:14.112553: val_loss -0.1498 +2026-04-09 02:48:14.115440: Pseudo dice [0.7346, 0.4415, 0.5138, 0.0022, 0.2457, 0.8047, 0.8798] +2026-04-09 02:48:14.132159: Epoch time: 101.22 s +2026-04-09 02:48:15.264690: +2026-04-09 02:48:15.268067: Epoch 381 +2026-04-09 02:48:15.270061: Current learning rate: 0.00649 +2026-04-09 02:49:57.566381: train_loss -0.1838 +2026-04-09 02:49:57.572932: val_loss -0.169 +2026-04-09 02:49:57.575055: Pseudo dice [0.6764, 0.3338, 0.5794, 0.3117, 0.4597, 0.7504, 0.7389] +2026-04-09 02:49:57.577305: Epoch time: 102.3 s +2026-04-09 02:49:58.708457: +2026-04-09 02:49:58.709972: Epoch 382 +2026-04-09 02:49:58.711521: Current learning rate: 0.00648 +2026-04-09 02:51:40.344581: train_loss -0.1795 +2026-04-09 02:51:40.351667: val_loss -0.1391 +2026-04-09 02:51:40.353728: Pseudo dice [0.7118, 0.5884, 0.8045, 0.3593, 0.3716, 0.8053, 0.5726] +2026-04-09 02:51:40.356498: Epoch time: 101.64 s +2026-04-09 02:51:40.358685: Yayy! New best EMA pseudo Dice: 0.5438 +2026-04-09 02:51:44.191118: +2026-04-09 02:51:44.192605: Epoch 383 +2026-04-09 02:51:44.194071: Current learning rate: 0.00648 +2026-04-09 02:53:26.583116: train_loss -0.1602 +2026-04-09 02:53:26.590585: val_loss -0.0783 +2026-04-09 02:53:26.593132: Pseudo dice [0.7122, 0.4228, 0.7135, 0.0555, 0.3658, 0.1378, 0.8523] +2026-04-09 02:53:26.595775: Epoch time: 102.4 s +2026-04-09 02:53:27.728083: +2026-04-09 02:53:27.729973: Epoch 384 +2026-04-09 02:53:27.731980: Current learning rate: 0.00647 +2026-04-09 02:55:09.601142: train_loss -0.1557 +2026-04-09 02:55:09.605962: val_loss -0.0888 +2026-04-09 02:55:09.609196: Pseudo dice [0.2161, 0.5286, 0.6479, 0.2045, 0.3603, 0.7899, 0.5723] +2026-04-09 02:55:09.611240: Epoch time: 101.88 s +2026-04-09 02:55:10.754309: +2026-04-09 02:55:10.756341: Epoch 385 +2026-04-09 02:55:10.758405: Current learning rate: 0.00646 +2026-04-09 02:56:53.144582: train_loss -0.1718 +2026-04-09 02:56:53.153494: val_loss -0.1091 +2026-04-09 02:56:53.159066: Pseudo dice [0.6159, 0.53, 0.8086, 0.0776, 0.23, 0.7576, 0.6194] +2026-04-09 02:56:53.162117: Epoch time: 102.39 s +2026-04-09 02:56:54.309108: +2026-04-09 02:56:54.312661: Epoch 386 +2026-04-09 02:56:54.315135: Current learning rate: 0.00645 +2026-04-09 02:58:35.659837: train_loss -0.187 +2026-04-09 02:58:35.665488: val_loss -0.1451 +2026-04-09 02:58:35.667705: Pseudo dice [0.6035, 0.5459, 0.7477, 0.2614, 0.258, 0.5838, 0.8382] +2026-04-09 02:58:35.670046: Epoch time: 101.35 s +2026-04-09 02:58:36.806723: +2026-04-09 02:58:36.808415: Epoch 387 +2026-04-09 02:58:36.810626: Current learning rate: 0.00644 +2026-04-09 03:00:18.635485: train_loss -0.1825 +2026-04-09 03:00:18.643057: val_loss -0.1111 +2026-04-09 03:00:18.644876: Pseudo dice [0.2196, 0.0319, 0.6808, 0.0063, 0.5384, 0.7114, 0.3827] +2026-04-09 03:00:18.647450: Epoch time: 101.83 s +2026-04-09 03:00:19.800019: +2026-04-09 03:00:19.802538: Epoch 388 +2026-04-09 03:00:19.805268: Current learning rate: 0.00643 +2026-04-09 03:02:01.051292: train_loss -0.1749 +2026-04-09 03:02:01.057820: val_loss -0.0973 +2026-04-09 03:02:01.059479: Pseudo dice [0.7339, 0.3531, 0.5601, 0.062, 0.2367, 0.7707, 0.7854] +2026-04-09 03:02:01.062239: Epoch time: 101.25 s +2026-04-09 03:02:02.211161: +2026-04-09 03:02:02.214752: Epoch 389 +2026-04-09 03:02:02.216918: Current learning rate: 0.00642 +2026-04-09 03:03:43.733178: train_loss -0.1746 +2026-04-09 03:03:43.738856: val_loss -0.1193 +2026-04-09 03:03:43.740594: Pseudo dice [0.8089, 0.109, 0.5984, 0.4693, 0.2748, 0.6431, 0.7244] +2026-04-09 03:03:43.742774: Epoch time: 101.53 s +2026-04-09 03:03:44.891827: +2026-04-09 03:03:44.893962: Epoch 390 +2026-04-09 03:03:44.896843: Current learning rate: 0.00641 +2026-04-09 03:05:27.428064: train_loss -0.1762 +2026-04-09 03:05:27.434686: val_loss -0.1608 +2026-04-09 03:05:27.436691: Pseudo dice [0.3511, 0.2068, 0.7021, 0.2173, 0.4043, 0.8396, 0.7184] +2026-04-09 03:05:27.439025: Epoch time: 102.54 s +2026-04-09 03:05:28.590278: +2026-04-09 03:05:28.593181: Epoch 391 +2026-04-09 03:05:28.595649: Current learning rate: 0.0064 +2026-04-09 03:07:10.333579: train_loss -0.1904 +2026-04-09 03:07:10.340991: val_loss -0.1697 +2026-04-09 03:07:10.343661: Pseudo dice [0.5177, 0.2229, 0.7463, 0.591, 0.2599, 0.7466, 0.8521] +2026-04-09 03:07:10.346040: Epoch time: 101.75 s +2026-04-09 03:07:11.493530: +2026-04-09 03:07:11.496057: Epoch 392 +2026-04-09 03:07:11.498107: Current learning rate: 0.00639 +2026-04-09 03:08:53.079196: train_loss -0.1919 +2026-04-09 03:08:53.085049: val_loss -0.1543 +2026-04-09 03:08:53.086655: Pseudo dice [0.7369, 0.2425, 0.6815, 0.0361, 0.4328, 0.7746, 0.7378] +2026-04-09 03:08:53.089193: Epoch time: 101.59 s +2026-04-09 03:08:54.248938: +2026-04-09 03:08:54.250581: Epoch 393 +2026-04-09 03:08:54.251938: Current learning rate: 0.00638 +2026-04-09 03:10:35.886018: train_loss -0.1821 +2026-04-09 03:10:35.895351: val_loss -0.144 +2026-04-09 03:10:35.897493: Pseudo dice [0.7233, 0.3073, 0.593, 0.2265, 0.3962, 0.2901, 0.7953] +2026-04-09 03:10:35.901452: Epoch time: 101.64 s +2026-04-09 03:10:37.043333: +2026-04-09 03:10:37.045700: Epoch 394 +2026-04-09 03:10:37.047936: Current learning rate: 0.00637 +2026-04-09 03:12:18.673033: train_loss -0.1779 +2026-04-09 03:12:18.680363: val_loss -0.133 +2026-04-09 03:12:18.682411: Pseudo dice [0.5418, 0.4638, 0.7161, 0.0199, 0.2403, 0.6639, 0.8783] +2026-04-09 03:12:18.684837: Epoch time: 101.63 s +2026-04-09 03:12:19.813633: +2026-04-09 03:12:19.815234: Epoch 395 +2026-04-09 03:12:19.816700: Current learning rate: 0.00636 +2026-04-09 03:14:01.757258: train_loss -0.2 +2026-04-09 03:14:01.763841: val_loss -0.059 +2026-04-09 03:14:01.765706: Pseudo dice [0.7699, 0.2025, 0.7606, 0.0156, 0.4318, 0.1261, 0.7561] +2026-04-09 03:14:01.770028: Epoch time: 101.95 s +2026-04-09 03:14:02.911489: +2026-04-09 03:14:02.913261: Epoch 396 +2026-04-09 03:14:02.914896: Current learning rate: 0.00635 +2026-04-09 03:15:43.730626: train_loss -0.1726 +2026-04-09 03:15:43.739455: val_loss -0.1368 +2026-04-09 03:15:43.742512: Pseudo dice [0.4742, 0.533, 0.7597, 0.1898, 0.5252, 0.8352, 0.8011] +2026-04-09 03:15:43.746141: Epoch time: 100.82 s +2026-04-09 03:15:44.900735: +2026-04-09 03:15:44.902500: Epoch 397 +2026-04-09 03:15:44.904027: Current learning rate: 0.00634 +2026-04-09 03:17:27.496682: train_loss -0.1837 +2026-04-09 03:17:27.502824: val_loss -0.1488 +2026-04-09 03:17:27.504879: Pseudo dice [0.7699, 0.225, 0.7239, 0.6312, 0.4573, 0.6985, 0.5021] +2026-04-09 03:17:27.508974: Epoch time: 102.6 s +2026-04-09 03:17:28.652749: +2026-04-09 03:17:28.655608: Epoch 398 +2026-04-09 03:17:28.657291: Current learning rate: 0.00633 +2026-04-09 03:19:10.160221: train_loss -0.1826 +2026-04-09 03:19:10.166363: val_loss -0.1512 +2026-04-09 03:19:10.168931: Pseudo dice [0.75, 0.1299, 0.6469, 0.7779, 0.2636, 0.8369, 0.841] +2026-04-09 03:19:10.172013: Epoch time: 101.51 s +2026-04-09 03:19:11.293781: +2026-04-09 03:19:11.295615: Epoch 399 +2026-04-09 03:19:11.297324: Current learning rate: 0.00632 +2026-04-09 03:20:52.781550: train_loss -0.1881 +2026-04-09 03:20:52.786824: val_loss -0.1443 +2026-04-09 03:20:52.788766: Pseudo dice [0.747, 0.1676, 0.7816, 0.3792, 0.5526, 0.6298, 0.7789] +2026-04-09 03:20:52.791489: Epoch time: 101.49 s +2026-04-09 03:20:55.592162: +2026-04-09 03:20:55.595479: Epoch 400 +2026-04-09 03:20:55.598522: Current learning rate: 0.00631 +2026-04-09 03:22:37.452238: train_loss -0.1792 +2026-04-09 03:22:37.458218: val_loss -0.1766 +2026-04-09 03:22:37.459933: Pseudo dice [0.7217, 0.5358, 0.6881, 0.4223, 0.3665, 0.8407, 0.5426] +2026-04-09 03:22:37.463006: Epoch time: 101.86 s +2026-04-09 03:22:38.626764: +2026-04-09 03:22:38.629049: Epoch 401 +2026-04-09 03:22:38.632822: Current learning rate: 0.0063 +2026-04-09 03:24:19.940245: train_loss -0.1881 +2026-04-09 03:24:19.945389: val_loss -0.1789 +2026-04-09 03:24:19.947021: Pseudo dice [0.8325, 0.5191, 0.6978, 0.6737, 0.3568, 0.8224, 0.885] +2026-04-09 03:24:19.949034: Epoch time: 101.32 s +2026-04-09 03:24:19.951481: Yayy! New best EMA pseudo Dice: 0.5526 +2026-04-09 03:24:23.880794: +2026-04-09 03:24:23.882737: Epoch 402 +2026-04-09 03:24:23.884238: Current learning rate: 0.0063 +2026-04-09 03:26:06.297803: train_loss -0.1833 +2026-04-09 03:26:06.304847: val_loss -0.1241 +2026-04-09 03:26:06.307331: Pseudo dice [0.493, 0.3077, 0.7364, 0.4079, 0.2977, 0.6107, 0.7682] +2026-04-09 03:26:06.311569: Epoch time: 102.42 s +2026-04-09 03:26:07.459707: +2026-04-09 03:26:07.461602: Epoch 403 +2026-04-09 03:26:07.464014: Current learning rate: 0.00629 +2026-04-09 03:27:51.059734: train_loss -0.1802 +2026-04-09 03:27:51.066703: val_loss -0.1264 +2026-04-09 03:27:51.069166: Pseudo dice [0.741, 0.4852, 0.7042, 0.0728, 0.1691, 0.4501, 0.7712] +2026-04-09 03:27:51.071824: Epoch time: 103.6 s +2026-04-09 03:27:52.199347: +2026-04-09 03:27:52.201075: Epoch 404 +2026-04-09 03:27:52.202973: Current learning rate: 0.00628 +2026-04-09 03:29:33.603141: train_loss -0.1845 +2026-04-09 03:29:33.610592: val_loss -0.146 +2026-04-09 03:29:33.613235: Pseudo dice [0.5553, 0.3851, 0.6325, 0.0025, 0.3331, 0.6578, 0.7538] +2026-04-09 03:29:33.617002: Epoch time: 101.41 s +2026-04-09 03:29:34.775013: +2026-04-09 03:29:34.777372: Epoch 405 +2026-04-09 03:29:34.779367: Current learning rate: 0.00627 +2026-04-09 03:31:16.709159: train_loss -0.1961 +2026-04-09 03:31:16.715571: val_loss -0.162 +2026-04-09 03:31:16.718127: Pseudo dice [0.7362, 0.1891, 0.6624, 0.1082, 0.4674, 0.7773, 0.7259] +2026-04-09 03:31:16.721350: Epoch time: 101.94 s +2026-04-09 03:31:17.892167: +2026-04-09 03:31:17.894027: Epoch 406 +2026-04-09 03:31:17.895821: Current learning rate: 0.00626 +2026-04-09 03:32:59.645272: train_loss -0.1825 +2026-04-09 03:32:59.652052: val_loss -0.1536 +2026-04-09 03:32:59.654009: Pseudo dice [0.4857, 0.6278, 0.7364, 0.5648, 0.5015, 0.7634, 0.6243] +2026-04-09 03:32:59.656353: Epoch time: 101.76 s +2026-04-09 03:33:00.787828: +2026-04-09 03:33:00.790397: Epoch 407 +2026-04-09 03:33:00.803383: Current learning rate: 0.00625 +2026-04-09 03:34:44.997339: train_loss -0.1852 +2026-04-09 03:34:45.002849: val_loss -0.1408 +2026-04-09 03:34:45.005171: Pseudo dice [0.7659, 0.3862, 0.8052, 0.0066, 0.3728, 0.3822, 0.7967] +2026-04-09 03:34:45.007449: Epoch time: 104.21 s +2026-04-09 03:34:46.154936: +2026-04-09 03:34:46.157016: Epoch 408 +2026-04-09 03:34:46.158854: Current learning rate: 0.00624 +2026-04-09 03:36:27.744532: train_loss -0.1885 +2026-04-09 03:36:27.749650: val_loss -0.1545 +2026-04-09 03:36:27.751881: Pseudo dice [0.6858, 0.0802, 0.6175, 0.4887, 0.2913, 0.6232, 0.8162] +2026-04-09 03:36:27.755501: Epoch time: 101.59 s +2026-04-09 03:36:28.912331: +2026-04-09 03:36:28.914453: Epoch 409 +2026-04-09 03:36:28.917041: Current learning rate: 0.00623 +2026-04-09 03:38:10.571786: train_loss -0.188 +2026-04-09 03:38:10.577426: val_loss -0.1226 +2026-04-09 03:38:10.579030: Pseudo dice [0.3875, 0.5052, 0.6971, 0.0115, 0.1858, 0.8273, 0.6246] +2026-04-09 03:38:10.581353: Epoch time: 101.66 s +2026-04-09 03:38:11.735389: +2026-04-09 03:38:11.737388: Epoch 410 +2026-04-09 03:38:11.738955: Current learning rate: 0.00622 +2026-04-09 03:39:52.996644: train_loss -0.1808 +2026-04-09 03:39:53.004529: val_loss -0.1267 +2026-04-09 03:39:53.006495: Pseudo dice [0.5803, 0.5281, 0.6454, 0.1575, 0.3255, 0.6759, 0.8634] +2026-04-09 03:39:53.010497: Epoch time: 101.26 s +2026-04-09 03:39:54.091723: +2026-04-09 03:39:54.093975: Epoch 411 +2026-04-09 03:39:54.096473: Current learning rate: 0.00621 +2026-04-09 03:41:36.036052: train_loss -0.1733 +2026-04-09 03:41:36.041782: val_loss -0.1474 +2026-04-09 03:41:36.043926: Pseudo dice [0.7159, 0.6203, 0.6966, 0.3962, 0.3607, 0.653, 0.8272] +2026-04-09 03:41:36.046043: Epoch time: 101.95 s +2026-04-09 03:41:37.123564: +2026-04-09 03:41:37.125655: Epoch 412 +2026-04-09 03:41:37.127082: Current learning rate: 0.0062 +2026-04-09 03:43:20.012896: train_loss -0.1681 +2026-04-09 03:43:20.017880: val_loss -0.1303 +2026-04-09 03:43:20.019773: Pseudo dice [0.6314, 0.597, 0.5494, 0.0724, 0.3122, 0.4302, 0.893] +2026-04-09 03:43:20.023642: Epoch time: 102.89 s +2026-04-09 03:43:21.092303: +2026-04-09 03:43:21.093989: Epoch 413 +2026-04-09 03:43:21.096045: Current learning rate: 0.00619 +2026-04-09 03:45:02.603690: train_loss -0.1927 +2026-04-09 03:45:02.609670: val_loss -0.1403 +2026-04-09 03:45:02.613145: Pseudo dice [0.6961, 0.4541, 0.7182, 0.0537, 0.2943, 0.8196, 0.8086] +2026-04-09 03:45:02.618644: Epoch time: 101.51 s +2026-04-09 03:45:03.700627: +2026-04-09 03:45:03.702389: Epoch 414 +2026-04-09 03:45:03.705233: Current learning rate: 0.00618 +2026-04-09 03:46:45.477980: train_loss -0.1721 +2026-04-09 03:46:45.487146: val_loss -0.1451 +2026-04-09 03:46:45.489230: Pseudo dice [0.4745, 0.1514, 0.6029, 0.0514, 0.1931, 0.8517, 0.7811] +2026-04-09 03:46:45.491785: Epoch time: 101.78 s +2026-04-09 03:46:46.568890: +2026-04-09 03:46:46.571181: Epoch 415 +2026-04-09 03:46:46.573038: Current learning rate: 0.00617 +2026-04-09 03:48:28.256230: train_loss -0.1812 +2026-04-09 03:48:28.261884: val_loss -0.1284 +2026-04-09 03:48:28.264161: Pseudo dice [0.2841, 0.6142, 0.7663, 0.2136, 0.3939, 0.7659, 0.6597] +2026-04-09 03:48:28.268412: Epoch time: 101.69 s +2026-04-09 03:48:29.348970: +2026-04-09 03:48:29.352875: Epoch 416 +2026-04-09 03:48:29.354606: Current learning rate: 0.00616 +2026-04-09 03:50:11.330090: train_loss -0.1481 +2026-04-09 03:50:11.335610: val_loss -0.1296 +2026-04-09 03:50:11.337387: Pseudo dice [0.4812, 0.1196, 0.3198, 0.7651, 0.4161, 0.6638, 0.5602] +2026-04-09 03:50:11.339563: Epoch time: 101.98 s +2026-04-09 03:50:12.424509: +2026-04-09 03:50:12.427111: Epoch 417 +2026-04-09 03:50:12.428956: Current learning rate: 0.00615 +2026-04-09 03:51:55.322695: train_loss -0.1589 +2026-04-09 03:51:55.327470: val_loss -0.1096 +2026-04-09 03:51:55.329066: Pseudo dice [0.611, 0.2408, 0.7983, 0.0228, 0.4693, 0.7067, 0.7681] +2026-04-09 03:51:55.333541: Epoch time: 102.9 s +2026-04-09 03:51:56.410870: +2026-04-09 03:51:56.412484: Epoch 418 +2026-04-09 03:51:56.413947: Current learning rate: 0.00614 +2026-04-09 03:53:37.676702: train_loss -0.171 +2026-04-09 03:53:37.683639: val_loss -0.1508 +2026-04-09 03:53:37.685770: Pseudo dice [0.6961, 0.6979, 0.7298, 0.7156, 0.4152, 0.7346, 0.522] +2026-04-09 03:53:37.688503: Epoch time: 101.27 s +2026-04-09 03:53:38.772741: +2026-04-09 03:53:38.774851: Epoch 419 +2026-04-09 03:53:38.776403: Current learning rate: 0.00613 +2026-04-09 03:55:20.641155: train_loss -0.1724 +2026-04-09 03:55:20.648276: val_loss -0.0841 +2026-04-09 03:55:20.650400: Pseudo dice [0.7518, 0.4803, 0.6341, 0.0386, 0.2821, 0.1421, 0.4605] +2026-04-09 03:55:20.654983: Epoch time: 101.87 s +2026-04-09 03:55:21.723352: +2026-04-09 03:55:21.725060: Epoch 420 +2026-04-09 03:55:21.726954: Current learning rate: 0.00612 +2026-04-09 03:57:04.635755: train_loss -0.1798 +2026-04-09 03:57:04.640517: val_loss -0.1189 +2026-04-09 03:57:04.642064: Pseudo dice [0.623, 0.6476, 0.6414, 0.0489, 0.3096, 0.5604, 0.8356] +2026-04-09 03:57:04.644019: Epoch time: 102.92 s +2026-04-09 03:57:05.713550: +2026-04-09 03:57:05.716083: Epoch 421 +2026-04-09 03:57:05.719060: Current learning rate: 0.00612 +2026-04-09 03:58:48.287218: train_loss -0.1818 +2026-04-09 03:58:48.292457: val_loss -0.143 +2026-04-09 03:58:48.294286: Pseudo dice [0.7296, 0.5224, 0.7673, 0.003, 0.2894, 0.7473, 0.8504] +2026-04-09 03:58:48.296736: Epoch time: 102.58 s +2026-04-09 03:58:49.368186: +2026-04-09 03:58:49.371058: Epoch 422 +2026-04-09 03:58:49.373201: Current learning rate: 0.00611 +2026-04-09 04:00:31.441986: train_loss -0.1627 +2026-04-09 04:00:31.448654: val_loss -0.084 +2026-04-09 04:00:31.450325: Pseudo dice [0.5659, 0.6859, 0.476, 0.0464, 0.2362, 0.7107, 0.4903] +2026-04-09 04:00:31.454560: Epoch time: 102.08 s +2026-04-09 04:00:32.527755: +2026-04-09 04:00:32.529912: Epoch 423 +2026-04-09 04:00:32.533343: Current learning rate: 0.0061 +2026-04-09 04:02:13.976564: train_loss -0.191 +2026-04-09 04:02:14.039320: val_loss -0.1206 +2026-04-09 04:02:14.043005: Pseudo dice [0.7254, 0.51, 0.6581, 0.0922, 0.4017, 0.3903, 0.8204] +2026-04-09 04:02:14.047340: Epoch time: 101.45 s +2026-04-09 04:02:15.147851: +2026-04-09 04:02:15.149457: Epoch 424 +2026-04-09 04:02:15.150855: Current learning rate: 0.00609 +2026-04-09 04:03:56.981166: train_loss -0.1869 +2026-04-09 04:03:56.987557: val_loss -0.1572 +2026-04-09 04:03:56.989339: Pseudo dice [0.5064, 0.595, 0.6608, 0.0052, 0.3428, 0.6979, 0.7702] +2026-04-09 04:03:56.992053: Epoch time: 101.84 s +2026-04-09 04:03:58.079831: +2026-04-09 04:03:58.081447: Epoch 425 +2026-04-09 04:03:58.082854: Current learning rate: 0.00608 +2026-04-09 04:05:39.543037: train_loss -0.1785 +2026-04-09 04:05:39.550586: val_loss -0.0902 +2026-04-09 04:05:39.552508: Pseudo dice [0.8116, 0.4738, 0.6558, 0.0207, 0.3912, 0.7346, 0.8709] +2026-04-09 04:05:39.554753: Epoch time: 101.47 s +2026-04-09 04:05:40.635973: +2026-04-09 04:05:40.637824: Epoch 426 +2026-04-09 04:05:40.639350: Current learning rate: 0.00607 +2026-04-09 04:07:23.049662: train_loss -0.1785 +2026-04-09 04:07:23.055757: val_loss -0.1319 +2026-04-09 04:07:23.060289: Pseudo dice [0.5444, 0.2437, 0.8273, 0.0033, 0.4591, 0.7609, 0.766] +2026-04-09 04:07:23.063422: Epoch time: 102.42 s +2026-04-09 04:07:24.154525: +2026-04-09 04:07:24.156455: Epoch 427 +2026-04-09 04:07:24.158535: Current learning rate: 0.00606 +2026-04-09 04:09:05.363626: train_loss -0.1864 +2026-04-09 04:09:05.368520: val_loss -0.1307 +2026-04-09 04:09:05.370429: Pseudo dice [0.2817, 0.0741, 0.6745, 0.0786, 0.3766, 0.7956, 0.8946] +2026-04-09 04:09:05.372676: Epoch time: 101.21 s +2026-04-09 04:09:06.491593: +2026-04-09 04:09:06.493401: Epoch 428 +2026-04-09 04:09:06.494922: Current learning rate: 0.00605 +2026-04-09 04:10:47.731717: train_loss -0.1835 +2026-04-09 04:10:47.737659: val_loss -0.1121 +2026-04-09 04:10:47.740607: Pseudo dice [0.6984, 0.2804, 0.5128, 0.9023, 0.2876, 0.7322, 0.8477] +2026-04-09 04:10:47.743544: Epoch time: 101.24 s +2026-04-09 04:10:48.829864: +2026-04-09 04:10:48.831875: Epoch 429 +2026-04-09 04:10:48.833425: Current learning rate: 0.00604 +2026-04-09 04:12:30.453923: train_loss -0.168 +2026-04-09 04:12:30.460251: val_loss -0.0724 +2026-04-09 04:12:30.463097: Pseudo dice [0.7164, 0.4591, 0.5437, 0.0251, 0.3716, 0.6485, 0.701] +2026-04-09 04:12:30.465479: Epoch time: 101.63 s +2026-04-09 04:12:31.546758: +2026-04-09 04:12:31.549850: Epoch 430 +2026-04-09 04:12:31.551479: Current learning rate: 0.00603 +2026-04-09 04:14:13.437003: train_loss -0.1933 +2026-04-09 04:14:13.447080: val_loss -0.1369 +2026-04-09 04:14:13.449092: Pseudo dice [0.0, 0.6318, 0.6688, 0.0299, 0.5435, 0.5063, 0.8436] +2026-04-09 04:14:13.452498: Epoch time: 101.89 s +2026-04-09 04:14:14.534455: +2026-04-09 04:14:14.536181: Epoch 431 +2026-04-09 04:14:14.537700: Current learning rate: 0.00602 +2026-04-09 04:15:56.241545: train_loss -0.1874 +2026-04-09 04:15:56.247973: val_loss -0.1244 +2026-04-09 04:15:56.250484: Pseudo dice [0.7501, 0.0682, 0.5364, 0.0485, 0.5628, 0.7218, 0.7515] +2026-04-09 04:15:56.254877: Epoch time: 101.71 s +2026-04-09 04:15:57.369982: +2026-04-09 04:15:57.371754: Epoch 432 +2026-04-09 04:15:57.373451: Current learning rate: 0.00601 +2026-04-09 04:17:39.148929: train_loss -0.1839 +2026-04-09 04:17:39.158676: val_loss -0.1503 +2026-04-09 04:17:39.160990: Pseudo dice [0.5551, 0.2961, 0.7354, 0.3027, 0.3993, 0.8318, 0.8489] +2026-04-09 04:17:39.163380: Epoch time: 101.78 s +2026-04-09 04:17:40.287973: +2026-04-09 04:17:40.290675: Epoch 433 +2026-04-09 04:17:40.292668: Current learning rate: 0.006 +2026-04-09 04:19:21.856333: train_loss -0.1852 +2026-04-09 04:19:21.862814: val_loss -0.1493 +2026-04-09 04:19:21.864790: Pseudo dice [0.8579, 0.524, 0.6172, 0.4965, 0.4909, 0.7354, 0.827] +2026-04-09 04:19:21.867469: Epoch time: 101.57 s +2026-04-09 04:19:23.014915: +2026-04-09 04:19:23.017262: Epoch 434 +2026-04-09 04:19:23.020008: Current learning rate: 0.00599 +2026-04-09 04:21:04.383817: train_loss -0.1899 +2026-04-09 04:21:04.396078: val_loss -0.1112 +2026-04-09 04:21:04.398716: Pseudo dice [0.8086, 0.2708, 0.7438, 0.0488, 0.2849, 0.6912, 0.7087] +2026-04-09 04:21:04.401030: Epoch time: 101.37 s +2026-04-09 04:21:05.492907: +2026-04-09 04:21:05.495529: Epoch 435 +2026-04-09 04:21:05.497807: Current learning rate: 0.00598 +2026-04-09 04:22:46.751240: train_loss -0.1842 +2026-04-09 04:22:46.756856: val_loss -0.1714 +2026-04-09 04:22:46.758977: Pseudo dice [0.8072, 0.4613, 0.8057, 0.5046, 0.3015, 0.8043, 0.8384] +2026-04-09 04:22:46.760975: Epoch time: 101.26 s +2026-04-09 04:22:47.879324: +2026-04-09 04:22:47.881128: Epoch 436 +2026-04-09 04:22:47.882828: Current learning rate: 0.00597 +2026-04-09 04:24:29.229648: train_loss -0.1914 +2026-04-09 04:24:29.236766: val_loss -0.1014 +2026-04-09 04:24:29.238706: Pseudo dice [0.77, 0.649, 0.6388, 0.0212, 0.5477, 0.7354, 0.6802] +2026-04-09 04:24:29.243142: Epoch time: 101.35 s +2026-04-09 04:24:30.343947: +2026-04-09 04:24:30.345906: Epoch 437 +2026-04-09 04:24:30.347755: Current learning rate: 0.00596 +2026-04-09 04:26:11.898936: train_loss -0.1787 +2026-04-09 04:26:11.906331: val_loss -0.1385 +2026-04-09 04:26:11.908582: Pseudo dice [0.4729, 0.6143, 0.7445, 0.0006, 0.3798, 0.4613, 0.6631] +2026-04-09 04:26:11.911921: Epoch time: 101.56 s +2026-04-09 04:26:13.007497: +2026-04-09 04:26:13.009320: Epoch 438 +2026-04-09 04:26:13.011057: Current learning rate: 0.00595 +2026-04-09 04:27:54.620710: train_loss -0.1969 +2026-04-09 04:27:54.626252: val_loss -0.1431 +2026-04-09 04:27:54.628121: Pseudo dice [0.6633, 0.4733, 0.5426, 0.7687, 0.3603, 0.8199, 0.7198] +2026-04-09 04:27:54.630454: Epoch time: 101.62 s +2026-04-09 04:27:55.719362: +2026-04-09 04:27:55.721147: Epoch 439 +2026-04-09 04:27:55.722783: Current learning rate: 0.00594 +2026-04-09 04:29:37.471047: train_loss -0.1885 +2026-04-09 04:29:37.478114: val_loss -0.1498 +2026-04-09 04:29:37.480305: Pseudo dice [0.7232, 0.537, 0.7353, 0.0441, 0.3437, 0.7085, 0.6735] +2026-04-09 04:29:37.482742: Epoch time: 101.75 s +2026-04-09 04:29:38.554663: +2026-04-09 04:29:38.556296: Epoch 440 +2026-04-09 04:29:38.557733: Current learning rate: 0.00593 +2026-04-09 04:31:20.809419: train_loss -0.1953 +2026-04-09 04:31:20.816869: val_loss -0.1453 +2026-04-09 04:31:20.820821: Pseudo dice [0.708, 0.5515, 0.7812, 0.0464, 0.4075, 0.6465, 0.8929] +2026-04-09 04:31:20.823516: Epoch time: 102.26 s +2026-04-09 04:31:21.910045: +2026-04-09 04:31:21.912185: Epoch 441 +2026-04-09 04:31:21.913907: Current learning rate: 0.00592 +2026-04-09 04:33:04.205625: train_loss -0.1848 +2026-04-09 04:33:04.214246: val_loss -0.1345 +2026-04-09 04:33:04.216505: Pseudo dice [0.7474, 0.1777, 0.7533, 0.1218, 0.2805, 0.7905, 0.8308] +2026-04-09 04:33:04.218569: Epoch time: 102.3 s +2026-04-09 04:33:06.508313: +2026-04-09 04:33:06.510233: Epoch 442 +2026-04-09 04:33:06.511787: Current learning rate: 0.00592 +2026-04-09 04:34:48.619788: train_loss -0.1839 +2026-04-09 04:34:48.626242: val_loss -0.1218 +2026-04-09 04:34:48.628834: Pseudo dice [0.7955, 0.6711, 0.6211, 0.0043, 0.319, 0.8467, 0.6929] +2026-04-09 04:34:48.631775: Epoch time: 102.11 s +2026-04-09 04:34:49.716655: +2026-04-09 04:34:49.718553: Epoch 443 +2026-04-09 04:34:49.720225: Current learning rate: 0.00591 +2026-04-09 04:36:31.390145: train_loss -0.1726 +2026-04-09 04:36:31.397232: val_loss -0.1193 +2026-04-09 04:36:31.399198: Pseudo dice [0.569, 0.3602, 0.7032, 0.0636, 0.3241, 0.7901, 0.821] +2026-04-09 04:36:31.401418: Epoch time: 101.68 s +2026-04-09 04:36:32.487222: +2026-04-09 04:36:32.489050: Epoch 444 +2026-04-09 04:36:32.491057: Current learning rate: 0.0059 +2026-04-09 04:38:14.199116: train_loss -0.1861 +2026-04-09 04:38:14.204356: val_loss -0.1627 +2026-04-09 04:38:14.206194: Pseudo dice [0.7877, 0.56, 0.7859, 0.5099, 0.3598, 0.8219, 0.8097] +2026-04-09 04:38:14.210125: Epoch time: 101.72 s +2026-04-09 04:38:14.212505: Yayy! New best EMA pseudo Dice: 0.5568 +2026-04-09 04:38:17.020269: +2026-04-09 04:38:17.022188: Epoch 445 +2026-04-09 04:38:17.023587: Current learning rate: 0.00589 +2026-04-09 04:39:58.732195: train_loss -0.1791 +2026-04-09 04:39:58.739492: val_loss -0.1419 +2026-04-09 04:39:58.741451: Pseudo dice [0.8865, 0.2184, 0.143, 0.4045, 0.3779, 0.7184, 0.8329] +2026-04-09 04:39:58.747206: Epoch time: 101.72 s +2026-04-09 04:39:59.814179: +2026-04-09 04:39:59.816065: Epoch 446 +2026-04-09 04:39:59.817836: Current learning rate: 0.00588 +2026-04-09 04:41:41.143855: train_loss -0.1867 +2026-04-09 04:41:41.149942: val_loss -0.1608 +2026-04-09 04:41:41.152179: Pseudo dice [0.6837, 0.5196, 0.814, 0.0376, 0.5684, 0.4285, 0.8361] +2026-04-09 04:41:41.154557: Epoch time: 101.33 s +2026-04-09 04:41:42.232772: +2026-04-09 04:41:42.234964: Epoch 447 +2026-04-09 04:41:42.236639: Current learning rate: 0.00587 +2026-04-09 04:43:23.568362: train_loss -0.1869 +2026-04-09 04:43:23.573980: val_loss -0.1087 +2026-04-09 04:43:23.575657: Pseudo dice [0.7611, 0.2928, 0.7481, 0.0004, 0.4812, 0.6367, 0.6874] +2026-04-09 04:43:23.577805: Epoch time: 101.34 s +2026-04-09 04:43:24.647529: +2026-04-09 04:43:24.650012: Epoch 448 +2026-04-09 04:43:24.652811: Current learning rate: 0.00586 +2026-04-09 04:45:05.853774: train_loss -0.1933 +2026-04-09 04:45:05.858343: val_loss -0.1259 +2026-04-09 04:45:05.860023: Pseudo dice [0.83, 0.3349, 0.6624, 0.1014, 0.3936, 0.8306, 0.6307] +2026-04-09 04:45:05.861688: Epoch time: 101.21 s +2026-04-09 04:45:06.943343: +2026-04-09 04:45:06.956026: Epoch 449 +2026-04-09 04:45:06.968855: Current learning rate: 0.00585 +2026-04-09 04:46:48.836412: train_loss -0.178 +2026-04-09 04:46:48.850761: val_loss -0.1732 +2026-04-09 04:46:48.853862: Pseudo dice [0.8225, 0.3864, 0.7711, 0.1179, 0.3798, 0.8549, 0.8756] +2026-04-09 04:46:48.860331: Epoch time: 101.9 s +2026-04-09 04:46:51.669364: +2026-04-09 04:46:51.671441: Epoch 450 +2026-04-09 04:46:51.672846: Current learning rate: 0.00584 +2026-04-09 04:48:33.313274: train_loss -0.1992 +2026-04-09 04:48:33.323862: val_loss -0.1544 +2026-04-09 04:48:33.326439: Pseudo dice [0.6403, 0.1801, 0.7617, 0.0779, 0.3137, 0.8612, 0.7267] +2026-04-09 04:48:33.329551: Epoch time: 101.65 s +2026-04-09 04:48:34.408488: +2026-04-09 04:48:34.411191: Epoch 451 +2026-04-09 04:48:34.413686: Current learning rate: 0.00583 +2026-04-09 04:50:15.810398: train_loss -0.1995 +2026-04-09 04:50:15.818454: val_loss -0.1519 +2026-04-09 04:50:15.820606: Pseudo dice [0.6209, 0.3702, 0.743, 0.0198, 0.3512, 0.7302, 0.7466] +2026-04-09 04:50:15.824014: Epoch time: 101.41 s +2026-04-09 04:50:16.898696: +2026-04-09 04:50:16.900690: Epoch 452 +2026-04-09 04:50:16.902324: Current learning rate: 0.00582 +2026-04-09 04:51:58.592071: train_loss -0.1594 +2026-04-09 04:51:58.599068: val_loss -0.1414 +2026-04-09 04:51:58.601163: Pseudo dice [0.8286, 0.5535, 0.6549, 0.0048, 0.4195, 0.3018, 0.8429] +2026-04-09 04:51:58.603257: Epoch time: 101.7 s +2026-04-09 04:51:59.676692: +2026-04-09 04:51:59.678966: Epoch 453 +2026-04-09 04:51:59.680550: Current learning rate: 0.00581 +2026-04-09 04:53:41.144185: train_loss -0.1659 +2026-04-09 04:53:41.151165: val_loss -0.1611 +2026-04-09 04:53:41.155579: Pseudo dice [0.5773, 0.4888, 0.6759, 0.0072, 0.5783, 0.7547, 0.6238] +2026-04-09 04:53:41.157926: Epoch time: 101.47 s +2026-04-09 04:53:42.229673: +2026-04-09 04:53:42.233185: Epoch 454 +2026-04-09 04:53:42.235343: Current learning rate: 0.0058 +2026-04-09 04:55:24.436591: train_loss -0.1828 +2026-04-09 04:55:24.449410: val_loss -0.1254 +2026-04-09 04:55:24.454232: Pseudo dice [0.2518, 0.2644, 0.7487, 0.0601, 0.4336, 0.84, 0.7585] +2026-04-09 04:55:24.457390: Epoch time: 102.21 s +2026-04-09 04:55:25.553505: +2026-04-09 04:55:25.555717: Epoch 455 +2026-04-09 04:55:25.557898: Current learning rate: 0.00579 +2026-04-09 04:57:07.212989: train_loss -0.1801 +2026-04-09 04:57:07.220126: val_loss -0.1084 +2026-04-09 04:57:07.222145: Pseudo dice [0.4921, 0.1726, 0.6559, 0.0688, 0.3209, 0.7193, 0.6696] +2026-04-09 04:57:07.224858: Epoch time: 101.66 s +2026-04-09 04:57:08.343475: +2026-04-09 04:57:08.345526: Epoch 456 +2026-04-09 04:57:08.351400: Current learning rate: 0.00578 +2026-04-09 04:58:50.813358: train_loss -0.2024 +2026-04-09 04:58:50.822675: val_loss -0.105 +2026-04-09 04:58:50.826742: Pseudo dice [0.707, 0.1298, 0.5668, 0.0634, 0.2193, 0.8385, 0.7495] +2026-04-09 04:58:50.829659: Epoch time: 102.47 s +2026-04-09 04:58:51.921718: +2026-04-09 04:58:51.923567: Epoch 457 +2026-04-09 04:58:51.926172: Current learning rate: 0.00577 +2026-04-09 05:00:34.119112: train_loss -0.1883 +2026-04-09 05:00:34.123954: val_loss -0.1202 +2026-04-09 05:00:34.125717: Pseudo dice [0.8096, 0.6218, 0.745, 0.016, 0.6175, 0.5087, 0.7469] +2026-04-09 05:00:34.128796: Epoch time: 102.2 s +2026-04-09 05:00:35.223198: +2026-04-09 05:00:35.225286: Epoch 458 +2026-04-09 05:00:35.227187: Current learning rate: 0.00576 +2026-04-09 05:02:17.266829: train_loss -0.1989 +2026-04-09 05:02:17.273901: val_loss -0.0314 +2026-04-09 05:02:17.275772: Pseudo dice [0.5435, 0.6046, 0.6465, 0.0118, 0.4541, 0.2405, 0.5605] +2026-04-09 05:02:17.281252: Epoch time: 102.05 s +2026-04-09 05:02:18.383583: +2026-04-09 05:02:18.385102: Epoch 459 +2026-04-09 05:02:18.386909: Current learning rate: 0.00575 +2026-04-09 05:04:00.842956: train_loss -0.1846 +2026-04-09 05:04:00.847537: val_loss -0.1281 +2026-04-09 05:04:00.849643: Pseudo dice [0.3878, 0.4493, 0.7305, 0.0004, 0.5218, 0.276, 0.7135] +2026-04-09 05:04:00.851851: Epoch time: 102.46 s +2026-04-09 05:04:01.900381: +2026-04-09 05:04:01.902050: Epoch 460 +2026-04-09 05:04:01.903653: Current learning rate: 0.00574 +2026-04-09 05:05:45.306489: train_loss -0.1869 +2026-04-09 05:05:45.315268: val_loss -0.1398 +2026-04-09 05:05:45.316834: Pseudo dice [0.6793, 0.358, 0.6845, 0.0055, 0.4213, 0.7918, 0.9032] +2026-04-09 05:05:45.318664: Epoch time: 103.41 s +2026-04-09 05:05:46.377627: +2026-04-09 05:05:46.379250: Epoch 461 +2026-04-09 05:05:46.380682: Current learning rate: 0.00573 +2026-04-09 05:07:29.005166: train_loss -0.1805 +2026-04-09 05:07:29.013257: val_loss -0.1475 +2026-04-09 05:07:29.016138: Pseudo dice [0.8227, 0.2297, 0.6667, 0.5319, 0.4507, 0.203, 0.5987] +2026-04-09 05:07:29.021078: Epoch time: 102.63 s +2026-04-09 05:07:31.277792: +2026-04-09 05:07:31.279382: Epoch 462 +2026-04-09 05:07:31.280843: Current learning rate: 0.00572 +2026-04-09 05:09:13.550933: train_loss -0.1738 +2026-04-09 05:09:13.556746: val_loss -0.1111 +2026-04-09 05:09:13.558703: Pseudo dice [0.169, 0.6188, 0.6312, 0.0014, 0.5265, 0.7512, 0.7192] +2026-04-09 05:09:13.561211: Epoch time: 102.28 s +2026-04-09 05:09:14.683140: +2026-04-09 05:09:14.684677: Epoch 463 +2026-04-09 05:09:14.691887: Current learning rate: 0.00571 +2026-04-09 05:10:56.278388: train_loss -0.164 +2026-04-09 05:10:56.286915: val_loss -0.145 +2026-04-09 05:10:56.289124: Pseudo dice [0.7734, 0.3629, 0.7426, 0.0078, 0.3834, 0.5644, 0.7618] +2026-04-09 05:10:56.291269: Epoch time: 101.6 s +2026-04-09 05:10:57.773755: +2026-04-09 05:10:57.775438: Epoch 464 +2026-04-09 05:10:57.776810: Current learning rate: 0.0057 +2026-04-09 05:12:40.137198: train_loss -0.1787 +2026-04-09 05:12:40.142212: val_loss -0.041 +2026-04-09 05:12:40.144386: Pseudo dice [0.6824, 0.2057, 0.6317, 0.0494, 0.3255, 0.6624, 0.7605] +2026-04-09 05:12:40.146208: Epoch time: 102.37 s +2026-04-09 05:12:41.220997: +2026-04-09 05:12:41.223745: Epoch 465 +2026-04-09 05:12:41.225561: Current learning rate: 0.0057 +2026-04-09 05:14:23.230448: train_loss -0.1787 +2026-04-09 05:14:23.239148: val_loss -0.1036 +2026-04-09 05:14:23.242193: Pseudo dice [0.3049, 0.4773, 0.7275, 0.0941, 0.3799, 0.7065, 0.8035] +2026-04-09 05:14:23.244702: Epoch time: 102.01 s +2026-04-09 05:14:24.323172: +2026-04-09 05:14:24.325055: Epoch 466 +2026-04-09 05:14:24.326907: Current learning rate: 0.00569 +2026-04-09 05:16:05.835816: train_loss -0.19 +2026-04-09 05:16:05.843500: val_loss -0.15 +2026-04-09 05:16:05.845810: Pseudo dice [0.8585, 0.2418, 0.5808, 0.1293, 0.3023, 0.6789, 0.7381] +2026-04-09 05:16:05.848729: Epoch time: 101.52 s +2026-04-09 05:16:06.917720: +2026-04-09 05:16:06.919694: Epoch 467 +2026-04-09 05:16:06.921511: Current learning rate: 0.00568 +2026-04-09 05:17:48.342176: train_loss -0.1736 +2026-04-09 05:17:48.349125: val_loss -0.1325 +2026-04-09 05:17:48.351139: Pseudo dice [0.7728, 0.218, 0.6124, 0.3268, 0.263, 0.7912, 0.6954] +2026-04-09 05:17:48.354833: Epoch time: 101.43 s +2026-04-09 05:17:49.431544: +2026-04-09 05:17:49.433438: Epoch 468 +2026-04-09 05:17:49.435312: Current learning rate: 0.00567 +2026-04-09 05:19:30.824202: train_loss -0.1769 +2026-04-09 05:19:30.835582: val_loss -0.1206 +2026-04-09 05:19:30.837939: Pseudo dice [0.2722, 0.1229, 0.5787, 0.1412, 0.2863, 0.5404, 0.8158] +2026-04-09 05:19:30.840485: Epoch time: 101.4 s +2026-04-09 05:19:31.927487: +2026-04-09 05:19:31.932810: Epoch 469 +2026-04-09 05:19:31.937112: Current learning rate: 0.00566 +2026-04-09 05:21:14.214858: train_loss -0.1933 +2026-04-09 05:21:14.221428: val_loss -0.1265 +2026-04-09 05:21:14.223892: Pseudo dice [0.3971, 0.6288, 0.6544, 0.0995, 0.4685, 0.7617, 0.6278] +2026-04-09 05:21:14.226902: Epoch time: 102.29 s +2026-04-09 05:21:15.285384: +2026-04-09 05:21:15.287704: Epoch 470 +2026-04-09 05:21:15.289427: Current learning rate: 0.00565 +2026-04-09 05:22:57.354965: train_loss -0.1957 +2026-04-09 05:22:57.362161: val_loss -0.1693 +2026-04-09 05:22:57.366834: Pseudo dice [0.8477, 0.5782, 0.851, 0.7474, 0.3478, 0.8367, 0.8585] +2026-04-09 05:22:57.369670: Epoch time: 102.07 s +2026-04-09 05:22:58.450958: +2026-04-09 05:22:58.452605: Epoch 471 +2026-04-09 05:22:58.454147: Current learning rate: 0.00564 +2026-04-09 05:24:40.372322: train_loss -0.186 +2026-04-09 05:24:40.377679: val_loss -0.1384 +2026-04-09 05:24:40.380072: Pseudo dice [0.7948, 0.5871, 0.708, 0.4942, 0.4439, 0.6016, 0.6697] +2026-04-09 05:24:40.382768: Epoch time: 101.92 s +2026-04-09 05:24:41.451765: +2026-04-09 05:24:41.453533: Epoch 472 +2026-04-09 05:24:41.455332: Current learning rate: 0.00563 +2026-04-09 05:26:23.011638: train_loss -0.1802 +2026-04-09 05:26:23.016905: val_loss -0.1251 +2026-04-09 05:26:23.018640: Pseudo dice [0.3826, 0.436, 0.4417, 0.0, 0.3022, 0.7897, 0.7667] +2026-04-09 05:26:23.020837: Epoch time: 101.56 s +2026-04-09 05:26:24.083212: +2026-04-09 05:26:24.085013: Epoch 473 +2026-04-09 05:26:24.086960: Current learning rate: 0.00562 +2026-04-09 05:28:06.266527: train_loss -0.1839 +2026-04-09 05:28:06.272522: val_loss -0.1434 +2026-04-09 05:28:06.274316: Pseudo dice [0.6201, 0.2063, 0.7614, 0.0102, 0.5188, 0.7794, 0.5856] +2026-04-09 05:28:06.276243: Epoch time: 102.19 s +2026-04-09 05:28:07.345522: +2026-04-09 05:28:07.347120: Epoch 474 +2026-04-09 05:28:07.349301: Current learning rate: 0.00561 +2026-04-09 05:29:49.078505: train_loss -0.1858 +2026-04-09 05:29:49.084248: val_loss -0.1464 +2026-04-09 05:29:49.086221: Pseudo dice [0.8442, 0.249, 0.6123, 0.0228, 0.4244, 0.4625, 0.6323] +2026-04-09 05:29:49.088625: Epoch time: 101.74 s +2026-04-09 05:29:50.160717: +2026-04-09 05:29:50.163498: Epoch 475 +2026-04-09 05:29:50.166330: Current learning rate: 0.0056 +2026-04-09 05:31:32.308769: train_loss -0.173 +2026-04-09 05:31:32.316716: val_loss -0.1427 +2026-04-09 05:31:32.319502: Pseudo dice [0.431, 0.3321, 0.5928, 0.404, 0.4074, 0.8149, 0.597] +2026-04-09 05:31:32.322137: Epoch time: 102.15 s +2026-04-09 05:31:33.387440: +2026-04-09 05:31:33.389361: Epoch 476 +2026-04-09 05:31:33.391254: Current learning rate: 0.00559 +2026-04-09 05:33:14.509246: train_loss -0.1902 +2026-04-09 05:33:14.517133: val_loss -0.1132 +2026-04-09 05:33:14.519031: Pseudo dice [0.7309, 0.4869, 0.2583, 0.0635, 0.4512, 0.6553, 0.7024] +2026-04-09 05:33:14.522677: Epoch time: 101.12 s +2026-04-09 05:33:15.589752: +2026-04-09 05:33:15.591383: Epoch 477 +2026-04-09 05:33:15.592953: Current learning rate: 0.00558 +2026-04-09 05:34:57.560062: train_loss -0.1776 +2026-04-09 05:34:57.572048: val_loss -0.159 +2026-04-09 05:34:57.574331: Pseudo dice [0.7275, 0.2749, 0.8021, 0.1007, 0.4006, 0.8342, 0.8058] +2026-04-09 05:34:57.577764: Epoch time: 101.97 s +2026-04-09 05:34:58.656576: +2026-04-09 05:34:58.659265: Epoch 478 +2026-04-09 05:34:58.661132: Current learning rate: 0.00557 +2026-04-09 05:36:39.803738: train_loss -0.1919 +2026-04-09 05:36:39.809683: val_loss -0.0905 +2026-04-09 05:36:39.811492: Pseudo dice [0.6368, 0.6349, 0.5001, 0.0088, 0.2517, 0.274, 0.8188] +2026-04-09 05:36:39.814390: Epoch time: 101.15 s +2026-04-09 05:36:40.900129: +2026-04-09 05:36:40.901775: Epoch 479 +2026-04-09 05:36:40.903383: Current learning rate: 0.00556 +2026-04-09 05:38:24.367272: train_loss -0.1854 +2026-04-09 05:38:24.372922: val_loss -0.1667 +2026-04-09 05:38:24.374787: Pseudo dice [0.6102, 0.1568, 0.2677, 0.5912, 0.3332, 0.6736, 0.8559] +2026-04-09 05:38:24.377123: Epoch time: 103.47 s +2026-04-09 05:38:25.471789: +2026-04-09 05:38:25.475090: Epoch 480 +2026-04-09 05:38:25.477074: Current learning rate: 0.00555 +2026-04-09 05:40:06.944417: train_loss -0.1986 +2026-04-09 05:40:06.951764: val_loss -0.117 +2026-04-09 05:40:06.954370: Pseudo dice [0.2511, 0.2573, 0.5555, 0.0002, 0.5003, 0.2517, 0.6395] +2026-04-09 05:40:06.957110: Epoch time: 101.48 s +2026-04-09 05:40:08.043000: +2026-04-09 05:40:08.044742: Epoch 481 +2026-04-09 05:40:08.046713: Current learning rate: 0.00554 +2026-04-09 05:41:49.965237: train_loss -0.1938 +2026-04-09 05:41:49.971059: val_loss -0.1402 +2026-04-09 05:41:49.972712: Pseudo dice [0.5945, 0.115, 0.4681, 0.0153, 0.4028, 0.8311, 0.8914] +2026-04-09 05:41:49.975830: Epoch time: 101.93 s +2026-04-09 05:41:51.069050: +2026-04-09 05:41:51.070913: Epoch 482 +2026-04-09 05:41:51.072590: Current learning rate: 0.00553 +2026-04-09 05:43:34.625222: train_loss -0.1662 +2026-04-09 05:43:34.635235: val_loss -0.1409 +2026-04-09 05:43:34.637154: Pseudo dice [0.4594, 0.5652, 0.7434, 0.3009, 0.3859, 0.6507, 0.6512] +2026-04-09 05:43:34.639652: Epoch time: 103.56 s +2026-04-09 05:43:36.958354: +2026-04-09 05:43:36.960160: Epoch 483 +2026-04-09 05:43:36.961828: Current learning rate: 0.00552 +2026-04-09 05:45:17.949312: train_loss -0.1845 +2026-04-09 05:45:17.955733: val_loss -0.1074 +2026-04-09 05:45:17.957500: Pseudo dice [0.8069, 0.4821, 0.6404, 0.0976, 0.2872, 0.2167, 0.6394] +2026-04-09 05:45:17.959816: Epoch time: 100.99 s +2026-04-09 05:45:19.057486: +2026-04-09 05:45:19.059488: Epoch 484 +2026-04-09 05:45:19.061654: Current learning rate: 0.00551 +2026-04-09 05:47:01.133801: train_loss -0.182 +2026-04-09 05:47:01.140208: val_loss -0.1255 +2026-04-09 05:47:01.142706: Pseudo dice [0.7315, 0.6798, 0.6338, 0.1298, 0.4587, 0.8613, 0.5773] +2026-04-09 05:47:01.145953: Epoch time: 102.08 s +2026-04-09 05:47:02.249906: +2026-04-09 05:47:02.252699: Epoch 485 +2026-04-09 05:47:02.254956: Current learning rate: 0.0055 +2026-04-09 05:48:44.588861: train_loss -0.1714 +2026-04-09 05:48:44.595197: val_loss -0.1554 +2026-04-09 05:48:44.597863: Pseudo dice [0.7591, 0.4939, 0.7757, 0.1376, 0.441, 0.6592, 0.6977] +2026-04-09 05:48:44.600427: Epoch time: 102.34 s +2026-04-09 05:48:45.683510: +2026-04-09 05:48:45.685177: Epoch 486 +2026-04-09 05:48:45.687025: Current learning rate: 0.00549 +2026-04-09 05:50:27.792129: train_loss -0.1899 +2026-04-09 05:50:27.799127: val_loss -0.1149 +2026-04-09 05:50:27.801325: Pseudo dice [0.7206, 0.2128, 0.6383, 0.1094, 0.5113, 0.6314, 0.7681] +2026-04-09 05:50:27.803825: Epoch time: 102.11 s +2026-04-09 05:50:28.897417: +2026-04-09 05:50:28.899318: Epoch 487 +2026-04-09 05:50:28.900971: Current learning rate: 0.00548 +2026-04-09 05:52:10.239944: train_loss -0.1984 +2026-04-09 05:52:10.245057: val_loss -0.1519 +2026-04-09 05:52:10.247103: Pseudo dice [0.8392, 0.4266, 0.6309, 0.1735, 0.4959, 0.7953, 0.7192] +2026-04-09 05:52:10.249566: Epoch time: 101.35 s +2026-04-09 05:52:11.337670: +2026-04-09 05:52:11.341244: Epoch 488 +2026-04-09 05:52:11.345015: Current learning rate: 0.00547 +2026-04-09 05:53:53.724440: train_loss -0.1986 +2026-04-09 05:53:53.730816: val_loss -0.1434 +2026-04-09 05:53:53.740669: Pseudo dice [0.7789, 0.5455, 0.7991, 0.0003, 0.3319, 0.8143, 0.7665] +2026-04-09 05:53:53.742651: Epoch time: 102.39 s +2026-04-09 05:53:54.835612: +2026-04-09 05:53:54.837152: Epoch 489 +2026-04-09 05:53:54.838523: Current learning rate: 0.00546 +2026-04-09 05:55:35.849548: train_loss -0.1998 +2026-04-09 05:55:35.854846: val_loss -0.0986 +2026-04-09 05:55:35.856541: Pseudo dice [0.6987, 0.2465, 0.6896, 0.0485, 0.4271, 0.7442, 0.8272] +2026-04-09 05:55:35.858618: Epoch time: 101.02 s +2026-04-09 05:55:36.942727: +2026-04-09 05:55:36.944753: Epoch 490 +2026-04-09 05:55:36.946539: Current learning rate: 0.00546 +2026-04-09 05:57:18.713900: train_loss -0.1788 +2026-04-09 05:57:18.722424: val_loss -0.1001 +2026-04-09 05:57:18.731637: Pseudo dice [0.0828, 0.7677, 0.5284, 0.0002, 0.3209, 0.4894, 0.6137] +2026-04-09 05:57:18.734057: Epoch time: 101.77 s +2026-04-09 05:57:19.810668: +2026-04-09 05:57:19.813668: Epoch 491 +2026-04-09 05:57:19.816938: Current learning rate: 0.00545 +2026-04-09 05:59:01.412844: train_loss -0.1858 +2026-04-09 05:59:01.428139: val_loss -0.1251 +2026-04-09 05:59:01.433422: Pseudo dice [0.6706, 0.5045, 0.3797, 0.0002, 0.3171, 0.847, 0.7413] +2026-04-09 05:59:01.439268: Epoch time: 101.61 s +2026-04-09 05:59:02.524662: +2026-04-09 05:59:02.526515: Epoch 492 +2026-04-09 05:59:02.527983: Current learning rate: 0.00544 +2026-04-09 06:00:45.057392: train_loss -0.1895 +2026-04-09 06:00:45.064291: val_loss -0.1194 +2026-04-09 06:00:45.066342: Pseudo dice [0.3552, 0.2657, 0.6741, 0.0111, 0.3732, 0.7538, 0.7047] +2026-04-09 06:00:45.069090: Epoch time: 102.54 s +2026-04-09 06:00:46.164442: +2026-04-09 06:00:46.167359: Epoch 493 +2026-04-09 06:00:46.169397: Current learning rate: 0.00543 +2026-04-09 06:02:27.301784: train_loss -0.2044 +2026-04-09 06:02:27.307517: val_loss -0.1229 +2026-04-09 06:02:27.309698: Pseudo dice [0.8072, 0.5265, 0.7282, 0.0697, 0.3595, 0.8288, 0.8067] +2026-04-09 06:02:27.312727: Epoch time: 101.14 s +2026-04-09 06:02:28.416904: +2026-04-09 06:02:28.419033: Epoch 494 +2026-04-09 06:02:28.420998: Current learning rate: 0.00542 +2026-04-09 06:04:11.902314: train_loss -0.1877 +2026-04-09 06:04:11.909009: val_loss -0.1509 +2026-04-09 06:04:11.911486: Pseudo dice [0.6936, 0.3451, 0.7608, 0.4935, 0.3463, 0.6827, 0.7602] +2026-04-09 06:04:11.914273: Epoch time: 103.49 s +2026-04-09 06:04:12.993397: +2026-04-09 06:04:12.995030: Epoch 495 +2026-04-09 06:04:12.996614: Current learning rate: 0.00541 +2026-04-09 06:05:54.143231: train_loss -0.1838 +2026-04-09 06:05:54.153710: val_loss -0.1363 +2026-04-09 06:05:54.155653: Pseudo dice [0.7811, 0.503, 0.6289, 0.1149, 0.3997, 0.7782, 0.8903] +2026-04-09 06:05:54.159011: Epoch time: 101.15 s +2026-04-09 06:05:55.244013: +2026-04-09 06:05:55.245659: Epoch 496 +2026-04-09 06:05:55.247041: Current learning rate: 0.0054 +2026-04-09 06:07:36.261294: train_loss -0.1934 +2026-04-09 06:07:36.267848: val_loss -0.161 +2026-04-09 06:07:36.269514: Pseudo dice [0.5176, 0.5381, 0.767, 0.2313, 0.4183, 0.7824, 0.8293] +2026-04-09 06:07:36.272118: Epoch time: 101.02 s +2026-04-09 06:07:37.369458: +2026-04-09 06:07:37.371370: Epoch 497 +2026-04-09 06:07:37.373121: Current learning rate: 0.00539 +2026-04-09 06:09:18.976734: train_loss -0.1747 +2026-04-09 06:09:18.981537: val_loss -0.0537 +2026-04-09 06:09:18.983090: Pseudo dice [0.6326, 0.0966, 0.3498, 0.0171, 0.4218, 0.6746, 0.7561] +2026-04-09 06:09:18.985297: Epoch time: 101.61 s +2026-04-09 06:09:20.079220: +2026-04-09 06:09:20.080605: Epoch 498 +2026-04-09 06:09:20.082335: Current learning rate: 0.00538 +2026-04-09 06:11:01.271875: train_loss -0.179 +2026-04-09 06:11:01.279813: val_loss -0.1082 +2026-04-09 06:11:01.281761: Pseudo dice [0.8034, 0.5218, 0.6487, 0.0401, 0.4668, 0.7647, 0.7854] +2026-04-09 06:11:01.284767: Epoch time: 101.2 s +2026-04-09 06:11:02.373760: +2026-04-09 06:11:02.375439: Epoch 499 +2026-04-09 06:11:02.377225: Current learning rate: 0.00537 +2026-04-09 06:12:44.598232: train_loss -0.1975 +2026-04-09 06:12:44.604615: val_loss -0.1327 +2026-04-09 06:12:44.608158: Pseudo dice [0.7613, 0.1008, 0.7123, 0.0247, 0.1709, 0.8757, 0.8035] +2026-04-09 06:12:44.610548: Epoch time: 102.23 s +2026-04-09 06:12:47.330721: +2026-04-09 06:12:47.332123: Epoch 500 +2026-04-09 06:12:47.334191: Current learning rate: 0.00536 +2026-04-09 06:14:30.043987: train_loss -0.1882 +2026-04-09 06:14:30.050066: val_loss -0.1233 +2026-04-09 06:14:30.053220: Pseudo dice [0.7194, 0.4272, 0.5677, 0.255, 0.4397, 0.8209, 0.4756] +2026-04-09 06:14:30.055865: Epoch time: 102.72 s +2026-04-09 06:14:31.119338: +2026-04-09 06:14:31.121339: Epoch 501 +2026-04-09 06:14:31.123432: Current learning rate: 0.00535 +2026-04-09 06:16:12.519139: train_loss -0.2041 +2026-04-09 06:16:12.524905: val_loss -0.11 +2026-04-09 06:16:12.527432: Pseudo dice [0.568, 0.5135, 0.5464, 0.0253, 0.1771, 0.7915, 0.461] +2026-04-09 06:16:12.531960: Epoch time: 101.4 s +2026-04-09 06:16:13.603967: +2026-04-09 06:16:13.605691: Epoch 502 +2026-04-09 06:16:13.607911: Current learning rate: 0.00534 +2026-04-09 06:17:54.855469: train_loss -0.1804 +2026-04-09 06:17:54.861262: val_loss -0.1431 +2026-04-09 06:17:54.863289: Pseudo dice [0.4349, 0.1391, 0.7088, 0.2704, 0.5436, 0.6239, 0.8293] +2026-04-09 06:17:54.865911: Epoch time: 101.25 s +2026-04-09 06:17:57.130524: +2026-04-09 06:17:57.134248: Epoch 503 +2026-04-09 06:17:57.136366: Current learning rate: 0.00533 +2026-04-09 06:19:39.665205: train_loss -0.1891 +2026-04-09 06:19:39.672078: val_loss -0.1427 +2026-04-09 06:19:39.676148: Pseudo dice [0.7276, 0.6521, 0.6329, 0.2556, 0.295, 0.7144, 0.7914] +2026-04-09 06:19:39.687568: Epoch time: 102.54 s +2026-04-09 06:19:40.794499: +2026-04-09 06:19:40.796982: Epoch 504 +2026-04-09 06:19:40.800484: Current learning rate: 0.00532 +2026-04-09 06:21:22.667282: train_loss -0.1878 +2026-04-09 06:21:22.673870: val_loss -0.1684 +2026-04-09 06:21:22.676081: Pseudo dice [0.8442, 0.4397, 0.7745, 0.3895, 0.4669, 0.7013, 0.8441] +2026-04-09 06:21:22.678259: Epoch time: 101.88 s +2026-04-09 06:21:23.760399: +2026-04-09 06:21:23.762387: Epoch 505 +2026-04-09 06:21:23.763816: Current learning rate: 0.00531 +2026-04-09 06:23:04.747152: train_loss -0.1807 +2026-04-09 06:23:04.753489: val_loss -0.1072 +2026-04-09 06:23:04.758357: Pseudo dice [0.8034, 0.2143, 0.6281, 0.0855, 0.5629, 0.6406, 0.6638] +2026-04-09 06:23:04.761758: Epoch time: 100.99 s +2026-04-09 06:23:05.837431: +2026-04-09 06:23:05.839128: Epoch 506 +2026-04-09 06:23:05.840803: Current learning rate: 0.0053 +2026-04-09 06:24:46.834180: train_loss -0.182 +2026-04-09 06:24:46.840742: val_loss -0.1595 +2026-04-09 06:24:46.843403: Pseudo dice [0.4844, 0.2819, 0.7592, 0.0328, 0.3712, 0.6102, 0.8104] +2026-04-09 06:24:46.846394: Epoch time: 101.0 s +2026-04-09 06:24:47.936207: +2026-04-09 06:24:47.938667: Epoch 507 +2026-04-09 06:24:47.940592: Current learning rate: 0.00529 +2026-04-09 06:26:30.120079: train_loss -0.1848 +2026-04-09 06:26:30.127985: val_loss -0.1413 +2026-04-09 06:26:30.130230: Pseudo dice [0.7737, 0.1631, 0.67, 0.4805, 0.1751, 0.7854, 0.7888] +2026-04-09 06:26:30.132447: Epoch time: 102.19 s +2026-04-09 06:26:31.209112: +2026-04-09 06:26:31.211748: Epoch 508 +2026-04-09 06:26:31.213598: Current learning rate: 0.00528 +2026-04-09 06:28:12.365796: train_loss -0.1811 +2026-04-09 06:28:12.370958: val_loss -0.1686 +2026-04-09 06:28:12.372925: Pseudo dice [0.7845, 0.4412, 0.5784, 0.0, 0.3866, 0.8557, 0.8536] +2026-04-09 06:28:12.375202: Epoch time: 101.16 s +2026-04-09 06:28:13.448043: +2026-04-09 06:28:13.449525: Epoch 509 +2026-04-09 06:28:13.450935: Current learning rate: 0.00527 +2026-04-09 06:29:55.009127: train_loss -0.1942 +2026-04-09 06:29:55.015273: val_loss -0.1286 +2026-04-09 06:29:55.017904: Pseudo dice [0.727, 0.6052, 0.7332, 0.1264, 0.3134, 0.743, 0.6835] +2026-04-09 06:29:55.020184: Epoch time: 101.56 s +2026-04-09 06:29:56.139165: +2026-04-09 06:29:56.142384: Epoch 510 +2026-04-09 06:29:56.145642: Current learning rate: 0.00526 +2026-04-09 06:31:37.545331: train_loss -0.203 +2026-04-09 06:31:37.550343: val_loss -0.1441 +2026-04-09 06:31:37.551895: Pseudo dice [0.5091, 0.6417, 0.7517, 0.352, 0.407, 0.7306, 0.5987] +2026-04-09 06:31:37.554105: Epoch time: 101.41 s +2026-04-09 06:31:38.631378: +2026-04-09 06:31:38.632977: Epoch 511 +2026-04-09 06:31:38.635078: Current learning rate: 0.00525 +2026-04-09 06:33:19.697490: train_loss -0.188 +2026-04-09 06:33:19.702362: val_loss -0.1477 +2026-04-09 06:33:19.705904: Pseudo dice [0.7024, 0.4504, 0.7378, 0.4636, 0.3217, 0.7305, 0.6711] +2026-04-09 06:33:19.709276: Epoch time: 101.07 s +2026-04-09 06:33:20.791251: +2026-04-09 06:33:20.793213: Epoch 512 +2026-04-09 06:33:20.798403: Current learning rate: 0.00524 +2026-04-09 06:35:01.954666: train_loss -0.1887 +2026-04-09 06:35:01.960103: val_loss -0.1037 +2026-04-09 06:35:01.962049: Pseudo dice [0.7984, 0.376, 0.7313, 0.0006, 0.5419, 0.8085, 0.4547] +2026-04-09 06:35:01.963847: Epoch time: 101.17 s +2026-04-09 06:35:03.041664: +2026-04-09 06:35:03.043289: Epoch 513 +2026-04-09 06:35:03.044751: Current learning rate: 0.00523 +2026-04-09 06:36:43.995280: train_loss -0.1992 +2026-04-09 06:36:44.000186: val_loss -0.1779 +2026-04-09 06:36:44.001848: Pseudo dice [0.7844, 0.6779, 0.8167, 0.8395, 0.4555, 0.8422, 0.857] +2026-04-09 06:36:44.003799: Epoch time: 100.96 s +2026-04-09 06:36:44.005298: Yayy! New best EMA pseudo Dice: 0.5619 +2026-04-09 06:36:46.702391: +2026-04-09 06:36:46.704133: Epoch 514 +2026-04-09 06:36:46.705652: Current learning rate: 0.00522 +2026-04-09 06:38:27.622342: train_loss -0.2032 +2026-04-09 06:38:27.627502: val_loss -0.164 +2026-04-09 06:38:27.629375: Pseudo dice [0.4908, 0.3932, 0.8109, 0.2085, 0.4947, 0.7369, 0.8345] +2026-04-09 06:38:27.631485: Epoch time: 100.92 s +2026-04-09 06:38:27.633401: Yayy! New best EMA pseudo Dice: 0.5624 +2026-04-09 06:38:30.265667: +2026-04-09 06:38:30.267508: Epoch 515 +2026-04-09 06:38:30.269338: Current learning rate: 0.00521 +2026-04-09 06:40:11.666857: train_loss -0.2 +2026-04-09 06:40:11.672561: val_loss -0.1279 +2026-04-09 06:40:11.674890: Pseudo dice [0.5953, 0.2532, 0.7012, 0.0699, 0.3036, 0.645, 0.8504] +2026-04-09 06:40:11.677119: Epoch time: 101.4 s +2026-04-09 06:40:12.760758: +2026-04-09 06:40:12.763248: Epoch 516 +2026-04-09 06:40:12.765160: Current learning rate: 0.0052 +2026-04-09 06:41:54.594870: train_loss -0.1902 +2026-04-09 06:41:54.603696: val_loss -0.1589 +2026-04-09 06:41:54.606723: Pseudo dice [0.7353, 0.412, 0.6577, 0.0017, 0.362, 0.747, 0.6989] +2026-04-09 06:41:54.609419: Epoch time: 101.84 s +2026-04-09 06:41:55.707580: +2026-04-09 06:41:55.711216: Epoch 517 +2026-04-09 06:41:55.715603: Current learning rate: 0.00519 +2026-04-09 06:43:37.170673: train_loss -0.1951 +2026-04-09 06:43:37.178093: val_loss -0.1159 +2026-04-09 06:43:37.180015: Pseudo dice [0.5816, 0.5519, 0.7783, 0.0006, 0.4432, 0.6777, 0.8396] +2026-04-09 06:43:37.182074: Epoch time: 101.47 s +2026-04-09 06:43:38.256542: +2026-04-09 06:43:38.257966: Epoch 518 +2026-04-09 06:43:38.259902: Current learning rate: 0.00518 +2026-04-09 06:45:20.318944: train_loss -0.1998 +2026-04-09 06:45:20.327828: val_loss -0.127 +2026-04-09 06:45:20.329671: Pseudo dice [0.4276, 0.3559, 0.678, 0.0042, 0.5261, 0.8888, 0.539] +2026-04-09 06:45:20.333962: Epoch time: 102.07 s +2026-04-09 06:45:21.409543: +2026-04-09 06:45:21.411717: Epoch 519 +2026-04-09 06:45:21.413220: Current learning rate: 0.00518 +2026-04-09 06:47:02.496218: train_loss -0.1953 +2026-04-09 06:47:02.503191: val_loss -0.1491 +2026-04-09 06:47:02.504778: Pseudo dice [0.569, 0.0222, 0.7589, 0.0133, 0.5174, 0.7709, 0.846] +2026-04-09 06:47:02.507211: Epoch time: 101.09 s +2026-04-09 06:47:03.567601: +2026-04-09 06:47:03.569092: Epoch 520 +2026-04-09 06:47:03.570487: Current learning rate: 0.00517 +2026-04-09 06:48:44.685960: train_loss -0.1886 +2026-04-09 06:48:44.695653: val_loss -0.1527 +2026-04-09 06:48:44.697682: Pseudo dice [0.7526, 0.4647, 0.7297, 0.4652, 0.3587, 0.8876, 0.8005] +2026-04-09 06:48:44.699750: Epoch time: 101.12 s +2026-04-09 06:48:45.783303: +2026-04-09 06:48:45.784843: Epoch 521 +2026-04-09 06:48:45.786884: Current learning rate: 0.00516 +2026-04-09 06:50:27.915789: train_loss -0.2186 +2026-04-09 06:50:27.924129: val_loss -0.1904 +2026-04-09 06:50:27.926991: Pseudo dice [0.5964, 0.78, 0.7326, 0.8428, 0.4135, 0.6006, 0.8438] +2026-04-09 06:50:27.929796: Epoch time: 102.14 s +2026-04-09 06:50:27.931543: Yayy! New best EMA pseudo Dice: 0.5639 +2026-04-09 06:50:30.648195: +2026-04-09 06:50:30.650242: Epoch 522 +2026-04-09 06:50:30.651787: Current learning rate: 0.00515 +2026-04-09 06:52:11.720496: train_loss -0.2577 +2026-04-09 06:52:11.727672: val_loss -0.1592 +2026-04-09 06:52:11.729731: Pseudo dice [0.3187, 0.6421, 0.7025, 0.0562, 0.2496, 0.5189, 0.8589] +2026-04-09 06:52:11.732380: Epoch time: 101.08 s +2026-04-09 06:52:13.944067: +2026-04-09 06:52:13.945921: Epoch 523 +2026-04-09 06:52:13.948008: Current learning rate: 0.00514 +2026-04-09 06:53:55.945969: train_loss -0.3029 +2026-04-09 06:53:55.953995: val_loss -0.276 +2026-04-09 06:53:55.955423: Pseudo dice [0.7707, 0.0868, 0.7372, 0.0389, 0.3248, 0.8062, 0.7627] +2026-04-09 06:53:55.957377: Epoch time: 102.0 s +2026-04-09 06:53:57.051158: +2026-04-09 06:53:57.053445: Epoch 524 +2026-04-09 06:53:57.055294: Current learning rate: 0.00513 +2026-04-09 06:55:38.652579: train_loss -0.2909 +2026-04-09 06:55:38.662726: val_loss -0.2372 +2026-04-09 06:55:38.665450: Pseudo dice [0.3058, 0.5908, 0.6582, 0.0418, 0.4156, 0.8097, 0.5796] +2026-04-09 06:55:38.667680: Epoch time: 101.6 s +2026-04-09 06:55:39.737450: +2026-04-09 06:55:39.739462: Epoch 525 +2026-04-09 06:55:39.741940: Current learning rate: 0.00512 +2026-04-09 06:57:21.448481: train_loss -0.282 +2026-04-09 06:57:21.457226: val_loss -0.2134 +2026-04-09 06:57:21.459299: Pseudo dice [0.659, 0.0768, 0.5911, 0.0079, 0.2701, 0.7285, 0.7991] +2026-04-09 06:57:21.461802: Epoch time: 101.71 s +2026-04-09 06:57:22.524327: +2026-04-09 06:57:22.528182: Epoch 526 +2026-04-09 06:57:22.532094: Current learning rate: 0.00511 +2026-04-09 06:59:03.864495: train_loss -0.2874 +2026-04-09 06:59:03.875706: val_loss -0.2134 +2026-04-09 06:59:03.878933: Pseudo dice [0.512, 0.6819, 0.5967, 0.0017, 0.2761, 0.5282, 0.7308] +2026-04-09 06:59:03.883816: Epoch time: 101.34 s +2026-04-09 06:59:04.957818: +2026-04-09 06:59:04.959682: Epoch 527 +2026-04-09 06:59:04.961345: Current learning rate: 0.0051 +2026-04-09 07:00:46.447045: train_loss -0.2809 +2026-04-09 07:00:46.454909: val_loss -0.2283 +2026-04-09 07:00:46.456976: Pseudo dice [0.1205, 0.3047, 0.6803, 0.7123, 0.286, 0.5833, 0.5055] +2026-04-09 07:00:46.460570: Epoch time: 101.49 s +2026-04-09 07:00:47.559499: +2026-04-09 07:00:47.561617: Epoch 528 +2026-04-09 07:00:47.563055: Current learning rate: 0.00509 +2026-04-09 07:02:30.232055: train_loss -0.2729 +2026-04-09 07:02:30.247251: val_loss -0.2126 +2026-04-09 07:02:30.250790: Pseudo dice [0.6266, 0.5907, 0.5469, 0.0003, 0.4702, 0.6825, 0.6261] +2026-04-09 07:02:30.254198: Epoch time: 102.68 s +2026-04-09 07:02:31.360596: +2026-04-09 07:02:31.363200: Epoch 529 +2026-04-09 07:02:31.366040: Current learning rate: 0.00508 +2026-04-09 07:04:12.512769: train_loss -0.2788 +2026-04-09 07:04:12.519247: val_loss -0.1975 +2026-04-09 07:04:12.521285: Pseudo dice [0.7247, 0.2852, 0.4203, 0.0, 0.368, 0.8103, 0.8255] +2026-04-09 07:04:12.524075: Epoch time: 101.16 s +2026-04-09 07:04:13.606766: +2026-04-09 07:04:13.618741: Epoch 530 +2026-04-09 07:04:13.620225: Current learning rate: 0.00507 +2026-04-09 07:05:55.222480: train_loss -0.2892 +2026-04-09 07:05:55.232514: val_loss -0.2455 +2026-04-09 07:05:55.235283: Pseudo dice [0.6912, 0.308, 0.6079, 0.0008, 0.4206, 0.2306, 0.7572] +2026-04-09 07:05:55.237628: Epoch time: 101.62 s +2026-04-09 07:05:56.309748: +2026-04-09 07:05:56.312432: Epoch 531 +2026-04-09 07:05:56.314726: Current learning rate: 0.00506 +2026-04-09 07:07:37.200341: train_loss -0.2818 +2026-04-09 07:07:37.207502: val_loss -0.1678 +2026-04-09 07:07:37.209448: Pseudo dice [0.7178, 0.0921, 0.6133, 0.0, 0.398, 0.8381, 0.663] +2026-04-09 07:07:37.211497: Epoch time: 100.89 s +2026-04-09 07:07:38.292410: +2026-04-09 07:07:38.294176: Epoch 532 +2026-04-09 07:07:38.296395: Current learning rate: 0.00505 +2026-04-09 07:09:19.297736: train_loss -0.2928 +2026-04-09 07:09:19.303789: val_loss -0.2566 +2026-04-09 07:09:19.305770: Pseudo dice [0.8024, 0.377, 0.6673, 0.6511, 0.2983, 0.534, 0.7496] +2026-04-09 07:09:19.309645: Epoch time: 101.01 s +2026-04-09 07:09:20.391771: +2026-04-09 07:09:20.393723: Epoch 533 +2026-04-09 07:09:20.395781: Current learning rate: 0.00504 +2026-04-09 07:11:01.531650: train_loss -0.2881 +2026-04-09 07:11:01.538232: val_loss -0.2333 +2026-04-09 07:11:01.540321: Pseudo dice [0.4837, 0.2284, 0.6167, 0.0045, 0.2558, 0.2335, 0.8502] +2026-04-09 07:11:01.542684: Epoch time: 101.14 s +2026-04-09 07:11:02.629715: +2026-04-09 07:11:02.631251: Epoch 534 +2026-04-09 07:11:02.633117: Current learning rate: 0.00503 +2026-04-09 07:12:44.524616: train_loss -0.2939 +2026-04-09 07:12:44.530812: val_loss -0.2642 +2026-04-09 07:12:44.533087: Pseudo dice [0.667, 0.1409, 0.6647, 0.0514, 0.3334, 0.58, 0.6963] +2026-04-09 07:12:44.536338: Epoch time: 101.9 s +2026-04-09 07:12:45.620614: +2026-04-09 07:12:45.622502: Epoch 535 +2026-04-09 07:12:45.624348: Current learning rate: 0.00502 +2026-04-09 07:14:26.701426: train_loss -0.2905 +2026-04-09 07:14:26.706291: val_loss -0.1877 +2026-04-09 07:14:26.707964: Pseudo dice [0.4915, 0.123, 0.7703, 0.0076, 0.2038, 0.8829, 0.6416] +2026-04-09 07:14:26.710494: Epoch time: 101.08 s +2026-04-09 07:14:27.816141: +2026-04-09 07:14:27.817886: Epoch 536 +2026-04-09 07:14:27.819728: Current learning rate: 0.00501 +2026-04-09 07:16:09.788510: train_loss -0.293 +2026-04-09 07:16:09.793668: val_loss -0.298 +2026-04-09 07:16:09.795610: Pseudo dice [0.6214, 0.0904, 0.7195, 0.1001, 0.3806, 0.8429, 0.7901] +2026-04-09 07:16:09.797612: Epoch time: 101.98 s +2026-04-09 07:16:10.865857: +2026-04-09 07:16:10.867933: Epoch 537 +2026-04-09 07:16:10.869827: Current learning rate: 0.005 +2026-04-09 07:17:52.430261: train_loss -0.3009 +2026-04-09 07:17:52.436481: val_loss -0.2925 +2026-04-09 07:17:52.438694: Pseudo dice [0.8031, 0.4678, 0.7599, 0.4798, 0.496, 0.7449, 0.8555] +2026-04-09 07:17:52.441226: Epoch time: 101.57 s +2026-04-09 07:17:53.539940: +2026-04-09 07:17:53.543307: Epoch 538 +2026-04-09 07:17:53.546946: Current learning rate: 0.00499 +2026-04-09 07:19:35.230336: train_loss -0.2974 +2026-04-09 07:19:35.235601: val_loss -0.1643 +2026-04-09 07:19:35.238275: Pseudo dice [0.7133, 0.042, 0.7529, 0.039, 0.2644, 0.8009, 0.5323] +2026-04-09 07:19:35.240580: Epoch time: 101.69 s +2026-04-09 07:19:36.310377: +2026-04-09 07:19:36.311841: Epoch 539 +2026-04-09 07:19:36.313426: Current learning rate: 0.00498 +2026-04-09 07:21:17.276097: train_loss -0.2898 +2026-04-09 07:21:17.281176: val_loss -0.2707 +2026-04-09 07:21:17.282572: Pseudo dice [0.1369, 0.1089, 0.7194, 0.0074, 0.6496, 0.7781, 0.5413] +2026-04-09 07:21:17.284440: Epoch time: 100.97 s +2026-04-09 07:21:18.493802: +2026-04-09 07:21:18.495157: Epoch 540 +2026-04-09 07:21:18.496438: Current learning rate: 0.00497 +2026-04-09 07:22:59.681576: train_loss -0.3016 +2026-04-09 07:22:59.685657: val_loss -0.2508 +2026-04-09 07:22:59.687161: Pseudo dice [0.7289, 0.0392, 0.5914, 0.8065, 0.2197, 0.6709, 0.7947] +2026-04-09 07:22:59.688961: Epoch time: 101.19 s +2026-04-09 07:23:00.750092: +2026-04-09 07:23:00.751629: Epoch 541 +2026-04-09 07:23:00.753017: Current learning rate: 0.00496 +2026-04-09 07:24:42.064600: train_loss -0.2987 +2026-04-09 07:24:42.069706: val_loss -0.2722 +2026-04-09 07:24:42.071591: Pseudo dice [0.3675, 0.5079, 0.8184, 0.1207, 0.3888, 0.7487, 0.8423] +2026-04-09 07:24:42.073775: Epoch time: 101.32 s +2026-04-09 07:24:43.160573: +2026-04-09 07:24:43.162445: Epoch 542 +2026-04-09 07:24:43.164206: Current learning rate: 0.00495 +2026-04-09 07:26:24.120993: train_loss -0.3122 +2026-04-09 07:26:24.127569: val_loss -0.1968 +2026-04-09 07:26:24.129562: Pseudo dice [0.0508, 0.2023, 0.635, 0.0, 0.3109, 0.3735, 0.7641] +2026-04-09 07:26:24.131827: Epoch time: 100.96 s +2026-04-09 07:26:25.225798: +2026-04-09 07:26:25.228794: Epoch 543 +2026-04-09 07:26:25.231639: Current learning rate: 0.00494 +2026-04-09 07:28:07.374847: train_loss -0.2846 +2026-04-09 07:28:07.381044: val_loss -0.2445 +2026-04-09 07:28:07.382825: Pseudo dice [0.5265, 0.3629, 0.6884, 0.0369, 0.2623, 0.5221, 0.7298] +2026-04-09 07:28:07.387579: Epoch time: 102.15 s +2026-04-09 07:28:09.636627: +2026-04-09 07:28:09.638471: Epoch 544 +2026-04-09 07:28:09.639859: Current learning rate: 0.00493 +2026-04-09 07:29:50.793208: train_loss -0.3005 +2026-04-09 07:29:50.800267: val_loss -0.2671 +2026-04-09 07:29:50.802236: Pseudo dice [0.4359, 0.4442, 0.6105, 0.562, 0.2407, 0.8689, 0.8607] +2026-04-09 07:29:50.804819: Epoch time: 101.16 s +2026-04-09 07:29:51.906753: +2026-04-09 07:29:51.908970: Epoch 545 +2026-04-09 07:29:51.910899: Current learning rate: 0.00492 +2026-04-09 07:31:33.232552: train_loss -0.2742 +2026-04-09 07:31:33.237808: val_loss -0.1666 +2026-04-09 07:31:33.239592: Pseudo dice [0.4394, 0.6568, 0.2376, 0.033, 0.3397, 0.5889, 0.5932] +2026-04-09 07:31:33.242100: Epoch time: 101.33 s +2026-04-09 07:31:34.336452: +2026-04-09 07:31:34.338051: Epoch 546 +2026-04-09 07:31:34.339569: Current learning rate: 0.00491 +2026-04-09 07:33:15.290102: train_loss -0.2743 +2026-04-09 07:33:15.296355: val_loss -0.2411 +2026-04-09 07:33:15.299577: Pseudo dice [0.3519, 0.0502, 0.6638, 0.0516, 0.5229, 0.6894, 0.6276] +2026-04-09 07:33:15.301713: Epoch time: 100.96 s +2026-04-09 07:33:16.401626: +2026-04-09 07:33:16.403478: Epoch 547 +2026-04-09 07:33:16.405464: Current learning rate: 0.0049 +2026-04-09 07:34:57.320849: train_loss -0.2853 +2026-04-09 07:34:57.325464: val_loss -0.2588 +2026-04-09 07:34:57.327777: Pseudo dice [0.1158, 0.2978, 0.6909, 0.0412, 0.2851, 0.8347, 0.7035] +2026-04-09 07:34:57.329834: Epoch time: 100.92 s +2026-04-09 07:34:58.408797: +2026-04-09 07:34:58.410377: Epoch 548 +2026-04-09 07:34:58.411882: Current learning rate: 0.00489 +2026-04-09 07:36:39.327742: train_loss -0.284 +2026-04-09 07:36:39.332243: val_loss -0.2607 +2026-04-09 07:36:39.333758: Pseudo dice [0.8201, 0.4176, 0.5321, 0.3744, 0.5388, 0.7139, 0.6735] +2026-04-09 07:36:39.335961: Epoch time: 100.92 s +2026-04-09 07:36:40.408423: +2026-04-09 07:36:40.409936: Epoch 549 +2026-04-09 07:36:40.411548: Current learning rate: 0.00488 +2026-04-09 07:38:21.321333: train_loss -0.2827 +2026-04-09 07:38:21.326340: val_loss -0.1968 +2026-04-09 07:38:21.328368: Pseudo dice [0.6156, 0.4998, 0.4206, 0.4059, 0.1927, 0.5679, 0.6113] +2026-04-09 07:38:21.330611: Epoch time: 100.92 s +2026-04-09 07:38:24.142113: +2026-04-09 07:38:24.144535: Epoch 550 +2026-04-09 07:38:24.146010: Current learning rate: 0.00487 +2026-04-09 07:40:05.033799: train_loss -0.2884 +2026-04-09 07:40:05.041381: val_loss -0.2478 +2026-04-09 07:40:05.044174: Pseudo dice [0.5755, 0.219, 0.6134, 0.4788, 0.4452, 0.3792, 0.8001] +2026-04-09 07:40:05.046781: Epoch time: 100.89 s +2026-04-09 07:40:06.137861: +2026-04-09 07:40:06.139572: Epoch 551 +2026-04-09 07:40:06.141050: Current learning rate: 0.00486 +2026-04-09 07:41:47.478236: train_loss -0.2718 +2026-04-09 07:41:47.484045: val_loss -0.2506 +2026-04-09 07:41:47.485551: Pseudo dice [0.4531, 0.4858, 0.6084, 0.0053, 0.3718, 0.5082, 0.8066] +2026-04-09 07:41:47.487843: Epoch time: 101.34 s +2026-04-09 07:41:48.567810: +2026-04-09 07:41:48.570074: Epoch 552 +2026-04-09 07:41:48.571754: Current learning rate: 0.00485 +2026-04-09 07:43:29.630501: train_loss -0.3088 +2026-04-09 07:43:29.635031: val_loss -0.2691 +2026-04-09 07:43:29.638191: Pseudo dice [0.6517, 0.3836, 0.6003, 0.0498, 0.3438, 0.8711, 0.5453] +2026-04-09 07:43:29.640201: Epoch time: 101.07 s +2026-04-09 07:43:30.728261: +2026-04-09 07:43:30.730227: Epoch 553 +2026-04-09 07:43:30.731754: Current learning rate: 0.00484 +2026-04-09 07:45:12.057577: train_loss -0.2918 +2026-04-09 07:45:12.063545: val_loss -0.2616 +2026-04-09 07:45:12.065555: Pseudo dice [0.8247, 0.8034, 0.6418, 0.0012, 0.4462, 0.7787, 0.5057] +2026-04-09 07:45:12.068650: Epoch time: 101.33 s +2026-04-09 07:45:13.160053: +2026-04-09 07:45:13.162144: Epoch 554 +2026-04-09 07:45:13.163553: Current learning rate: 0.00484 +2026-04-09 07:46:55.607676: train_loss -0.3164 +2026-04-09 07:46:55.632454: val_loss -0.2755 +2026-04-09 07:46:55.634686: Pseudo dice [0.6558, 0.2941, 0.7511, 0.7949, 0.37, 0.6719, 0.7721] +2026-04-09 07:46:55.636953: Epoch time: 102.45 s +2026-04-09 07:46:56.762594: +2026-04-09 07:46:56.765030: Epoch 555 +2026-04-09 07:46:56.766606: Current learning rate: 0.00483 +2026-04-09 07:48:37.768311: train_loss -0.3043 +2026-04-09 07:48:37.775166: val_loss -0.2686 +2026-04-09 07:48:37.778488: Pseudo dice [0.2397, 0.1797, 0.8256, 0.071, 0.3871, 0.8719, 0.7876] +2026-04-09 07:48:37.780363: Epoch time: 101.01 s +2026-04-09 07:48:38.862119: +2026-04-09 07:48:38.863537: Epoch 556 +2026-04-09 07:48:38.865123: Current learning rate: 0.00482 +2026-04-09 07:50:19.864564: train_loss -0.2735 +2026-04-09 07:50:19.869530: val_loss -0.2184 +2026-04-09 07:50:19.871573: Pseudo dice [0.4599, 0.5595, 0.6697, 0.0004, 0.2397, 0.8379, 0.6083] +2026-04-09 07:50:19.873604: Epoch time: 101.01 s +2026-04-09 07:50:20.959920: +2026-04-09 07:50:20.962603: Epoch 557 +2026-04-09 07:50:20.965052: Current learning rate: 0.00481 +2026-04-09 07:52:02.600865: train_loss -0.2924 +2026-04-09 07:52:02.606185: val_loss -0.1045 +2026-04-09 07:52:02.607869: Pseudo dice [0.7237, 0.215, 0.402, 0.0393, 0.3889, 0.89, 0.7769] +2026-04-09 07:52:02.609833: Epoch time: 101.64 s +2026-04-09 07:52:03.701342: +2026-04-09 07:52:03.703253: Epoch 558 +2026-04-09 07:52:03.705249: Current learning rate: 0.0048 +2026-04-09 07:53:44.807757: train_loss -0.3156 +2026-04-09 07:53:44.823273: val_loss -0.2844 +2026-04-09 07:53:44.828421: Pseudo dice [0.5376, 0.6039, 0.7618, 0.6877, 0.2106, 0.7064, 0.7727] +2026-04-09 07:53:44.833930: Epoch time: 101.11 s +2026-04-09 07:53:45.962767: +2026-04-09 07:53:45.968828: Epoch 559 +2026-04-09 07:53:45.974605: Current learning rate: 0.00479 +2026-04-09 07:55:27.364414: train_loss -0.3247 +2026-04-09 07:55:27.370283: val_loss -0.2777 +2026-04-09 07:55:27.372816: Pseudo dice [0.6521, 0.2735, 0.6074, 0.1469, 0.4384, 0.7734, 0.76] +2026-04-09 07:55:27.375279: Epoch time: 101.4 s +2026-04-09 07:55:28.449500: +2026-04-09 07:55:28.451524: Epoch 560 +2026-04-09 07:55:28.453363: Current learning rate: 0.00478 +2026-04-09 07:57:09.834338: train_loss -0.3157 +2026-04-09 07:57:09.850469: val_loss -0.241 +2026-04-09 07:57:09.854901: Pseudo dice [0.5508, 0.3907, 0.7126, 0.0662, 0.3749, 0.6108, 0.8711] +2026-04-09 07:57:09.859843: Epoch time: 101.39 s +2026-04-09 07:57:10.973955: +2026-04-09 07:57:10.979261: Epoch 561 +2026-04-09 07:57:10.983636: Current learning rate: 0.00477 +2026-04-09 07:58:52.104817: train_loss -0.3068 +2026-04-09 07:58:52.121660: val_loss -0.2122 +2026-04-09 07:58:52.131747: Pseudo dice [0.7488, 0.215, 0.7112, 0.0708, 0.244, 0.8481, 0.4788] +2026-04-09 07:58:52.143018: Epoch time: 101.13 s +2026-04-09 07:58:53.214987: +2026-04-09 07:58:53.217026: Epoch 562 +2026-04-09 07:58:53.218369: Current learning rate: 0.00476 +2026-04-09 08:00:34.287838: train_loss -0.3049 +2026-04-09 08:00:34.293118: val_loss -0.2833 +2026-04-09 08:00:34.294808: Pseudo dice [0.3887, 0.1066, 0.6457, 0.7039, 0.4132, 0.8049, 0.8377] +2026-04-09 08:00:34.297683: Epoch time: 101.08 s +2026-04-09 08:00:35.387151: +2026-04-09 08:00:35.389161: Epoch 563 +2026-04-09 08:00:35.390872: Current learning rate: 0.00475 +2026-04-09 08:02:16.868911: train_loss -0.285 +2026-04-09 08:02:16.874640: val_loss -0.203 +2026-04-09 08:02:16.876387: Pseudo dice [0.7271, 0.1, 0.5216, 0.0554, 0.2555, 0.6796, 0.5226] +2026-04-09 08:02:16.878531: Epoch time: 101.48 s +2026-04-09 08:02:17.981606: +2026-04-09 08:02:17.984346: Epoch 564 +2026-04-09 08:02:17.987212: Current learning rate: 0.00474 +2026-04-09 08:04:00.903130: train_loss -0.2763 +2026-04-09 08:04:00.909151: val_loss -0.1867 +2026-04-09 08:04:00.910968: Pseudo dice [0.1417, 0.3679, 0.6118, 0.1442, 0.2737, 0.3662, 0.6757] +2026-04-09 08:04:00.913132: Epoch time: 102.92 s +2026-04-09 08:04:01.995691: +2026-04-09 08:04:01.997743: Epoch 565 +2026-04-09 08:04:01.999750: Current learning rate: 0.00473 +2026-04-09 08:05:43.115191: train_loss -0.2981 +2026-04-09 08:05:43.122095: val_loss -0.2796 +2026-04-09 08:05:43.123837: Pseudo dice [0.8343, 0.5225, 0.6634, 0.1092, 0.3288, 0.6019, 0.6819] +2026-04-09 08:05:43.126464: Epoch time: 101.12 s +2026-04-09 08:05:44.195665: +2026-04-09 08:05:44.197994: Epoch 566 +2026-04-09 08:05:44.199533: Current learning rate: 0.00472 +2026-04-09 08:07:25.371507: train_loss -0.309 +2026-04-09 08:07:25.376482: val_loss -0.2631 +2026-04-09 08:07:25.378274: Pseudo dice [0.7788, 0.0894, 0.6753, 0.3282, 0.264, 0.5414, 0.7641] +2026-04-09 08:07:25.381061: Epoch time: 101.18 s +2026-04-09 08:07:26.461230: +2026-04-09 08:07:26.462847: Epoch 567 +2026-04-09 08:07:26.464370: Current learning rate: 0.00471 +2026-04-09 08:09:07.733780: train_loss -0.3032 +2026-04-09 08:09:07.748956: val_loss -0.2609 +2026-04-09 08:09:07.754140: Pseudo dice [0.4517, 0.2278, 0.7357, 0.4455, 0.3182, 0.8408, 0.8806] +2026-04-09 08:09:07.757906: Epoch time: 101.28 s +2026-04-09 08:09:08.843984: +2026-04-09 08:09:08.845519: Epoch 568 +2026-04-09 08:09:08.847375: Current learning rate: 0.0047 +2026-04-09 08:10:50.929795: train_loss -0.2968 +2026-04-09 08:10:50.935370: val_loss -0.2557 +2026-04-09 08:10:50.936897: Pseudo dice [0.509, 0.1981, 0.5449, 0.0956, 0.4082, 0.6078, 0.8339] +2026-04-09 08:10:50.939181: Epoch time: 102.09 s +2026-04-09 08:10:52.027250: +2026-04-09 08:10:52.028891: Epoch 569 +2026-04-09 08:10:52.030429: Current learning rate: 0.00469 +2026-04-09 08:12:33.979228: train_loss -0.3045 +2026-04-09 08:12:33.990461: val_loss -0.2273 +2026-04-09 08:12:33.993634: Pseudo dice [0.6741, 0.6095, 0.4423, 0.0144, 0.3321, 0.6833, 0.7994] +2026-04-09 08:12:33.997360: Epoch time: 101.96 s +2026-04-09 08:12:35.099295: +2026-04-09 08:12:35.102175: Epoch 570 +2026-04-09 08:12:35.104348: Current learning rate: 0.00468 +2026-04-09 08:14:16.457775: train_loss -0.3126 +2026-04-09 08:14:16.464082: val_loss -0.2875 +2026-04-09 08:14:16.466749: Pseudo dice [0.1394, 0.2267, 0.8182, 0.8045, 0.1536, 0.6586, 0.8469] +2026-04-09 08:14:16.469383: Epoch time: 101.36 s +2026-04-09 08:14:17.559670: +2026-04-09 08:14:17.562201: Epoch 571 +2026-04-09 08:14:17.564211: Current learning rate: 0.00467 +2026-04-09 08:15:59.462684: train_loss -0.3037 +2026-04-09 08:15:59.467993: val_loss -0.2952 +2026-04-09 08:15:59.469392: Pseudo dice [0.723, 0.6499, 0.6271, 0.0026, 0.5829, 0.8391, 0.7144] +2026-04-09 08:15:59.472018: Epoch time: 101.91 s +2026-04-09 08:16:00.580949: +2026-04-09 08:16:00.582678: Epoch 572 +2026-04-09 08:16:00.584279: Current learning rate: 0.00466 +2026-04-09 08:17:42.317251: train_loss -0.291 +2026-04-09 08:17:42.324814: val_loss -0.1972 +2026-04-09 08:17:42.327491: Pseudo dice [0.3826, 0.38, 0.6285, 0.0543, 0.1749, 0.7045, 0.4788] +2026-04-09 08:17:42.329959: Epoch time: 101.74 s +2026-04-09 08:17:43.431628: +2026-04-09 08:17:43.434474: Epoch 573 +2026-04-09 08:17:43.436500: Current learning rate: 0.00465 +2026-04-09 08:19:24.946952: train_loss -0.3223 +2026-04-09 08:19:24.952153: val_loss -0.289 +2026-04-09 08:19:24.953951: Pseudo dice [0.592, 0.4497, 0.6748, 0.6866, 0.1106, 0.8126, 0.7386] +2026-04-09 08:19:24.957890: Epoch time: 101.52 s +2026-04-09 08:19:26.060003: +2026-04-09 08:19:26.061533: Epoch 574 +2026-04-09 08:19:26.063491: Current learning rate: 0.00464 +2026-04-09 08:21:07.547689: train_loss -0.3111 +2026-04-09 08:21:07.554664: val_loss -0.2663 +2026-04-09 08:21:07.556713: Pseudo dice [0.5791, 0.4916, 0.7094, 0.052, 0.2897, 0.7788, 0.6884] +2026-04-09 08:21:07.559082: Epoch time: 101.49 s +2026-04-09 08:21:08.681670: +2026-04-09 08:21:08.683271: Epoch 575 +2026-04-09 08:21:08.684681: Current learning rate: 0.00463 +2026-04-09 08:22:49.937171: train_loss -0.3187 +2026-04-09 08:22:49.943112: val_loss -0.2583 +2026-04-09 08:22:49.945053: Pseudo dice [0.3784, 0.527, 0.6358, 0.287, 0.3989, 0.7972, 0.4336] +2026-04-09 08:22:49.948789: Epoch time: 101.26 s +2026-04-09 08:22:51.107949: +2026-04-09 08:22:51.113322: Epoch 576 +2026-04-09 08:22:51.115218: Current learning rate: 0.00462 +2026-04-09 08:24:32.157074: train_loss -0.3107 +2026-04-09 08:24:32.163862: val_loss -0.2941 +2026-04-09 08:24:32.172077: Pseudo dice [0.8094, 0.5405, 0.7596, 0.8301, 0.4793, 0.5445, 0.9157] +2026-04-09 08:24:32.177748: Epoch time: 101.05 s +2026-04-09 08:24:33.284328: +2026-04-09 08:24:33.287134: Epoch 577 +2026-04-09 08:24:33.289244: Current learning rate: 0.00461 +2026-04-09 08:26:14.585211: train_loss -0.3123 +2026-04-09 08:26:14.590060: val_loss -0.2947 +2026-04-09 08:26:14.591817: Pseudo dice [0.8506, 0.36, 0.6807, 0.3032, 0.4046, 0.8651, 0.7342] +2026-04-09 08:26:14.593814: Epoch time: 101.3 s +2026-04-09 08:26:15.709109: +2026-04-09 08:26:15.711298: Epoch 578 +2026-04-09 08:26:15.713172: Current learning rate: 0.0046 +2026-04-09 08:27:57.403110: train_loss -0.3168 +2026-04-09 08:27:57.408910: val_loss -0.2759 +2026-04-09 08:27:57.411022: Pseudo dice [0.7536, 0.6093, 0.6845, 0.0111, 0.2513, 0.8046, 0.8745] +2026-04-09 08:27:57.414068: Epoch time: 101.7 s +2026-04-09 08:27:58.548873: +2026-04-09 08:27:58.552252: Epoch 579 +2026-04-09 08:27:58.555448: Current learning rate: 0.00459 +2026-04-09 08:29:40.423869: train_loss -0.3095 +2026-04-09 08:29:40.429349: val_loss -0.2787 +2026-04-09 08:29:40.431885: Pseudo dice [0.6704, 0.5058, 0.7009, 0.0059, 0.4948, 0.7715, 0.8169] +2026-04-09 08:29:40.434196: Epoch time: 101.88 s +2026-04-09 08:29:41.539873: +2026-04-09 08:29:41.541650: Epoch 580 +2026-04-09 08:29:41.543423: Current learning rate: 0.00458 +2026-04-09 08:31:23.695063: train_loss -0.3172 +2026-04-09 08:31:23.704523: val_loss -0.2563 +2026-04-09 08:31:23.707052: Pseudo dice [0.875, 0.2357, 0.7136, 0.2102, 0.3068, 0.8165, 0.8655] +2026-04-09 08:31:23.712608: Epoch time: 102.16 s +2026-04-09 08:31:24.838796: +2026-04-09 08:31:24.840718: Epoch 581 +2026-04-09 08:31:24.842724: Current learning rate: 0.00457 +2026-04-09 08:33:06.850406: train_loss -0.3173 +2026-04-09 08:33:06.855517: val_loss -0.2716 +2026-04-09 08:33:06.857170: Pseudo dice [0.7867, 0.4675, 0.5821, 0.019, 0.4845, 0.7691, 0.6164] +2026-04-09 08:33:06.858898: Epoch time: 102.02 s +2026-04-09 08:33:07.956706: +2026-04-09 08:33:07.958326: Epoch 582 +2026-04-09 08:33:07.959949: Current learning rate: 0.00456 +2026-04-09 08:34:48.949284: train_loss -0.3104 +2026-04-09 08:34:48.954141: val_loss -0.25 +2026-04-09 08:34:48.956221: Pseudo dice [0.601, 0.1555, 0.6563, 0.1269, 0.4467, 0.5424, 0.8113] +2026-04-09 08:34:48.958500: Epoch time: 101.0 s +2026-04-09 08:34:50.065970: +2026-04-09 08:34:50.067592: Epoch 583 +2026-04-09 08:34:50.069031: Current learning rate: 0.00455 +2026-04-09 08:36:33.034089: train_loss -0.3036 +2026-04-09 08:36:33.039645: val_loss -0.2331 +2026-04-09 08:36:33.041242: Pseudo dice [0.8108, 0.5584, 0.5227, 0.0069, 0.3178, 0.6915, 0.5933] +2026-04-09 08:36:33.043343: Epoch time: 102.97 s +2026-04-09 08:36:34.176773: +2026-04-09 08:36:34.179574: Epoch 584 +2026-04-09 08:36:34.181364: Current learning rate: 0.00454 +2026-04-09 08:38:15.429796: train_loss -0.2808 +2026-04-09 08:38:15.434746: val_loss -0.2611 +2026-04-09 08:38:15.436609: Pseudo dice [0.7628, 0.1491, 0.7893, 0.3155, 0.3271, 0.7031, 0.7315] +2026-04-09 08:38:15.439064: Epoch time: 101.26 s +2026-04-09 08:38:17.756607: +2026-04-09 08:38:17.758103: Epoch 585 +2026-04-09 08:38:17.759465: Current learning rate: 0.00453 +2026-04-09 08:39:59.731436: train_loss -0.3098 +2026-04-09 08:39:59.737797: val_loss -0.2543 +2026-04-09 08:39:59.739949: Pseudo dice [0.6347, 0.571, 0.4967, 0.1738, 0.3832, 0.8403, 0.5067] +2026-04-09 08:39:59.743987: Epoch time: 101.98 s +2026-04-09 08:40:00.848959: +2026-04-09 08:40:00.850636: Epoch 586 +2026-04-09 08:40:00.852231: Current learning rate: 0.00452 +2026-04-09 08:41:41.940341: train_loss -0.2988 +2026-04-09 08:41:41.945003: val_loss -0.2562 +2026-04-09 08:41:41.946970: Pseudo dice [0.6382, 0.4539, 0.7505, 0.0394, 0.4559, 0.6637, 0.8483] +2026-04-09 08:41:41.949256: Epoch time: 101.09 s +2026-04-09 08:41:43.063803: +2026-04-09 08:41:43.065410: Epoch 587 +2026-04-09 08:41:43.066999: Current learning rate: 0.00451 +2026-04-09 08:43:24.572371: train_loss -0.2965 +2026-04-09 08:43:24.577466: val_loss -0.2189 +2026-04-09 08:43:24.579354: Pseudo dice [0.7639, 0.441, 0.6108, 0.044, 0.3748, 0.7405, 0.767] +2026-04-09 08:43:24.581391: Epoch time: 101.51 s +2026-04-09 08:43:25.696315: +2026-04-09 08:43:25.698512: Epoch 588 +2026-04-09 08:43:25.700280: Current learning rate: 0.0045 +2026-04-09 08:45:07.164553: train_loss -0.3163 +2026-04-09 08:45:07.170831: val_loss -0.228 +2026-04-09 08:45:07.172776: Pseudo dice [0.5647, 0.2809, 0.5972, 0.3047, 0.23, 0.4997, 0.6913] +2026-04-09 08:45:07.175370: Epoch time: 101.47 s +2026-04-09 08:45:08.315544: +2026-04-09 08:45:08.318128: Epoch 589 +2026-04-09 08:45:08.320388: Current learning rate: 0.00449 +2026-04-09 08:46:50.008002: train_loss -0.3083 +2026-04-09 08:46:50.013088: val_loss -0.2541 +2026-04-09 08:46:50.014958: Pseudo dice [0.6648, 0.3948, 0.5388, 0.0262, 0.2108, 0.7264, 0.8881] +2026-04-09 08:46:50.019099: Epoch time: 101.7 s +2026-04-09 08:46:51.127080: +2026-04-09 08:46:51.128814: Epoch 590 +2026-04-09 08:46:51.131847: Current learning rate: 0.00448 +2026-04-09 08:48:32.149922: train_loss -0.3255 +2026-04-09 08:48:32.157718: val_loss -0.3031 +2026-04-09 08:48:32.159822: Pseudo dice [0.8384, 0.5993, 0.7143, 0.6055, 0.5109, 0.8038, 0.8634] +2026-04-09 08:48:32.162606: Epoch time: 101.03 s +2026-04-09 08:48:33.275821: +2026-04-09 08:48:33.278015: Epoch 591 +2026-04-09 08:48:33.280266: Current learning rate: 0.00447 +2026-04-09 08:50:15.155730: train_loss -0.3046 +2026-04-09 08:50:15.160630: val_loss -0.2965 +2026-04-09 08:50:15.163283: Pseudo dice [0.724, 0.6458, 0.7551, 0.4536, 0.3958, 0.8274, 0.8243] +2026-04-09 08:50:15.165552: Epoch time: 101.88 s +2026-04-09 08:50:16.276987: +2026-04-09 08:50:16.281545: Epoch 592 +2026-04-09 08:50:16.283205: Current learning rate: 0.00446 +2026-04-09 08:51:58.156323: train_loss -0.3232 +2026-04-09 08:51:58.161164: val_loss -0.2606 +2026-04-09 08:51:58.163375: Pseudo dice [0.7665, 0.5844, 0.7016, 0.1647, 0.3588, 0.8636, 0.6885] +2026-04-09 08:51:58.165469: Epoch time: 101.88 s +2026-04-09 08:51:59.263728: +2026-04-09 08:51:59.265158: Epoch 593 +2026-04-09 08:51:59.266454: Current learning rate: 0.00445 +2026-04-09 08:53:40.434783: train_loss -0.326 +2026-04-09 08:53:40.440635: val_loss -0.2962 +2026-04-09 08:53:40.444209: Pseudo dice [0.1479, 0.5522, 0.7393, 0.0005, 0.2951, 0.7768, 0.7334] +2026-04-09 08:53:40.446719: Epoch time: 101.17 s +2026-04-09 08:53:41.544926: +2026-04-09 08:53:41.546376: Epoch 594 +2026-04-09 08:53:41.547828: Current learning rate: 0.00444 +2026-04-09 08:55:23.412043: train_loss -0.3378 +2026-04-09 08:55:23.418359: val_loss -0.2967 +2026-04-09 08:55:23.420545: Pseudo dice [0.8566, 0.5739, 0.7259, 0.0008, 0.2507, 0.7409, 0.7562] +2026-04-09 08:55:23.422891: Epoch time: 101.87 s +2026-04-09 08:55:24.547028: +2026-04-09 08:55:24.548733: Epoch 595 +2026-04-09 08:55:24.550476: Current learning rate: 0.00443 +2026-04-09 08:57:06.396721: train_loss -0.3315 +2026-04-09 08:57:06.401275: val_loss -0.2006 +2026-04-09 08:57:06.402671: Pseudo dice [0.7855, 0.4451, 0.77, 0.0396, 0.1608, 0.5397, 0.758] +2026-04-09 08:57:06.404667: Epoch time: 101.85 s +2026-04-09 08:57:07.525610: +2026-04-09 08:57:07.528506: Epoch 596 +2026-04-09 08:57:07.530229: Current learning rate: 0.00442 +2026-04-09 08:58:48.823261: train_loss -0.332 +2026-04-09 08:58:48.828055: val_loss -0.2551 +2026-04-09 08:58:48.829882: Pseudo dice [0.6847, 0.6722, 0.5893, 0.0223, 0.4305, 0.4755, 0.675] +2026-04-09 08:58:48.832136: Epoch time: 101.3 s +2026-04-09 08:58:49.966871: +2026-04-09 08:58:49.969214: Epoch 597 +2026-04-09 08:58:49.971262: Current learning rate: 0.00441 +2026-04-09 09:00:31.988197: train_loss -0.3211 +2026-04-09 09:00:31.995118: val_loss -0.2881 +2026-04-09 09:00:31.996881: Pseudo dice [0.814, 0.0963, 0.514, 0.005, 0.5061, 0.7551, 0.7306] +2026-04-09 09:00:31.999207: Epoch time: 102.02 s +2026-04-09 09:00:33.095461: +2026-04-09 09:00:33.097119: Epoch 598 +2026-04-09 09:00:33.098542: Current learning rate: 0.0044 +2026-04-09 09:02:14.032712: train_loss -0.3277 +2026-04-09 09:02:14.048771: val_loss -0.2157 +2026-04-09 09:02:14.054677: Pseudo dice [0.5165, 0.1654, 0.7161, 0.052, 0.3982, 0.6411, 0.8558] +2026-04-09 09:02:14.061052: Epoch time: 100.94 s +2026-04-09 09:02:15.183368: +2026-04-09 09:02:15.184974: Epoch 599 +2026-04-09 09:02:15.186348: Current learning rate: 0.00439 +2026-04-09 09:03:58.139221: train_loss -0.3216 +2026-04-09 09:03:58.146253: val_loss -0.2579 +2026-04-09 09:03:58.149487: Pseudo dice [0.7884, 0.2644, 0.6811, 0.0524, 0.4324, 0.8526, 0.7421] +2026-04-09 09:03:58.154139: Epoch time: 102.96 s +2026-04-09 09:04:00.946437: +2026-04-09 09:04:00.950428: Epoch 600 +2026-04-09 09:04:00.954046: Current learning rate: 0.00438 +2026-04-09 09:05:42.252927: train_loss -0.327 +2026-04-09 09:05:42.257722: val_loss -0.2836 +2026-04-09 09:05:42.259580: Pseudo dice [0.53, 0.5363, 0.6965, 0.3259, 0.4741, 0.7298, 0.8007] +2026-04-09 09:05:42.261495: Epoch time: 101.31 s +2026-04-09 09:05:43.369612: +2026-04-09 09:05:43.371617: Epoch 601 +2026-04-09 09:05:43.373224: Current learning rate: 0.00437 +2026-04-09 09:07:24.176149: train_loss -0.3299 +2026-04-09 09:07:24.182561: val_loss -0.2893 +2026-04-09 09:07:24.190093: Pseudo dice [0.429, 0.1546, 0.7966, 0.0032, 0.4668, 0.8611, 0.6681] +2026-04-09 09:07:24.192214: Epoch time: 100.81 s +2026-04-09 09:07:25.309577: +2026-04-09 09:07:25.311242: Epoch 602 +2026-04-09 09:07:25.312778: Current learning rate: 0.00436 +2026-04-09 09:09:07.390478: train_loss -0.3304 +2026-04-09 09:09:07.400196: val_loss -0.2422 +2026-04-09 09:09:07.402261: Pseudo dice [0.5576, 0.5878, 0.5886, 0.0924, 0.3418, 0.8135, 0.8029] +2026-04-09 09:09:07.404979: Epoch time: 102.08 s +2026-04-09 09:09:08.520438: +2026-04-09 09:09:08.522696: Epoch 603 +2026-04-09 09:09:08.524657: Current learning rate: 0.00435 +2026-04-09 09:10:50.918619: train_loss -0.3282 +2026-04-09 09:10:50.926881: val_loss -0.28 +2026-04-09 09:10:50.932863: Pseudo dice [0.7525, 0.1351, 0.7285, 0.3375, 0.4524, 0.7696, 0.7274] +2026-04-09 09:10:50.936217: Epoch time: 102.4 s +2026-04-09 09:10:52.035742: +2026-04-09 09:10:52.037542: Epoch 604 +2026-04-09 09:10:52.039024: Current learning rate: 0.00434 +2026-04-09 09:12:33.300970: train_loss -0.3364 +2026-04-09 09:12:33.307907: val_loss -0.3075 +2026-04-09 09:12:33.309906: Pseudo dice [0.5633, 0.453, 0.8293, 0.6554, 0.5565, 0.6499, 0.7487] +2026-04-09 09:12:33.312020: Epoch time: 101.27 s +2026-04-09 09:12:35.748003: +2026-04-09 09:12:35.749502: Epoch 605 +2026-04-09 09:12:35.750885: Current learning rate: 0.00433 +2026-04-09 09:14:17.164869: train_loss -0.3554 +2026-04-09 09:14:17.170385: val_loss -0.3146 +2026-04-09 09:14:17.172772: Pseudo dice [0.5714, 0.2746, 0.7944, 0.3501, 0.5687, 0.7555, 0.7595] +2026-04-09 09:14:17.174852: Epoch time: 101.42 s +2026-04-09 09:14:18.276581: +2026-04-09 09:14:18.278628: Epoch 606 +2026-04-09 09:14:18.280509: Current learning rate: 0.00432 +2026-04-09 09:16:00.667475: train_loss -0.3554 +2026-04-09 09:16:00.677762: val_loss -0.237 +2026-04-09 09:16:00.681570: Pseudo dice [0.7182, 0.3224, 0.8031, 0.079, 0.4654, 0.7021, 0.8035] +2026-04-09 09:16:00.686441: Epoch time: 102.39 s +2026-04-09 09:16:01.806081: +2026-04-09 09:16:01.808467: Epoch 607 +2026-04-09 09:16:01.812078: Current learning rate: 0.00431 +2026-04-09 09:17:42.981813: train_loss -0.3053 +2026-04-09 09:17:42.986501: val_loss -0.2742 +2026-04-09 09:17:42.988234: Pseudo dice [0.4721, 0.5749, 0.6211, 0.4267, 0.3469, 0.7978, 0.5963] +2026-04-09 09:17:42.990049: Epoch time: 101.18 s +2026-04-09 09:17:44.101967: +2026-04-09 09:17:44.103784: Epoch 608 +2026-04-09 09:17:44.105131: Current learning rate: 0.0043 +2026-04-09 09:19:26.580094: train_loss -0.2984 +2026-04-09 09:19:26.586930: val_loss -0.2861 +2026-04-09 09:19:26.588905: Pseudo dice [0.5962, 0.536, 0.8046, 0.1782, 0.3346, 0.8743, 0.7938] +2026-04-09 09:19:26.590657: Epoch time: 102.48 s +2026-04-09 09:19:27.717806: +2026-04-09 09:19:27.719337: Epoch 609 +2026-04-09 09:19:27.720692: Current learning rate: 0.00429 +2026-04-09 09:21:09.044800: train_loss -0.3286 +2026-04-09 09:21:09.051615: val_loss -0.2619 +2026-04-09 09:21:09.053817: Pseudo dice [0.387, 0.424, 0.7172, 0.0004, 0.2014, 0.6677, 0.8512] +2026-04-09 09:21:09.056176: Epoch time: 101.33 s +2026-04-09 09:21:10.155567: +2026-04-09 09:21:10.157187: Epoch 610 +2026-04-09 09:21:10.158597: Current learning rate: 0.00429 +2026-04-09 09:22:51.513265: train_loss -0.3143 +2026-04-09 09:22:51.518433: val_loss -0.2384 +2026-04-09 09:22:51.519997: Pseudo dice [0.7231, 0.2941, 0.6491, 0.0152, 0.2044, 0.8083, 0.72] +2026-04-09 09:22:51.522255: Epoch time: 101.36 s +2026-04-09 09:22:52.702233: +2026-04-09 09:22:52.703740: Epoch 611 +2026-04-09 09:22:52.705141: Current learning rate: 0.00428 +2026-04-09 09:24:33.930659: train_loss -0.3204 +2026-04-09 09:24:33.935151: val_loss -0.269 +2026-04-09 09:24:33.936831: Pseudo dice [0.6016, 0.1991, 0.492, 0.2546, 0.3488, 0.6348, 0.8387] +2026-04-09 09:24:33.938625: Epoch time: 101.23 s +2026-04-09 09:24:35.058395: +2026-04-09 09:24:35.065112: Epoch 612 +2026-04-09 09:24:35.066719: Current learning rate: 0.00427 +2026-04-09 09:26:16.413624: train_loss -0.3125 +2026-04-09 09:26:16.418125: val_loss -0.2646 +2026-04-09 09:26:16.420285: Pseudo dice [0.7569, 0.5479, 0.604, 0.2378, 0.4322, 0.8189, 0.7786] +2026-04-09 09:26:16.422527: Epoch time: 101.36 s +2026-04-09 09:26:17.522598: +2026-04-09 09:26:17.524119: Epoch 613 +2026-04-09 09:26:17.525549: Current learning rate: 0.00426 +2026-04-09 09:27:58.910640: train_loss -0.3148 +2026-04-09 09:27:58.917285: val_loss -0.2442 +2026-04-09 09:27:58.920171: Pseudo dice [0.1905, 0.3322, 0.6629, 0.3609, 0.3354, 0.322, 0.8452] +2026-04-09 09:27:58.925075: Epoch time: 101.39 s +2026-04-09 09:28:00.038166: +2026-04-09 09:28:00.040136: Epoch 614 +2026-04-09 09:28:00.041882: Current learning rate: 0.00425 +2026-04-09 09:29:41.758394: train_loss -0.328 +2026-04-09 09:29:41.764350: val_loss -0.2432 +2026-04-09 09:29:41.766214: Pseudo dice [0.6791, 0.199, 0.539, 0.068, 0.4425, 0.6809, 0.602] +2026-04-09 09:29:41.768319: Epoch time: 101.72 s +2026-04-09 09:29:42.896992: +2026-04-09 09:29:42.898522: Epoch 615 +2026-04-09 09:29:42.899995: Current learning rate: 0.00424 +2026-04-09 09:31:24.555877: train_loss -0.3291 +2026-04-09 09:31:24.560579: val_loss -0.3108 +2026-04-09 09:31:24.562296: Pseudo dice [0.7687, 0.3912, 0.8687, 0.0064, 0.4409, 0.7811, 0.8617] +2026-04-09 09:31:24.566317: Epoch time: 101.66 s +2026-04-09 09:31:25.676140: +2026-04-09 09:31:25.678609: Epoch 616 +2026-04-09 09:31:25.680225: Current learning rate: 0.00423 +2026-04-09 09:33:07.256655: train_loss -0.3451 +2026-04-09 09:33:07.262112: val_loss -0.2121 +2026-04-09 09:33:07.263962: Pseudo dice [0.2861, 0.1617, 0.7519, 0.0229, 0.4623, 0.7985, 0.7483] +2026-04-09 09:33:07.265847: Epoch time: 101.58 s +2026-04-09 09:33:08.411256: +2026-04-09 09:33:08.412774: Epoch 617 +2026-04-09 09:33:08.414194: Current learning rate: 0.00422 +2026-04-09 09:34:49.952209: train_loss -0.3339 +2026-04-09 09:34:49.961761: val_loss -0.2353 +2026-04-09 09:34:49.964194: Pseudo dice [0.5907, 0.3978, 0.6366, 0.0618, 0.3207, 0.8216, 0.7846] +2026-04-09 09:34:49.966381: Epoch time: 101.54 s +2026-04-09 09:34:51.080966: +2026-04-09 09:34:51.082516: Epoch 618 +2026-04-09 09:34:51.084438: Current learning rate: 0.00421 +2026-04-09 09:36:32.284055: train_loss -0.3297 +2026-04-09 09:36:32.294396: val_loss -0.2853 +2026-04-09 09:36:32.296148: Pseudo dice [0.8165, 0.1591, 0.6888, 0.0039, 0.3878, 0.5126, 0.8584] +2026-04-09 09:36:32.298270: Epoch time: 101.21 s +2026-04-09 09:36:33.399018: +2026-04-09 09:36:33.400822: Epoch 619 +2026-04-09 09:36:33.403648: Current learning rate: 0.0042 +2026-04-09 09:38:14.881746: train_loss -0.3264 +2026-04-09 09:38:14.887841: val_loss -0.2987 +2026-04-09 09:38:14.889862: Pseudo dice [0.7685, 0.2172, 0.7617, 0.3488, 0.3739, 0.4892, 0.7915] +2026-04-09 09:38:14.897637: Epoch time: 101.49 s +2026-04-09 09:38:16.072673: +2026-04-09 09:38:16.074345: Epoch 620 +2026-04-09 09:38:16.075861: Current learning rate: 0.00419 +2026-04-09 09:39:57.261332: train_loss -0.3077 +2026-04-09 09:39:57.268516: val_loss -0.2797 +2026-04-09 09:39:57.270751: Pseudo dice [0.7767, 0.0693, 0.74, 0.0046, 0.2828, 0.8074, 0.7579] +2026-04-09 09:39:57.273343: Epoch time: 101.19 s +2026-04-09 09:39:58.385983: +2026-04-09 09:39:58.387877: Epoch 621 +2026-04-09 09:39:58.389391: Current learning rate: 0.00418 +2026-04-09 09:41:39.672223: train_loss -0.3151 +2026-04-09 09:41:39.677492: val_loss -0.2934 +2026-04-09 09:41:39.679193: Pseudo dice [0.5958, 0.4171, 0.7788, 0.061, 0.3937, 0.7573, 0.7975] +2026-04-09 09:41:39.681740: Epoch time: 101.29 s +2026-04-09 09:41:40.792014: +2026-04-09 09:41:40.793504: Epoch 622 +2026-04-09 09:41:40.794993: Current learning rate: 0.00417 +2026-04-09 09:43:21.887227: train_loss -0.3162 +2026-04-09 09:43:21.893090: val_loss -0.278 +2026-04-09 09:43:21.894688: Pseudo dice [0.702, 0.4141, 0.6849, 0.3054, 0.4225, 0.6484, 0.6521] +2026-04-09 09:43:21.896938: Epoch time: 101.1 s +2026-04-09 09:43:23.007759: +2026-04-09 09:43:23.009415: Epoch 623 +2026-04-09 09:43:23.011235: Current learning rate: 0.00416 +2026-04-09 09:45:04.549120: train_loss -0.3373 +2026-04-09 09:45:04.554405: val_loss -0.164 +2026-04-09 09:45:04.556105: Pseudo dice [0.8194, 0.4924, 0.8046, 0.0012, 0.2869, 0.7536, 0.7541] +2026-04-09 09:45:04.558366: Epoch time: 101.54 s +2026-04-09 09:45:05.688504: +2026-04-09 09:45:05.693550: Epoch 624 +2026-04-09 09:45:05.695131: Current learning rate: 0.00415 +2026-04-09 09:46:47.105831: train_loss -0.3259 +2026-04-09 09:46:47.111650: val_loss -0.2131 +2026-04-09 09:46:47.113589: Pseudo dice [0.7962, 0.11, 0.5699, 0.0165, 0.5251, 0.8204, 0.6874] +2026-04-09 09:46:47.116014: Epoch time: 101.42 s +2026-04-09 09:46:48.254966: +2026-04-09 09:46:48.258811: Epoch 625 +2026-04-09 09:46:48.261594: Current learning rate: 0.00414 +2026-04-09 09:48:30.757824: train_loss -0.3383 +2026-04-09 09:48:30.763694: val_loss -0.2353 +2026-04-09 09:48:30.766326: Pseudo dice [0.6019, 0.5155, 0.6717, 0.0635, 0.6289, 0.811, 0.7007] +2026-04-09 09:48:30.770547: Epoch time: 102.51 s +2026-04-09 09:48:31.871218: +2026-04-09 09:48:31.872749: Epoch 626 +2026-04-09 09:48:31.874342: Current learning rate: 0.00413 +2026-04-09 09:50:13.539963: train_loss -0.3401 +2026-04-09 09:50:13.545387: val_loss -0.1469 +2026-04-09 09:50:13.547313: Pseudo dice [0.8143, 0.371, 0.551, 0.042, 0.4285, 0.7415, 0.5294] +2026-04-09 09:50:13.549352: Epoch time: 101.67 s +2026-04-09 09:50:14.672227: +2026-04-09 09:50:14.679716: Epoch 627 +2026-04-09 09:50:14.681564: Current learning rate: 0.00412 +2026-04-09 09:52:00.491187: train_loss -0.322 +2026-04-09 09:52:00.502544: val_loss -0.2872 +2026-04-09 09:52:00.507298: Pseudo dice [0.6709, 0.1363, 0.8017, 0.2647, 0.4341, 0.7768, 0.8246] +2026-04-09 09:52:00.510150: Epoch time: 105.82 s +2026-04-09 09:52:01.629596: +2026-04-09 09:52:01.631644: Epoch 628 +2026-04-09 09:52:01.633566: Current learning rate: 0.00411 +2026-04-09 09:53:42.724797: train_loss -0.3298 +2026-04-09 09:53:42.740545: val_loss -0.3128 +2026-04-09 09:53:42.754210: Pseudo dice [0.4245, 0.5569, 0.8198, 0.0017, 0.4713, 0.8571, 0.8452] +2026-04-09 09:53:42.764975: Epoch time: 101.1 s +2026-04-09 09:53:43.894201: +2026-04-09 09:53:43.896405: Epoch 629 +2026-04-09 09:53:43.897992: Current learning rate: 0.0041 +2026-04-09 09:55:25.457086: train_loss -0.3155 +2026-04-09 09:55:25.467097: val_loss -0.2076 +2026-04-09 09:55:25.471129: Pseudo dice [0.4577, 0.231, 0.4945, 0.0416, 0.2153, 0.833, 0.6208] +2026-04-09 09:55:25.474325: Epoch time: 101.57 s +2026-04-09 09:55:26.605941: +2026-04-09 09:55:26.607559: Epoch 630 +2026-04-09 09:55:26.609059: Current learning rate: 0.00409 +2026-04-09 09:57:07.705302: train_loss -0.312 +2026-04-09 09:57:07.714502: val_loss -0.2782 +2026-04-09 09:57:07.716702: Pseudo dice [0.3482, 0.4653, 0.7573, 0.3482, 0.3802, 0.8907, 0.7268] +2026-04-09 09:57:07.719504: Epoch time: 101.1 s +2026-04-09 09:57:08.832100: +2026-04-09 09:57:08.836557: Epoch 631 +2026-04-09 09:57:08.840050: Current learning rate: 0.00408 +2026-04-09 09:58:50.348965: train_loss -0.3271 +2026-04-09 09:58:50.355149: val_loss -0.2261 +2026-04-09 09:58:50.356935: Pseudo dice [0.7278, 0.454, 0.6336, 0.0942, 0.275, 0.8419, 0.726] +2026-04-09 09:58:50.359467: Epoch time: 101.52 s +2026-04-09 09:58:51.476051: +2026-04-09 09:58:51.478531: Epoch 632 +2026-04-09 09:58:51.480182: Current learning rate: 0.00407 +2026-04-09 10:00:33.356322: train_loss -0.3404 +2026-04-09 10:00:33.362124: val_loss -0.2716 +2026-04-09 10:00:33.364202: Pseudo dice [0.1099, 0.4597, 0.7491, 0.7108, 0.2146, 0.6161, 0.6977] +2026-04-09 10:00:33.367075: Epoch time: 101.88 s +2026-04-09 10:00:34.488867: +2026-04-09 10:00:34.490478: Epoch 633 +2026-04-09 10:00:34.492337: Current learning rate: 0.00406 +2026-04-09 10:02:16.257767: train_loss -0.3429 +2026-04-09 10:02:16.265441: val_loss -0.2514 +2026-04-09 10:02:16.267090: Pseudo dice [0.685, 0.1195, 0.7919, 0.1832, 0.1929, 0.7579, 0.7666] +2026-04-09 10:02:16.269232: Epoch time: 101.77 s +2026-04-09 10:02:17.385005: +2026-04-09 10:02:17.389205: Epoch 634 +2026-04-09 10:02:17.391812: Current learning rate: 0.00405 +2026-04-09 10:03:58.638086: train_loss -0.3286 +2026-04-09 10:03:58.643263: val_loss -0.2394 +2026-04-09 10:03:58.645410: Pseudo dice [0.7313, 0.614, 0.6539, 0.0663, 0.4872, 0.6494, 0.5246] +2026-04-09 10:03:58.648103: Epoch time: 101.26 s +2026-04-09 10:03:59.779732: +2026-04-09 10:03:59.782534: Epoch 635 +2026-04-09 10:03:59.784791: Current learning rate: 0.00404 +2026-04-09 10:05:41.185263: train_loss -0.3278 +2026-04-09 10:05:41.191587: val_loss -0.3103 +2026-04-09 10:05:41.193496: Pseudo dice [0.5933, 0.2476, 0.7012, 0.4312, 0.2883, 0.8969, 0.9184] +2026-04-09 10:05:41.198393: Epoch time: 101.41 s +2026-04-09 10:05:42.302210: +2026-04-09 10:05:42.303917: Epoch 636 +2026-04-09 10:05:42.305486: Current learning rate: 0.00403 +2026-04-09 10:07:23.484000: train_loss -0.331 +2026-04-09 10:07:23.489546: val_loss -0.2746 +2026-04-09 10:07:23.491634: Pseudo dice [0.8444, 0.3478, 0.726, 0.2921, 0.5131, 0.8127, 0.8895] +2026-04-09 10:07:23.494220: Epoch time: 101.18 s +2026-04-09 10:07:24.608428: +2026-04-09 10:07:24.610706: Epoch 637 +2026-04-09 10:07:24.613354: Current learning rate: 0.00402 +2026-04-09 10:09:06.540710: train_loss -0.3382 +2026-04-09 10:09:06.545843: val_loss -0.2979 +2026-04-09 10:09:06.547858: Pseudo dice [0.7771, 0.3314, 0.7047, 0.6094, 0.3565, 0.7644, 0.9022] +2026-04-09 10:09:06.550671: Epoch time: 101.94 s +2026-04-09 10:09:07.675725: +2026-04-09 10:09:07.678121: Epoch 638 +2026-04-09 10:09:07.679481: Current learning rate: 0.00401 +2026-04-09 10:10:52.039215: train_loss -0.328 +2026-04-09 10:10:52.046129: val_loss -0.2848 +2026-04-09 10:10:52.049233: Pseudo dice [0.6355, 0.6058, 0.7532, 0.7084, 0.4504, 0.8667, 0.458] +2026-04-09 10:10:52.051893: Epoch time: 104.37 s +2026-04-09 10:10:53.157772: +2026-04-09 10:10:53.160333: Epoch 639 +2026-04-09 10:10:53.162961: Current learning rate: 0.004 +2026-04-09 10:12:37.671024: train_loss -0.3259 +2026-04-09 10:12:37.676848: val_loss -0.2909 +2026-04-09 10:12:37.680105: Pseudo dice [0.3808, 0.6416, 0.6893, 0.7819, 0.2785, 0.3998, 0.8185] +2026-04-09 10:12:37.683037: Epoch time: 104.52 s +2026-04-09 10:12:38.794484: +2026-04-09 10:12:38.797265: Epoch 640 +2026-04-09 10:12:38.800919: Current learning rate: 0.00399 +2026-04-09 10:14:23.215785: train_loss -0.3289 +2026-04-09 10:14:23.221041: val_loss -0.2196 +2026-04-09 10:14:23.222966: Pseudo dice [0.6164, 0.1494, 0.7568, 0.0228, 0.2192, 0.7944, 0.6867] +2026-04-09 10:14:23.224959: Epoch time: 104.42 s +2026-04-09 10:14:24.334867: +2026-04-09 10:14:24.337235: Epoch 641 +2026-04-09 10:14:24.339029: Current learning rate: 0.00398 +2026-04-09 10:16:06.905135: train_loss -0.3414 +2026-04-09 10:16:06.918252: val_loss -0.2832 +2026-04-09 10:16:06.921503: Pseudo dice [0.7164, 0.4075, 0.5572, 0.2457, 0.411, 0.7921, 0.8136] +2026-04-09 10:16:06.925301: Epoch time: 102.57 s +2026-04-09 10:16:08.051944: +2026-04-09 10:16:08.056657: Epoch 642 +2026-04-09 10:16:08.059653: Current learning rate: 0.00397 +2026-04-09 10:17:52.390694: train_loss -0.352 +2026-04-09 10:17:52.409235: val_loss -0.2699 +2026-04-09 10:17:52.416479: Pseudo dice [0.6186, 0.491, 0.8001, 0.0053, 0.317, 0.887, 0.763] +2026-04-09 10:17:52.422649: Epoch time: 104.34 s +2026-04-09 10:17:53.654475: +2026-04-09 10:17:53.661999: Epoch 643 +2026-04-09 10:17:53.664802: Current learning rate: 0.00396 +2026-04-09 10:19:36.804413: train_loss -0.3414 +2026-04-09 10:19:36.812365: val_loss -0.2515 +2026-04-09 10:19:36.816554: Pseudo dice [0.467, 0.3691, 0.7261, 0.0003, 0.362, 0.6821, 0.8169] +2026-04-09 10:19:36.819524: Epoch time: 103.15 s +2026-04-09 10:19:37.933094: +2026-04-09 10:19:37.936014: Epoch 644 +2026-04-09 10:19:37.939308: Current learning rate: 0.00395 +2026-04-09 10:21:21.324154: train_loss -0.3357 +2026-04-09 10:21:21.332219: val_loss -0.2817 +2026-04-09 10:21:21.333905: Pseudo dice [0.745, 0.0502, 0.6859, 0.0345, 0.3367, 0.3481, 0.7591] +2026-04-09 10:21:21.336077: Epoch time: 103.39 s +2026-04-09 10:21:22.431264: +2026-04-09 10:21:22.434470: Epoch 645 +2026-04-09 10:21:22.436185: Current learning rate: 0.00394 +2026-04-09 10:23:06.201909: train_loss -0.3342 +2026-04-09 10:23:06.208580: val_loss -0.2592 +2026-04-09 10:23:06.210843: Pseudo dice [0.797, 0.4357, 0.6808, 0.0204, 0.3913, 0.5593, 0.795] +2026-04-09 10:23:06.214407: Epoch time: 103.77 s +2026-04-09 10:23:07.354703: +2026-04-09 10:23:07.362311: Epoch 646 +2026-04-09 10:23:07.364097: Current learning rate: 0.00393 +2026-04-09 10:24:50.214372: train_loss -0.3369 +2026-04-09 10:24:50.223217: val_loss -0.2214 +2026-04-09 10:24:50.227000: Pseudo dice [0.8561, 0.5192, 0.6508, 0.0005, 0.2263, 0.8315, 0.6719] +2026-04-09 10:24:50.230551: Epoch time: 102.86 s +2026-04-09 10:24:51.359980: +2026-04-09 10:24:51.362456: Epoch 647 +2026-04-09 10:24:51.364411: Current learning rate: 0.00392 +2026-04-09 10:26:33.565022: train_loss -0.3322 +2026-04-09 10:26:33.572373: val_loss -0.3079 +2026-04-09 10:26:33.577307: Pseudo dice [0.4563, 0.1369, 0.6541, 0.707, 0.3482, 0.5328, 0.8908] +2026-04-09 10:26:33.581134: Epoch time: 102.21 s +2026-04-09 10:26:34.719994: +2026-04-09 10:26:34.721760: Epoch 648 +2026-04-09 10:26:34.723313: Current learning rate: 0.00391 +2026-04-09 10:28:17.676883: train_loss -0.3387 +2026-04-09 10:28:17.684065: val_loss -0.2431 +2026-04-09 10:28:17.687509: Pseudo dice [0.8072, 0.5305, 0.4901, 0.0003, 0.1619, 0.7208, 0.5306] +2026-04-09 10:28:17.690073: Epoch time: 102.96 s +2026-04-09 10:28:18.824900: +2026-04-09 10:28:18.827094: Epoch 649 +2026-04-09 10:28:18.828784: Current learning rate: 0.0039 +2026-04-09 10:30:00.500611: train_loss -0.3474 +2026-04-09 10:30:00.508478: val_loss -0.2718 +2026-04-09 10:30:00.511335: Pseudo dice [0.5765, 0.3731, 0.4999, 0.9048, 0.2715, 0.707, 0.8141] +2026-04-09 10:30:00.516770: Epoch time: 101.68 s +2026-04-09 10:30:03.235291: +2026-04-09 10:30:03.236745: Epoch 650 +2026-04-09 10:30:03.238196: Current learning rate: 0.00389 +2026-04-09 10:31:44.640333: train_loss -0.3311 +2026-04-09 10:31:44.645319: val_loss -0.1883 +2026-04-09 10:31:44.646943: Pseudo dice [0.7211, 0.3773, 0.4397, 0.0394, 0.2759, 0.7153, 0.8644] +2026-04-09 10:31:44.650046: Epoch time: 101.41 s +2026-04-09 10:31:45.770478: +2026-04-09 10:31:45.771866: Epoch 651 +2026-04-09 10:31:45.773132: Current learning rate: 0.00388 +2026-04-09 10:33:27.639187: train_loss -0.3254 +2026-04-09 10:33:27.645236: val_loss -0.2651 +2026-04-09 10:33:27.647549: Pseudo dice [0.3953, 0.3352, 0.6878, 0.1768, 0.5336, 0.2914, 0.8603] +2026-04-09 10:33:27.649900: Epoch time: 101.87 s +2026-04-09 10:33:28.768945: +2026-04-09 10:33:28.770958: Epoch 652 +2026-04-09 10:33:28.772453: Current learning rate: 0.00387 +2026-04-09 10:35:09.833287: train_loss -0.3257 +2026-04-09 10:35:09.838010: val_loss -0.2382 +2026-04-09 10:35:09.839599: Pseudo dice [0.1437, 0.5947, 0.5785, 0.1231, 0.3412, 0.3941, 0.778] +2026-04-09 10:35:09.841262: Epoch time: 101.07 s +2026-04-09 10:35:10.972394: +2026-04-09 10:35:10.974033: Epoch 653 +2026-04-09 10:35:10.975585: Current learning rate: 0.00386 +2026-04-09 10:36:52.163806: train_loss -0.3245 +2026-04-09 10:36:52.170561: val_loss -0.2622 +2026-04-09 10:36:52.172467: Pseudo dice [0.841, 0.062, 0.6589, 0.0523, 0.4408, 0.8283, 0.6633] +2026-04-09 10:36:52.177004: Epoch time: 101.19 s +2026-04-09 10:36:53.286273: +2026-04-09 10:36:53.288017: Epoch 654 +2026-04-09 10:36:53.289700: Current learning rate: 0.00385 +2026-04-09 10:38:34.671241: train_loss -0.3143 +2026-04-09 10:38:34.677865: val_loss -0.2358 +2026-04-09 10:38:34.679756: Pseudo dice [0.4436, 0.5518, 0.7922, 0.0002, 0.4752, 0.6831, 0.8093] +2026-04-09 10:38:34.681823: Epoch time: 101.39 s +2026-04-09 10:38:35.808374: +2026-04-09 10:38:35.809943: Epoch 655 +2026-04-09 10:38:35.812069: Current learning rate: 0.00384 +2026-04-09 10:40:17.484546: train_loss -0.3305 +2026-04-09 10:40:17.491761: val_loss -0.15 +2026-04-09 10:40:17.493883: Pseudo dice [0.5964, 0.3954, 0.6505, 0.002, 0.4853, 0.8113, 0.8116] +2026-04-09 10:40:17.497126: Epoch time: 101.68 s +2026-04-09 10:40:18.606587: +2026-04-09 10:40:18.608445: Epoch 656 +2026-04-09 10:40:18.610620: Current learning rate: 0.00383 +2026-04-09 10:42:00.167792: train_loss -0.3398 +2026-04-09 10:42:00.173245: val_loss -0.22 +2026-04-09 10:42:00.175504: Pseudo dice [0.7968, 0.3795, 0.7503, 0.1057, 0.276, 0.3402, 0.7567] +2026-04-09 10:42:00.178678: Epoch time: 101.56 s +2026-04-09 10:42:01.291455: +2026-04-09 10:42:01.293167: Epoch 657 +2026-04-09 10:42:01.295327: Current learning rate: 0.00382 +2026-04-09 10:43:42.633087: train_loss -0.3335 +2026-04-09 10:43:42.638268: val_loss -0.2693 +2026-04-09 10:43:42.640311: Pseudo dice [0.6417, 0.0789, 0.8124, 0.31, 0.3343, 0.8773, 0.7949] +2026-04-09 10:43:42.642548: Epoch time: 101.34 s +2026-04-09 10:43:43.754883: +2026-04-09 10:43:43.756928: Epoch 658 +2026-04-09 10:43:43.759120: Current learning rate: 0.00381 +2026-04-09 10:45:25.524335: train_loss -0.3308 +2026-04-09 10:45:25.529151: val_loss -0.3139 +2026-04-09 10:45:25.531291: Pseudo dice [0.7126, 0.6364, 0.7147, 0.6785, 0.3717, 0.7342, 0.8515] +2026-04-09 10:45:25.533498: Epoch time: 101.77 s +2026-04-09 10:45:26.651278: +2026-04-09 10:45:26.653048: Epoch 659 +2026-04-09 10:45:26.654460: Current learning rate: 0.0038 +2026-04-09 10:47:08.265863: train_loss -0.3366 +2026-04-09 10:47:08.273693: val_loss -0.2545 +2026-04-09 10:47:08.275671: Pseudo dice [0.7453, 0.4366, 0.8203, 0.0039, 0.4791, 0.6199, 0.82] +2026-04-09 10:47:08.278308: Epoch time: 101.62 s +2026-04-09 10:47:09.381636: +2026-04-09 10:47:09.383600: Epoch 660 +2026-04-09 10:47:09.385645: Current learning rate: 0.00379 +2026-04-09 10:48:52.080618: train_loss -0.321 +2026-04-09 10:48:52.088929: val_loss -0.261 +2026-04-09 10:48:52.091207: Pseudo dice [0.712, 0.4425, 0.7928, 0.6809, 0.4232, 0.3529, 0.6797] +2026-04-09 10:48:52.093841: Epoch time: 102.7 s +2026-04-09 10:48:53.207033: +2026-04-09 10:48:53.208554: Epoch 661 +2026-04-09 10:48:53.210524: Current learning rate: 0.00378 +2026-04-09 10:50:34.476279: train_loss -0.2922 +2026-04-09 10:50:34.482111: val_loss -0.2859 +2026-04-09 10:50:34.483989: Pseudo dice [0.7527, 0.5536, 0.7257, 0.6583, 0.5091, 0.7821, 0.7924] +2026-04-09 10:50:34.486507: Epoch time: 101.27 s +2026-04-09 10:50:35.602116: +2026-04-09 10:50:35.603949: Epoch 662 +2026-04-09 10:50:35.606155: Current learning rate: 0.00377 +2026-04-09 10:52:17.362119: train_loss -0.3172 +2026-04-09 10:52:17.370924: val_loss -0.2186 +2026-04-09 10:52:17.372993: Pseudo dice [0.3491, 0.547, 0.6307, 0.0676, 0.4634, 0.2748, 0.6743] +2026-04-09 10:52:17.374958: Epoch time: 101.76 s +2026-04-09 10:52:18.479229: +2026-04-09 10:52:18.480869: Epoch 663 +2026-04-09 10:52:18.482297: Current learning rate: 0.00376 +2026-04-09 10:53:59.901727: train_loss -0.3185 +2026-04-09 10:53:59.910272: val_loss -0.2871 +2026-04-09 10:53:59.912351: Pseudo dice [0.8462, 0.239, 0.6164, 0.4965, 0.481, 0.7666, 0.832] +2026-04-09 10:53:59.914660: Epoch time: 101.43 s +2026-04-09 10:54:01.035494: +2026-04-09 10:54:01.037440: Epoch 664 +2026-04-09 10:54:01.039105: Current learning rate: 0.00375 +2026-04-09 10:55:42.532936: train_loss -0.3401 +2026-04-09 10:55:42.538674: val_loss -0.2782 +2026-04-09 10:55:42.540664: Pseudo dice [0.7635, 0.0762, 0.7622, 0.0036, 0.3496, 0.7596, 0.8592] +2026-04-09 10:55:42.543310: Epoch time: 101.5 s +2026-04-09 10:55:44.909524: +2026-04-09 10:55:44.911849: Epoch 665 +2026-04-09 10:55:44.913684: Current learning rate: 0.00374 +2026-04-09 10:57:26.031678: train_loss -0.3395 +2026-04-09 10:57:26.040658: val_loss -0.2667 +2026-04-09 10:57:26.044122: Pseudo dice [0.6652, 0.3337, 0.7243, 0.0708, 0.531, 0.76, 0.8755] +2026-04-09 10:57:26.046912: Epoch time: 101.13 s +2026-04-09 10:57:27.180420: +2026-04-09 10:57:27.190009: Epoch 666 +2026-04-09 10:57:27.191797: Current learning rate: 0.00373 +2026-04-09 10:59:08.798262: train_loss -0.345 +2026-04-09 10:59:08.803683: val_loss -0.2903 +2026-04-09 10:59:08.805488: Pseudo dice [0.0229, 0.2123, 0.7843, 0.009, 0.5826, 0.6683, 0.6801] +2026-04-09 10:59:08.808419: Epoch time: 101.62 s +2026-04-09 10:59:09.922712: +2026-04-09 10:59:09.924378: Epoch 667 +2026-04-09 10:59:09.925835: Current learning rate: 0.00372 +2026-04-09 11:00:51.403839: train_loss -0.3484 +2026-04-09 11:00:51.411156: val_loss -0.2714 +2026-04-09 11:00:51.413107: Pseudo dice [0.6281, 0.6742, 0.5888, 0.0002, 0.4819, 0.8161, 0.8569] +2026-04-09 11:00:51.415772: Epoch time: 101.48 s +2026-04-09 11:00:52.545548: +2026-04-09 11:00:52.547181: Epoch 668 +2026-04-09 11:00:52.548618: Current learning rate: 0.00371 +2026-04-09 11:02:34.069897: train_loss -0.3392 +2026-04-09 11:02:34.078089: val_loss -0.2575 +2026-04-09 11:02:34.090990: Pseudo dice [0.6661, 0.191, 0.5082, 0.3512, 0.3981, 0.6897, 0.6638] +2026-04-09 11:02:34.094965: Epoch time: 101.53 s +2026-04-09 11:02:35.236215: +2026-04-09 11:02:35.238061: Epoch 669 +2026-04-09 11:02:35.239542: Current learning rate: 0.0037 +2026-04-09 11:04:17.103853: train_loss -0.338 +2026-04-09 11:04:17.110813: val_loss -0.1792 +2026-04-09 11:04:17.114599: Pseudo dice [0.8048, 0.2305, 0.7059, 0.0117, 0.3925, 0.8316, 0.7449] +2026-04-09 11:04:17.116667: Epoch time: 101.87 s +2026-04-09 11:04:18.248715: +2026-04-09 11:04:18.251087: Epoch 670 +2026-04-09 11:04:18.253209: Current learning rate: 0.00369 +2026-04-09 11:05:59.731183: train_loss -0.3391 +2026-04-09 11:05:59.740272: val_loss -0.2535 +2026-04-09 11:05:59.743575: Pseudo dice [0.6672, 0.4556, 0.7522, 0.313, 0.3639, 0.7971, 0.3647] +2026-04-09 11:05:59.746314: Epoch time: 101.49 s +2026-04-09 11:06:00.889411: +2026-04-09 11:06:00.892755: Epoch 671 +2026-04-09 11:06:00.895203: Current learning rate: 0.00368 +2026-04-09 11:07:43.752480: train_loss -0.3285 +2026-04-09 11:07:43.758138: val_loss -0.3052 +2026-04-09 11:07:43.760374: Pseudo dice [0.7637, 0.5105, 0.7204, 0.8296, 0.4526, 0.7542, 0.8641] +2026-04-09 11:07:43.763096: Epoch time: 102.87 s +2026-04-09 11:07:44.890564: +2026-04-09 11:07:44.892740: Epoch 672 +2026-04-09 11:07:44.895746: Current learning rate: 0.00367 +2026-04-09 11:09:27.435718: train_loss -0.3355 +2026-04-09 11:09:27.453277: val_loss -0.3011 +2026-04-09 11:09:27.461057: Pseudo dice [0.5525, 0.3082, 0.7574, 0.3195, 0.519, 0.8604, 0.4933] +2026-04-09 11:09:27.466686: Epoch time: 102.55 s +2026-04-09 11:09:28.637444: +2026-04-09 11:09:28.640689: Epoch 673 +2026-04-09 11:09:28.644068: Current learning rate: 0.00366 +2026-04-09 11:11:10.227039: train_loss -0.333 +2026-04-09 11:11:10.231652: val_loss -0.2756 +2026-04-09 11:11:10.233479: Pseudo dice [0.7672, 0.5794, 0.6229, 0.0389, 0.439, 0.7962, 0.5861] +2026-04-09 11:11:10.235389: Epoch time: 101.59 s +2026-04-09 11:11:11.400412: +2026-04-09 11:11:11.402000: Epoch 674 +2026-04-09 11:11:11.403476: Current learning rate: 0.00365 +2026-04-09 11:12:53.339909: train_loss -0.3275 +2026-04-09 11:12:53.347585: val_loss -0.2319 +2026-04-09 11:12:53.350239: Pseudo dice [0.4764, 0.7009, 0.3805, 0.0437, 0.4855, 0.8576, 0.6923] +2026-04-09 11:12:53.352871: Epoch time: 101.94 s +2026-04-09 11:12:54.544054: +2026-04-09 11:12:54.545779: Epoch 675 +2026-04-09 11:12:54.547536: Current learning rate: 0.00364 +2026-04-09 11:14:36.344656: train_loss -0.3306 +2026-04-09 11:14:36.349432: val_loss -0.2659 +2026-04-09 11:14:36.351568: Pseudo dice [0.8229, 0.2404, 0.5958, 0.0293, 0.4669, 0.8779, 0.7819] +2026-04-09 11:14:36.353812: Epoch time: 101.8 s +2026-04-09 11:14:37.476206: +2026-04-09 11:14:37.479534: Epoch 676 +2026-04-09 11:14:37.481284: Current learning rate: 0.00363 +2026-04-09 11:16:18.969105: train_loss -0.3112 +2026-04-09 11:16:18.975516: val_loss -0.2916 +2026-04-09 11:16:18.978006: Pseudo dice [0.8166, 0.0892, 0.7733, 0.0993, 0.4832, 0.8299, 0.7935] +2026-04-09 11:16:18.980691: Epoch time: 101.5 s +2026-04-09 11:16:20.097948: +2026-04-09 11:16:20.099776: Epoch 677 +2026-04-09 11:16:20.101392: Current learning rate: 0.00362 +2026-04-09 11:18:02.154456: train_loss -0.3196 +2026-04-09 11:18:02.164917: val_loss -0.2766 +2026-04-09 11:18:02.167050: Pseudo dice [0.6937, 0.3396, 0.6485, 0.8938, 0.4506, 0.3069, 0.6705] +2026-04-09 11:18:02.169698: Epoch time: 102.06 s +2026-04-09 11:18:03.295268: +2026-04-09 11:18:03.297475: Epoch 678 +2026-04-09 11:18:03.298949: Current learning rate: 0.00361 +2026-04-09 11:19:44.802580: train_loss -0.3394 +2026-04-09 11:19:44.810784: val_loss -0.228 +2026-04-09 11:19:44.812978: Pseudo dice [0.375, 0.5239, 0.7332, 0.0003, 0.3449, 0.2496, 0.8117] +2026-04-09 11:19:44.815329: Epoch time: 101.51 s +2026-04-09 11:19:45.949968: +2026-04-09 11:19:45.951643: Epoch 679 +2026-04-09 11:19:45.953053: Current learning rate: 0.0036 +2026-04-09 11:21:27.487570: train_loss -0.3272 +2026-04-09 11:21:27.503563: val_loss -0.297 +2026-04-09 11:21:27.509094: Pseudo dice [0.7947, 0.5496, 0.7046, 0.7329, 0.3116, 0.7539, 0.7203] +2026-04-09 11:21:27.514906: Epoch time: 101.54 s +2026-04-09 11:21:28.667640: +2026-04-09 11:21:28.669342: Epoch 680 +2026-04-09 11:21:28.670801: Current learning rate: 0.00359 +2026-04-09 11:23:10.682855: train_loss -0.3226 +2026-04-09 11:23:10.690453: val_loss -0.2913 +2026-04-09 11:23:10.696464: Pseudo dice [0.5608, 0.0666, 0.6143, 0.8255, 0.5911, 0.8075, 0.7182] +2026-04-09 11:23:10.699254: Epoch time: 102.02 s +2026-04-09 11:23:11.855925: +2026-04-09 11:23:11.860307: Epoch 681 +2026-04-09 11:23:11.865119: Current learning rate: 0.00358 +2026-04-09 11:24:54.561827: train_loss -0.3252 +2026-04-09 11:24:54.571314: val_loss -0.1962 +2026-04-09 11:24:54.574713: Pseudo dice [0.7357, 0.0939, 0.7604, 0.018, 0.4384, 0.1519, 0.8965] +2026-04-09 11:24:54.577191: Epoch time: 102.71 s +2026-04-09 11:24:55.704500: +2026-04-09 11:24:55.706507: Epoch 682 +2026-04-09 11:24:55.708954: Current learning rate: 0.00357 +2026-04-09 11:26:38.000024: train_loss -0.3304 +2026-04-09 11:26:38.007440: val_loss -0.2151 +2026-04-09 11:26:38.009567: Pseudo dice [0.7642, 0.1057, 0.6618, 0.1168, 0.5312, 0.7699, 0.8167] +2026-04-09 11:26:38.013089: Epoch time: 102.3 s +2026-04-09 11:26:39.176755: +2026-04-09 11:26:39.184673: Epoch 683 +2026-04-09 11:26:39.187630: Current learning rate: 0.00356 +2026-04-09 11:28:20.943556: train_loss -0.3447 +2026-04-09 11:28:20.952022: val_loss -0.2874 +2026-04-09 11:28:20.953830: Pseudo dice [0.7551, 0.3728, 0.7255, 0.2447, 0.4017, 0.6567, 0.7918] +2026-04-09 11:28:20.956584: Epoch time: 101.77 s +2026-04-09 11:28:22.167376: +2026-04-09 11:28:22.170340: Epoch 684 +2026-04-09 11:28:22.172253: Current learning rate: 0.00355 +2026-04-09 11:30:04.895767: train_loss -0.3573 +2026-04-09 11:30:04.902018: val_loss -0.297 +2026-04-09 11:30:04.905462: Pseudo dice [0.7778, 0.5155, 0.7584, 0.7344, 0.4728, 0.7667, 0.787] +2026-04-09 11:30:04.909071: Epoch time: 102.73 s +2026-04-09 11:30:06.054618: +2026-04-09 11:30:06.057412: Epoch 685 +2026-04-09 11:30:06.059738: Current learning rate: 0.00354 +2026-04-09 11:31:49.409688: train_loss -0.3325 +2026-04-09 11:31:49.415995: val_loss -0.2605 +2026-04-09 11:31:49.418871: Pseudo dice [0.8007, 0.1771, 0.8004, 0.1868, 0.5281, 0.5275, 0.7195] +2026-04-09 11:31:49.421334: Epoch time: 103.36 s +2026-04-09 11:31:50.584467: +2026-04-09 11:31:50.586355: Epoch 686 +2026-04-09 11:31:50.588444: Current learning rate: 0.00353 +2026-04-09 11:33:32.300273: train_loss -0.3421 +2026-04-09 11:33:32.304996: val_loss -0.2086 +2026-04-09 11:33:32.306884: Pseudo dice [0.7864, 0.2087, 0.7552, 0.0139, 0.3416, 0.7972, 0.5492] +2026-04-09 11:33:32.308838: Epoch time: 101.72 s +2026-04-09 11:33:33.435297: +2026-04-09 11:33:33.437037: Epoch 687 +2026-04-09 11:33:33.438407: Current learning rate: 0.00352 +2026-04-09 11:35:15.106046: train_loss -0.35 +2026-04-09 11:35:15.111766: val_loss -0.2576 +2026-04-09 11:35:15.113703: Pseudo dice [0.8035, 0.5361, 0.6767, 0.1954, 0.113, 0.7521, 0.7631] +2026-04-09 11:35:15.116424: Epoch time: 101.67 s +2026-04-09 11:35:16.254486: +2026-04-09 11:35:16.256113: Epoch 688 +2026-04-09 11:35:16.257784: Current learning rate: 0.00351 +2026-04-09 11:36:57.702990: train_loss -0.3473 +2026-04-09 11:36:57.708827: val_loss -0.2763 +2026-04-09 11:36:57.710709: Pseudo dice [0.3177, 0.3985, 0.7775, 0.1794, 0.3568, 0.7737, 0.7768] +2026-04-09 11:36:57.714472: Epoch time: 101.45 s +2026-04-09 11:36:58.851796: +2026-04-09 11:36:58.854499: Epoch 689 +2026-04-09 11:36:58.857322: Current learning rate: 0.0035 +2026-04-09 11:38:40.917943: train_loss -0.3459 +2026-04-09 11:38:40.923084: val_loss -0.2606 +2026-04-09 11:38:40.924994: Pseudo dice [0.7798, 0.5, 0.4987, 0.0003, 0.5295, 0.6559, 0.8976] +2026-04-09 11:38:40.927054: Epoch time: 102.07 s +2026-04-09 11:38:42.072064: +2026-04-09 11:38:42.074221: Epoch 690 +2026-04-09 11:38:42.076077: Current learning rate: 0.00349 +2026-04-09 11:40:23.394176: train_loss -0.3678 +2026-04-09 11:40:23.401591: val_loss -0.3087 +2026-04-09 11:40:23.403736: Pseudo dice [0.8259, 0.4545, 0.8571, 0.6041, 0.3145, 0.5134, 0.8529] +2026-04-09 11:40:23.406096: Epoch time: 101.33 s +2026-04-09 11:40:24.549568: +2026-04-09 11:40:24.551745: Epoch 691 +2026-04-09 11:40:24.553775: Current learning rate: 0.00348 +2026-04-09 11:42:06.029700: train_loss -0.3502 +2026-04-09 11:42:06.035722: val_loss -0.2904 +2026-04-09 11:42:06.037371: Pseudo dice [0.4628, 0.5012, 0.765, 0.0672, 0.5438, 0.7977, 0.6543] +2026-04-09 11:42:06.039836: Epoch time: 101.48 s +2026-04-09 11:42:07.190676: +2026-04-09 11:42:07.192514: Epoch 692 +2026-04-09 11:42:07.194200: Current learning rate: 0.00346 +2026-04-09 11:43:49.330620: train_loss -0.349 +2026-04-09 11:43:49.335338: val_loss -0.3048 +2026-04-09 11:43:49.337140: Pseudo dice [0.6388, 0.5469, 0.6788, 0.8163, 0.5617, 0.7993, 0.8281] +2026-04-09 11:43:49.339180: Epoch time: 102.14 s +2026-04-09 11:43:49.340704: Yayy! New best EMA pseudo Dice: 0.5681 +2026-04-09 11:43:52.249252: +2026-04-09 11:43:52.251935: Epoch 693 +2026-04-09 11:43:52.254777: Current learning rate: 0.00345 +2026-04-09 11:45:33.986559: train_loss -0.3547 +2026-04-09 11:45:33.993043: val_loss -0.2803 +2026-04-09 11:45:33.995132: Pseudo dice [0.7988, 0.3933, 0.7275, 0.5269, 0.4653, 0.7288, 0.4679] +2026-04-09 11:45:33.998505: Epoch time: 101.74 s +2026-04-09 11:45:34.000291: Yayy! New best EMA pseudo Dice: 0.57 +2026-04-09 11:45:36.800825: +2026-04-09 11:45:36.804193: Epoch 694 +2026-04-09 11:45:36.805778: Current learning rate: 0.00344 +2026-04-09 11:47:18.248203: train_loss -0.3336 +2026-04-09 11:47:18.253441: val_loss -0.2677 +2026-04-09 11:47:18.264287: Pseudo dice [0.6583, 0.358, 0.7608, 0.3064, 0.2886, 0.4019, 0.8302] +2026-04-09 11:47:18.266346: Epoch time: 101.45 s +2026-04-09 11:47:19.387250: +2026-04-09 11:47:19.389275: Epoch 695 +2026-04-09 11:47:19.391005: Current learning rate: 0.00343 +2026-04-09 11:49:01.947844: train_loss -0.35 +2026-04-09 11:49:01.973910: val_loss -0.3062 +2026-04-09 11:49:01.975917: Pseudo dice [0.5533, 0.4921, 0.6716, 0.7334, 0.4068, 0.8043, 0.9224] +2026-04-09 11:49:01.977681: Epoch time: 102.56 s +2026-04-09 11:49:01.979313: Yayy! New best EMA pseudo Dice: 0.5735 +2026-04-09 11:49:04.877942: +2026-04-09 11:49:04.893587: Epoch 696 +2026-04-09 11:49:04.895030: Current learning rate: 0.00342 +2026-04-09 11:50:47.164233: train_loss -0.3408 +2026-04-09 11:50:47.170297: val_loss -0.2713 +2026-04-09 11:50:47.172478: Pseudo dice [0.5501, 0.4453, 0.726, 0.1304, 0.4264, 0.6641, 0.8258] +2026-04-09 11:50:47.174738: Epoch time: 102.29 s +2026-04-09 11:50:48.332153: +2026-04-09 11:50:48.356137: Epoch 697 +2026-04-09 11:50:48.357853: Current learning rate: 0.00341 +2026-04-09 11:52:29.895442: train_loss -0.348 +2026-04-09 11:52:29.900900: val_loss -0.2721 +2026-04-09 11:52:29.902917: Pseudo dice [0.7959, 0.5962, 0.6488, 0.1091, 0.4398, 0.7716, 0.8181] +2026-04-09 11:52:29.905355: Epoch time: 101.57 s +2026-04-09 11:52:31.040972: +2026-04-09 11:52:31.042364: Epoch 698 +2026-04-09 11:52:31.044013: Current learning rate: 0.0034 +2026-04-09 11:54:12.344376: train_loss -0.3403 +2026-04-09 11:54:12.350333: val_loss -0.2439 +2026-04-09 11:54:12.352285: Pseudo dice [0.2908, 0.361, 0.6711, 0.0241, 0.2607, 0.7385, 0.6836] +2026-04-09 11:54:12.354905: Epoch time: 101.31 s +2026-04-09 11:54:13.515260: +2026-04-09 11:54:13.517378: Epoch 699 +2026-04-09 11:54:13.518909: Current learning rate: 0.00339 +2026-04-09 11:55:55.240330: train_loss -0.3449 +2026-04-09 11:55:55.246084: val_loss -0.2388 +2026-04-09 11:55:55.247657: Pseudo dice [0.8221, 0.0384, 0.5524, 0.1387, 0.4913, 0.7601, 0.7412] +2026-04-09 11:55:55.249907: Epoch time: 101.73 s +2026-04-09 11:55:58.011955: +2026-04-09 11:55:58.013503: Epoch 700 +2026-04-09 11:55:58.014785: Current learning rate: 0.00338 +2026-04-09 11:57:40.148856: train_loss -0.3395 +2026-04-09 11:57:40.155279: val_loss -0.2907 +2026-04-09 11:57:40.158586: Pseudo dice [0.8055, 0.3683, 0.8065, 0.4828, 0.4388, 0.7204, 0.8952] +2026-04-09 11:57:40.163227: Epoch time: 102.14 s +2026-04-09 11:57:41.289357: +2026-04-09 11:57:41.294506: Epoch 701 +2026-04-09 11:57:41.323677: Current learning rate: 0.00337 +2026-04-09 11:59:22.543727: train_loss -0.354 +2026-04-09 11:59:22.548899: val_loss -0.2428 +2026-04-09 11:59:22.550481: Pseudo dice [0.8417, 0.548, 0.6891, 0.0615, 0.3823, 0.4366, 0.804] +2026-04-09 11:59:22.552167: Epoch time: 101.26 s +2026-04-09 11:59:23.665277: +2026-04-09 11:59:23.667207: Epoch 702 +2026-04-09 11:59:23.668987: Current learning rate: 0.00336 +2026-04-09 12:01:06.337609: train_loss -0.3489 +2026-04-09 12:01:06.346925: val_loss -0.2948 +2026-04-09 12:01:06.349011: Pseudo dice [0.6765, 0.1442, 0.7431, 0.0485, 0.5019, 0.7195, 0.7374] +2026-04-09 12:01:06.352004: Epoch time: 102.68 s +2026-04-09 12:01:07.489842: +2026-04-09 12:01:07.491812: Epoch 703 +2026-04-09 12:01:07.494057: Current learning rate: 0.00335 +2026-04-09 12:02:50.298129: train_loss -0.3419 +2026-04-09 12:02:50.303131: val_loss -0.2041 +2026-04-09 12:02:50.305285: Pseudo dice [0.1458, 0.3326, 0.6456, 0.0792, 0.5255, 0.8663, 0.6396] +2026-04-09 12:02:50.307949: Epoch time: 102.81 s +2026-04-09 12:02:51.453269: +2026-04-09 12:02:51.455107: Epoch 704 +2026-04-09 12:02:51.456951: Current learning rate: 0.00334 +2026-04-09 12:04:32.826838: train_loss -0.3409 +2026-04-09 12:04:32.832469: val_loss -0.3106 +2026-04-09 12:04:32.834253: Pseudo dice [0.7356, 0.6628, 0.7659, 0.8048, 0.36, 0.5668, 0.8294] +2026-04-09 12:04:32.836207: Epoch time: 101.38 s +2026-04-09 12:04:33.957528: +2026-04-09 12:04:33.966620: Epoch 705 +2026-04-09 12:04:33.978439: Current learning rate: 0.00333 +2026-04-09 12:06:15.431267: train_loss -0.34 +2026-04-09 12:06:15.438565: val_loss -0.2806 +2026-04-09 12:06:15.440637: Pseudo dice [0.7707, 0.3695, 0.7362, 0.6385, 0.473, 0.6103, 0.6817] +2026-04-09 12:06:15.443087: Epoch time: 101.48 s +2026-04-09 12:06:16.576698: +2026-04-09 12:06:16.578413: Epoch 706 +2026-04-09 12:06:16.581258: Current learning rate: 0.00332 +2026-04-09 12:07:58.210129: train_loss -0.3317 +2026-04-09 12:07:58.215877: val_loss -0.2822 +2026-04-09 12:07:58.217546: Pseudo dice [0.6403, 0.4136, 0.658, 0.51, 0.3594, 0.7932, 0.7081] +2026-04-09 12:07:58.220432: Epoch time: 101.64 s +2026-04-09 12:07:59.364388: +2026-04-09 12:07:59.366564: Epoch 707 +2026-04-09 12:07:59.370329: Current learning rate: 0.00331 +2026-04-09 12:09:41.064414: train_loss -0.3295 +2026-04-09 12:09:41.069820: val_loss -0.2601 +2026-04-09 12:09:41.071935: Pseudo dice [0.6141, 0.3012, 0.6005, 0.9145, 0.2538, 0.4292, 0.7097] +2026-04-09 12:09:41.074363: Epoch time: 101.7 s +2026-04-09 12:09:42.245832: +2026-04-09 12:09:42.248228: Epoch 708 +2026-04-09 12:09:42.250357: Current learning rate: 0.0033 +2026-04-09 12:11:23.956709: train_loss -0.3139 +2026-04-09 12:11:23.967776: val_loss -0.2941 +2026-04-09 12:11:23.972821: Pseudo dice [0.6228, 0.1798, 0.6802, 0.4981, 0.5125, 0.7308, 0.8303] +2026-04-09 12:11:23.976668: Epoch time: 101.71 s +2026-04-09 12:11:25.142594: +2026-04-09 12:11:25.147441: Epoch 709 +2026-04-09 12:11:25.152958: Current learning rate: 0.00329 +2026-04-09 12:13:06.903289: train_loss -0.3413 +2026-04-09 12:13:06.910369: val_loss -0.2716 +2026-04-09 12:13:06.912483: Pseudo dice [0.4656, 0.29, 0.7677, 0.0003, 0.4396, 0.8719, 0.7275] +2026-04-09 12:13:06.915045: Epoch time: 101.76 s +2026-04-09 12:13:08.077918: +2026-04-09 12:13:08.081277: Epoch 710 +2026-04-09 12:13:08.083202: Current learning rate: 0.00328 +2026-04-09 12:14:50.456127: train_loss -0.3372 +2026-04-09 12:14:50.463796: val_loss -0.3085 +2026-04-09 12:14:50.466079: Pseudo dice [0.8031, 0.5586, 0.6616, 0.4897, 0.4579, 0.8169, 0.8497] +2026-04-09 12:14:50.471053: Epoch time: 102.38 s +2026-04-09 12:14:51.617779: +2026-04-09 12:14:51.619813: Epoch 711 +2026-04-09 12:14:51.621574: Current learning rate: 0.00327 +2026-04-09 12:16:33.286327: train_loss -0.3369 +2026-04-09 12:16:33.302405: val_loss -0.2498 +2026-04-09 12:16:33.309374: Pseudo dice [0.7804, 0.4529, 0.7135, 0.0032, 0.4264, 0.5317, 0.5876] +2026-04-09 12:16:33.312171: Epoch time: 101.67 s +2026-04-09 12:16:34.468906: +2026-04-09 12:16:34.470413: Epoch 712 +2026-04-09 12:16:34.472121: Current learning rate: 0.00326 +2026-04-09 12:18:16.213570: train_loss -0.3518 +2026-04-09 12:18:16.219949: val_loss -0.3061 +2026-04-09 12:18:16.223093: Pseudo dice [0.6566, 0.4726, 0.7795, 0.1858, 0.5685, 0.8863, 0.796] +2026-04-09 12:18:16.227201: Epoch time: 101.75 s +2026-04-09 12:18:17.362411: +2026-04-09 12:18:17.365174: Epoch 713 +2026-04-09 12:18:17.367401: Current learning rate: 0.00325 +2026-04-09 12:19:59.139107: train_loss -0.3442 +2026-04-09 12:19:59.146060: val_loss -0.272 +2026-04-09 12:19:59.148520: Pseudo dice [0.8149, 0.5052, 0.7184, 0.4337, 0.3262, 0.7762, 0.529] +2026-04-09 12:19:59.151090: Epoch time: 101.78 s +2026-04-09 12:20:00.267315: +2026-04-09 12:20:00.269471: Epoch 714 +2026-04-09 12:20:00.271924: Current learning rate: 0.00324 +2026-04-09 12:21:43.553685: train_loss -0.35 +2026-04-09 12:21:43.564863: val_loss -0.2979 +2026-04-09 12:21:43.566869: Pseudo dice [0.3924, 0.4643, 0.782, 0.7298, 0.3345, 0.8911, 0.7589] +2026-04-09 12:21:43.568808: Epoch time: 103.29 s +2026-04-09 12:21:43.572433: Yayy! New best EMA pseudo Dice: 0.5757 +2026-04-09 12:21:46.444924: +2026-04-09 12:21:46.446944: Epoch 715 +2026-04-09 12:21:46.448505: Current learning rate: 0.00323 +2026-04-09 12:23:29.051078: train_loss -0.3481 +2026-04-09 12:23:29.066731: val_loss -0.2976 +2026-04-09 12:23:29.068705: Pseudo dice [0.7611, 0.5167, 0.7435, 0.025, 0.3894, 0.7999, 0.7749] +2026-04-09 12:23:29.071359: Epoch time: 102.61 s +2026-04-09 12:23:30.226069: +2026-04-09 12:23:30.228125: Epoch 716 +2026-04-09 12:23:30.229690: Current learning rate: 0.00322 +2026-04-09 12:25:12.055368: train_loss -0.3442 +2026-04-09 12:25:12.061368: val_loss -0.292 +2026-04-09 12:25:12.063158: Pseudo dice [0.7089, 0.4666, 0.8591, 0.0008, 0.3184, 0.6905, 0.8355] +2026-04-09 12:25:12.065554: Epoch time: 101.83 s +2026-04-09 12:25:13.204078: +2026-04-09 12:25:13.206388: Epoch 717 +2026-04-09 12:25:13.207932: Current learning rate: 0.00321 +2026-04-09 12:26:55.283070: train_loss -0.3488 +2026-04-09 12:26:55.289706: val_loss -0.2981 +2026-04-09 12:26:55.292728: Pseudo dice [0.8325, 0.5321, 0.751, 0.8529, 0.3435, 0.6988, 0.8779] +2026-04-09 12:26:55.295677: Epoch time: 102.08 s +2026-04-09 12:26:55.298290: Yayy! New best EMA pseudo Dice: 0.5858 +2026-04-09 12:26:58.161072: +2026-04-09 12:26:58.162933: Epoch 718 +2026-04-09 12:26:58.164356: Current learning rate: 0.0032 +2026-04-09 12:28:40.822615: train_loss -0.3502 +2026-04-09 12:28:40.831401: val_loss -0.2514 +2026-04-09 12:28:40.835009: Pseudo dice [0.7355, 0.4782, 0.7299, 0.103, 0.3591, 0.8505, 0.6704] +2026-04-09 12:28:40.838429: Epoch time: 102.66 s +2026-04-09 12:28:41.973322: +2026-04-09 12:28:41.975891: Epoch 719 +2026-04-09 12:28:41.977748: Current learning rate: 0.00319 +2026-04-09 12:30:24.145459: train_loss -0.3455 +2026-04-09 12:30:24.151497: val_loss -0.2974 +2026-04-09 12:30:24.153328: Pseudo dice [0.4044, 0.4686, 0.7435, 0.653, 0.4239, 0.5398, 0.7515] +2026-04-09 12:30:24.155496: Epoch time: 102.18 s +2026-04-09 12:30:25.283015: +2026-04-09 12:30:25.287006: Epoch 720 +2026-04-09 12:30:25.291148: Current learning rate: 0.00318 +2026-04-09 12:32:07.259793: train_loss -0.3375 +2026-04-09 12:32:07.264461: val_loss -0.2926 +2026-04-09 12:32:07.266413: Pseudo dice [0.7876, 0.357, 0.7394, 0.0012, 0.4835, 0.8073, 0.7033] +2026-04-09 12:32:07.269356: Epoch time: 101.98 s +2026-04-09 12:32:08.409814: +2026-04-09 12:32:08.413528: Epoch 721 +2026-04-09 12:32:08.415179: Current learning rate: 0.00317 +2026-04-09 12:33:50.506155: train_loss -0.3485 +2026-04-09 12:33:50.512648: val_loss -0.3114 +2026-04-09 12:33:50.515203: Pseudo dice [0.5962, 0.4953, 0.8, 0.8453, 0.431, 0.7348, 0.8445] +2026-04-09 12:33:50.517462: Epoch time: 102.1 s +2026-04-09 12:33:50.519921: Yayy! New best EMA pseudo Dice: 0.589 +2026-04-09 12:33:53.379569: +2026-04-09 12:33:53.381594: Epoch 722 +2026-04-09 12:33:53.383061: Current learning rate: 0.00316 +2026-04-09 12:35:37.445756: train_loss -0.3509 +2026-04-09 12:35:37.451519: val_loss -0.2972 +2026-04-09 12:35:37.453736: Pseudo dice [0.8028, 0.5823, 0.7445, 0.0055, 0.5023, 0.6905, 0.8378] +2026-04-09 12:35:37.455910: Epoch time: 104.07 s +2026-04-09 12:35:37.457909: Yayy! New best EMA pseudo Dice: 0.5896 +2026-04-09 12:35:40.314017: +2026-04-09 12:35:40.315472: Epoch 723 +2026-04-09 12:35:40.316839: Current learning rate: 0.00315 +2026-04-09 12:37:22.923246: train_loss -0.3572 +2026-04-09 12:37:22.928833: val_loss -0.2196 +2026-04-09 12:37:22.931961: Pseudo dice [0.3634, 0.4625, 0.7578, 0.0148, 0.3341, 0.9136, 0.886] +2026-04-09 12:37:22.934249: Epoch time: 102.61 s +2026-04-09 12:37:24.115137: +2026-04-09 12:37:24.118875: Epoch 724 +2026-04-09 12:37:24.122473: Current learning rate: 0.00314 +2026-04-09 12:39:06.602849: train_loss -0.3403 +2026-04-09 12:39:06.609770: val_loss -0.2715 +2026-04-09 12:39:06.611921: Pseudo dice [0.8581, 0.0327, 0.6957, 0.6891, 0.4483, 0.4601, 0.6147] +2026-04-09 12:39:06.614813: Epoch time: 102.49 s +2026-04-09 12:39:07.764297: +2026-04-09 12:39:07.766113: Epoch 725 +2026-04-09 12:39:07.768315: Current learning rate: 0.00313 +2026-04-09 12:40:50.489496: train_loss -0.3305 +2026-04-09 12:40:50.496693: val_loss -0.2998 +2026-04-09 12:40:50.506998: Pseudo dice [0.6188, 0.2845, 0.7985, 0.0801, 0.4476, 0.8435, 0.8502] +2026-04-09 12:40:50.510374: Epoch time: 102.73 s +2026-04-09 12:40:51.670356: +2026-04-09 12:40:51.678574: Epoch 726 +2026-04-09 12:40:51.680758: Current learning rate: 0.00312 +2026-04-09 12:42:34.638061: train_loss -0.3506 +2026-04-09 12:42:34.662866: val_loss -0.2044 +2026-04-09 12:42:34.664995: Pseudo dice [0.8604, 0.2532, 0.4746, 0.0161, 0.4399, 0.7596, 0.7616] +2026-04-09 12:42:34.673893: Epoch time: 102.97 s +2026-04-09 12:42:35.825927: +2026-04-09 12:42:35.829546: Epoch 727 +2026-04-09 12:42:35.831558: Current learning rate: 0.00311 +2026-04-09 12:44:17.661034: train_loss -0.3506 +2026-04-09 12:44:17.688364: val_loss -0.2056 +2026-04-09 12:44:17.694306: Pseudo dice [0.6498, 0.3208, 0.767, 0.082, 0.3232, 0.7881, 0.5974] +2026-04-09 12:44:17.704252: Epoch time: 101.84 s +2026-04-09 12:44:18.815252: +2026-04-09 12:44:18.817125: Epoch 728 +2026-04-09 12:44:18.818740: Current learning rate: 0.0031 +2026-04-09 12:46:00.340390: train_loss -0.346 +2026-04-09 12:46:00.346847: val_loss -0.3196 +2026-04-09 12:46:00.350216: Pseudo dice [0.834, 0.1829, 0.7752, 0.693, 0.2814, 0.7917, 0.867] +2026-04-09 12:46:00.353050: Epoch time: 101.53 s +2026-04-09 12:46:01.494066: +2026-04-09 12:46:01.496620: Epoch 729 +2026-04-09 12:46:01.498608: Current learning rate: 0.00309 +2026-04-09 12:47:44.295692: train_loss -0.3239 +2026-04-09 12:47:44.304824: val_loss -0.3205 +2026-04-09 12:47:44.306610: Pseudo dice [0.8426, 0.5157, 0.7874, 0.5995, 0.5265, 0.7578, 0.8816] +2026-04-09 12:47:44.310898: Epoch time: 102.8 s +2026-04-09 12:47:45.445702: +2026-04-09 12:47:45.461335: Epoch 730 +2026-04-09 12:47:45.464928: Current learning rate: 0.00308 +2026-04-09 12:49:29.555596: train_loss -0.3172 +2026-04-09 12:49:29.580805: val_loss -0.2236 +2026-04-09 12:49:29.583564: Pseudo dice [0.4558, 0.5041, 0.7963, 0.037, 0.3562, 0.5098, 0.7772] +2026-04-09 12:49:29.585802: Epoch time: 104.11 s +2026-04-09 12:49:30.721176: +2026-04-09 12:49:30.724659: Epoch 731 +2026-04-09 12:49:30.726995: Current learning rate: 0.00307 +2026-04-09 12:51:13.209823: train_loss -0.3348 +2026-04-09 12:51:13.215207: val_loss -0.2878 +2026-04-09 12:51:13.217658: Pseudo dice [0.7214, 0.5626, 0.7026, 0.4819, 0.4868, 0.6801, 0.7928] +2026-04-09 12:51:13.220250: Epoch time: 102.49 s +2026-04-09 12:51:14.391596: +2026-04-09 12:51:14.393337: Epoch 732 +2026-04-09 12:51:14.394947: Current learning rate: 0.00306 +2026-04-09 12:52:56.093605: train_loss -0.3588 +2026-04-09 12:52:56.099058: val_loss -0.2963 +2026-04-09 12:52:56.101499: Pseudo dice [0.8443, 0.1217, 0.7306, 0.8676, 0.2681, 0.7947, 0.8825] +2026-04-09 12:52:56.103414: Epoch time: 101.71 s +2026-04-09 12:52:57.240734: +2026-04-09 12:52:57.249505: Epoch 733 +2026-04-09 12:52:57.254087: Current learning rate: 0.00305 +2026-04-09 12:54:38.910736: train_loss -0.3528 +2026-04-09 12:54:38.917198: val_loss -0.3129 +2026-04-09 12:54:38.919189: Pseudo dice [0.3456, 0.0753, 0.7204, 0.8479, 0.3926, 0.7459, 0.8617] +2026-04-09 12:54:38.921302: Epoch time: 101.67 s +2026-04-09 12:54:40.052982: +2026-04-09 12:54:40.054593: Epoch 734 +2026-04-09 12:54:40.056257: Current learning rate: 0.00304 +2026-04-09 12:56:21.794277: train_loss -0.3499 +2026-04-09 12:56:21.800580: val_loss -0.3041 +2026-04-09 12:56:21.802623: Pseudo dice [0.8367, 0.4965, 0.6466, 0.3687, 0.3839, 0.437, 0.8586] +2026-04-09 12:56:21.805879: Epoch time: 101.74 s +2026-04-09 12:56:22.953692: +2026-04-09 12:56:22.955423: Epoch 735 +2026-04-09 12:56:22.957698: Current learning rate: 0.00303 +2026-04-09 12:58:04.316458: train_loss -0.3554 +2026-04-09 12:58:04.323398: val_loss -0.2191 +2026-04-09 12:58:04.325388: Pseudo dice [0.7328, 0.3445, 0.7071, 0.0936, 0.3069, 0.5627, 0.8444] +2026-04-09 12:58:04.327799: Epoch time: 101.37 s +2026-04-09 12:58:05.461504: +2026-04-09 12:58:05.463096: Epoch 736 +2026-04-09 12:58:05.464774: Current learning rate: 0.00302 +2026-04-09 12:59:47.219700: train_loss -0.3499 +2026-04-09 12:59:47.225884: val_loss -0.2877 +2026-04-09 12:59:47.228993: Pseudo dice [0.5531, 0.1978, 0.7965, 0.1666, 0.2612, 0.5023, 0.8502] +2026-04-09 12:59:47.231204: Epoch time: 101.76 s +2026-04-09 12:59:48.383702: +2026-04-09 12:59:48.385731: Epoch 737 +2026-04-09 12:59:48.387434: Current learning rate: 0.00301 +2026-04-09 13:01:30.218404: train_loss -0.3434 +2026-04-09 13:01:30.225687: val_loss -0.3058 +2026-04-09 13:01:30.227453: Pseudo dice [0.8001, 0.4369, 0.7814, 0.0028, 0.3684, 0.6898, 0.881] +2026-04-09 13:01:30.229977: Epoch time: 101.84 s +2026-04-09 13:01:31.357889: +2026-04-09 13:01:31.359628: Epoch 738 +2026-04-09 13:01:31.361005: Current learning rate: 0.003 +2026-04-09 13:03:13.330184: train_loss -0.3504 +2026-04-09 13:03:13.337729: val_loss -0.2568 +2026-04-09 13:03:13.340199: Pseudo dice [0.9194, 0.0638, 0.7239, 0.0895, 0.5652, 0.8534, 0.779] +2026-04-09 13:03:13.343509: Epoch time: 101.98 s +2026-04-09 13:03:14.465692: +2026-04-09 13:03:14.468130: Epoch 739 +2026-04-09 13:03:14.470159: Current learning rate: 0.00299 +2026-04-09 13:04:55.843274: train_loss -0.3628 +2026-04-09 13:04:55.849720: val_loss -0.292 +2026-04-09 13:04:55.854089: Pseudo dice [0.42, 0.5924, 0.8225, 0.4293, 0.5247, 0.7526, 0.5475] +2026-04-09 13:04:55.856750: Epoch time: 101.38 s +2026-04-09 13:04:56.983239: +2026-04-09 13:04:56.984953: Epoch 740 +2026-04-09 13:04:56.987056: Current learning rate: 0.00297 +2026-04-09 13:06:39.058033: train_loss -0.3609 +2026-04-09 13:06:39.065133: val_loss -0.2875 +2026-04-09 13:06:39.067013: Pseudo dice [0.6075, 0.4534, 0.8432, 0.2492, 0.3109, 0.7733, 0.4233] +2026-04-09 13:06:39.069062: Epoch time: 102.08 s +2026-04-09 13:06:40.220690: +2026-04-09 13:06:40.223966: Epoch 741 +2026-04-09 13:06:40.227048: Current learning rate: 0.00296 +2026-04-09 13:08:21.520376: train_loss -0.3547 +2026-04-09 13:08:21.525517: val_loss -0.239 +2026-04-09 13:08:21.527371: Pseudo dice [0.2084, 0.0224, 0.511, 0.0805, 0.3889, 0.8047, 0.5483] +2026-04-09 13:08:21.529630: Epoch time: 101.3 s +2026-04-09 13:08:23.802529: +2026-04-09 13:08:23.804727: Epoch 742 +2026-04-09 13:08:23.806274: Current learning rate: 0.00295 +2026-04-09 13:10:09.431950: train_loss -0.3506 +2026-04-09 13:10:09.438451: val_loss -0.2851 +2026-04-09 13:10:09.440557: Pseudo dice [0.4052, 0.5811, 0.817, 0.2058, 0.4263, 0.8618, 0.7769] +2026-04-09 13:10:09.443557: Epoch time: 105.63 s +2026-04-09 13:10:10.580297: +2026-04-09 13:10:10.582773: Epoch 743 +2026-04-09 13:10:10.585389: Current learning rate: 0.00294 +2026-04-09 13:11:52.166194: train_loss -0.3564 +2026-04-09 13:11:52.172695: val_loss -0.2703 +2026-04-09 13:11:52.174722: Pseudo dice [0.2915, 0.1445, 0.6569, 0.2715, 0.3044, 0.4545, 0.8583] +2026-04-09 13:11:52.177038: Epoch time: 101.59 s +2026-04-09 13:11:53.339890: +2026-04-09 13:11:53.345009: Epoch 744 +2026-04-09 13:11:53.348122: Current learning rate: 0.00293 +2026-04-09 13:13:35.022725: train_loss -0.3519 +2026-04-09 13:13:35.031796: val_loss -0.2254 +2026-04-09 13:13:35.034693: Pseudo dice [0.7101, 0.5515, 0.7869, 0.0322, 0.4378, 0.7051, 0.7422] +2026-04-09 13:13:35.037293: Epoch time: 101.69 s +2026-04-09 13:13:36.206073: +2026-04-09 13:13:36.207706: Epoch 745 +2026-04-09 13:13:36.210076: Current learning rate: 0.00292 +2026-04-09 13:15:17.582109: train_loss -0.357 +2026-04-09 13:15:17.588348: val_loss -0.2755 +2026-04-09 13:15:17.590083: Pseudo dice [0.422, 0.4964, 0.7326, 0.7247, 0.4818, 0.8088, 0.6063] +2026-04-09 13:15:17.594267: Epoch time: 101.38 s +2026-04-09 13:15:18.735376: +2026-04-09 13:15:18.737366: Epoch 746 +2026-04-09 13:15:18.740039: Current learning rate: 0.00291 +2026-04-09 13:17:00.874658: train_loss -0.3425 +2026-04-09 13:17:00.882164: val_loss -0.2981 +2026-04-09 13:17:00.884692: Pseudo dice [0.1874, 0.1886, 0.7523, 0.0112, 0.4174, 0.7266, 0.7749] +2026-04-09 13:17:00.887975: Epoch time: 102.14 s +2026-04-09 13:17:02.047331: +2026-04-09 13:17:02.049518: Epoch 747 +2026-04-09 13:17:02.051514: Current learning rate: 0.0029 +2026-04-09 13:18:43.973418: train_loss -0.3386 +2026-04-09 13:18:43.979448: val_loss -0.3258 +2026-04-09 13:18:43.982833: Pseudo dice [0.8937, 0.3739, 0.6931, 0.872, 0.4636, 0.7476, 0.9007] +2026-04-09 13:18:43.985904: Epoch time: 101.93 s +2026-04-09 13:18:45.163584: +2026-04-09 13:18:45.165823: Epoch 748 +2026-04-09 13:18:45.167764: Current learning rate: 0.00289 +2026-04-09 13:20:26.742428: train_loss -0.3616 +2026-04-09 13:20:26.748080: val_loss -0.3123 +2026-04-09 13:20:26.750346: Pseudo dice [0.8478, 0.1341, 0.6768, 0.8441, 0.361, 0.7414, 0.8256] +2026-04-09 13:20:26.753051: Epoch time: 101.58 s +2026-04-09 13:20:27.914317: +2026-04-09 13:20:27.916252: Epoch 749 +2026-04-09 13:20:27.919456: Current learning rate: 0.00288 +2026-04-09 13:22:10.772110: train_loss -0.3589 +2026-04-09 13:22:10.778506: val_loss -0.2307 +2026-04-09 13:22:10.780919: Pseudo dice [0.8096, 0.478, 0.5722, 0.0907, 0.2632, 0.1899, 0.8262] +2026-04-09 13:22:10.783731: Epoch time: 102.86 s +2026-04-09 13:22:13.587660: +2026-04-09 13:22:13.589295: Epoch 750 +2026-04-09 13:22:13.590661: Current learning rate: 0.00287 +2026-04-09 13:23:54.841231: train_loss -0.349 +2026-04-09 13:23:54.846228: val_loss -0.2692 +2026-04-09 13:23:54.848281: Pseudo dice [0.8171, 0.1555, 0.7361, 0.1446, 0.4988, 0.752, 0.8102] +2026-04-09 13:23:54.851083: Epoch time: 101.26 s +2026-04-09 13:23:56.008070: +2026-04-09 13:23:56.010282: Epoch 751 +2026-04-09 13:23:56.012135: Current learning rate: 0.00286 +2026-04-09 13:25:37.717054: train_loss -0.336 +2026-04-09 13:25:37.723271: val_loss -0.2941 +2026-04-09 13:25:37.725410: Pseudo dice [0.2476, 0.45, 0.734, 0.0298, 0.295, 0.7872, 0.8086] +2026-04-09 13:25:37.728130: Epoch time: 101.71 s +2026-04-09 13:25:38.892134: +2026-04-09 13:25:38.894224: Epoch 752 +2026-04-09 13:25:38.896300: Current learning rate: 0.00285 +2026-04-09 13:27:20.385890: train_loss -0.3525 +2026-04-09 13:27:20.391502: val_loss -0.3111 +2026-04-09 13:27:20.393506: Pseudo dice [0.8619, 0.2235, 0.8158, 0.4329, 0.4266, 0.8497, 0.8968] +2026-04-09 13:27:20.395517: Epoch time: 101.5 s +2026-04-09 13:27:21.540613: +2026-04-09 13:27:21.543513: Epoch 753 +2026-04-09 13:27:21.545732: Current learning rate: 0.00284 +2026-04-09 13:29:05.290934: train_loss -0.3623 +2026-04-09 13:29:05.298734: val_loss -0.2759 +2026-04-09 13:29:05.302136: Pseudo dice [0.823, 0.2722, 0.7699, 0.0005, 0.45, 0.7008, 0.8644] +2026-04-09 13:29:05.304979: Epoch time: 103.75 s +2026-04-09 13:29:06.457165: +2026-04-09 13:29:06.459302: Epoch 754 +2026-04-09 13:29:06.462447: Current learning rate: 0.00283 +2026-04-09 13:30:48.750335: train_loss -0.3687 +2026-04-09 13:30:48.789009: val_loss -0.2776 +2026-04-09 13:30:48.791494: Pseudo dice [0.6544, 0.6442, 0.7149, 0.0712, 0.357, 0.8961, 0.8508] +2026-04-09 13:30:48.794336: Epoch time: 102.3 s +2026-04-09 13:30:50.010270: +2026-04-09 13:30:50.013295: Epoch 755 +2026-04-09 13:30:50.020649: Current learning rate: 0.00282 +2026-04-09 13:32:32.432908: train_loss -0.3485 +2026-04-09 13:32:32.440954: val_loss -0.2897 +2026-04-09 13:32:32.443499: Pseudo dice [0.6243, 0.1326, 0.7759, 0.8431, 0.25, 0.8187, 0.8401] +2026-04-09 13:32:32.446043: Epoch time: 102.43 s +2026-04-09 13:32:33.577163: +2026-04-09 13:32:33.580225: Epoch 756 +2026-04-09 13:32:33.582474: Current learning rate: 0.00281 +2026-04-09 13:34:16.543590: train_loss -0.3472 +2026-04-09 13:34:16.551491: val_loss -0.2979 +2026-04-09 13:34:16.553515: Pseudo dice [0.7882, 0.6324, 0.6753, 0.4237, 0.2857, 0.7626, 0.6247] +2026-04-09 13:34:16.556481: Epoch time: 102.97 s +2026-04-09 13:34:17.699402: +2026-04-09 13:34:17.702175: Epoch 757 +2026-04-09 13:34:17.704130: Current learning rate: 0.0028 +2026-04-09 13:36:00.941497: train_loss -0.3396 +2026-04-09 13:36:00.960318: val_loss -0.2869 +2026-04-09 13:36:00.963535: Pseudo dice [0.3493, 0.4744, 0.8229, 0.3228, 0.3003, 0.8471, 0.785] +2026-04-09 13:36:00.967173: Epoch time: 103.25 s +2026-04-09 13:36:02.109455: +2026-04-09 13:36:02.111598: Epoch 758 +2026-04-09 13:36:02.113509: Current learning rate: 0.00279 +2026-04-09 13:37:45.781592: train_loss -0.3493 +2026-04-09 13:37:45.788501: val_loss -0.2523 +2026-04-09 13:37:45.791419: Pseudo dice [0.7438, 0.1163, 0.7097, 0.0949, 0.4134, 0.8158, 0.8556] +2026-04-09 13:37:45.793699: Epoch time: 103.68 s +2026-04-09 13:37:46.971023: +2026-04-09 13:37:46.973825: Epoch 759 +2026-04-09 13:37:46.977339: Current learning rate: 0.00278 +2026-04-09 13:39:29.977992: train_loss -0.3574 +2026-04-09 13:39:29.991208: val_loss -0.299 +2026-04-09 13:39:29.993748: Pseudo dice [0.7353, 0.545, 0.697, 0.7958, 0.4122, 0.7004, 0.65] +2026-04-09 13:39:29.997527: Epoch time: 103.01 s +2026-04-09 13:39:31.151594: +2026-04-09 13:39:31.153175: Epoch 760 +2026-04-09 13:39:31.154672: Current learning rate: 0.00277 +2026-04-09 13:41:15.347449: train_loss -0.3537 +2026-04-09 13:41:15.362408: val_loss -0.2923 +2026-04-09 13:41:15.379569: Pseudo dice [0.8363, 0.4378, 0.7557, 0.8256, 0.2484, 0.7793, 0.7402] +2026-04-09 13:41:15.384109: Epoch time: 104.2 s +2026-04-09 13:41:16.566217: +2026-04-09 13:41:16.574480: Epoch 761 +2026-04-09 13:41:16.578468: Current learning rate: 0.00276 +2026-04-09 13:43:00.038751: train_loss -0.3613 +2026-04-09 13:43:00.046718: val_loss -0.3338 +2026-04-09 13:43:00.048651: Pseudo dice [0.8376, 0.7549, 0.7619, 0.8407, 0.3956, 0.8729, 0.9096] +2026-04-09 13:43:00.051008: Epoch time: 103.48 s +2026-04-09 13:43:00.052840: Yayy! New best EMA pseudo Dice: 0.5994 +2026-04-09 13:43:03.045311: +2026-04-09 13:43:03.047768: Epoch 762 +2026-04-09 13:43:03.049259: Current learning rate: 0.00275 +2026-04-09 13:44:44.836886: train_loss -0.3543 +2026-04-09 13:44:44.845115: val_loss -0.2762 +2026-04-09 13:44:44.847331: Pseudo dice [0.2321, 0.5471, 0.8151, 0.4204, 0.392, 0.6825, 0.7106] +2026-04-09 13:44:44.849905: Epoch time: 101.79 s +2026-04-09 13:44:46.006601: +2026-04-09 13:44:46.008659: Epoch 763 +2026-04-09 13:44:46.010315: Current learning rate: 0.00274 +2026-04-09 13:46:27.633048: train_loss -0.3411 +2026-04-09 13:46:27.639773: val_loss -0.2954 +2026-04-09 13:46:27.642015: Pseudo dice [0.8736, 0.4647, 0.6415, 0.1811, 0.3496, 0.773, 0.8952] +2026-04-09 13:46:27.644783: Epoch time: 101.63 s +2026-04-09 13:46:28.830588: +2026-04-09 13:46:28.832649: Epoch 764 +2026-04-09 13:46:28.834108: Current learning rate: 0.00273 +2026-04-09 13:48:10.556016: train_loss -0.3569 +2026-04-09 13:48:10.561416: val_loss -0.2196 +2026-04-09 13:48:10.563607: Pseudo dice [0.552, 0.3847, 0.6004, 0.0481, 0.3631, 0.8912, 0.6971] +2026-04-09 13:48:10.565834: Epoch time: 101.73 s +2026-04-09 13:48:11.779478: +2026-04-09 13:48:11.781232: Epoch 765 +2026-04-09 13:48:11.782707: Current learning rate: 0.00272 +2026-04-09 13:49:54.961540: train_loss -0.3492 +2026-04-09 13:49:54.998365: val_loss -0.1778 +2026-04-09 13:49:55.003191: Pseudo dice [0.3329, 0.2636, 0.6115, 0.1047, 0.2772, 0.8551, 0.5821] +2026-04-09 13:49:55.006521: Epoch time: 103.19 s +2026-04-09 13:49:56.209920: +2026-04-09 13:49:56.214725: Epoch 766 +2026-04-09 13:49:56.220577: Current learning rate: 0.00271 +2026-04-09 13:51:38.168793: train_loss -0.3569 +2026-04-09 13:51:38.177012: val_loss -0.3078 +2026-04-09 13:51:38.178904: Pseudo dice [0.6928, 0.5569, 0.8474, 0.1849, 0.3829, 0.8707, 0.7791] +2026-04-09 13:51:38.181501: Epoch time: 101.96 s +2026-04-09 13:51:39.370412: +2026-04-09 13:51:39.372017: Epoch 767 +2026-04-09 13:51:39.373397: Current learning rate: 0.0027 +2026-04-09 13:53:20.878674: train_loss -0.3668 +2026-04-09 13:53:20.885865: val_loss -0.2933 +2026-04-09 13:53:20.888597: Pseudo dice [0.644, 0.0755, 0.8536, 0.0001, 0.433, 0.8461, 0.8026] +2026-04-09 13:53:20.895171: Epoch time: 101.51 s +2026-04-09 13:53:22.078033: +2026-04-09 13:53:22.079686: Epoch 768 +2026-04-09 13:53:22.081204: Current learning rate: 0.00268 +2026-04-09 13:55:06.066347: train_loss -0.3499 +2026-04-09 13:55:06.073833: val_loss -0.3075 +2026-04-09 13:55:06.075897: Pseudo dice [0.7973, 0.3342, 0.7025, 0.6779, 0.4107, 0.8023, 0.8277] +2026-04-09 13:55:06.078980: Epoch time: 103.99 s +2026-04-09 13:55:07.277214: +2026-04-09 13:55:07.279743: Epoch 769 +2026-04-09 13:55:07.281569: Current learning rate: 0.00267 +2026-04-09 13:56:48.558043: train_loss -0.3594 +2026-04-09 13:56:48.563291: val_loss -0.2583 +2026-04-09 13:56:48.564883: Pseudo dice [0.751, 0.3412, 0.7714, 0.1664, 0.4024, 0.6418, 0.5788] +2026-04-09 13:56:48.567248: Epoch time: 101.28 s +2026-04-09 13:56:49.731532: +2026-04-09 13:56:49.733705: Epoch 770 +2026-04-09 13:56:49.735733: Current learning rate: 0.00266 +2026-04-09 13:58:32.226130: train_loss -0.349 +2026-04-09 13:58:32.236503: val_loss -0.2949 +2026-04-09 13:58:32.238983: Pseudo dice [0.2295, 0.4644, 0.85, 0.0417, 0.3617, 0.848, 0.8086] +2026-04-09 13:58:32.241656: Epoch time: 102.5 s +2026-04-09 13:58:33.459706: +2026-04-09 13:58:33.461312: Epoch 771 +2026-04-09 13:58:33.462988: Current learning rate: 0.00265 +2026-04-09 14:00:14.949118: train_loss -0.3577 +2026-04-09 14:00:14.955279: val_loss -0.2995 +2026-04-09 14:00:14.957138: Pseudo dice [0.506, 0.5377, 0.8185, 0.0192, 0.3785, 0.7462, 0.8582] +2026-04-09 14:00:14.959307: Epoch time: 101.49 s +2026-04-09 14:00:16.110641: +2026-04-09 14:00:16.112627: Epoch 772 +2026-04-09 14:00:16.114431: Current learning rate: 0.00264 +2026-04-09 14:01:58.301966: train_loss -0.3551 +2026-04-09 14:01:58.309614: val_loss -0.2364 +2026-04-09 14:01:58.311489: Pseudo dice [0.2545, 0.5549, 0.6923, 0.0112, 0.1853, 0.5876, 0.8319] +2026-04-09 14:01:58.313889: Epoch time: 102.19 s +2026-04-09 14:01:59.457454: +2026-04-09 14:01:59.459139: Epoch 773 +2026-04-09 14:01:59.460735: Current learning rate: 0.00263 +2026-04-09 14:03:41.899537: train_loss -0.3584 +2026-04-09 14:03:41.911072: val_loss -0.2846 +2026-04-09 14:03:41.914293: Pseudo dice [0.653, 0.1349, 0.7896, 0.0592, 0.4619, 0.6518, 0.8084] +2026-04-09 14:03:41.918053: Epoch time: 102.45 s +2026-04-09 14:03:43.077691: +2026-04-09 14:03:43.080052: Epoch 774 +2026-04-09 14:03:43.082298: Current learning rate: 0.00262 +2026-04-09 14:05:24.617726: train_loss -0.3554 +2026-04-09 14:05:24.624135: val_loss -0.2341 +2026-04-09 14:05:24.625956: Pseudo dice [0.8335, 0.1346, 0.7876, 0.078, 0.3603, 0.7407, 0.8699] +2026-04-09 14:05:24.628165: Epoch time: 101.54 s +2026-04-09 14:05:25.798650: +2026-04-09 14:05:25.800163: Epoch 775 +2026-04-09 14:05:25.801911: Current learning rate: 0.00261 +2026-04-09 14:07:07.762633: train_loss -0.362 +2026-04-09 14:07:07.779402: val_loss -0.2953 +2026-04-09 14:07:07.783973: Pseudo dice [0.6033, 0.2857, 0.7478, 0.602, 0.2828, 0.6976, 0.6485] +2026-04-09 14:07:07.788344: Epoch time: 101.97 s +2026-04-09 14:07:08.959526: +2026-04-09 14:07:08.962795: Epoch 776 +2026-04-09 14:07:08.966543: Current learning rate: 0.0026 +2026-04-09 14:08:50.359398: train_loss -0.3601 +2026-04-09 14:08:50.363956: val_loss -0.2974 +2026-04-09 14:08:50.365707: Pseudo dice [0.7662, 0.2016, 0.8282, 0.5054, 0.4504, 0.7662, 0.7823] +2026-04-09 14:08:50.367458: Epoch time: 101.4 s +2026-04-09 14:08:51.520822: +2026-04-09 14:08:51.522375: Epoch 777 +2026-04-09 14:08:51.523786: Current learning rate: 0.00259 +2026-04-09 14:10:33.265244: train_loss -0.369 +2026-04-09 14:10:33.272571: val_loss -0.2117 +2026-04-09 14:10:33.274632: Pseudo dice [0.8082, 0.3292, 0.7619, 0.0904, 0.2586, 0.8898, 0.6858] +2026-04-09 14:10:33.277092: Epoch time: 101.75 s +2026-04-09 14:10:34.433804: +2026-04-09 14:10:34.437330: Epoch 778 +2026-04-09 14:10:34.439134: Current learning rate: 0.00258 +2026-04-09 14:12:16.287065: train_loss -0.3551 +2026-04-09 14:12:16.294421: val_loss -0.2965 +2026-04-09 14:12:16.296071: Pseudo dice [0.508, 0.1555, 0.7262, 0.4484, 0.3656, 0.6949, 0.8548] +2026-04-09 14:12:16.299353: Epoch time: 101.86 s +2026-04-09 14:12:17.482342: +2026-04-09 14:12:17.484653: Epoch 779 +2026-04-09 14:12:17.487066: Current learning rate: 0.00257 +2026-04-09 14:13:59.129908: train_loss -0.3559 +2026-04-09 14:13:59.148366: val_loss -0.2822 +2026-04-09 14:13:59.153744: Pseudo dice [0.7318, 0.0604, 0.6768, 0.4886, 0.2656, 0.7534, 0.6078] +2026-04-09 14:13:59.156452: Epoch time: 101.65 s +2026-04-09 14:14:00.331971: +2026-04-09 14:14:00.333436: Epoch 780 +2026-04-09 14:14:00.335053: Current learning rate: 0.00256 +2026-04-09 14:15:43.159253: train_loss -0.3682 +2026-04-09 14:15:43.164959: val_loss -0.3096 +2026-04-09 14:15:43.167037: Pseudo dice [0.431, 0.1714, 0.8098, 0.3002, 0.3613, 0.8789, 0.8596] +2026-04-09 14:15:43.169223: Epoch time: 102.83 s +2026-04-09 14:15:44.312507: +2026-04-09 14:15:44.314608: Epoch 781 +2026-04-09 14:15:44.316035: Current learning rate: 0.00255 +2026-04-09 14:17:26.118487: train_loss -0.3533 +2026-04-09 14:17:26.125350: val_loss -0.2839 +2026-04-09 14:17:26.127050: Pseudo dice [0.5902, 0.1698, 0.7819, 0.3478, 0.4683, 0.8667, 0.7245] +2026-04-09 14:17:26.129519: Epoch time: 101.81 s +2026-04-09 14:17:27.292781: +2026-04-09 14:17:27.294332: Epoch 782 +2026-04-09 14:17:27.295841: Current learning rate: 0.00254 +2026-04-09 14:19:09.190068: train_loss -0.3558 +2026-04-09 14:19:09.196155: val_loss -0.2861 +2026-04-09 14:19:09.198987: Pseudo dice [0.3528, 0.4987, 0.6381, 0.2516, 0.5502, 0.3599, 0.889] +2026-04-09 14:19:09.202031: Epoch time: 101.9 s +2026-04-09 14:19:10.378925: +2026-04-09 14:19:10.380635: Epoch 783 +2026-04-09 14:19:10.384605: Current learning rate: 0.00253 +2026-04-09 14:20:51.894797: train_loss -0.3545 +2026-04-09 14:20:51.899580: val_loss -0.2107 +2026-04-09 14:20:51.901392: Pseudo dice [0.7972, 0.5157, 0.7154, 0.0001, 0.2793, 0.7945, 0.8292] +2026-04-09 14:20:51.903571: Epoch time: 101.52 s +2026-04-09 14:20:53.057457: +2026-04-09 14:20:53.059040: Epoch 784 +2026-04-09 14:20:53.060431: Current learning rate: 0.00252 +2026-04-09 14:22:35.534085: train_loss -0.3637 +2026-04-09 14:22:35.538726: val_loss -0.2838 +2026-04-09 14:22:35.540745: Pseudo dice [0.5797, 0.4608, 0.7553, 0.0032, 0.3507, 0.7242, 0.6564] +2026-04-09 14:22:35.542796: Epoch time: 102.48 s +2026-04-09 14:22:36.759732: +2026-04-09 14:22:36.761356: Epoch 785 +2026-04-09 14:22:36.762816: Current learning rate: 0.00251 +2026-04-09 14:24:18.152511: train_loss -0.3522 +2026-04-09 14:24:18.157489: val_loss -0.2751 +2026-04-09 14:24:18.159479: Pseudo dice [0.6353, 0.1164, 0.4062, 0.4376, 0.2187, 0.8519, 0.8683] +2026-04-09 14:24:18.162232: Epoch time: 101.4 s +2026-04-09 14:24:19.297050: +2026-04-09 14:24:19.298955: Epoch 786 +2026-04-09 14:24:19.300579: Current learning rate: 0.0025 +2026-04-09 14:26:00.870184: train_loss -0.3341 +2026-04-09 14:26:00.875718: val_loss -0.209 +2026-04-09 14:26:00.877825: Pseudo dice [0.7838, 0.1793, 0.658, 0.0492, 0.1759, 0.7082, 0.7733] +2026-04-09 14:26:00.880184: Epoch time: 101.58 s +2026-04-09 14:26:02.062987: +2026-04-09 14:26:02.065221: Epoch 787 +2026-04-09 14:26:02.068087: Current learning rate: 0.00249 +2026-04-09 14:27:43.853984: train_loss -0.3566 +2026-04-09 14:27:43.860999: val_loss -0.2651 +2026-04-09 14:27:43.862988: Pseudo dice [0.7906, 0.4339, 0.624, 0.042, 0.277, 0.8184, 0.9243] +2026-04-09 14:27:43.865256: Epoch time: 101.79 s +2026-04-09 14:27:45.102789: +2026-04-09 14:27:45.104588: Epoch 788 +2026-04-09 14:27:45.106286: Current learning rate: 0.00248 +2026-04-09 14:29:26.857665: train_loss -0.3609 +2026-04-09 14:29:26.862757: val_loss -0.2939 +2026-04-09 14:29:26.864444: Pseudo dice [0.8954, 0.1663, 0.7171, 0.0123, 0.2684, 0.7605, 0.7326] +2026-04-09 14:29:26.866615: Epoch time: 101.76 s +2026-04-09 14:29:28.022017: +2026-04-09 14:29:28.023627: Epoch 789 +2026-04-09 14:29:28.025016: Current learning rate: 0.00247 +2026-04-09 14:31:10.810819: train_loss -0.3529 +2026-04-09 14:31:10.816265: val_loss -0.2847 +2026-04-09 14:31:10.818934: Pseudo dice [0.4898, 0.1251, 0.8075, 0.683, 0.3896, 0.4668, 0.7212] +2026-04-09 14:31:10.822008: Epoch time: 102.79 s +2026-04-09 14:31:11.977519: +2026-04-09 14:31:11.979747: Epoch 790 +2026-04-09 14:31:11.981643: Current learning rate: 0.00245 +2026-04-09 14:32:53.488303: train_loss -0.3636 +2026-04-09 14:32:53.493945: val_loss -0.2723 +2026-04-09 14:32:53.496046: Pseudo dice [0.5467, 0.6586, 0.6753, 0.0453, 0.3595, 0.8603, 0.7973] +2026-04-09 14:32:53.498437: Epoch time: 101.51 s +2026-04-09 14:32:54.656566: +2026-04-09 14:32:54.658213: Epoch 791 +2026-04-09 14:32:54.660245: Current learning rate: 0.00244 +2026-04-09 14:34:36.136926: train_loss -0.3578 +2026-04-09 14:34:36.142255: val_loss -0.2955 +2026-04-09 14:34:36.145751: Pseudo dice [0.5341, 0.5769, 0.797, 0.655, 0.3305, 0.7791, 0.7874] +2026-04-09 14:34:36.148535: Epoch time: 101.48 s +2026-04-09 14:34:37.295111: +2026-04-09 14:34:37.297205: Epoch 792 +2026-04-09 14:34:37.299013: Current learning rate: 0.00243 +2026-04-09 14:36:19.066921: train_loss -0.3715 +2026-04-09 14:36:19.076419: val_loss -0.3258 +2026-04-09 14:36:19.078739: Pseudo dice [0.4652, 0.6154, 0.7952, 0.9261, 0.2176, 0.8266, 0.9054] +2026-04-09 14:36:19.081218: Epoch time: 101.77 s +2026-04-09 14:36:20.243833: +2026-04-09 14:36:20.245540: Epoch 793 +2026-04-09 14:36:20.247261: Current learning rate: 0.00242 +2026-04-09 14:38:02.912443: train_loss -0.3714 +2026-04-09 14:38:02.918452: val_loss -0.2406 +2026-04-09 14:38:02.920584: Pseudo dice [0.4505, 0.5934, 0.6849, 0.1171, 0.4029, 0.8917, 0.8665] +2026-04-09 14:38:02.923133: Epoch time: 102.67 s +2026-04-09 14:38:04.084836: +2026-04-09 14:38:04.086565: Epoch 794 +2026-04-09 14:38:04.088720: Current learning rate: 0.00241 +2026-04-09 14:39:45.917029: train_loss -0.3699 +2026-04-09 14:39:45.922333: val_loss -0.2934 +2026-04-09 14:39:45.924102: Pseudo dice [0.5515, 0.6844, 0.7971, 0.0134, 0.2126, 0.8433, 0.8173] +2026-04-09 14:39:45.926472: Epoch time: 101.84 s +2026-04-09 14:39:47.091635: +2026-04-09 14:39:47.093195: Epoch 795 +2026-04-09 14:39:47.094948: Current learning rate: 0.0024 +2026-04-09 14:41:28.651328: train_loss -0.3575 +2026-04-09 14:41:28.656808: val_loss -0.3202 +2026-04-09 14:41:28.658247: Pseudo dice [0.4935, 0.4654, 0.7426, 0.6322, 0.4928, 0.8091, 0.6588] +2026-04-09 14:41:28.659962: Epoch time: 101.56 s +2026-04-09 14:41:29.813455: +2026-04-09 14:41:29.815056: Epoch 796 +2026-04-09 14:41:29.816453: Current learning rate: 0.00239 +2026-04-09 14:43:11.171629: train_loss -0.3572 +2026-04-09 14:43:11.177801: val_loss -0.2165 +2026-04-09 14:43:11.179511: Pseudo dice [0.5139, 0.584, 0.6265, 0.0003, 0.3165, 0.8534, 0.6699] +2026-04-09 14:43:11.181478: Epoch time: 101.36 s +2026-04-09 14:43:12.337014: +2026-04-09 14:43:12.338881: Epoch 797 +2026-04-09 14:43:12.340807: Current learning rate: 0.00238 +2026-04-09 14:44:54.206556: train_loss -0.361 +2026-04-09 14:44:54.211807: val_loss -0.3061 +2026-04-09 14:44:54.213967: Pseudo dice [0.3738, 0.2996, 0.7729, 0.776, 0.6327, 0.8982, 0.7353] +2026-04-09 14:44:54.216535: Epoch time: 101.87 s +2026-04-09 14:44:55.373611: +2026-04-09 14:44:55.375517: Epoch 798 +2026-04-09 14:44:55.377250: Current learning rate: 0.00237 +2026-04-09 14:46:37.396508: train_loss -0.3709 +2026-04-09 14:46:37.402927: val_loss -0.285 +2026-04-09 14:46:37.407331: Pseudo dice [0.5149, 0.3291, 0.8365, 0.2504, 0.4534, 0.8385, 0.7396] +2026-04-09 14:46:37.410165: Epoch time: 102.03 s +2026-04-09 14:46:38.564662: +2026-04-09 14:46:38.566696: Epoch 799 +2026-04-09 14:46:38.568310: Current learning rate: 0.00236 +2026-04-09 14:48:20.162282: train_loss -0.3714 +2026-04-09 14:48:20.167796: val_loss -0.3306 +2026-04-09 14:48:20.169765: Pseudo dice [0.8291, 0.5993, 0.8556, 0.2664, 0.6194, 0.872, 0.7411] +2026-04-09 14:48:20.172009: Epoch time: 101.6 s +2026-04-09 14:48:23.829994: +2026-04-09 14:48:23.833505: Epoch 800 +2026-04-09 14:48:23.835995: Current learning rate: 0.00235 +2026-04-09 14:50:05.841542: train_loss -0.3689 +2026-04-09 14:50:05.857312: val_loss -0.3107 +2026-04-09 14:50:05.859056: Pseudo dice [0.8538, 0.3746, 0.8031, 0.7021, 0.2653, 0.7199, 0.8151] +2026-04-09 14:50:05.861567: Epoch time: 102.01 s +2026-04-09 14:50:07.015583: +2026-04-09 14:50:07.017588: Epoch 801 +2026-04-09 14:50:07.019591: Current learning rate: 0.00234 +2026-04-09 14:51:48.935586: train_loss -0.3826 +2026-04-09 14:51:48.944302: val_loss -0.311 +2026-04-09 14:51:48.946915: Pseudo dice [0.858, 0.6589, 0.7974, 0.5328, 0.4972, 0.6443, 0.7272] +2026-04-09 14:51:48.949620: Epoch time: 101.92 s +2026-04-09 14:51:50.109325: +2026-04-09 14:51:50.111517: Epoch 802 +2026-04-09 14:51:50.113003: Current learning rate: 0.00233 +2026-04-09 14:53:31.674671: train_loss -0.3719 +2026-04-09 14:53:31.682171: val_loss -0.2944 +2026-04-09 14:53:31.684562: Pseudo dice [0.7538, 0.4886, 0.5198, 0.0082, 0.342, 0.8498, 0.7424] +2026-04-09 14:53:31.687646: Epoch time: 101.57 s +2026-04-09 14:53:32.843067: +2026-04-09 14:53:32.846477: Epoch 803 +2026-04-09 14:53:32.849306: Current learning rate: 0.00232 +2026-04-09 14:55:14.972289: train_loss -0.3647 +2026-04-09 14:55:14.978461: val_loss -0.2983 +2026-04-09 14:55:14.980436: Pseudo dice [0.2855, 0.6243, 0.7449, 0.0033, 0.4936, 0.8701, 0.6667] +2026-04-09 14:55:14.982362: Epoch time: 102.13 s +2026-04-09 14:55:16.138942: +2026-04-09 14:55:16.140911: Epoch 804 +2026-04-09 14:55:16.142442: Current learning rate: 0.00231 +2026-04-09 14:56:57.990515: train_loss -0.3657 +2026-04-09 14:56:58.006130: val_loss -0.3046 +2026-04-09 14:56:58.010181: Pseudo dice [0.1751, 0.702, 0.8062, 0.5259, 0.2628, 0.7487, 0.9071] +2026-04-09 14:56:58.015130: Epoch time: 101.85 s +2026-04-09 14:56:59.184656: +2026-04-09 14:56:59.186501: Epoch 805 +2026-04-09 14:56:59.187973: Current learning rate: 0.0023 +2026-04-09 14:58:42.029595: train_loss -0.3618 +2026-04-09 14:58:42.036313: val_loss -0.1683 +2026-04-09 14:58:42.038085: Pseudo dice [0.7604, 0.5349, 0.4267, 0.0204, 0.3761, 0.8679, 0.7193] +2026-04-09 14:58:42.040174: Epoch time: 102.85 s +2026-04-09 14:58:43.209939: +2026-04-09 14:58:43.212668: Epoch 806 +2026-04-09 14:58:43.214467: Current learning rate: 0.00229 +2026-04-09 15:00:25.254983: train_loss -0.3542 +2026-04-09 15:00:25.267327: val_loss -0.3223 +2026-04-09 15:00:25.271414: Pseudo dice [0.7999, 0.6321, 0.7111, 0.3082, 0.417, 0.7978, 0.8944] +2026-04-09 15:00:25.275183: Epoch time: 102.05 s +2026-04-09 15:00:26.450278: +2026-04-09 15:00:26.451971: Epoch 807 +2026-04-09 15:00:26.453382: Current learning rate: 0.00228 +2026-04-09 15:02:10.102149: train_loss -0.3645 +2026-04-09 15:02:10.109401: val_loss -0.3029 +2026-04-09 15:02:10.111555: Pseudo dice [0.4755, 0.715, 0.7954, 0.4509, 0.4262, 0.8352, 0.6865] +2026-04-09 15:02:10.113512: Epoch time: 103.65 s +2026-04-09 15:02:11.281819: +2026-04-09 15:02:11.283531: Epoch 808 +2026-04-09 15:02:11.285871: Current learning rate: 0.00226 +2026-04-09 15:03:53.160574: train_loss -0.3662 +2026-04-09 15:03:53.166867: val_loss -0.2844 +2026-04-09 15:03:53.168505: Pseudo dice [0.3229, 0.5911, 0.8076, 0.4983, 0.455, 0.8266, 0.6209] +2026-04-09 15:03:53.170782: Epoch time: 101.88 s +2026-04-09 15:03:54.354731: +2026-04-09 15:03:54.356297: Epoch 809 +2026-04-09 15:03:54.357817: Current learning rate: 0.00225 +2026-04-09 15:05:36.221639: train_loss -0.362 +2026-04-09 15:05:36.230178: val_loss -0.3295 +2026-04-09 15:05:36.232611: Pseudo dice [0.7475, 0.7347, 0.8229, 0.6514, 0.3129, 0.741, 0.7721] +2026-04-09 15:05:36.236418: Epoch time: 101.87 s +2026-04-09 15:05:37.407167: +2026-04-09 15:05:37.409345: Epoch 810 +2026-04-09 15:05:37.410951: Current learning rate: 0.00224 +2026-04-09 15:07:20.743434: train_loss -0.3619 +2026-04-09 15:07:20.759326: val_loss -0.297 +2026-04-09 15:07:20.763759: Pseudo dice [0.8449, 0.1433, 0.8037, 0.776, 0.3794, 0.8299, 0.6607] +2026-04-09 15:07:20.769150: Epoch time: 103.34 s +2026-04-09 15:07:20.773267: Yayy! New best EMA pseudo Dice: 0.602 +2026-04-09 15:07:23.665361: +2026-04-09 15:07:23.667314: Epoch 811 +2026-04-09 15:07:23.668982: Current learning rate: 0.00223 +2026-04-09 15:09:06.251741: train_loss -0.3818 +2026-04-09 15:09:06.264066: val_loss -0.3241 +2026-04-09 15:09:06.266120: Pseudo dice [0.5587, 0.6088, 0.8276, 0.7058, 0.3481, 0.9109, 0.7119] +2026-04-09 15:09:06.269324: Epoch time: 102.59 s +2026-04-09 15:09:06.271484: Yayy! New best EMA pseudo Dice: 0.6085 +2026-04-09 15:09:09.146595: +2026-04-09 15:09:09.147998: Epoch 812 +2026-04-09 15:09:09.149774: Current learning rate: 0.00222 +2026-04-09 15:10:51.534070: train_loss -0.3731 +2026-04-09 15:10:51.541399: val_loss -0.2891 +2026-04-09 15:10:51.543567: Pseudo dice [0.8121, 0.2144, 0.7487, 0.1721, 0.2242, 0.8683, 0.7528] +2026-04-09 15:10:51.547775: Epoch time: 102.39 s +2026-04-09 15:10:52.705139: +2026-04-09 15:10:52.706759: Epoch 813 +2026-04-09 15:10:52.708192: Current learning rate: 0.00221 +2026-04-09 15:12:35.289070: train_loss -0.368 +2026-04-09 15:12:35.300829: val_loss -0.2821 +2026-04-09 15:12:35.304550: Pseudo dice [0.5763, 0.6794, 0.6839, 0.0725, 0.3856, 0.8787, 0.7468] +2026-04-09 15:12:35.308040: Epoch time: 102.59 s +2026-04-09 15:12:36.499657: +2026-04-09 15:12:36.504629: Epoch 814 +2026-04-09 15:12:36.507704: Current learning rate: 0.0022 +2026-04-09 15:14:20.087675: train_loss -0.3605 +2026-04-09 15:14:20.094486: val_loss -0.3149 +2026-04-09 15:14:20.096688: Pseudo dice [0.0691, 0.4477, 0.7854, 0.6264, 0.3732, 0.8716, 0.8518] +2026-04-09 15:14:20.099182: Epoch time: 103.59 s +2026-04-09 15:14:21.297149: +2026-04-09 15:14:21.300544: Epoch 815 +2026-04-09 15:14:21.302519: Current learning rate: 0.00219 +2026-04-09 15:16:04.012151: train_loss -0.3708 +2026-04-09 15:16:04.018793: val_loss -0.176 +2026-04-09 15:16:04.020677: Pseudo dice [0.7074, 0.3224, 0.4325, 0.0002, 0.4119, 0.881, 0.7307] +2026-04-09 15:16:04.023456: Epoch time: 102.72 s +2026-04-09 15:16:05.197109: +2026-04-09 15:16:05.199216: Epoch 816 +2026-04-09 15:16:05.201033: Current learning rate: 0.00218 +2026-04-09 15:17:46.828780: train_loss -0.3525 +2026-04-09 15:17:46.834309: val_loss -0.2676 +2026-04-09 15:17:46.836230: Pseudo dice [0.8248, 0.4333, 0.7797, 0.0004, 0.375, 0.6834, 0.6856] +2026-04-09 15:17:46.838276: Epoch time: 101.63 s +2026-04-09 15:17:48.036557: +2026-04-09 15:17:48.039397: Epoch 817 +2026-04-09 15:17:48.040788: Current learning rate: 0.00217 +2026-04-09 15:19:29.563333: train_loss -0.3455 +2026-04-09 15:19:29.571791: val_loss -0.2511 +2026-04-09 15:19:29.574327: Pseudo dice [0.6054, 0.2091, 0.6289, 0.1235, 0.2124, 0.7325, 0.4082] +2026-04-09 15:19:29.576799: Epoch time: 101.53 s +2026-04-09 15:19:31.888777: +2026-04-09 15:19:31.891341: Epoch 818 +2026-04-09 15:19:31.892915: Current learning rate: 0.00216 +2026-04-09 15:21:13.897889: train_loss -0.3436 +2026-04-09 15:21:13.911018: val_loss -0.3502 +2026-04-09 15:21:13.919317: Pseudo dice [0.8332, 0.4825, 0.8438, 0.0468, 0.5444, 0.7975, 0.8543] +2026-04-09 15:21:13.922170: Epoch time: 102.01 s +2026-04-09 15:21:15.112471: +2026-04-09 15:21:15.114399: Epoch 819 +2026-04-09 15:21:15.116322: Current learning rate: 0.00215 +2026-04-09 15:22:56.537414: train_loss -0.3605 +2026-04-09 15:22:56.542971: val_loss -0.2936 +2026-04-09 15:22:56.544706: Pseudo dice [0.5116, 0.5557, 0.5586, 0.6538, 0.4488, 0.8946, 0.5074] +2026-04-09 15:22:56.547462: Epoch time: 101.43 s +2026-04-09 15:22:57.655630: +2026-04-09 15:22:57.657489: Epoch 820 +2026-04-09 15:22:57.659515: Current learning rate: 0.00214 +2026-04-09 15:24:40.482949: train_loss -0.3759 +2026-04-09 15:24:40.489640: val_loss -0.3095 +2026-04-09 15:24:40.491646: Pseudo dice [0.7629, 0.6701, 0.8169, 0.7601, 0.2579, 0.8, 0.2172] +2026-04-09 15:24:40.494366: Epoch time: 102.83 s +2026-04-09 15:24:41.602055: +2026-04-09 15:24:41.605250: Epoch 821 +2026-04-09 15:24:41.607049: Current learning rate: 0.00213 +2026-04-09 15:26:23.301363: train_loss -0.3678 +2026-04-09 15:26:23.307606: val_loss -0.2677 +2026-04-09 15:26:23.310105: Pseudo dice [0.5095, 0.476, 0.6884, 0.319, 0.1235, 0.8472, 0.6333] +2026-04-09 15:26:23.313307: Epoch time: 101.7 s +2026-04-09 15:26:24.457693: +2026-04-09 15:26:24.459990: Epoch 822 +2026-04-09 15:26:24.461608: Current learning rate: 0.00212 +2026-04-09 15:28:07.773624: train_loss -0.3706 +2026-04-09 15:28:07.779283: val_loss -0.3029 +2026-04-09 15:28:07.780887: Pseudo dice [0.3818, 0.3622, 0.7303, 0.8686, 0.2988, 0.856, 0.9194] +2026-04-09 15:28:07.783119: Epoch time: 103.32 s +2026-04-09 15:28:08.907694: +2026-04-09 15:28:08.910184: Epoch 823 +2026-04-09 15:28:08.911635: Current learning rate: 0.0021 +2026-04-09 15:29:50.414687: train_loss -0.3637 +2026-04-09 15:29:50.424298: val_loss -0.3016 +2026-04-09 15:29:50.427173: Pseudo dice [0.4576, 0.4979, 0.6919, 0.255, 0.5114, 0.8435, 0.6992] +2026-04-09 15:29:50.431482: Epoch time: 101.51 s +2026-04-09 15:29:51.555229: +2026-04-09 15:29:51.557146: Epoch 824 +2026-04-09 15:29:51.558847: Current learning rate: 0.00209 +2026-04-09 15:31:33.755726: train_loss -0.3626 +2026-04-09 15:31:33.762049: val_loss -0.2346 +2026-04-09 15:31:33.763895: Pseudo dice [0.4527, 0.1241, 0.7313, 0.0009, 0.3829, 0.7016, 0.7885] +2026-04-09 15:31:33.766407: Epoch time: 102.2 s +2026-04-09 15:31:34.888826: +2026-04-09 15:31:34.890731: Epoch 825 +2026-04-09 15:31:34.893109: Current learning rate: 0.00208 +2026-04-09 15:33:17.518973: train_loss -0.3532 +2026-04-09 15:33:17.525030: val_loss -0.2473 +2026-04-09 15:33:17.527004: Pseudo dice [0.5976, 0.5681, 0.7, 0.0825, 0.4708, 0.8482, 0.7861] +2026-04-09 15:33:17.529417: Epoch time: 102.63 s +2026-04-09 15:33:18.617720: +2026-04-09 15:33:18.619265: Epoch 826 +2026-04-09 15:33:18.620905: Current learning rate: 0.00207 +2026-04-09 15:35:00.679435: train_loss -0.3572 +2026-04-09 15:35:00.686409: val_loss -0.3033 +2026-04-09 15:35:00.689249: Pseudo dice [0.4305, 0.2067, 0.8025, 0.2514, 0.4833, 0.7243, 0.8162] +2026-04-09 15:35:00.691782: Epoch time: 102.06 s +2026-04-09 15:35:01.795034: +2026-04-09 15:35:01.811523: Epoch 827 +2026-04-09 15:35:01.815158: Current learning rate: 0.00206 +2026-04-09 15:36:44.105303: train_loss -0.3548 +2026-04-09 15:36:44.114470: val_loss -0.2846 +2026-04-09 15:36:44.118927: Pseudo dice [0.5154, 0.4327, 0.8186, 0.354, 0.3509, 0.8499, 0.3672] +2026-04-09 15:36:44.121444: Epoch time: 102.31 s +2026-04-09 15:36:45.243028: +2026-04-09 15:36:45.245296: Epoch 828 +2026-04-09 15:36:45.248057: Current learning rate: 0.00205 +2026-04-09 15:38:27.775536: train_loss -0.3617 +2026-04-09 15:38:27.782683: val_loss -0.3082 +2026-04-09 15:38:27.784448: Pseudo dice [0.4376, 0.6562, 0.7707, 0.0012, 0.4723, 0.8226, 0.8053] +2026-04-09 15:38:27.787609: Epoch time: 102.54 s +2026-04-09 15:38:28.901831: +2026-04-09 15:38:28.903650: Epoch 829 +2026-04-09 15:38:28.905476: Current learning rate: 0.00204 +2026-04-09 15:40:10.365701: train_loss -0.369 +2026-04-09 15:40:10.372600: val_loss -0.1746 +2026-04-09 15:40:10.374645: Pseudo dice [0.3492, 0.4813, 0.495, 0.0021, 0.3026, 0.8842, 0.8129] +2026-04-09 15:40:10.377874: Epoch time: 101.47 s +2026-04-09 15:40:11.481781: +2026-04-09 15:40:11.483370: Epoch 830 +2026-04-09 15:40:11.484725: Current learning rate: 0.00203 +2026-04-09 15:41:53.099044: train_loss -0.3607 +2026-04-09 15:41:53.105410: val_loss -0.2598 +2026-04-09 15:41:53.108890: Pseudo dice [0.5698, 0.4884, 0.7358, 0.0024, 0.4508, 0.8924, 0.84] +2026-04-09 15:41:53.111552: Epoch time: 101.62 s +2026-04-09 15:41:54.205376: +2026-04-09 15:41:54.207092: Epoch 831 +2026-04-09 15:41:54.208596: Current learning rate: 0.00202 +2026-04-09 15:43:42.806859: train_loss -0.3687 +2026-04-09 15:43:42.815907: val_loss -0.3196 +2026-04-09 15:43:42.818375: Pseudo dice [0.5716, 0.517, 0.723, 0.6177, 0.4822, 0.8307, 0.7223] +2026-04-09 15:43:42.821270: Epoch time: 108.6 s +2026-04-09 15:43:43.919837: +2026-04-09 15:43:43.921862: Epoch 832 +2026-04-09 15:43:43.923759: Current learning rate: 0.00201 +2026-04-09 15:45:25.685005: train_loss -0.3717 +2026-04-09 15:45:25.700603: val_loss -0.2212 +2026-04-09 15:45:25.705121: Pseudo dice [0.7724, 0.3079, 0.8106, 0.0337, 0.3444, 0.7982, 0.8751] +2026-04-09 15:45:25.709391: Epoch time: 101.77 s +2026-04-09 15:45:26.823796: +2026-04-09 15:45:26.825550: Epoch 833 +2026-04-09 15:45:26.827196: Current learning rate: 0.002 +2026-04-09 15:47:08.491975: train_loss -0.3669 +2026-04-09 15:47:08.498049: val_loss -0.3228 +2026-04-09 15:47:08.500480: Pseudo dice [0.6817, 0.637, 0.7811, 0.0017, 0.2833, 0.8183, 0.8834] +2026-04-09 15:47:08.503512: Epoch time: 101.67 s +2026-04-09 15:47:09.618216: +2026-04-09 15:47:09.620209: Epoch 834 +2026-04-09 15:47:09.621888: Current learning rate: 0.00199 +2026-04-09 15:48:52.057706: train_loss -0.3665 +2026-04-09 15:48:52.064006: val_loss -0.2901 +2026-04-09 15:48:52.066428: Pseudo dice [0.4903, 0.5282, 0.8258, 0.3819, 0.369, 0.7808, 0.7653] +2026-04-09 15:48:52.069288: Epoch time: 102.44 s +2026-04-09 15:48:53.186787: +2026-04-09 15:48:53.188440: Epoch 835 +2026-04-09 15:48:53.189915: Current learning rate: 0.00198 +2026-04-09 15:50:34.820003: train_loss -0.3644 +2026-04-09 15:50:34.827775: val_loss -0.3111 +2026-04-09 15:50:34.830157: Pseudo dice [0.4587, 0.6072, 0.7592, 0.8606, 0.3262, 0.7051, 0.8383] +2026-04-09 15:50:34.832522: Epoch time: 101.64 s +2026-04-09 15:50:35.917937: +2026-04-09 15:50:35.920240: Epoch 836 +2026-04-09 15:50:35.921757: Current learning rate: 0.00196 +2026-04-09 15:52:17.493072: train_loss -0.3623 +2026-04-09 15:52:17.498827: val_loss -0.304 +2026-04-09 15:52:17.500617: Pseudo dice [0.606, 0.7121, 0.7777, 0.8373, 0.2423, 0.832, 0.9083] +2026-04-09 15:52:17.502665: Epoch time: 101.58 s +2026-04-09 15:52:18.585207: +2026-04-09 15:52:18.587037: Epoch 837 +2026-04-09 15:52:18.588554: Current learning rate: 0.00195 +2026-04-09 15:54:00.290708: train_loss -0.3639 +2026-04-09 15:54:00.297266: val_loss -0.3002 +2026-04-09 15:54:00.299238: Pseudo dice [0.7265, 0.3311, 0.7544, 0.0651, 0.3863, 0.8177, 0.8488] +2026-04-09 15:54:00.301895: Epoch time: 101.71 s +2026-04-09 15:54:01.394280: +2026-04-09 15:54:01.396028: Epoch 838 +2026-04-09 15:54:01.398547: Current learning rate: 0.00194 +2026-04-09 15:55:46.205268: train_loss -0.3564 +2026-04-09 15:55:46.212260: val_loss -0.2964 +2026-04-09 15:55:46.214890: Pseudo dice [0.6522, 0.5479, 0.7023, 0.1708, 0.4225, 0.8316, 0.8805] +2026-04-09 15:55:46.218029: Epoch time: 104.81 s +2026-04-09 15:55:47.342722: +2026-04-09 15:55:47.344949: Epoch 839 +2026-04-09 15:55:47.347142: Current learning rate: 0.00193 +2026-04-09 15:57:29.361516: train_loss -0.374 +2026-04-09 15:57:29.369396: val_loss -0.2548 +2026-04-09 15:57:29.372049: Pseudo dice [0.5228, 0.4583, 0.6426, 0.181, 0.3953, 0.7994, 0.8699] +2026-04-09 15:57:29.375245: Epoch time: 102.02 s +2026-04-09 15:57:30.469975: +2026-04-09 15:57:30.472470: Epoch 840 +2026-04-09 15:57:30.475113: Current learning rate: 0.00192 +2026-04-09 15:59:14.352438: train_loss -0.3581 +2026-04-09 15:59:14.359026: val_loss -0.2777 +2026-04-09 15:59:14.361717: Pseudo dice [0.7864, 0.3458, 0.6085, 0.0007, 0.5103, 0.8214, 0.7513] +2026-04-09 15:59:14.364724: Epoch time: 103.89 s +2026-04-09 15:59:15.467121: +2026-04-09 15:59:15.468797: Epoch 841 +2026-04-09 15:59:15.470314: Current learning rate: 0.00191 +2026-04-09 16:00:58.001250: train_loss -0.3654 +2026-04-09 16:00:58.006974: val_loss -0.3132 +2026-04-09 16:00:58.008988: Pseudo dice [0.8114, 0.607, 0.7705, 0.3566, 0.5119, 0.7894, 0.8705] +2026-04-09 16:00:58.011496: Epoch time: 102.54 s +2026-04-09 16:00:59.111289: +2026-04-09 16:00:59.113905: Epoch 842 +2026-04-09 16:00:59.117501: Current learning rate: 0.0019 +2026-04-09 16:02:42.087825: train_loss -0.3757 +2026-04-09 16:02:42.094466: val_loss -0.2841 +2026-04-09 16:02:42.097439: Pseudo dice [0.8744, 0.4382, 0.2532, 0.0324, 0.4389, 0.8545, 0.7894] +2026-04-09 16:02:42.100516: Epoch time: 102.98 s +2026-04-09 16:02:43.187469: +2026-04-09 16:02:43.189336: Epoch 843 +2026-04-09 16:02:43.191283: Current learning rate: 0.00189 +2026-04-09 16:04:26.390754: train_loss -0.3747 +2026-04-09 16:04:26.397200: val_loss -0.3116 +2026-04-09 16:04:26.399160: Pseudo dice [0.5648, 0.5971, 0.7359, 0.4843, 0.6641, 0.7711, 0.8801] +2026-04-09 16:04:26.402953: Epoch time: 103.21 s +2026-04-09 16:04:27.527300: +2026-04-09 16:04:27.530258: Epoch 844 +2026-04-09 16:04:27.534410: Current learning rate: 0.00188 +2026-04-09 16:06:09.471556: train_loss -0.3755 +2026-04-09 16:06:09.481963: val_loss -0.2666 +2026-04-09 16:06:09.484955: Pseudo dice [0.7577, 0.458, 0.7923, 0.0013, 0.3685, 0.795, 0.8758] +2026-04-09 16:06:09.489406: Epoch time: 101.95 s +2026-04-09 16:06:10.612089: +2026-04-09 16:06:10.614193: Epoch 845 +2026-04-09 16:06:10.619076: Current learning rate: 0.00187 +2026-04-09 16:07:53.899592: train_loss -0.3719 +2026-04-09 16:07:53.907182: val_loss -0.3287 +2026-04-09 16:07:53.909228: Pseudo dice [0.8521, 0.1181, 0.7953, 0.7323, 0.4518, 0.8837, 0.8363] +2026-04-09 16:07:53.911585: Epoch time: 103.29 s +2026-04-09 16:07:55.013392: +2026-04-09 16:07:55.016119: Epoch 846 +2026-04-09 16:07:55.018054: Current learning rate: 0.00186 +2026-04-09 16:09:36.718355: train_loss -0.3821 +2026-04-09 16:09:36.724000: val_loss -0.3029 +2026-04-09 16:09:36.725710: Pseudo dice [0.8115, 0.443, 0.7205, 0.3196, 0.4529, 0.8912, 0.7126] +2026-04-09 16:09:36.727978: Epoch time: 101.71 s +2026-04-09 16:09:37.833408: +2026-04-09 16:09:37.834937: Epoch 847 +2026-04-09 16:09:37.836439: Current learning rate: 0.00185 +2026-04-09 16:11:20.018193: train_loss -0.3719 +2026-04-09 16:11:20.025492: val_loss -0.2319 +2026-04-09 16:11:20.027986: Pseudo dice [0.4769, 0.5729, 0.6818, 0.0001, 0.4086, 0.8937, 0.7726] +2026-04-09 16:11:20.030752: Epoch time: 102.19 s +2026-04-09 16:11:21.138500: +2026-04-09 16:11:21.140852: Epoch 848 +2026-04-09 16:11:21.143812: Current learning rate: 0.00184 +2026-04-09 16:13:02.663142: train_loss -0.3776 +2026-04-09 16:13:02.669751: val_loss -0.3217 +2026-04-09 16:13:02.672033: Pseudo dice [0.5141, 0.6692, 0.8219, 0.1249, 0.3624, 0.8743, 0.8273] +2026-04-09 16:13:02.674429: Epoch time: 101.53 s +2026-04-09 16:13:03.778761: +2026-04-09 16:13:03.780473: Epoch 849 +2026-04-09 16:13:03.781961: Current learning rate: 0.00182 +2026-04-09 16:14:46.889217: train_loss -0.3748 +2026-04-09 16:14:46.896303: val_loss -0.3274 +2026-04-09 16:14:46.898020: Pseudo dice [0.8483, 0.3672, 0.7825, 0.2038, 0.3922, 0.8309, 0.9137] +2026-04-09 16:14:46.900234: Epoch time: 103.11 s +2026-04-09 16:14:49.741194: +2026-04-09 16:14:49.742955: Epoch 850 +2026-04-09 16:14:49.744339: Current learning rate: 0.00181 +2026-04-09 16:16:32.089908: train_loss -0.3614 +2026-04-09 16:16:32.096372: val_loss -0.296 +2026-04-09 16:16:32.098069: Pseudo dice [0.5373, 0.209, 0.7918, 0.8717, 0.3802, 0.7977, 0.711] +2026-04-09 16:16:32.100327: Epoch time: 102.35 s +2026-04-09 16:16:33.194593: +2026-04-09 16:16:33.196187: Epoch 851 +2026-04-09 16:16:33.198050: Current learning rate: 0.0018 +2026-04-09 16:18:15.085383: train_loss -0.3789 +2026-04-09 16:18:15.093138: val_loss -0.3316 +2026-04-09 16:18:15.096156: Pseudo dice [0.7144, 0.5507, 0.7861, 0.0865, 0.4681, 0.8124, 0.917] +2026-04-09 16:18:15.098770: Epoch time: 101.89 s +2026-04-09 16:18:16.180155: +2026-04-09 16:18:16.182136: Epoch 852 +2026-04-09 16:18:16.186058: Current learning rate: 0.00179 +2026-04-09 16:19:58.496450: train_loss -0.3821 +2026-04-09 16:19:58.504386: val_loss -0.3007 +2026-04-09 16:19:58.506641: Pseudo dice [0.3217, 0.4924, 0.6923, 0.2849, 0.4512, 0.8344, 0.8996] +2026-04-09 16:19:58.509029: Epoch time: 102.32 s +2026-04-09 16:19:59.584751: +2026-04-09 16:19:59.586238: Epoch 853 +2026-04-09 16:19:59.587616: Current learning rate: 0.00178 +2026-04-09 16:21:41.703677: train_loss -0.3743 +2026-04-09 16:21:41.711243: val_loss -0.3323 +2026-04-09 16:21:41.713448: Pseudo dice [0.8135, 0.608, 0.7749, 0.8469, 0.4598, 0.7903, 0.8976] +2026-04-09 16:21:41.716821: Epoch time: 102.12 s +2026-04-09 16:21:41.718686: Yayy! New best EMA pseudo Dice: 0.6123 +2026-04-09 16:21:44.712174: +2026-04-09 16:21:44.714431: Epoch 854 +2026-04-09 16:21:44.716744: Current learning rate: 0.00177 +2026-04-09 16:23:26.231915: train_loss -0.3686 +2026-04-09 16:23:26.238954: val_loss -0.3452 +2026-04-09 16:23:26.240963: Pseudo dice [0.6711, 0.5242, 0.7494, 0.8221, 0.5815, 0.8953, 0.7661] +2026-04-09 16:23:26.243296: Epoch time: 101.52 s +2026-04-09 16:23:26.245368: Yayy! New best EMA pseudo Dice: 0.6226 +2026-04-09 16:23:29.087596: +2026-04-09 16:23:29.090633: Epoch 855 +2026-04-09 16:23:29.093033: Current learning rate: 0.00176 +2026-04-09 16:25:10.475212: train_loss -0.3755 +2026-04-09 16:25:10.480822: val_loss -0.2744 +2026-04-09 16:25:10.482723: Pseudo dice [0.8637, 0.6937, 0.7709, 0.0002, 0.3317, 0.9008, 0.8447] +2026-04-09 16:25:10.485562: Epoch time: 101.39 s +2026-04-09 16:25:10.487437: Yayy! New best EMA pseudo Dice: 0.6233 +2026-04-09 16:25:13.313898: +2026-04-09 16:25:13.316803: Epoch 856 +2026-04-09 16:25:13.319689: Current learning rate: 0.00175 +2026-04-09 16:26:55.102810: train_loss -0.3814 +2026-04-09 16:26:55.107687: val_loss -0.2403 +2026-04-09 16:26:55.109415: Pseudo dice [0.8667, 0.6084, 0.694, 0.0077, 0.4243, 0.8306, 0.7639] +2026-04-09 16:26:55.111777: Epoch time: 101.79 s +2026-04-09 16:26:56.218160: +2026-04-09 16:26:56.221017: Epoch 857 +2026-04-09 16:26:56.224046: Current learning rate: 0.00174 +2026-04-09 16:28:39.717093: train_loss -0.376 +2026-04-09 16:28:39.725770: val_loss -0.3251 +2026-04-09 16:28:39.727780: Pseudo dice [0.8271, 0.4464, 0.8004, 0.789, 0.3254, 0.7858, 0.8805] +2026-04-09 16:28:39.729886: Epoch time: 103.5 s +2026-04-09 16:28:39.732136: Yayy! New best EMA pseudo Dice: 0.6281 +2026-04-09 16:28:42.537583: +2026-04-09 16:28:42.539334: Epoch 858 +2026-04-09 16:28:42.540915: Current learning rate: 0.00173 +2026-04-09 16:30:24.492110: train_loss -0.3752 +2026-04-09 16:30:24.499903: val_loss -0.2764 +2026-04-09 16:30:24.503258: Pseudo dice [0.7772, 0.7134, 0.5343, 0.0294, 0.3758, 0.8743, 0.8174] +2026-04-09 16:30:24.505586: Epoch time: 101.96 s +2026-04-09 16:30:25.638406: +2026-04-09 16:30:25.644953: Epoch 859 +2026-04-09 16:30:25.647865: Current learning rate: 0.00172 +2026-04-09 16:32:08.116175: train_loss -0.4098 +2026-04-09 16:32:08.123311: val_loss -0.3484 +2026-04-09 16:32:08.125386: Pseudo dice [0.5858, 0.3307, 0.7834, 0.2016, 0.4413, 0.7986, 0.6906] +2026-04-09 16:32:08.128046: Epoch time: 102.48 s +2026-04-09 16:32:09.213018: +2026-04-09 16:32:09.214847: Epoch 860 +2026-04-09 16:32:09.216396: Current learning rate: 0.0017 +2026-04-09 16:33:50.767957: train_loss -0.4248 +2026-04-09 16:33:50.773539: val_loss -0.3159 +2026-04-09 16:33:50.775243: Pseudo dice [0.8372, 0.012, 0.8157, 0.0936, 0.5657, 0.7985, 0.7691] +2026-04-09 16:33:50.777113: Epoch time: 101.56 s +2026-04-09 16:33:51.890827: +2026-04-09 16:33:51.892786: Epoch 861 +2026-04-09 16:33:51.894819: Current learning rate: 0.00169 +2026-04-09 16:35:33.834950: train_loss -0.4477 +2026-04-09 16:35:33.841125: val_loss -0.389 +2026-04-09 16:35:33.844124: Pseudo dice [0.7872, 0.168, 0.669, 0.5349, 0.4724, 0.8828, 0.8263] +2026-04-09 16:35:33.846625: Epoch time: 101.95 s +2026-04-09 16:35:34.947304: +2026-04-09 16:35:34.949370: Epoch 862 +2026-04-09 16:35:34.950800: Current learning rate: 0.00168 +2026-04-09 16:37:17.070394: train_loss -0.5326 +2026-04-09 16:37:17.076576: val_loss -0.4772 +2026-04-09 16:37:17.078580: Pseudo dice [0.143, 0.0533, 0.3314, 0.2593, 0.1564, 0.4741, 0.2806] +2026-04-09 16:37:17.080803: Epoch time: 102.13 s +2026-04-09 16:37:18.199682: +2026-04-09 16:37:18.202111: Epoch 863 +2026-04-09 16:37:18.204195: Current learning rate: 0.00167 +2026-04-09 16:39:00.016825: train_loss -0.6146 +2026-04-09 16:39:00.021680: val_loss -0.5298 +2026-04-09 16:39:00.023249: Pseudo dice [0.3022, 0.1607, 0.7234, 0.0, 0.2227, 0.8324, 0.7784] +2026-04-09 16:39:00.026143: Epoch time: 101.82 s +2026-04-09 16:39:01.092154: +2026-04-09 16:39:01.093677: Epoch 864 +2026-04-09 16:39:01.095058: Current learning rate: 0.00166 +2026-04-09 16:40:43.562798: train_loss -0.6177 +2026-04-09 16:40:43.568197: val_loss -0.5821 +2026-04-09 16:40:43.570153: Pseudo dice [0.6492, 0.0505, 0.7632, 0.7398, 0.4108, 0.8311, 0.8531] +2026-04-09 16:40:43.572281: Epoch time: 102.47 s +2026-04-09 16:40:44.655155: +2026-04-09 16:40:44.657015: Epoch 865 +2026-04-09 16:40:44.659388: Current learning rate: 0.00165 +2026-04-09 16:42:26.289796: train_loss -0.6492 +2026-04-09 16:42:26.310497: val_loss -0.6039 +2026-04-09 16:42:26.316843: Pseudo dice [0.3599, 0.0625, 0.6116, 0.5995, 0.5214, 0.5705, 0.8585] +2026-04-09 16:42:26.319033: Epoch time: 101.64 s +2026-04-09 16:42:27.404662: +2026-04-09 16:42:27.406517: Epoch 866 +2026-04-09 16:42:27.408267: Current learning rate: 0.00164 +2026-04-09 16:44:09.570894: train_loss -0.6433 +2026-04-09 16:44:09.579716: val_loss -0.5402 +2026-04-09 16:44:09.582385: Pseudo dice [0.3175, 0.7265, 0.2981, 0.3102, 0.506, 0.8596, 0.6334] +2026-04-09 16:44:09.585019: Epoch time: 102.17 s +2026-04-09 16:44:10.660803: +2026-04-09 16:44:10.664148: Epoch 867 +2026-04-09 16:44:10.666425: Current learning rate: 0.00163 +2026-04-09 16:45:52.656811: train_loss -0.6102 +2026-04-09 16:45:52.664067: val_loss -0.5531 +2026-04-09 16:45:52.665997: Pseudo dice [0.6271, 0.0, 0.5828, 0.4774, 0.3149, 0.791, 0.8414] +2026-04-09 16:45:52.667764: Epoch time: 102.0 s +2026-04-09 16:45:53.755594: +2026-04-09 16:45:53.757890: Epoch 868 +2026-04-09 16:45:53.759534: Current learning rate: 0.00162 +2026-04-09 16:47:35.989059: train_loss -0.6247 +2026-04-09 16:47:35.994199: val_loss -0.3775 +2026-04-09 16:47:35.995978: Pseudo dice [0.2921, 0.0, 0.6981, 0.0068, 0.3811, 0.8176, 0.7967] +2026-04-09 16:47:35.998072: Epoch time: 102.24 s +2026-04-09 16:47:37.074719: +2026-04-09 16:47:37.076578: Epoch 869 +2026-04-09 16:47:37.078272: Current learning rate: 0.00161 +2026-04-09 16:49:19.780874: train_loss -0.6256 +2026-04-09 16:49:19.786523: val_loss -0.5222 +2026-04-09 16:49:19.789040: Pseudo dice [0.5921, 0.2577, 0.6826, 0.0013, 0.246, 0.7168, 0.6968] +2026-04-09 16:49:19.791147: Epoch time: 102.71 s +2026-04-09 16:49:20.873077: +2026-04-09 16:49:20.874732: Epoch 870 +2026-04-09 16:49:20.876305: Current learning rate: 0.00159 +2026-04-09 16:51:02.565673: train_loss -0.6424 +2026-04-09 16:51:02.571306: val_loss -0.5439 +2026-04-09 16:51:02.573029: Pseudo dice [0.6472, 0.4481, 0.5495, 0.8638, 0.2671, 0.7943, 0.5833] +2026-04-09 16:51:02.574835: Epoch time: 101.7 s +2026-04-09 16:51:03.663220: +2026-04-09 16:51:03.664930: Epoch 871 +2026-04-09 16:51:03.666436: Current learning rate: 0.00158 +2026-04-09 16:52:45.763464: train_loss -0.6212 +2026-04-09 16:52:45.769717: val_loss -0.5594 +2026-04-09 16:52:45.771320: Pseudo dice [0.2675, 0.2489, 0.7009, 0.7981, 0.4545, 0.678, 0.4475] +2026-04-09 16:52:45.773421: Epoch time: 102.1 s +2026-04-09 16:52:46.847535: +2026-04-09 16:52:46.851836: Epoch 872 +2026-04-09 16:52:46.856710: Current learning rate: 0.00157 +2026-04-09 16:54:29.909286: train_loss -0.608 +2026-04-09 16:54:29.914528: val_loss -0.6213 +2026-04-09 16:54:29.916048: Pseudo dice [0.1092, 0.4833, 0.8126, 0.7947, 0.4333, 0.7867, 0.8315] +2026-04-09 16:54:29.918178: Epoch time: 103.06 s +2026-04-09 16:54:30.988027: +2026-04-09 16:54:30.989534: Epoch 873 +2026-04-09 16:54:30.991853: Current learning rate: 0.00156 +2026-04-09 16:56:12.838515: train_loss -0.6388 +2026-04-09 16:56:12.845074: val_loss -0.5858 +2026-04-09 16:56:12.847637: Pseudo dice [0.5008, 0.128, 0.6437, 0.6621, 0.3416, 0.8529, 0.7892] +2026-04-09 16:56:12.849544: Epoch time: 101.85 s +2026-04-09 16:56:13.937949: +2026-04-09 16:56:13.941758: Epoch 874 +2026-04-09 16:56:13.946833: Current learning rate: 0.00155 +2026-04-09 16:57:56.109799: train_loss -0.6336 +2026-04-09 16:57:56.115769: val_loss -0.5332 +2026-04-09 16:57:56.117836: Pseudo dice [0.6477, 0.6862, 0.7334, 0.0944, 0.3235, 0.7623, 0.8029] +2026-04-09 16:57:56.119548: Epoch time: 102.18 s +2026-04-09 16:57:57.210436: +2026-04-09 16:57:57.214531: Epoch 875 +2026-04-09 16:57:57.216260: Current learning rate: 0.00154 +2026-04-09 16:59:39.807961: train_loss -0.6381 +2026-04-09 16:59:39.815130: val_loss -0.5137 +2026-04-09 16:59:39.817149: Pseudo dice [0.7598, 0.5086, 0.4755, 0.1738, 0.3446, 0.8332, 0.791] +2026-04-09 16:59:39.818970: Epoch time: 102.6 s +2026-04-09 16:59:40.920391: +2026-04-09 16:59:40.922043: Epoch 876 +2026-04-09 16:59:40.923594: Current learning rate: 0.00153 +2026-04-09 17:01:22.874227: train_loss -0.6368 +2026-04-09 17:01:22.892029: val_loss -0.5353 +2026-04-09 17:01:22.897716: Pseudo dice [0.6925, 0.5718, 0.723, 0.0, 0.2758, 0.5296, 0.8015] +2026-04-09 17:01:22.902610: Epoch time: 101.96 s +2026-04-09 17:01:23.988275: +2026-04-09 17:01:23.992911: Epoch 877 +2026-04-09 17:01:23.998518: Current learning rate: 0.00152 +2026-04-09 17:03:06.627274: train_loss -0.6306 +2026-04-09 17:03:06.634930: val_loss -0.5022 +2026-04-09 17:03:06.637093: Pseudo dice [0.4283, 0.6796, 0.7366, 0.7408, 0.3929, 0.7645, 0.225] +2026-04-09 17:03:06.638964: Epoch time: 102.64 s +2026-04-09 17:03:09.006441: +2026-04-09 17:03:09.008016: Epoch 878 +2026-04-09 17:03:09.010793: Current learning rate: 0.00151 +2026-04-09 17:04:51.093534: train_loss -0.6321 +2026-04-09 17:04:51.098398: val_loss -0.5407 +2026-04-09 17:04:51.100742: Pseudo dice [0.2697, 0.0029, 0.7061, 0.0412, 0.4728, 0.584, 0.8124] +2026-04-09 17:04:51.102690: Epoch time: 102.09 s +2026-04-09 17:04:52.188139: +2026-04-09 17:04:52.189629: Epoch 879 +2026-04-09 17:04:52.191074: Current learning rate: 0.00149 +2026-04-09 17:06:33.972797: train_loss -0.6376 +2026-04-09 17:06:33.977627: val_loss -0.5023 +2026-04-09 17:06:33.979116: Pseudo dice [0.7822, 0.6401, 0.777, 0.2105, 0.4327, 0.5521, 0.7681] +2026-04-09 17:06:33.980862: Epoch time: 101.79 s +2026-04-09 17:06:35.067882: +2026-04-09 17:06:35.069933: Epoch 880 +2026-04-09 17:06:35.071932: Current learning rate: 0.00148 +2026-04-09 17:08:17.125815: train_loss -0.6428 +2026-04-09 17:08:17.131896: val_loss -0.4585 +2026-04-09 17:08:17.133608: Pseudo dice [0.6955, 0.7161, 0.5815, 0.0493, 0.4811, 0.758, 0.6512] +2026-04-09 17:08:17.135834: Epoch time: 102.06 s +2026-04-09 17:08:18.226264: +2026-04-09 17:08:18.228684: Epoch 881 +2026-04-09 17:08:18.231381: Current learning rate: 0.00147 +2026-04-09 17:10:00.037464: train_loss -0.6036 +2026-04-09 17:10:00.044879: val_loss -0.5654 +2026-04-09 17:10:00.047104: Pseudo dice [0.3633, 0.5843, 0.7039, 0.7161, 0.3414, 0.4385, 0.8021] +2026-04-09 17:10:00.049466: Epoch time: 101.81 s +2026-04-09 17:10:01.189453: +2026-04-09 17:10:01.191217: Epoch 882 +2026-04-09 17:10:01.193519: Current learning rate: 0.00146 +2026-04-09 17:11:43.798225: train_loss -0.6181 +2026-04-09 17:11:43.804834: val_loss -0.5596 +2026-04-09 17:11:43.807202: Pseudo dice [0.7252, 0.4499, 0.7012, 0.0, 0.3413, 0.7992, 0.6799] +2026-04-09 17:11:43.810224: Epoch time: 102.61 s +2026-04-09 17:11:44.935316: +2026-04-09 17:11:44.937567: Epoch 883 +2026-04-09 17:11:44.939396: Current learning rate: 0.00145 +2026-04-09 17:13:26.427556: train_loss -0.6443 +2026-04-09 17:13:26.433754: val_loss -0.563 +2026-04-09 17:13:26.435726: Pseudo dice [0.6125, 0.0888, 0.6011, 0.8585, 0.1756, 0.4031, 0.8009] +2026-04-09 17:13:26.439221: Epoch time: 101.5 s +2026-04-09 17:13:27.542073: +2026-04-09 17:13:27.544003: Epoch 884 +2026-04-09 17:13:27.545835: Current learning rate: 0.00144 +2026-04-09 17:15:10.430039: train_loss -0.6216 +2026-04-09 17:15:10.436666: val_loss -0.5123 +2026-04-09 17:15:10.439019: Pseudo dice [0.0706, 0.6191, 0.6795, 0.0111, 0.3087, 0.3953, 0.9064] +2026-04-09 17:15:10.441244: Epoch time: 102.89 s +2026-04-09 17:15:11.572773: +2026-04-09 17:15:11.577563: Epoch 885 +2026-04-09 17:15:11.580113: Current learning rate: 0.00143 +2026-04-09 17:16:53.690892: train_loss -0.6472 +2026-04-09 17:16:53.698033: val_loss -0.5503 +2026-04-09 17:16:53.701393: Pseudo dice [0.6602, 0.2921, 0.6512, 0.0, 0.4521, 0.661, 0.8772] +2026-04-09 17:16:53.703235: Epoch time: 102.12 s +2026-04-09 17:16:54.784512: +2026-04-09 17:16:54.786501: Epoch 886 +2026-04-09 17:16:54.788400: Current learning rate: 0.00142 +2026-04-09 17:18:37.057139: train_loss -0.6399 +2026-04-09 17:18:37.064013: val_loss -0.4777 +2026-04-09 17:18:37.066715: Pseudo dice [0.61, 0.3698, 0.5175, 0.0747, 0.5252, 0.761, 0.5522] +2026-04-09 17:18:37.068776: Epoch time: 102.28 s +2026-04-09 17:18:38.169112: +2026-04-09 17:18:38.170894: Epoch 887 +2026-04-09 17:18:38.173783: Current learning rate: 0.00141 +2026-04-09 17:20:20.830895: train_loss -0.6205 +2026-04-09 17:20:20.839822: val_loss -0.5508 +2026-04-09 17:20:20.843490: Pseudo dice [0.7955, 0.2177, 0.8408, 0.0001, 0.5334, 0.3073, 0.6317] +2026-04-09 17:20:20.845416: Epoch time: 102.66 s +2026-04-09 17:20:21.928075: +2026-04-09 17:20:21.930136: Epoch 888 +2026-04-09 17:20:21.932189: Current learning rate: 0.00139 +2026-04-09 17:22:04.244818: train_loss -0.609 +2026-04-09 17:22:04.249652: val_loss -0.5558 +2026-04-09 17:22:04.251514: Pseudo dice [0.8092, 0.51, 0.6816, 0.1286, 0.4642, 0.7252, 0.8434] +2026-04-09 17:22:04.253057: Epoch time: 102.32 s +2026-04-09 17:22:05.319892: +2026-04-09 17:22:05.321624: Epoch 889 +2026-04-09 17:22:05.323421: Current learning rate: 0.00138 +2026-04-09 17:23:48.617436: train_loss -0.6136 +2026-04-09 17:23:48.624848: val_loss -0.5955 +2026-04-09 17:23:48.627823: Pseudo dice [0.7355, 0.2243, 0.7706, 0.0, 0.4108, 0.8806, 0.8079] +2026-04-09 17:23:48.631210: Epoch time: 103.3 s +2026-04-09 17:23:49.709144: +2026-04-09 17:23:49.711659: Epoch 890 +2026-04-09 17:23:49.714312: Current learning rate: 0.00137 +2026-04-09 17:25:31.665024: train_loss -0.6292 +2026-04-09 17:25:31.671473: val_loss -0.5833 +2026-04-09 17:25:31.673319: Pseudo dice [0.3522, 0.0184, 0.7008, 0.4661, 0.4319, 0.8319, 0.8892] +2026-04-09 17:25:31.675362: Epoch time: 101.96 s +2026-04-09 17:25:32.762038: +2026-04-09 17:25:32.764189: Epoch 891 +2026-04-09 17:25:32.765970: Current learning rate: 0.00136 +2026-04-09 17:27:15.690757: train_loss -0.6264 +2026-04-09 17:27:15.697217: val_loss -0.5634 +2026-04-09 17:27:15.699001: Pseudo dice [0.7934, 0.1901, 0.7185, 0.0, 0.4986, 0.7672, 0.5751] +2026-04-09 17:27:15.700659: Epoch time: 102.93 s +2026-04-09 17:27:16.797209: +2026-04-09 17:27:16.801539: Epoch 892 +2026-04-09 17:27:16.802984: Current learning rate: 0.00135 +2026-04-09 17:28:59.430261: train_loss -0.6474 +2026-04-09 17:28:59.436352: val_loss -0.6084 +2026-04-09 17:28:59.438962: Pseudo dice [0.3031, 0.7215, 0.7229, 0.5233, 0.4711, 0.7779, 0.8614] +2026-04-09 17:28:59.440940: Epoch time: 102.64 s +2026-04-09 17:29:00.527947: +2026-04-09 17:29:00.529905: Epoch 893 +2026-04-09 17:29:00.532059: Current learning rate: 0.00134 +2026-04-09 17:30:43.089578: train_loss -0.6498 +2026-04-09 17:30:43.097085: val_loss -0.5295 +2026-04-09 17:30:43.099082: Pseudo dice [0.8103, 0.077, 0.7404, 0.0533, 0.5108, 0.835, 0.7745] +2026-04-09 17:30:43.102750: Epoch time: 102.56 s +2026-04-09 17:30:44.185994: +2026-04-09 17:30:44.188212: Epoch 894 +2026-04-09 17:30:44.191092: Current learning rate: 0.00133 +2026-04-09 17:32:26.482579: train_loss -0.6406 +2026-04-09 17:32:26.489413: val_loss -0.6088 +2026-04-09 17:32:26.491205: Pseudo dice [0.2672, 0.164, 0.7248, 0.0, 0.4854, 0.8551, 0.7635] +2026-04-09 17:32:26.493409: Epoch time: 102.3 s +2026-04-09 17:32:27.611569: +2026-04-09 17:32:27.613522: Epoch 895 +2026-04-09 17:32:27.615402: Current learning rate: 0.00132 +2026-04-09 17:34:09.606903: train_loss -0.6378 +2026-04-09 17:34:09.611860: val_loss -0.6143 +2026-04-09 17:34:09.613531: Pseudo dice [0.6212, 0.0, 0.7131, 0.674, 0.6017, 0.7495, 0.732] +2026-04-09 17:34:09.615113: Epoch time: 102.0 s +2026-04-09 17:34:10.701588: +2026-04-09 17:34:10.704001: Epoch 896 +2026-04-09 17:34:10.705470: Current learning rate: 0.0013 +2026-04-09 17:35:53.715108: train_loss -0.623 +2026-04-09 17:35:53.719850: val_loss -0.5933 +2026-04-09 17:35:53.723068: Pseudo dice [0.5757, 0.5948, 0.7502, 0.7778, 0.3979, 0.7219, 0.7587] +2026-04-09 17:35:53.724759: Epoch time: 103.02 s +2026-04-09 17:35:54.816983: +2026-04-09 17:35:54.819362: Epoch 897 +2026-04-09 17:35:54.821312: Current learning rate: 0.00129 +2026-04-09 17:37:37.241574: train_loss -0.6541 +2026-04-09 17:37:37.247530: val_loss -0.5892 +2026-04-09 17:37:37.249470: Pseudo dice [0.6589, 0.5924, 0.6785, 0.54, 0.1792, 0.8546, 0.7127] +2026-04-09 17:37:37.251242: Epoch time: 102.43 s +2026-04-09 17:37:38.318346: +2026-04-09 17:37:38.320402: Epoch 898 +2026-04-09 17:37:38.322105: Current learning rate: 0.00128 +2026-04-09 17:39:19.884802: train_loss -0.6336 +2026-04-09 17:39:19.890370: val_loss -0.6379 +2026-04-09 17:39:19.892257: Pseudo dice [0.7846, 0.1483, 0.7637, 0.0904, 0.5054, 0.8645, 0.8705] +2026-04-09 17:39:19.894476: Epoch time: 101.57 s +2026-04-09 17:39:20.973647: +2026-04-09 17:39:20.975661: Epoch 899 +2026-04-09 17:39:20.979085: Current learning rate: 0.00127 +2026-04-09 17:41:04.013172: train_loss -0.6601 +2026-04-09 17:41:04.076433: val_loss -0.6292 +2026-04-09 17:41:04.079691: Pseudo dice [0.7862, 0.4071, 0.5665, 0.422, 0.5919, 0.7465, 0.8535] +2026-04-09 17:41:04.081635: Epoch time: 103.04 s +2026-04-09 17:41:06.976142: +2026-04-09 17:41:06.978507: Epoch 900 +2026-04-09 17:41:06.980063: Current learning rate: 0.00126 +2026-04-09 17:42:48.358979: train_loss -0.6574 +2026-04-09 17:42:48.367129: val_loss -0.629 +2026-04-09 17:42:48.370779: Pseudo dice [0.7669, 0.4084, 0.7982, 0.4925, 0.4359, 0.7691, 0.7691] +2026-04-09 17:42:48.373792: Epoch time: 101.39 s +2026-04-09 17:42:49.448133: +2026-04-09 17:42:49.450153: Epoch 901 +2026-04-09 17:42:49.451823: Current learning rate: 0.00125 +2026-04-09 17:44:32.111582: train_loss -0.6628 +2026-04-09 17:44:32.117764: val_loss -0.4697 +2026-04-09 17:44:32.120291: Pseudo dice [0.7115, 0.706, 0.6887, 0.0193, 0.3076, 0.8414, 0.8113] +2026-04-09 17:44:32.122673: Epoch time: 102.67 s +2026-04-09 17:44:33.210385: +2026-04-09 17:44:33.215510: Epoch 902 +2026-04-09 17:44:33.219011: Current learning rate: 0.00124 +2026-04-09 17:46:16.553199: train_loss -0.6696 +2026-04-09 17:46:16.566754: val_loss -0.5295 +2026-04-09 17:46:16.571761: Pseudo dice [0.0278, 0.0629, 0.7758, 0.0497, 0.3243, 0.6066, 0.8044] +2026-04-09 17:46:16.576381: Epoch time: 103.35 s +2026-04-09 17:46:17.673001: +2026-04-09 17:46:17.675329: Epoch 903 +2026-04-09 17:46:17.679777: Current learning rate: 0.00122 +2026-04-09 17:48:00.029948: train_loss -0.6572 +2026-04-09 17:48:00.036553: val_loss -0.5989 +2026-04-09 17:48:00.038826: Pseudo dice [0.8354, 0.1351, 0.7685, 0.0, 0.3718, 0.8178, 0.6659] +2026-04-09 17:48:00.041270: Epoch time: 102.36 s +2026-04-09 17:48:01.129050: +2026-04-09 17:48:01.131987: Epoch 904 +2026-04-09 17:48:01.135292: Current learning rate: 0.00121 +2026-04-09 17:49:43.701890: train_loss -0.6507 +2026-04-09 17:49:43.710132: val_loss -0.6018 +2026-04-09 17:49:43.712076: Pseudo dice [0.8086, 0.6813, 0.7503, 0.8513, 0.4861, 0.6475, 0.763] +2026-04-09 17:49:43.713787: Epoch time: 102.58 s +2026-04-09 17:49:44.781382: +2026-04-09 17:49:44.783254: Epoch 905 +2026-04-09 17:49:44.785118: Current learning rate: 0.0012 +2026-04-09 17:51:26.388644: train_loss -0.6513 +2026-04-09 17:51:26.394878: val_loss -0.5015 +2026-04-09 17:51:26.397050: Pseudo dice [0.6304, 0.1402, 0.7689, 0.0, 0.5972, 0.7784, 0.6368] +2026-04-09 17:51:26.399088: Epoch time: 101.61 s +2026-04-09 17:51:27.483681: +2026-04-09 17:51:27.486860: Epoch 906 +2026-04-09 17:51:27.488808: Current learning rate: 0.00119 +2026-04-09 17:53:09.292355: train_loss -0.6537 +2026-04-09 17:53:09.298172: val_loss -0.6169 +2026-04-09 17:53:09.299758: Pseudo dice [0.6902, 0.1349, 0.7673, 0.6622, 0.4628, 0.5586, 0.8362] +2026-04-09 17:53:09.301592: Epoch time: 101.81 s +2026-04-09 17:53:10.386224: +2026-04-09 17:53:10.388381: Epoch 907 +2026-04-09 17:53:10.390489: Current learning rate: 0.00118 +2026-04-09 17:54:51.936922: train_loss -0.6437 +2026-04-09 17:54:51.942677: val_loss -0.4483 +2026-04-09 17:54:51.945615: Pseudo dice [0.7038, 0.4292, 0.6329, 0.0031, 0.3712, 0.6434, 0.7353] +2026-04-09 17:54:51.947908: Epoch time: 101.55 s +2026-04-09 17:54:53.031699: +2026-04-09 17:54:53.034609: Epoch 908 +2026-04-09 17:54:53.037561: Current learning rate: 0.00117 +2026-04-09 17:56:34.624377: train_loss -0.6612 +2026-04-09 17:56:34.629720: val_loss -0.5824 +2026-04-09 17:56:34.631691: Pseudo dice [0.5169, 0.085, 0.6678, 0.4511, 0.5377, 0.7763, 0.745] +2026-04-09 17:56:34.635742: Epoch time: 101.6 s +2026-04-09 17:56:35.765702: +2026-04-09 17:56:35.768145: Epoch 909 +2026-04-09 17:56:35.771790: Current learning rate: 0.00116 +2026-04-09 17:58:18.921724: train_loss -0.6373 +2026-04-09 17:58:18.929365: val_loss -0.5988 +2026-04-09 17:58:18.931312: Pseudo dice [0.6659, 0.364, 0.8235, 0.6158, 0.3046, 0.654, 0.8325] +2026-04-09 17:58:18.934246: Epoch time: 103.16 s +2026-04-09 17:58:20.019988: +2026-04-09 17:58:20.024500: Epoch 910 +2026-04-09 17:58:20.026527: Current learning rate: 0.00115 +2026-04-09 18:00:01.941763: train_loss -0.6749 +2026-04-09 18:00:01.948018: val_loss -0.5682 +2026-04-09 18:00:01.950242: Pseudo dice [0.466, 0.0668, 0.83, 0.0093, 0.4444, 0.5901, 0.6447] +2026-04-09 18:00:01.951981: Epoch time: 101.92 s +2026-04-09 18:00:03.056897: +2026-04-09 18:00:03.059207: Epoch 911 +2026-04-09 18:00:03.060832: Current learning rate: 0.00113 +2026-04-09 18:01:44.497517: train_loss -0.6727 +2026-04-09 18:01:44.503851: val_loss -0.6261 +2026-04-09 18:01:44.506021: Pseudo dice [0.7021, 0.4027, 0.7902, 0.8789, 0.3495, 0.8477, 0.8894] +2026-04-09 18:01:44.508196: Epoch time: 101.44 s +2026-04-09 18:01:45.586035: +2026-04-09 18:01:45.587692: Epoch 912 +2026-04-09 18:01:45.589534: Current learning rate: 0.00112 +2026-04-09 18:03:27.785095: train_loss -0.6661 +2026-04-09 18:03:27.800094: val_loss -0.6091 +2026-04-09 18:03:27.804462: Pseudo dice [0.8403, 0.2819, 0.8238, 0.7109, 0.37, 0.8706, 0.9136] +2026-04-09 18:03:27.808743: Epoch time: 102.2 s +2026-04-09 18:03:28.919450: +2026-04-09 18:03:28.921247: Epoch 913 +2026-04-09 18:03:28.922680: Current learning rate: 0.00111 +2026-04-09 18:05:10.936756: train_loss -0.6704 +2026-04-09 18:05:10.942497: val_loss -0.6158 +2026-04-09 18:05:10.944570: Pseudo dice [0.8051, 0.6463, 0.8271, 0.678, 0.5527, 0.8657, 0.7795] +2026-04-09 18:05:10.947240: Epoch time: 102.02 s +2026-04-09 18:05:12.026003: +2026-04-09 18:05:12.027969: Epoch 914 +2026-04-09 18:05:12.029771: Current learning rate: 0.0011 +2026-04-09 18:06:53.949465: train_loss -0.6496 +2026-04-09 18:06:53.956352: val_loss -0.6078 +2026-04-09 18:06:53.958202: Pseudo dice [0.64, 0.267, 0.6981, 0.0185, 0.4509, 0.8283, 0.8055] +2026-04-09 18:06:53.959965: Epoch time: 101.93 s +2026-04-09 18:06:55.085615: +2026-04-09 18:06:55.088141: Epoch 915 +2026-04-09 18:06:55.092094: Current learning rate: 0.00109 +2026-04-09 18:08:37.236237: train_loss -0.6568 +2026-04-09 18:08:37.242949: val_loss -0.6182 +2026-04-09 18:08:37.245190: Pseudo dice [0.4883, 0.5758, 0.7431, 0.9109, 0.3503, 0.7331, 0.7626] +2026-04-09 18:08:37.248137: Epoch time: 102.15 s +2026-04-09 18:08:38.333358: +2026-04-09 18:08:38.335559: Epoch 916 +2026-04-09 18:08:38.338583: Current learning rate: 0.00108 +2026-04-09 18:10:22.491752: train_loss -0.6563 +2026-04-09 18:10:22.500164: val_loss -0.5705 +2026-04-09 18:10:22.502513: Pseudo dice [0.2365, 0.0446, 0.7135, 0.8491, 0.1267, 0.8616, 0.6869] +2026-04-09 18:10:22.505097: Epoch time: 104.16 s +2026-04-09 18:10:23.603004: +2026-04-09 18:10:23.604876: Epoch 917 +2026-04-09 18:10:23.608221: Current learning rate: 0.00106 +2026-04-09 18:12:08.269081: train_loss -0.6585 +2026-04-09 18:12:08.275754: val_loss -0.6178 +2026-04-09 18:12:08.279259: Pseudo dice [0.6459, 0.5431, 0.8435, 0.1748, 0.3847, 0.7969, 0.8824] +2026-04-09 18:12:08.281587: Epoch time: 104.67 s +2026-04-09 18:12:09.377049: +2026-04-09 18:12:09.380197: Epoch 918 +2026-04-09 18:12:09.382498: Current learning rate: 0.00105 +2026-04-09 18:13:55.815120: train_loss -0.6333 +2026-04-09 18:13:55.827114: val_loss -0.4909 +2026-04-09 18:13:55.829918: Pseudo dice [0.8611, 0.6786, 0.4602, 0.0, 0.1825, 0.2147, 0.7992] +2026-04-09 18:13:55.832816: Epoch time: 106.44 s +2026-04-09 18:13:56.961525: +2026-04-09 18:13:56.967180: Epoch 919 +2026-04-09 18:13:56.971601: Current learning rate: 0.00104 +2026-04-09 18:15:45.086820: train_loss -0.6186 +2026-04-09 18:15:45.096545: val_loss -0.5212 +2026-04-09 18:15:45.099266: Pseudo dice [0.3775, 0.1796, 0.5538, 0.0, 0.3674, 0.9013, 0.6863] +2026-04-09 18:15:45.101815: Epoch time: 108.13 s +2026-04-09 18:15:47.414814: +2026-04-09 18:15:47.417554: Epoch 920 +2026-04-09 18:15:47.419257: Current learning rate: 0.00103 +2026-04-09 18:17:32.877624: train_loss -0.6631 +2026-04-09 18:17:32.887757: val_loss -0.6092 +2026-04-09 18:17:32.891052: Pseudo dice [0.8377, 0.5519, 0.7428, 0.7629, 0.3281, 0.8324, 0.6121] +2026-04-09 18:17:32.893502: Epoch time: 105.47 s +2026-04-09 18:17:33.988449: +2026-04-09 18:17:33.993441: Epoch 921 +2026-04-09 18:17:34.002209: Current learning rate: 0.00102 +2026-04-09 18:19:18.115440: train_loss -0.6512 +2026-04-09 18:19:18.126289: val_loss -0.5428 +2026-04-09 18:19:18.129395: Pseudo dice [0.6997, 0.2954, 0.6588, 0.9139, 0.4157, 0.8642, 0.7437] +2026-04-09 18:19:18.131999: Epoch time: 104.13 s +2026-04-09 18:19:19.203160: +2026-04-09 18:19:19.207466: Epoch 922 +2026-04-09 18:19:19.211314: Current learning rate: 0.00101 +2026-04-09 18:21:08.855236: train_loss -0.6728 +2026-04-09 18:21:08.865443: val_loss -0.4201 +2026-04-09 18:21:08.870014: Pseudo dice [0.826, 0.2529, 0.6953, 0.0016, 0.4717, 0.4489, 0.6091] +2026-04-09 18:21:08.873106: Epoch time: 109.66 s +2026-04-09 18:21:09.967870: +2026-04-09 18:21:09.970408: Epoch 923 +2026-04-09 18:21:09.974637: Current learning rate: 0.001 +2026-04-09 18:22:53.274513: train_loss -0.6721 +2026-04-09 18:22:53.281142: val_loss -0.5628 +2026-04-09 18:22:53.283430: Pseudo dice [0.8161, 0.3281, 0.6765, 0.4051, 0.2458, 0.8672, 0.7988] +2026-04-09 18:22:53.285481: Epoch time: 103.31 s +2026-04-09 18:22:54.395985: +2026-04-09 18:22:54.398731: Epoch 924 +2026-04-09 18:22:54.400911: Current learning rate: 0.00098 +2026-04-09 18:24:36.997173: train_loss -0.6658 +2026-04-09 18:24:37.010719: val_loss -0.569 +2026-04-09 18:24:37.015927: Pseudo dice [0.3001, 0.1183, 0.7693, 0.0383, 0.3971, 0.7119, 0.574] +2026-04-09 18:24:37.020229: Epoch time: 102.6 s +2026-04-09 18:24:38.109720: +2026-04-09 18:24:38.117520: Epoch 925 +2026-04-09 18:24:38.122547: Current learning rate: 0.00097 +2026-04-09 18:26:25.832420: train_loss -0.6721 +2026-04-09 18:26:25.841353: val_loss -0.5096 +2026-04-09 18:26:25.843970: Pseudo dice [0.6867, 0.0911, 0.6902, 0.1029, 0.3425, 0.7089, 0.5851] +2026-04-09 18:26:25.848854: Epoch time: 107.73 s +2026-04-09 18:26:26.941296: +2026-04-09 18:26:26.943548: Epoch 926 +2026-04-09 18:26:26.947207: Current learning rate: 0.00096 +2026-04-09 18:28:13.289983: train_loss -0.6636 +2026-04-09 18:28:13.296618: val_loss -0.501 +2026-04-09 18:28:13.299579: Pseudo dice [0.7886, 0.1646, 0.7105, 0.0002, 0.4701, 0.8426, 0.7852] +2026-04-09 18:28:13.302693: Epoch time: 106.35 s +2026-04-09 18:28:14.381225: +2026-04-09 18:28:14.383627: Epoch 927 +2026-04-09 18:28:14.388418: Current learning rate: 0.00095 +2026-04-09 18:29:56.390297: train_loss -0.6575 +2026-04-09 18:29:56.396795: val_loss -0.5236 +2026-04-09 18:29:56.399889: Pseudo dice [0.7595, 0.0592, 0.5612, 0.0045, 0.4341, 0.5939, 0.7295] +2026-04-09 18:29:56.402485: Epoch time: 102.01 s +2026-04-09 18:29:57.508239: +2026-04-09 18:29:57.510272: Epoch 928 +2026-04-09 18:29:57.512499: Current learning rate: 0.00094 +2026-04-09 18:31:41.967859: train_loss -0.6574 +2026-04-09 18:31:41.977498: val_loss -0.5357 +2026-04-09 18:31:41.981514: Pseudo dice [0.8822, 0.6873, 0.5634, 0.3378, 0.1738, 0.7729, 0.8582] +2026-04-09 18:31:41.985020: Epoch time: 104.46 s +2026-04-09 18:31:43.077633: +2026-04-09 18:31:43.080288: Epoch 929 +2026-04-09 18:31:43.082541: Current learning rate: 0.00092 +2026-04-09 18:33:25.937498: train_loss -0.6679 +2026-04-09 18:33:25.943938: val_loss -0.5909 +2026-04-09 18:33:25.946201: Pseudo dice [0.6083, 0.3565, 0.7514, 0.7947, 0.5258, 0.828, 0.6515] +2026-04-09 18:33:25.948451: Epoch time: 102.86 s +2026-04-09 18:33:27.038133: +2026-04-09 18:33:27.042384: Epoch 930 +2026-04-09 18:33:27.046720: Current learning rate: 0.00091 +2026-04-09 18:35:10.745307: train_loss -0.657 +2026-04-09 18:35:10.763292: val_loss -0.5954 +2026-04-09 18:35:10.768366: Pseudo dice [0.7732, 0.6714, 0.802, 0.0, 0.3501, 0.8161, 0.8562] +2026-04-09 18:35:10.774355: Epoch time: 103.71 s +2026-04-09 18:35:11.870115: +2026-04-09 18:35:11.873153: Epoch 931 +2026-04-09 18:35:11.875311: Current learning rate: 0.0009 +2026-04-09 18:36:55.365112: train_loss -0.6757 +2026-04-09 18:36:55.395993: val_loss -0.5303 +2026-04-09 18:36:55.399662: Pseudo dice [0.392, 0.5001, 0.8546, 0.0634, 0.3102, 0.7555, 0.8266] +2026-04-09 18:36:55.401771: Epoch time: 103.5 s +2026-04-09 18:36:56.486519: +2026-04-09 18:36:56.488486: Epoch 932 +2026-04-09 18:36:56.490631: Current learning rate: 0.00089 +2026-04-09 18:38:40.263583: train_loss -0.6722 +2026-04-09 18:38:40.269730: val_loss -0.5035 +2026-04-09 18:38:40.272056: Pseudo dice [0.8096, 0.2648, 0.7155, 0.102, 0.2922, 0.8198, 0.8652] +2026-04-09 18:38:40.274558: Epoch time: 103.78 s +2026-04-09 18:38:41.458466: +2026-04-09 18:38:41.460225: Epoch 933 +2026-04-09 18:38:41.462058: Current learning rate: 0.00088 +2026-04-09 18:40:23.357399: train_loss -0.6647 +2026-04-09 18:40:23.364795: val_loss -0.6204 +2026-04-09 18:40:23.366486: Pseudo dice [0.7738, 0.6217, 0.7399, 0.6965, 0.3353, 0.8348, 0.8578] +2026-04-09 18:40:23.368922: Epoch time: 101.9 s +2026-04-09 18:40:24.455972: +2026-04-09 18:40:24.458095: Epoch 934 +2026-04-09 18:40:24.460154: Current learning rate: 0.00087 +2026-04-09 18:42:09.878879: train_loss -0.6763 +2026-04-09 18:42:09.885448: val_loss -0.563 +2026-04-09 18:42:09.889243: Pseudo dice [0.2209, 0.5957, 0.8504, 0.0363, 0.4287, 0.7633, 0.7724] +2026-04-09 18:42:09.892900: Epoch time: 105.43 s +2026-04-09 18:42:11.007040: +2026-04-09 18:42:11.010051: Epoch 935 +2026-04-09 18:42:11.012205: Current learning rate: 0.00085 +2026-04-09 18:43:54.110355: train_loss -0.6661 +2026-04-09 18:43:54.118301: val_loss -0.5778 +2026-04-09 18:43:54.121068: Pseudo dice [0.7777, 0.5726, 0.7575, 0.7, 0.3288, 0.6625, 0.9006] +2026-04-09 18:43:54.123517: Epoch time: 103.11 s +2026-04-09 18:43:55.208198: +2026-04-09 18:43:55.210365: Epoch 936 +2026-04-09 18:43:55.212381: Current learning rate: 0.00084 +2026-04-09 18:45:38.025717: train_loss -0.6798 +2026-04-09 18:45:38.033574: val_loss -0.5892 +2026-04-09 18:45:38.036028: Pseudo dice [0.7, 0.5905, 0.744, 0.48, 0.3758, 0.8104, 0.7766] +2026-04-09 18:45:38.038280: Epoch time: 102.82 s +2026-04-09 18:45:39.135767: +2026-04-09 18:45:39.137817: Epoch 937 +2026-04-09 18:45:39.140110: Current learning rate: 0.00083 +2026-04-09 18:47:27.621919: train_loss -0.6933 +2026-04-09 18:47:27.628867: val_loss -0.6232 +2026-04-09 18:47:27.631486: Pseudo dice [0.7423, 0.5601, 0.7102, 0.6364, 0.2072, 0.6295, 0.6623] +2026-04-09 18:47:27.633780: Epoch time: 108.49 s +2026-04-09 18:47:28.736768: +2026-04-09 18:47:28.739078: Epoch 938 +2026-04-09 18:47:28.741012: Current learning rate: 0.00082 +2026-04-09 18:49:12.042120: train_loss -0.6784 +2026-04-09 18:49:12.049424: val_loss -0.599 +2026-04-09 18:49:12.051366: Pseudo dice [0.8246, 0.0402, 0.7786, 0.8463, 0.2252, 0.8417, 0.7441] +2026-04-09 18:49:12.053104: Epoch time: 103.31 s +2026-04-09 18:49:13.121825: +2026-04-09 18:49:13.123718: Epoch 939 +2026-04-09 18:49:13.125458: Current learning rate: 0.00081 +2026-04-09 18:50:54.822760: train_loss -0.6722 +2026-04-09 18:50:54.828855: val_loss -0.5209 +2026-04-09 18:50:54.830956: Pseudo dice [0.4517, 0.379, 0.785, 0.0005, 0.1576, 0.7535, 0.6065] +2026-04-09 18:50:54.833065: Epoch time: 101.7 s +2026-04-09 18:50:55.926823: +2026-04-09 18:50:55.928499: Epoch 940 +2026-04-09 18:50:55.930362: Current learning rate: 0.00079 +2026-04-09 18:52:38.096364: train_loss -0.6783 +2026-04-09 18:52:38.105157: val_loss -0.554 +2026-04-09 18:52:38.107771: Pseudo dice [0.5497, 0.1901, 0.7169, 0.082, 0.463, 0.7259, 0.7089] +2026-04-09 18:52:38.110688: Epoch time: 102.17 s +2026-04-09 18:52:40.424920: +2026-04-09 18:52:40.427172: Epoch 941 +2026-04-09 18:52:40.428969: Current learning rate: 0.00078 +2026-04-09 18:54:23.568418: train_loss -0.6779 +2026-04-09 18:54:23.575852: val_loss -0.6136 +2026-04-09 18:54:23.577850: Pseudo dice [0.3784, 0.4446, 0.7644, 0.1361, 0.1794, 0.7996, 0.5327] +2026-04-09 18:54:23.581195: Epoch time: 103.15 s +2026-04-09 18:54:24.661869: +2026-04-09 18:54:24.663799: Epoch 942 +2026-04-09 18:54:24.665765: Current learning rate: 0.00077 +2026-04-09 18:56:07.032352: train_loss -0.6815 +2026-04-09 18:56:07.039129: val_loss -0.5797 +2026-04-09 18:56:07.041170: Pseudo dice [0.8143, 0.5935, 0.7219, 0.3836, 0.4255, 0.8435, 0.8834] +2026-04-09 18:56:07.043475: Epoch time: 102.37 s +2026-04-09 18:56:08.134406: +2026-04-09 18:56:08.136786: Epoch 943 +2026-04-09 18:56:08.138885: Current learning rate: 0.00076 +2026-04-09 18:57:50.391707: train_loss -0.6906 +2026-04-09 18:57:50.397428: val_loss -0.6386 +2026-04-09 18:57:50.399206: Pseudo dice [0.5152, 0.2709, 0.7778, 0.8281, 0.3406, 0.7921, 0.868] +2026-04-09 18:57:50.400842: Epoch time: 102.26 s +2026-04-09 18:57:51.466902: +2026-04-09 18:57:51.469890: Epoch 944 +2026-04-09 18:57:51.472205: Current learning rate: 0.00075 +2026-04-09 18:59:35.822193: train_loss -0.695 +2026-04-09 18:59:35.830547: val_loss -0.5952 +2026-04-09 18:59:35.832706: Pseudo dice [0.7859, 0.2995, 0.7828, 0.0005, 0.4538, 0.7187, 0.7612] +2026-04-09 18:59:35.834856: Epoch time: 104.36 s +2026-04-09 18:59:36.943518: +2026-04-09 18:59:36.946984: Epoch 945 +2026-04-09 18:59:36.948980: Current learning rate: 0.00074 +2026-04-09 19:01:19.374437: train_loss -0.6905 +2026-04-09 19:01:19.380162: val_loss -0.5687 +2026-04-09 19:01:19.381893: Pseudo dice [0.5443, 0.2561, 0.8119, 0.0191, 0.4583, 0.7948, 0.6263] +2026-04-09 19:01:19.383718: Epoch time: 102.43 s +2026-04-09 19:01:20.487090: +2026-04-09 19:01:20.489496: Epoch 946 +2026-04-09 19:01:20.491329: Current learning rate: 0.00072 +2026-04-09 19:03:02.915804: train_loss -0.6925 +2026-04-09 19:03:02.921552: val_loss -0.6182 +2026-04-09 19:03:02.923613: Pseudo dice [0.6668, 0.225, 0.8001, 0.5225, 0.5842, 0.8133, 0.4608] +2026-04-09 19:03:02.925439: Epoch time: 102.43 s +2026-04-09 19:03:04.013087: +2026-04-09 19:03:04.014884: Epoch 947 +2026-04-09 19:03:04.016675: Current learning rate: 0.00071 +2026-04-09 19:04:48.515134: train_loss -0.6878 +2026-04-09 19:04:48.531924: val_loss -0.5853 +2026-04-09 19:04:48.539035: Pseudo dice [0.8021, 0.119, 0.6717, 0.2873, 0.458, 0.8404, 0.8069] +2026-04-09 19:04:48.544025: Epoch time: 104.51 s +2026-04-09 19:04:49.643601: +2026-04-09 19:04:49.647211: Epoch 948 +2026-04-09 19:04:49.653086: Current learning rate: 0.0007 +2026-04-09 19:06:32.978053: train_loss -0.6868 +2026-04-09 19:06:32.992470: val_loss -0.5421 +2026-04-09 19:06:32.997066: Pseudo dice [0.8198, 0.1599, 0.7737, 0.0, 0.365, 0.8427, 0.7118] +2026-04-09 19:06:32.999921: Epoch time: 103.34 s +2026-04-09 19:06:34.113341: +2026-04-09 19:06:34.115526: Epoch 949 +2026-04-09 19:06:34.117366: Current learning rate: 0.00069 +2026-04-09 19:08:15.675600: train_loss -0.6916 +2026-04-09 19:08:15.686999: val_loss -0.6167 +2026-04-09 19:08:15.689100: Pseudo dice [0.5671, 0.4229, 0.8089, 0.4803, 0.5046, 0.7918, 0.7025] +2026-04-09 19:08:15.691247: Epoch time: 101.57 s +2026-04-09 19:08:18.583023: +2026-04-09 19:08:18.584673: Epoch 950 +2026-04-09 19:08:18.586221: Current learning rate: 0.00067 +2026-04-09 19:10:07.310677: train_loss -0.702 +2026-04-09 19:10:07.320796: val_loss -0.5876 +2026-04-09 19:10:07.323947: Pseudo dice [0.2998, 0.3466, 0.6064, 0.2005, 0.5843, 0.8459, 0.8343] +2026-04-09 19:10:07.325758: Epoch time: 108.73 s +2026-04-09 19:10:08.437592: +2026-04-09 19:10:08.441094: Epoch 951 +2026-04-09 19:10:08.443752: Current learning rate: 0.00066 +2026-04-09 19:11:50.221938: train_loss -0.688 +2026-04-09 19:11:50.228348: val_loss -0.6105 +2026-04-09 19:11:50.230221: Pseudo dice [0.855, 0.4315, 0.7139, 0.4519, 0.5794, 0.87, 0.7085] +2026-04-09 19:11:50.233808: Epoch time: 101.79 s +2026-04-09 19:11:51.310771: +2026-04-09 19:11:51.312632: Epoch 952 +2026-04-09 19:11:51.314196: Current learning rate: 0.00065 +2026-04-09 19:13:33.957771: train_loss -0.6994 +2026-04-09 19:13:33.966761: val_loss -0.5668 +2026-04-09 19:13:33.969332: Pseudo dice [0.8247, 0.6223, 0.7312, 0.733, 0.2988, 0.8016, 0.7163] +2026-04-09 19:13:33.972813: Epoch time: 102.65 s +2026-04-09 19:13:35.076948: +2026-04-09 19:13:35.079331: Epoch 953 +2026-04-09 19:13:35.081890: Current learning rate: 0.00064 +2026-04-09 19:15:18.315599: train_loss -0.6902 +2026-04-09 19:15:18.331473: val_loss -0.5112 +2026-04-09 19:15:18.336076: Pseudo dice [0.7262, 0.5269, 0.8145, 0.0591, 0.4039, 0.8538, 0.6653] +2026-04-09 19:15:18.340751: Epoch time: 103.24 s +2026-04-09 19:15:19.465147: +2026-04-09 19:15:19.467703: Epoch 954 +2026-04-09 19:15:19.469587: Current learning rate: 0.00063 +2026-04-09 19:17:01.237286: train_loss -0.6859 +2026-04-09 19:17:01.243546: val_loss -0.6203 +2026-04-09 19:17:01.245937: Pseudo dice [0.3909, 0.2589, 0.8562, 0.7288, 0.5635, 0.8015, 0.8599] +2026-04-09 19:17:01.247979: Epoch time: 101.78 s +2026-04-09 19:17:02.395422: +2026-04-09 19:17:02.397443: Epoch 955 +2026-04-09 19:17:02.399405: Current learning rate: 0.00061 +2026-04-09 19:18:44.731643: train_loss -0.6964 +2026-04-09 19:18:44.737657: val_loss -0.406 +2026-04-09 19:18:44.740210: Pseudo dice [0.6906, 0.4656, 0.5949, 0.0, 0.5013, 0.8608, 0.7447] +2026-04-09 19:18:44.742643: Epoch time: 102.34 s +2026-04-09 19:18:45.872726: +2026-04-09 19:18:45.876065: Epoch 956 +2026-04-09 19:18:45.878550: Current learning rate: 0.0006 +2026-04-09 19:20:27.611827: train_loss -0.7041 +2026-04-09 19:20:27.618860: val_loss -0.6073 +2026-04-09 19:20:27.620527: Pseudo dice [0.6368, 0.2062, 0.651, 0.8546, 0.4536, 0.7895, 0.8262] +2026-04-09 19:20:27.622809: Epoch time: 101.74 s +2026-04-09 19:20:28.731214: +2026-04-09 19:20:28.734171: Epoch 957 +2026-04-09 19:20:28.742795: Current learning rate: 0.00059 +2026-04-09 19:22:13.191683: train_loss -0.6978 +2026-04-09 19:22:13.199029: val_loss -0.5313 +2026-04-09 19:22:13.201688: Pseudo dice [0.6012, 0.3622, 0.8621, 0.1303, 0.3776, 0.7966, 0.6398] +2026-04-09 19:22:13.204192: Epoch time: 104.46 s +2026-04-09 19:22:14.317398: +2026-04-09 19:22:14.319937: Epoch 958 +2026-04-09 19:22:14.321815: Current learning rate: 0.00058 +2026-04-09 19:23:58.177570: train_loss -0.696 +2026-04-09 19:23:58.184257: val_loss -0.618 +2026-04-09 19:23:58.186830: Pseudo dice [0.303, 0.2324, 0.7884, 0.8086, 0.422, 0.8518, 0.705] +2026-04-09 19:23:58.188933: Epoch time: 103.86 s +2026-04-09 19:23:59.340647: +2026-04-09 19:23:59.342942: Epoch 959 +2026-04-09 19:23:59.344825: Current learning rate: 0.00056 +2026-04-09 19:25:41.154613: train_loss -0.698 +2026-04-09 19:25:41.162435: val_loss -0.5583 +2026-04-09 19:25:41.164681: Pseudo dice [0.7138, 0.2181, 0.8288, 0.1353, 0.444, 0.872, 0.7377] +2026-04-09 19:25:41.166725: Epoch time: 101.82 s +2026-04-09 19:25:42.278409: +2026-04-09 19:25:42.281118: Epoch 960 +2026-04-09 19:25:42.282728: Current learning rate: 0.00055 +2026-04-09 19:27:27.186269: train_loss -0.7064 +2026-04-09 19:27:27.195310: val_loss -0.5972 +2026-04-09 19:27:27.198946: Pseudo dice [0.2298, 0.3969, 0.7563, 0.8979, 0.4801, 0.6399, 0.8401] +2026-04-09 19:27:27.201021: Epoch time: 104.91 s +2026-04-09 19:27:29.535413: +2026-04-09 19:27:29.537581: Epoch 961 +2026-04-09 19:27:29.540096: Current learning rate: 0.00054 +2026-04-09 19:29:12.956080: train_loss -0.6888 +2026-04-09 19:29:12.963545: val_loss -0.5942 +2026-04-09 19:29:12.966310: Pseudo dice [0.7233, 0.561, 0.7356, 0.14, 0.4572, 0.8597, 0.8791] +2026-04-09 19:29:12.968703: Epoch time: 103.42 s +2026-04-09 19:29:14.067876: +2026-04-09 19:29:14.070025: Epoch 962 +2026-04-09 19:29:14.071560: Current learning rate: 0.00053 +2026-04-09 19:30:56.435082: train_loss -0.7065 +2026-04-09 19:30:56.443042: val_loss -0.4726 +2026-04-09 19:30:56.445937: Pseudo dice [0.7511, 0.6911, 0.7082, 0.0, 0.5536, 0.8105, 0.8019] +2026-04-09 19:30:56.448421: Epoch time: 102.37 s +2026-04-09 19:30:57.593759: +2026-04-09 19:30:57.597105: Epoch 963 +2026-04-09 19:30:57.601391: Current learning rate: 0.00051 +2026-04-09 19:32:40.669480: train_loss -0.7073 +2026-04-09 19:32:40.676591: val_loss -0.4887 +2026-04-09 19:32:40.678636: Pseudo dice [0.6481, 0.28, 0.8192, 0.0, 0.4555, 0.6043, 0.8669] +2026-04-09 19:32:40.681167: Epoch time: 103.08 s +2026-04-09 19:32:41.793746: +2026-04-09 19:32:41.801735: Epoch 964 +2026-04-09 19:32:41.804052: Current learning rate: 0.0005 +2026-04-09 19:34:24.659737: train_loss -0.6996 +2026-04-09 19:34:24.666666: val_loss -0.6059 +2026-04-09 19:34:24.668677: Pseudo dice [0.5722, 0.6214, 0.6257, 0.7612, 0.4724, 0.8419, 0.7678] +2026-04-09 19:34:24.670903: Epoch time: 102.87 s +2026-04-09 19:34:25.782651: +2026-04-09 19:34:25.785015: Epoch 965 +2026-04-09 19:34:25.786685: Current learning rate: 0.00049 +2026-04-09 19:36:07.916308: train_loss -0.6972 +2026-04-09 19:36:07.921964: val_loss -0.5471 +2026-04-09 19:36:07.924386: Pseudo dice [0.6537, 0.0041, 0.6403, 0.5971, 0.4056, 0.7965, 0.7445] +2026-04-09 19:36:07.926372: Epoch time: 102.14 s +2026-04-09 19:36:09.050150: +2026-04-09 19:36:09.052060: Epoch 966 +2026-04-09 19:36:09.054852: Current learning rate: 0.00048 +2026-04-09 19:37:51.488827: train_loss -0.7003 +2026-04-09 19:37:51.495637: val_loss -0.6197 +2026-04-09 19:37:51.498166: Pseudo dice [0.2937, 0.7834, 0.7933, 0.597, 0.4141, 0.7373, 0.898] +2026-04-09 19:37:51.501288: Epoch time: 102.44 s +2026-04-09 19:37:52.628126: +2026-04-09 19:37:52.630589: Epoch 967 +2026-04-09 19:37:52.632339: Current learning rate: 0.00046 +2026-04-09 19:39:36.875554: train_loss -0.6885 +2026-04-09 19:39:36.883440: val_loss -0.5204 +2026-04-09 19:39:36.886454: Pseudo dice [0.8293, 0.4692, 0.7868, 0.0377, 0.1246, 0.8003, 0.5597] +2026-04-09 19:39:36.888894: Epoch time: 104.25 s +2026-04-09 19:39:37.999803: +2026-04-09 19:39:38.001678: Epoch 968 +2026-04-09 19:39:38.004531: Current learning rate: 0.00045 +2026-04-09 19:41:20.479260: train_loss -0.6908 +2026-04-09 19:41:20.486043: val_loss -0.5089 +2026-04-09 19:41:20.488679: Pseudo dice [0.7647, 0.0662, 0.852, 0.0002, 0.3883, 0.8796, 0.7556] +2026-04-09 19:41:20.490183: Epoch time: 102.48 s +2026-04-09 19:41:21.602127: +2026-04-09 19:41:21.604306: Epoch 969 +2026-04-09 19:41:21.606305: Current learning rate: 0.00044 +2026-04-09 19:43:04.156276: train_loss -0.6977 +2026-04-09 19:43:04.161700: val_loss -0.6171 +2026-04-09 19:43:04.163630: Pseudo dice [0.8734, 0.7165, 0.6853, 0.8851, 0.4759, 0.6664, 0.8194] +2026-04-09 19:43:04.165609: Epoch time: 102.56 s +2026-04-09 19:43:05.282210: +2026-04-09 19:43:05.284599: Epoch 970 +2026-04-09 19:43:05.286454: Current learning rate: 0.00043 +2026-04-09 19:44:47.935544: train_loss -0.7034 +2026-04-09 19:44:47.943987: val_loss -0.598 +2026-04-09 19:44:47.946190: Pseudo dice [0.6464, 0.0222, 0.5181, 0.0803, 0.3418, 0.7922, 0.9136] +2026-04-09 19:44:47.949277: Epoch time: 102.66 s +2026-04-09 19:44:49.078172: +2026-04-09 19:44:49.080102: Epoch 971 +2026-04-09 19:44:49.081944: Current learning rate: 0.00041 +2026-04-09 19:46:31.574091: train_loss -0.6975 +2026-04-09 19:46:31.580769: val_loss -0.5604 +2026-04-09 19:46:31.582608: Pseudo dice [0.5184, 0.4864, 0.5974, 0.1146, 0.3796, 0.8465, 0.9022] +2026-04-09 19:46:31.584499: Epoch time: 102.5 s +2026-04-09 19:46:32.697552: +2026-04-09 19:46:32.700954: Epoch 972 +2026-04-09 19:46:32.703325: Current learning rate: 0.0004 +2026-04-09 19:48:14.480034: train_loss -0.7045 +2026-04-09 19:48:14.486366: val_loss -0.6681 +2026-04-09 19:48:14.488431: Pseudo dice [0.8146, 0.2755, 0.8176, 0.0176, 0.7032, 0.8851, 0.7765] +2026-04-09 19:48:14.490754: Epoch time: 101.79 s +2026-04-09 19:48:15.603760: +2026-04-09 19:48:15.605427: Epoch 973 +2026-04-09 19:48:15.607418: Current learning rate: 0.00039 +2026-04-09 19:49:59.181993: train_loss -0.7006 +2026-04-09 19:49:59.189232: val_loss -0.4171 +2026-04-09 19:49:59.191722: Pseudo dice [0.4064, 0.714, 0.6814, 0.0236, 0.36, 0.7236, 0.6754] +2026-04-09 19:49:59.194931: Epoch time: 103.58 s +2026-04-09 19:50:00.323423: +2026-04-09 19:50:00.326095: Epoch 974 +2026-04-09 19:50:00.327990: Current learning rate: 0.00037 +2026-04-09 19:51:42.675872: train_loss -0.6961 +2026-04-09 19:51:42.682286: val_loss -0.398 +2026-04-09 19:51:42.684288: Pseudo dice [0.8007, 0.1867, 0.7053, 0.0457, 0.367, 0.7511, 0.7507] +2026-04-09 19:51:42.686249: Epoch time: 102.36 s +2026-04-09 19:51:43.799291: +2026-04-09 19:51:43.812708: Epoch 975 +2026-04-09 19:51:43.814735: Current learning rate: 0.00036 +2026-04-09 19:53:25.919189: train_loss -0.704 +2026-04-09 19:53:25.926057: val_loss -0.5795 +2026-04-09 19:53:25.931313: Pseudo dice [0.8392, 0.44, 0.6992, 0.1566, 0.5196, 0.7284, 0.8833] +2026-04-09 19:53:25.933654: Epoch time: 102.12 s +2026-04-09 19:53:27.032669: +2026-04-09 19:53:27.038045: Epoch 976 +2026-04-09 19:53:27.039748: Current learning rate: 0.00035 +2026-04-09 19:55:10.438113: train_loss -0.6847 +2026-04-09 19:55:10.444583: val_loss -0.5713 +2026-04-09 19:55:10.448024: Pseudo dice [0.8229, 0.2316, 0.7574, 0.0008, 0.2926, 0.7988, 0.6981] +2026-04-09 19:55:10.449979: Epoch time: 103.41 s +2026-04-09 19:55:11.564522: +2026-04-09 19:55:11.566897: Epoch 977 +2026-04-09 19:55:11.569871: Current learning rate: 0.00034 +2026-04-09 19:56:53.436375: train_loss -0.7046 +2026-04-09 19:56:53.445612: val_loss -0.6061 +2026-04-09 19:56:53.448547: Pseudo dice [0.7387, 0.0, 0.7809, 0.0844, 0.296, 0.8573, 0.8274] +2026-04-09 19:56:53.450395: Epoch time: 101.88 s +2026-04-09 19:56:54.559986: +2026-04-09 19:56:54.561891: Epoch 978 +2026-04-09 19:56:54.563601: Current learning rate: 0.00032 +2026-04-09 19:58:36.079263: train_loss -0.7025 +2026-04-09 19:58:36.084232: val_loss -0.6391 +2026-04-09 19:58:36.085943: Pseudo dice [0.2742, 0.062, 0.8755, 0.087, 0.4143, 0.8131, 0.6865] +2026-04-09 19:58:36.087500: Epoch time: 101.52 s +2026-04-09 19:58:37.184851: +2026-04-09 19:58:37.186528: Epoch 979 +2026-04-09 19:58:37.188264: Current learning rate: 0.00031 +2026-04-09 20:00:19.638749: train_loss -0.6976 +2026-04-09 20:00:19.655234: val_loss -0.5575 +2026-04-09 20:00:19.660436: Pseudo dice [0.3967, 0.7215, 0.7962, 0.1772, 0.3338, 0.8359, 0.8214] +2026-04-09 20:00:19.664912: Epoch time: 102.46 s +2026-04-09 20:00:20.782690: +2026-04-09 20:00:20.794355: Epoch 980 +2026-04-09 20:00:20.800291: Current learning rate: 0.0003 +2026-04-09 20:02:05.562682: train_loss -0.704 +2026-04-09 20:02:05.569047: val_loss -0.6189 +2026-04-09 20:02:05.571330: Pseudo dice [0.8407, 0.1024, 0.7107, 0.7384, 0.2427, 0.7859, 0.9182] +2026-04-09 20:02:05.572871: Epoch time: 104.78 s +2026-04-09 20:02:06.695857: +2026-04-09 20:02:06.697508: Epoch 981 +2026-04-09 20:02:06.699295: Current learning rate: 0.00028 +2026-04-09 20:03:49.577243: train_loss -0.7 +2026-04-09 20:03:49.584074: val_loss -0.6263 +2026-04-09 20:03:49.585876: Pseudo dice [0.3921, 0.8103, 0.6802, 0.6145, 0.4322, 0.8119, 0.8018] +2026-04-09 20:03:49.588150: Epoch time: 102.88 s +2026-04-09 20:03:52.039813: +2026-04-09 20:03:52.044215: Epoch 982 +2026-04-09 20:03:52.048480: Current learning rate: 0.00027 +2026-04-09 20:05:34.373267: train_loss -0.7079 +2026-04-09 20:05:34.381612: val_loss -0.6446 +2026-04-09 20:05:34.384489: Pseudo dice [0.4263, 0.5109, 0.7674, 0.0, 0.4416, 0.9047, 0.8746] +2026-04-09 20:05:34.386985: Epoch time: 102.34 s +2026-04-09 20:05:35.515781: +2026-04-09 20:05:35.518158: Epoch 983 +2026-04-09 20:05:35.519953: Current learning rate: 0.00026 +2026-04-09 20:07:18.792682: train_loss -0.7029 +2026-04-09 20:07:18.807771: val_loss -0.6476 +2026-04-09 20:07:18.813182: Pseudo dice [0.7604, 0.5548, 0.7586, 0.9145, 0.3941, 0.8672, 0.7367] +2026-04-09 20:07:18.818060: Epoch time: 103.28 s +2026-04-09 20:07:19.942342: +2026-04-09 20:07:19.944862: Epoch 984 +2026-04-09 20:07:19.949330: Current learning rate: 0.00024 +2026-04-09 20:09:02.251055: train_loss -0.707 +2026-04-09 20:09:02.257611: val_loss -0.6122 +2026-04-09 20:09:02.259444: Pseudo dice [0.7072, 0.457, 0.8192, 0.7612, 0.3753, 0.7266, 0.7605] +2026-04-09 20:09:02.261659: Epoch time: 102.31 s +2026-04-09 20:09:03.492026: +2026-04-09 20:09:03.493750: Epoch 985 +2026-04-09 20:09:03.495436: Current learning rate: 0.00023 +2026-04-09 20:10:45.808801: train_loss -0.7032 +2026-04-09 20:10:45.816083: val_loss -0.4397 +2026-04-09 20:10:45.818544: Pseudo dice [0.0, 0.5201, 0.7065, 0.0364, 0.2793, 0.5141, 0.6867] +2026-04-09 20:10:45.820390: Epoch time: 102.32 s +2026-04-09 20:10:46.963913: +2026-04-09 20:10:46.965485: Epoch 986 +2026-04-09 20:10:46.967457: Current learning rate: 0.00021 +2026-04-09 20:12:34.511248: train_loss -0.7085 +2026-04-09 20:12:34.518605: val_loss -0.6357 +2026-04-09 20:12:34.521124: Pseudo dice [0.2448, 0.7053, 0.7737, 0.723, 0.4972, 0.7354, 0.9054] +2026-04-09 20:12:34.523784: Epoch time: 107.55 s +2026-04-09 20:12:35.631683: +2026-04-09 20:12:35.634679: Epoch 987 +2026-04-09 20:12:35.636744: Current learning rate: 0.0002 +2026-04-09 20:14:18.291924: train_loss -0.7054 +2026-04-09 20:14:18.299073: val_loss -0.5298 +2026-04-09 20:14:18.300918: Pseudo dice [0.7359, 0.6672, 0.6652, 0.1539, 0.4001, 0.7985, 0.7669] +2026-04-09 20:14:18.302600: Epoch time: 102.66 s +2026-04-09 20:14:19.405952: +2026-04-09 20:14:19.409001: Epoch 988 +2026-04-09 20:14:19.410725: Current learning rate: 0.00019 +2026-04-09 20:16:01.821221: train_loss -0.7108 +2026-04-09 20:16:01.833382: val_loss -0.4521 +2026-04-09 20:16:01.837346: Pseudo dice [0.6334, 0.7305, 0.7565, 0.0382, 0.3491, 0.7037, 0.6899] +2026-04-09 20:16:01.840390: Epoch time: 102.42 s +2026-04-09 20:16:02.961516: +2026-04-09 20:16:02.964394: Epoch 989 +2026-04-09 20:16:02.966517: Current learning rate: 0.00017 +2026-04-09 20:17:46.766303: train_loss -0.7028 +2026-04-09 20:17:46.773497: val_loss -0.6414 +2026-04-09 20:17:46.777155: Pseudo dice [0.8088, 0.2474, 0.7903, 0.0787, 0.5393, 0.8767, 0.8171] +2026-04-09 20:17:46.780097: Epoch time: 103.81 s +2026-04-09 20:17:47.908497: +2026-04-09 20:17:47.913227: Epoch 990 +2026-04-09 20:17:47.919946: Current learning rate: 0.00016 +2026-04-09 20:19:30.824306: train_loss -0.7049 +2026-04-09 20:19:30.830254: val_loss -0.6485 +2026-04-09 20:19:30.832316: Pseudo dice [0.8361, 0.6981, 0.77, 0.5032, 0.3724, 0.7334, 0.8015] +2026-04-09 20:19:30.834402: Epoch time: 102.92 s +2026-04-09 20:19:31.939935: +2026-04-09 20:19:31.942381: Epoch 991 +2026-04-09 20:19:31.946368: Current learning rate: 0.00014 +2026-04-09 20:21:14.100300: train_loss -0.7045 +2026-04-09 20:21:14.107730: val_loss -0.5867 +2026-04-09 20:21:14.109672: Pseudo dice [0.8493, 0.6278, 0.749, 0.1146, 0.5063, 0.9154, 0.6175] +2026-04-09 20:21:14.111573: Epoch time: 102.16 s +2026-04-09 20:21:15.221647: +2026-04-09 20:21:15.223759: Epoch 992 +2026-04-09 20:21:15.225613: Current learning rate: 0.00013 +2026-04-09 20:22:58.989954: train_loss -0.7052 +2026-04-09 20:22:58.997957: val_loss -0.6318 +2026-04-09 20:22:59.001050: Pseudo dice [0.843, 0.6159, 0.7614, 0.4489, 0.2686, 0.7645, 0.8836] +2026-04-09 20:22:59.003093: Epoch time: 103.77 s +2026-04-09 20:23:00.131948: +2026-04-09 20:23:00.134356: Epoch 993 +2026-04-09 20:23:00.136352: Current learning rate: 0.00011 +2026-04-09 20:24:45.158235: train_loss -0.7072 +2026-04-09 20:24:45.163992: val_loss -0.6085 +2026-04-09 20:24:45.166156: Pseudo dice [0.8404, 0.6248, 0.813, 0.7536, 0.2504, 0.893, 0.587] +2026-04-09 20:24:45.168446: Epoch time: 105.03 s +2026-04-09 20:24:46.298077: +2026-04-09 20:24:46.300248: Epoch 994 +2026-04-09 20:24:46.302200: Current learning rate: 0.0001 +2026-04-09 20:26:29.081748: train_loss -0.6914 +2026-04-09 20:26:29.088190: val_loss -0.5384 +2026-04-09 20:26:29.090125: Pseudo dice [0.8098, 0.777, 0.7941, 0.0659, 0.4282, 0.9084, 0.5081] +2026-04-09 20:26:29.092199: Epoch time: 102.79 s +2026-04-09 20:26:30.210352: +2026-04-09 20:26:30.213309: Epoch 995 +2026-04-09 20:26:30.215741: Current learning rate: 8e-05 +2026-04-09 20:28:12.076208: train_loss -0.6944 +2026-04-09 20:28:12.083363: val_loss -0.6303 +2026-04-09 20:28:12.085853: Pseudo dice [0.8263, 0.576, 0.8397, 0.2163, 0.4414, 0.9072, 0.7864] +2026-04-09 20:28:12.088288: Epoch time: 101.87 s +2026-04-09 20:28:13.189760: +2026-04-09 20:28:13.191556: Epoch 996 +2026-04-09 20:28:13.194013: Current learning rate: 7e-05 +2026-04-09 20:29:58.628554: train_loss -0.7074 +2026-04-09 20:29:58.634733: val_loss -0.646 +2026-04-09 20:29:58.636794: Pseudo dice [0.8719, 0.5852, 0.8401, 0.2761, 0.2452, 0.838, 0.9077] +2026-04-09 20:29:58.639022: Epoch time: 105.44 s +2026-04-09 20:29:59.758933: +2026-04-09 20:29:59.761504: Epoch 997 +2026-04-09 20:29:59.764304: Current learning rate: 5e-05 +2026-04-09 20:31:42.325144: train_loss -0.7079 +2026-04-09 20:31:42.330774: val_loss -0.5753 +2026-04-09 20:31:42.332881: Pseudo dice [0.6696, 0.6544, 0.8487, 0.1705, 0.5106, 0.6465, 0.7806] +2026-04-09 20:31:42.335747: Epoch time: 102.57 s +2026-04-09 20:31:43.448949: +2026-04-09 20:31:43.450868: Epoch 998 +2026-04-09 20:31:43.452659: Current learning rate: 4e-05 +2026-04-09 20:33:24.780898: train_loss -0.7112 +2026-04-09 20:33:24.787617: val_loss -0.5645 +2026-04-09 20:33:24.789779: Pseudo dice [0.5696, 0.5579, 0.631, 0.2422, 0.4158, 0.8781, 0.8072] +2026-04-09 20:33:24.791437: Epoch time: 101.34 s +2026-04-09 20:33:25.893669: +2026-04-09 20:33:25.895484: Epoch 999 +2026-04-09 20:33:25.898581: Current learning rate: 2e-05 +2026-04-09 20:35:12.150044: train_loss -0.711 +2026-04-09 20:35:12.156508: val_loss -0.5817 +2026-04-09 20:35:12.158624: Pseudo dice [0.5018, 0.6104, 0.7575, 0.181, 0.4915, 0.7836, 0.6565] +2026-04-09 20:35:12.160801: Epoch time: 106.26 s +2026-04-09 20:35:15.146359: Training done. +2026-04-09 20:35:15.736932: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-09 20:35:15.744144: The split file contains 5 splits. +2026-04-09 20:35:15.745638: Desired fold for training: 0 +2026-04-09 20:35:15.747163: This split has 387 training and 97 validation cases. +2026-04-09 20:35:15.749385: predicting MSWAL_0017 +2026-04-09 20:35:15.759811: MSWAL_0017, shape torch.Size([1, 177, 532, 532]), rank 0 +2026-04-09 20:36:18.443364: predicting MSWAL_0018 +2026-04-09 20:36:18.472111: MSWAL_0018, shape torch.Size([1, 285, 507, 507]), rank 0 +2026-04-09 20:36:39.461335: predicting MSWAL_0020 +2026-04-09 20:36:39.486710: MSWAL_0020, shape torch.Size([1, 433, 595, 595]), rank 0 +2026-04-09 20:37:31.771586: predicting MSWAL_0028 +2026-04-09 20:37:31.812181: MSWAL_0028, shape torch.Size([1, 137, 507, 507]), rank 0 +2026-04-09 20:37:40.976159: predicting MSWAL_0031 +2026-04-09 20:37:40.990937: MSWAL_0031, shape torch.Size([1, 217, 507, 507]), rank 0 +2026-04-09 20:37:55.649189: predicting MSWAL_0040 +2026-04-09 20:37:55.675290: MSWAL_0040, shape torch.Size([1, 189, 551, 551]), rank 0 +2026-04-09 20:38:19.855614: predicting MSWAL_0041 +2026-04-09 20:38:19.875998: MSWAL_0041, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 20:38:30.278680: predicting MSWAL_0046 +2026-04-09 20:38:30.292641: MSWAL_0046, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 20:38:47.040021: predicting MSWAL_0050 +2026-04-09 20:38:47.111170: MSWAL_0050, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 20:38:57.417983: predicting MSWAL_0059 +2026-04-09 20:38:57.449261: MSWAL_0059, shape torch.Size([1, 189, 565, 565]), rank 0 +2026-04-09 20:39:29.891292: predicting MSWAL_0060 +2026-04-09 20:39:29.910175: MSWAL_0060, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 20:39:54.118518: predicting MSWAL_0066 +2026-04-09 20:39:54.156564: MSWAL_0066, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 20:40:13.098097: predicting MSWAL_0069 +2026-04-09 20:40:13.124876: MSWAL_0069, shape torch.Size([1, 177, 569, 569]), rank 0 +2026-04-09 20:41:14.974001: predicting MSWAL_0080 +2026-04-09 20:41:15.686790: MSWAL_0080, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 20:42:18.034130: predicting MSWAL_0084 +2026-04-09 20:42:18.092467: MSWAL_0084, shape torch.Size([1, 145, 507, 507]), rank 0 +2026-04-09 20:42:32.845515: predicting MSWAL_0085 +2026-04-09 20:42:32.869905: MSWAL_0085, shape torch.Size([1, 177, 563, 563]), rank 0 +2026-04-09 20:42:59.965673: predicting MSWAL_0099 +2026-04-09 20:42:59.997889: MSWAL_0099, shape torch.Size([1, 193, 489, 489]), rank 0 +2026-04-09 20:43:24.367362: predicting MSWAL_0102 +2026-04-09 20:43:24.416464: MSWAL_0102, shape torch.Size([1, 194, 463, 463]), rank 0 +2026-04-09 20:43:44.354681: predicting MSWAL_0124 +2026-04-09 20:43:44.389040: MSWAL_0124, shape torch.Size([1, 317, 599, 599]), rank 0 +2026-04-09 20:44:55.961440: predicting MSWAL_0125 +2026-04-09 20:44:56.109873: MSWAL_0125, shape torch.Size([1, 169, 507, 507]), rank 0 +2026-04-09 20:45:31.200987: predicting MSWAL_0127 +2026-04-09 20:45:31.233126: MSWAL_0127, shape torch.Size([1, 169, 507, 507]), rank 0 +2026-04-09 20:46:10.226916: predicting MSWAL_0130 +2026-04-09 20:46:10.305209: MSWAL_0130, shape torch.Size([1, 205, 507, 507]), rank 0 +2026-04-09 20:46:33.526303: predicting MSWAL_0140 +2026-04-09 20:46:33.551577: MSWAL_0140, shape torch.Size([1, 458, 573, 573]), rank 0 +2026-04-09 20:48:17.839656: predicting MSWAL_0142 +2026-04-09 20:48:17.875458: MSWAL_0142, shape torch.Size([1, 274, 480, 480]), rank 0 +2026-04-09 20:48:36.973164: predicting MSWAL_0143 +2026-04-09 20:48:37.003096: MSWAL_0143, shape torch.Size([1, 466, 615, 615]), rank 0 +2026-04-09 20:49:37.770267: predicting MSWAL_0145 +2026-04-09 20:49:37.821694: MSWAL_0145, shape torch.Size([1, 373, 631, 631]), rank 0 +2026-04-09 20:50:22.820650: predicting MSWAL_0148 +2026-04-09 20:50:22.860089: MSWAL_0148, shape torch.Size([1, 270, 480, 480]), rank 0 +2026-04-09 20:50:39.969929: predicting MSWAL_0162 +2026-04-09 20:50:40.002913: MSWAL_0162, shape torch.Size([1, 326, 533, 533]), rank 0 +2026-04-09 20:51:17.294566: predicting MSWAL_0168 +2026-04-09 20:51:17.320986: MSWAL_0168, shape torch.Size([1, 134, 529, 529]), rank 0 +2026-04-09 20:51:32.843127: predicting MSWAL_0188 +2026-04-09 20:51:32.865855: MSWAL_0188, shape torch.Size([1, 478, 623, 623]), rank 0 +2026-04-09 20:52:37.654926: predicting MSWAL_0189 +2026-04-09 20:52:37.697953: MSWAL_0189, shape torch.Size([1, 313, 507, 507]), rank 0 +2026-04-09 20:53:04.437489: predicting MSWAL_0194 +2026-04-09 20:53:04.467048: MSWAL_0194, shape torch.Size([1, 330, 529, 529]), rank 0 +2026-04-09 20:53:51.304507: predicting MSWAL_0207 +2026-04-09 20:53:51.346456: MSWAL_0207, shape torch.Size([1, 298, 480, 480]), rank 0 +2026-04-09 20:54:18.782392: predicting MSWAL_0217 +2026-04-09 20:54:18.803197: MSWAL_0217, shape torch.Size([1, 185, 507, 507]), rank 0 +2026-04-09 20:54:37.029758: predicting MSWAL_0223 +2026-04-09 20:54:37.080889: MSWAL_0223, shape torch.Size([1, 201, 580, 580]), rank 0 +2026-04-09 20:55:14.414013: predicting MSWAL_0225 +2026-04-09 20:55:14.443141: MSWAL_0225, shape torch.Size([1, 193, 507, 507]), rank 0 +2026-04-09 20:55:42.325162: predicting MSWAL_0226 +2026-04-09 20:55:42.350232: MSWAL_0226, shape torch.Size([1, 485, 548, 548]), rank 0 +2026-04-09 20:58:57.319866: predicting MSWAL_0227 +2026-04-09 20:58:57.345398: MSWAL_0227, shape torch.Size([1, 309, 507, 507]), rank 0 +2026-04-09 20:59:33.384335: predicting MSWAL_0233 +2026-04-09 20:59:33.492486: MSWAL_0233, shape torch.Size([1, 296, 556, 556]), rank 0 +2026-04-09 21:00:11.410578: predicting MSWAL_0234 +2026-04-09 21:00:11.460725: MSWAL_0234, shape torch.Size([1, 553, 572, 572]), rank 0 +2026-04-09 21:01:18.618278: predicting MSWAL_0245 +2026-04-09 21:01:18.693192: MSWAL_0245, shape torch.Size([1, 286, 452, 452]), rank 0 +2026-04-09 21:01:40.682406: predicting MSWAL_0254 +2026-04-09 21:01:40.712983: MSWAL_0254, shape torch.Size([1, 322, 496, 496]), rank 0 +2026-04-09 21:02:02.004962: predicting MSWAL_0259 +2026-04-09 21:02:02.037740: MSWAL_0259, shape torch.Size([1, 305, 507, 507]), rank 0 +2026-04-09 21:02:23.266172: predicting MSWAL_0261 +2026-04-09 21:02:23.295713: MSWAL_0261, shape torch.Size([1, 176, 555, 555]), rank 0 +2026-04-09 21:02:46.101202: predicting MSWAL_0262 +2026-04-09 21:02:46.129063: MSWAL_0262, shape torch.Size([1, 355, 515, 515]), rank 0 +2026-04-09 21:03:30.455443: predicting MSWAL_0265 +2026-04-09 21:03:30.491095: MSWAL_0265, shape torch.Size([1, 182, 480, 480]), rank 0 +2026-04-09 21:03:43.137811: predicting MSWAL_0273 +2026-04-09 21:03:43.161151: MSWAL_0273, shape torch.Size([1, 367, 507, 507]), rank 0 +2026-04-09 21:04:08.729980: predicting MSWAL_0279 +2026-04-09 21:04:08.764280: MSWAL_0279, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:04:21.684303: predicting MSWAL_0293 +2026-04-09 21:04:21.708267: MSWAL_0293, shape torch.Size([1, 174, 507, 507]), rank 0 +2026-04-09 21:04:34.474417: predicting MSWAL_0296 +2026-04-09 21:04:34.508100: MSWAL_0296, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:04:47.320826: predicting MSWAL_0313 +2026-04-09 21:04:47.351597: MSWAL_0313, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 21:04:56.215017: predicting MSWAL_0316 +2026-04-09 21:04:56.243111: MSWAL_0316, shape torch.Size([1, 177, 432, 432]), rank 0 +2026-04-09 21:05:09.038962: predicting MSWAL_0323 +2026-04-09 21:05:09.061321: MSWAL_0323, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:05:21.905480: predicting MSWAL_0331 +2026-04-09 21:05:21.935879: MSWAL_0331, shape torch.Size([1, 189, 480, 480]), rank 0 +2026-04-09 21:05:34.621684: predicting MSWAL_0391 +2026-04-09 21:05:34.662497: MSWAL_0391, shape torch.Size([1, 369, 537, 537]), rank 0 +2026-04-09 21:06:18.881754: predicting MSWAL_0410 +2026-04-09 21:06:18.924105: MSWAL_0410, shape torch.Size([1, 177, 512, 512]), rank 0 +2026-04-09 21:06:31.913143: predicting MSWAL_0412 +2026-04-09 21:06:31.939616: MSWAL_0412, shape torch.Size([1, 197, 507, 507]), rank 0 +2026-04-09 21:06:45.309192: predicting MSWAL_0416 +2026-04-09 21:06:45.342741: MSWAL_0416, shape torch.Size([1, 197, 564, 564]), rank 0 +2026-04-09 21:07:07.795879: predicting MSWAL_0419 +2026-04-09 21:07:07.827441: MSWAL_0419, shape torch.Size([1, 295, 552, 552]), rank 0 +2026-04-09 21:07:44.646656: predicting MSWAL_0420 +2026-04-09 21:07:44.682062: MSWAL_0420, shape torch.Size([1, 165, 579, 579]), rank 0 +2026-04-09 21:07:59.828973: predicting MSWAL_0421 +2026-04-09 21:07:59.855083: MSWAL_0421, shape torch.Size([1, 377, 556, 556]), rank 0 +2026-04-09 21:08:44.804643: predicting MSWAL_0422 +2026-04-09 21:08:44.846661: MSWAL_0422, shape torch.Size([1, 379, 583, 583]), rank 0 +2026-04-09 21:09:29.451886: predicting MSWAL_0430 +2026-04-09 21:09:29.490148: MSWAL_0430, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:09:42.493477: predicting MSWAL_0436 +2026-04-09 21:09:42.515013: MSWAL_0436, shape torch.Size([1, 349, 536, 536]), rank 0 +2026-04-09 21:10:27.077791: predicting MSWAL_0446 +2026-04-09 21:10:27.112551: MSWAL_0446, shape torch.Size([1, 333, 531, 531]), rank 0 +2026-04-09 21:11:04.376353: predicting MSWAL_0453 +2026-04-09 21:11:04.410028: MSWAL_0453, shape torch.Size([1, 164, 444, 444]), rank 0 +2026-04-09 21:11:13.027496: predicting MSWAL_0465 +2026-04-09 21:11:13.053752: MSWAL_0465, shape torch.Size([1, 301, 543, 543]), rank 0 +2026-04-09 21:11:51.076766: predicting MSWAL_0475 +2026-04-09 21:11:51.109122: MSWAL_0475, shape torch.Size([1, 184, 507, 507]), rank 0 +2026-04-09 21:12:03.974002: predicting MSWAL_0476 +2026-04-09 21:12:04.000101: MSWAL_0476, shape torch.Size([1, 185, 507, 507]), rank 0 +2026-04-09 21:12:16.854957: predicting MSWAL_0484 +2026-04-09 21:12:16.889790: MSWAL_0484, shape torch.Size([1, 197, 507, 507]), rank 0 +2026-04-09 21:12:30.057227: predicting MSWAL_0492 +2026-04-09 21:12:30.079746: MSWAL_0492, shape torch.Size([1, 237, 507, 507]), rank 0 +2026-04-09 21:12:47.099094: predicting MSWAL_0504 +2026-04-09 21:12:47.180554: MSWAL_0504, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:13:00.385093: predicting MSWAL_0505 +2026-04-09 21:13:00.410301: MSWAL_0505, shape torch.Size([1, 277, 531, 531]), rank 0 +2026-04-09 21:13:30.196900: predicting MSWAL_0510 +2026-04-09 21:13:30.228942: MSWAL_0510, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:13:43.133753: predicting MSWAL_0527 +2026-04-09 21:13:43.171674: MSWAL_0527, shape torch.Size([1, 189, 524, 524]), rank 0 +2026-04-09 21:14:05.531676: predicting MSWAL_0530 +2026-04-09 21:14:05.555477: MSWAL_0530, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:14:18.316779: predicting MSWAL_0540 +2026-04-09 21:14:18.339246: MSWAL_0540, shape torch.Size([1, 329, 615, 615]), rank 0 +2026-04-09 21:14:55.569952: predicting MSWAL_0544 +2026-04-09 21:14:55.600834: MSWAL_0544, shape torch.Size([1, 264, 605, 605]), rank 0 +2026-04-09 21:15:25.447992: predicting MSWAL_0545 +2026-04-09 21:15:25.499366: MSWAL_0545, shape torch.Size([1, 185, 539, 539]), rank 0 +2026-04-09 21:15:48.169212: predicting MSWAL_0546 +2026-04-09 21:15:48.204895: MSWAL_0546, shape torch.Size([1, 273, 543, 543]), rank 0 +2026-04-09 21:16:17.888944: predicting MSWAL_0547 +2026-04-09 21:16:17.924180: MSWAL_0547, shape torch.Size([1, 285, 537, 537]), rank 0 +2026-04-09 21:16:54.811932: predicting MSWAL_0552 +2026-04-09 21:16:54.832446: MSWAL_0552, shape torch.Size([1, 297, 617, 617]), rank 0 +2026-04-09 21:17:32.284848: predicting MSWAL_0554 +2026-04-09 21:17:32.335743: MSWAL_0554, shape torch.Size([1, 177, 555, 555]), rank 0 +2026-04-09 21:17:55.568768: predicting MSWAL_0563 +2026-04-09 21:17:55.593982: MSWAL_0563, shape torch.Size([1, 284, 561, 561]), rank 0 +2026-04-09 21:18:32.547870: predicting MSWAL_0568 +2026-04-09 21:18:32.589804: MSWAL_0568, shape torch.Size([1, 323, 529, 529]), rank 0 +2026-04-09 21:19:09.671229: predicting MSWAL_0573 +2026-04-09 21:19:09.698811: MSWAL_0573, shape torch.Size([1, 189, 507, 507]), rank 0 +2026-04-09 21:19:22.624308: predicting MSWAL_0621 +2026-04-09 21:19:22.650449: MSWAL_0621, shape torch.Size([1, 208, 351, 351]), rank 0 +2026-04-09 21:19:28.358879: predicting MSWAL_0628 +2026-04-09 21:19:28.380585: MSWAL_0628, shape torch.Size([1, 258, 543, 543]), rank 0 +2026-04-09 21:19:58.098927: predicting MSWAL_0641 +2026-04-09 21:19:58.121834: MSWAL_0641, shape torch.Size([1, 328, 559, 559]), rank 0 +2026-04-09 21:20:35.546705: predicting MSWAL_0643 +2026-04-09 21:20:35.571045: MSWAL_0643, shape torch.Size([1, 402, 536, 536]), rank 0 +2026-04-09 21:21:28.206171: predicting MSWAL_0649 +2026-04-09 21:21:28.236086: MSWAL_0649, shape torch.Size([1, 225, 507, 507]), rank 0 +2026-04-09 21:21:45.193662: predicting MSWAL_0651 +2026-04-09 21:21:45.226925: MSWAL_0651, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:21:58.449810: predicting MSWAL_0662 +2026-04-09 21:21:58.476506: MSWAL_0662, shape torch.Size([1, 338, 496, 496]), rank 0 +2026-04-09 21:22:23.860850: predicting MSWAL_0675 +2026-04-09 21:22:23.880762: MSWAL_0675, shape torch.Size([1, 340, 597, 597]), rank 0 +2026-04-09 21:23:08.407837: predicting MSWAL_0677 +2026-04-09 21:23:08.434611: MSWAL_0677, shape torch.Size([1, 280, 593, 593]), rank 0 +2026-04-09 21:23:38.808768: predicting MSWAL_0679 +2026-04-09 21:23:38.831518: MSWAL_0679, shape torch.Size([1, 502, 480, 480]), rank 0 +2026-04-09 21:24:12.471174: predicting MSWAL_0692 +2026-04-09 21:24:12.505482: MSWAL_0692, shape torch.Size([1, 293, 540, 540]), rank 0 +2026-04-09 21:26:21.946182: Validation complete +2026-04-09 21:26:21.948803: Mean Validation Dice: 0.4436371730114793 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/checkpoint_best.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..bfe9332f94aba25878ca7e961daab97664a864fd --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77df7dfcce21ab0d9ac0087cf0ca07c7d91f7ee1f7e7f83422028c137e08ef68 +size 1129335698 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/checkpoint_final.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/checkpoint_final.pth new file mode 100644 index 0000000000000000000000000000000000000000..a7a13f346007d71d321f756d351e5447b2b64f83 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/checkpoint_final.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5d7bc3fad5294c4f4260e12c24b20d5b4aedda5be1aac03fc6b33d4be4ce6dc +size 1129848726 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/debug.json b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/debug.json new file mode 100644 index 0000000000000000000000000000000000000000..30e164164d3ea607db2eaee2e66813d93ee85ea4 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/debug.json @@ -0,0 +1,53 @@ +{ + "_best_ema": "None", + "batch_size": "2", + "configuration_manager": "{'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}", + "configuration_name": "3d_fullres", + "cudnn_version": 90100, + "current_epoch": "0", + "dataloader_train": "", + "dataloader_train.generator": "", + "dataloader_train.num_processes": "12", + "dataloader_train.transform": "None", + "dataloader_val": "", + "dataloader_val.generator": "", + "dataloader_val.num_processes": "6", + "dataloader_val.transform": "None", + "dataset_json": "{'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}", + "device": "cuda:0", + "disable_checkpointing": "False", + "enable_deep_supervision": "True", + "fold": "1", + "folder_with_segs_from_previous_stage": "None", + "gpu_name": "NVIDIA A100-SXM4-80GB", + "grad_scaler": "", + "hostname": "cn0095", + "inference_allowed_mirroring_axes": "(0, 1, 2)", + "initial_lr": "0.01", + "is_cascaded": "False", + "is_ddp": "False", + "label_manager": "", + "local_rank": "0", + "log_file": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/training_log_2026_4_8_15_54_45.txt", + "logger": "", + "loss": "DeepSupervisionWrapper(\n (loss): DC_and_CE_loss(\n (ce): RobustCrossEntropyLoss()\n (dc): OptimizedModule(\n (_orig_mod): MemoryEfficientSoftDiceLoss()\n )\n )\n)", + "lr_scheduler": "", + "my_init_kwargs": "{'plans': {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}, 'configuration': '3d_fullres', 'fold': 1, 'dataset_json': {'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}, 'unpack_dataset': True, 'device': device(type='cuda')}", + "network": "OptimizedModule", + "num_epochs": "1000", + "num_input_channels": "1", + "num_iterations_per_epoch": "250", + "num_val_iterations_per_epoch": "50", + "optimizer": "SGD (\nParameter Group 0\n dampening: 0\n differentiable: False\n foreach: None\n fused: None\n initial_lr: 0.01\n lr: 0.01\n maximize: False\n momentum: 0.99\n nesterov: True\n weight_decay: 3e-05\n)", + "output_folder": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1", + "output_folder_base": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres", + "oversample_foreground_percent": "0.33", + "plans_manager": "{'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}", + "preprocessed_dataset_folder": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/nnUNetPlans_3d_fullres", + "preprocessed_dataset_folder_base": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL", + "save_every": "50", + "torch_version": "2.5.0+cu121", + "unpack_dataset": "True", + "was_initialized": "True", + "weight_decay": "3e-05" +} \ No newline at end of file diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/progress.png b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/progress.png new file mode 100644 index 0000000000000000000000000000000000000000..1604b88c28412da2e0cbfa200517a0d6ece63122 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/progress.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd9643e44ff05019efcac5fae155c46746b0768acc4b192bd3dbaabe149c9b7f +size 1413722 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/training_log_2026_4_8_15_27_18.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/training_log_2026_4_8_15_27_18.txt new file mode 100644 index 0000000000000000000000000000000000000000..6b3004a1da2b0d799681ab1b5bac9959ab2021d5 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/training_log_2026_4_8_15_27_18.txt @@ -0,0 +1,11 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 15:27:18.494670: do_dummy_2d_data_aug: False +2026-04-08 15:27:18.498723: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-08 15:27:18.502337: The split file contains 5 splits. +2026-04-08 15:27:18.504030: Desired fold for training: 1 +2026-04-08 15:27:18.505772: This split has 387 training and 97 validation cases. diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/training_log_2026_4_8_15_54_45.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/training_log_2026_4_8_15_54_45.txt new file mode 100644 index 0000000000000000000000000000000000000000..6bbc077197bc5117cf8f67358f0ddf829b608f9b --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_1/training_log_2026_4_8_15_54_45.txt @@ -0,0 +1,7350 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 15:54:45.364251: do_dummy_2d_data_aug: False +2026-04-08 15:54:45.427429: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-08 15:54:45.430450: The split file contains 5 splits. +2026-04-08 15:54:45.432248: Desired fold for training: 1 +2026-04-08 15:54:45.434526: This split has 387 training and 97 validation cases. +2026-04-08 15:54:57.109545: Using torch.compile... + +This is the configuration used by this training: +Configuration name: 3d_fullres + {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True} + +These are the global plan.json settings: + {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}} + +2026-04-08 15:54:59.034324: unpacking dataset... +2026-04-08 15:55:06.160245: unpacking done... +2026-04-08 15:55:06.187735: Unable to plot network architecture: nnUNet_compile is enabled! +2026-04-08 15:55:06.245101: +2026-04-08 15:55:06.247339: Epoch 0 +2026-04-08 15:55:06.249814: Current learning rate: 0.01 +2026-04-08 15:59:15.274133: train_loss 0.2377 +2026-04-08 15:59:15.279532: val_loss 0.0676 +2026-04-08 15:59:15.281369: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 15:59:15.283026: Epoch time: 249.03 s +2026-04-08 15:59:15.285742: Yayy! New best EMA pseudo Dice: 0.0 +2026-04-08 15:59:17.844011: +2026-04-08 15:59:17.846594: Epoch 1 +2026-04-08 15:59:17.848089: Current learning rate: 0.00999 +2026-04-08 16:00:59.733832: train_loss 0.0697 +2026-04-08 16:00:59.742975: val_loss 0.0616 +2026-04-08 16:00:59.748172: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:00:59.751379: Epoch time: 101.89 s +2026-04-08 16:01:00.777678: +2026-04-08 16:01:00.780290: Epoch 2 +2026-04-08 16:01:00.782078: Current learning rate: 0.00998 +2026-04-08 16:02:42.522196: train_loss 0.0744 +2026-04-08 16:02:42.531578: val_loss 0.0566 +2026-04-08 16:02:42.533826: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:02:42.536625: Epoch time: 101.75 s +2026-04-08 16:02:43.712380: +2026-04-08 16:02:43.715407: Epoch 3 +2026-04-08 16:02:43.717712: Current learning rate: 0.00997 +2026-04-08 16:04:24.813604: train_loss 0.0614 +2026-04-08 16:04:24.821277: val_loss 0.0487 +2026-04-08 16:04:24.828415: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:04:24.831076: Epoch time: 101.1 s +2026-04-08 16:04:25.907869: +2026-04-08 16:04:25.910261: Epoch 4 +2026-04-08 16:04:25.913017: Current learning rate: 0.00996 +2026-04-08 16:06:07.269699: train_loss 0.0562 +2026-04-08 16:06:07.277541: val_loss 0.0521 +2026-04-08 16:06:07.279736: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:06:07.281919: Epoch time: 101.36 s +2026-04-08 16:06:08.389665: +2026-04-08 16:06:08.391858: Epoch 5 +2026-04-08 16:06:08.394158: Current learning rate: 0.00995 +2026-04-08 16:07:49.808325: train_loss 0.0589 +2026-04-08 16:07:49.813884: val_loss 0.0679 +2026-04-08 16:07:49.816699: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:07:49.820548: Epoch time: 101.42 s +2026-04-08 16:07:50.857038: +2026-04-08 16:07:50.859456: Epoch 6 +2026-04-08 16:07:50.861683: Current learning rate: 0.00995 +2026-04-08 16:09:32.653395: train_loss 0.0606 +2026-04-08 16:09:32.658727: val_loss 0.0411 +2026-04-08 16:09:32.660868: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:09:32.662986: Epoch time: 101.8 s +2026-04-08 16:09:33.676248: +2026-04-08 16:09:33.679494: Epoch 7 +2026-04-08 16:09:33.681797: Current learning rate: 0.00994 +2026-04-08 16:11:15.959608: train_loss 0.0548 +2026-04-08 16:11:15.966733: val_loss 0.0628 +2026-04-08 16:11:15.969820: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:11:15.972248: Epoch time: 102.29 s +2026-04-08 16:11:17.028280: +2026-04-08 16:11:17.032412: Epoch 8 +2026-04-08 16:11:17.038565: Current learning rate: 0.00993 +2026-04-08 16:12:59.583630: train_loss 0.0575 +2026-04-08 16:12:59.589643: val_loss 0.0884 +2026-04-08 16:12:59.593631: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:12:59.596146: Epoch time: 102.56 s +2026-04-08 16:13:00.675796: +2026-04-08 16:13:00.678154: Epoch 9 +2026-04-08 16:13:00.680303: Current learning rate: 0.00992 +2026-04-08 16:14:42.053989: train_loss 0.0621 +2026-04-08 16:14:42.059869: val_loss 0.0352 +2026-04-08 16:14:42.062167: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:14:42.064443: Epoch time: 101.38 s +2026-04-08 16:14:43.068408: +2026-04-08 16:14:43.070286: Epoch 10 +2026-04-08 16:14:43.072520: Current learning rate: 0.00991 +2026-04-08 16:16:25.352476: train_loss 0.0626 +2026-04-08 16:16:25.369752: val_loss 0.0498 +2026-04-08 16:16:25.372205: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:16:25.374064: Epoch time: 102.29 s +2026-04-08 16:16:26.472246: +2026-04-08 16:16:26.474715: Epoch 11 +2026-04-08 16:16:26.476581: Current learning rate: 0.0099 +2026-04-08 16:18:07.882422: train_loss 0.0562 +2026-04-08 16:18:07.893247: val_loss 0.0536 +2026-04-08 16:18:07.901763: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:18:07.905979: Epoch time: 101.41 s +2026-04-08 16:18:09.005484: +2026-04-08 16:18:09.013393: Epoch 12 +2026-04-08 16:18:09.040328: Current learning rate: 0.00989 +2026-04-08 16:19:50.909883: train_loss 0.0469 +2026-04-08 16:19:50.919312: val_loss 0.0433 +2026-04-08 16:19:50.921906: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:19:50.926120: Epoch time: 101.91 s +2026-04-08 16:19:52.017737: +2026-04-08 16:19:52.019523: Epoch 13 +2026-04-08 16:19:52.021302: Current learning rate: 0.00988 +2026-04-08 16:21:35.431593: train_loss 0.0412 +2026-04-08 16:21:35.441456: val_loss 0.0657 +2026-04-08 16:21:35.444175: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:21:35.450078: Epoch time: 103.42 s +2026-04-08 16:21:36.593908: +2026-04-08 16:21:36.602147: Epoch 14 +2026-04-08 16:21:36.604617: Current learning rate: 0.00987 +2026-04-08 16:23:19.170893: train_loss 0.0471 +2026-04-08 16:23:19.177220: val_loss 0.0478 +2026-04-08 16:23:19.179446: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:23:19.182712: Epoch time: 102.58 s +2026-04-08 16:23:20.320150: +2026-04-08 16:23:20.322785: Epoch 15 +2026-04-08 16:23:20.326610: Current learning rate: 0.00986 +2026-04-08 16:25:03.247674: train_loss 0.0496 +2026-04-08 16:25:03.256703: val_loss 0.0326 +2026-04-08 16:25:03.259085: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:25:03.261875: Epoch time: 102.93 s +2026-04-08 16:25:04.334700: +2026-04-08 16:25:04.337265: Epoch 16 +2026-04-08 16:25:04.339602: Current learning rate: 0.00986 +2026-04-08 16:26:48.435134: train_loss 0.0491 +2026-04-08 16:26:48.443611: val_loss 0.0269 +2026-04-08 16:26:48.445977: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:26:48.448358: Epoch time: 104.1 s +2026-04-08 16:26:49.563666: +2026-04-08 16:26:49.566553: Epoch 17 +2026-04-08 16:26:49.570868: Current learning rate: 0.00985 +2026-04-08 16:28:34.290699: train_loss 0.0543 +2026-04-08 16:28:34.298911: val_loss 0.0755 +2026-04-08 16:28:34.301085: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:28:34.304758: Epoch time: 104.73 s +2026-04-08 16:28:36.488364: +2026-04-08 16:28:36.500565: Epoch 18 +2026-04-08 16:28:36.502411: Current learning rate: 0.00984 +2026-04-08 16:30:19.823607: train_loss 0.0486 +2026-04-08 16:30:19.830656: val_loss 0.0689 +2026-04-08 16:30:19.833552: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:30:19.836108: Epoch time: 103.34 s +2026-04-08 16:30:20.862147: +2026-04-08 16:30:20.863898: Epoch 19 +2026-04-08 16:30:20.865700: Current learning rate: 0.00983 +2026-04-08 16:32:03.716519: train_loss 0.0528 +2026-04-08 16:32:03.723459: val_loss 0.0427 +2026-04-08 16:32:03.726734: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:32:03.729017: Epoch time: 102.86 s +2026-04-08 16:32:04.817851: +2026-04-08 16:32:04.820142: Epoch 20 +2026-04-08 16:32:04.822597: Current learning rate: 0.00982 +2026-04-08 16:33:48.309368: train_loss 0.0482 +2026-04-08 16:33:48.317391: val_loss 0.0544 +2026-04-08 16:33:48.319521: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:33:48.321910: Epoch time: 103.49 s +2026-04-08 16:33:49.402215: +2026-04-08 16:33:49.404255: Epoch 21 +2026-04-08 16:33:49.407143: Current learning rate: 0.00981 +2026-04-08 16:35:33.149687: train_loss 0.0401 +2026-04-08 16:35:33.164291: val_loss 0.0427 +2026-04-08 16:35:33.167499: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:35:33.170257: Epoch time: 103.75 s +2026-04-08 16:35:34.197585: +2026-04-08 16:35:34.200551: Epoch 22 +2026-04-08 16:35:34.204973: Current learning rate: 0.0098 +2026-04-08 16:37:16.575562: train_loss 0.0461 +2026-04-08 16:37:16.581429: val_loss 0.0297 +2026-04-08 16:37:16.584033: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:37:16.588955: Epoch time: 102.38 s +2026-04-08 16:37:17.612762: +2026-04-08 16:37:17.615672: Epoch 23 +2026-04-08 16:37:17.617716: Current learning rate: 0.00979 +2026-04-08 16:39:00.896875: train_loss 0.0521 +2026-04-08 16:39:00.906981: val_loss 0.0291 +2026-04-08 16:39:00.910316: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:39:00.912889: Epoch time: 103.29 s +2026-04-08 16:39:01.917847: +2026-04-08 16:39:01.920314: Epoch 24 +2026-04-08 16:39:01.923122: Current learning rate: 0.00978 +2026-04-08 16:40:45.680952: train_loss 0.0427 +2026-04-08 16:40:45.689708: val_loss 0.0356 +2026-04-08 16:40:45.691663: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:40:45.694623: Epoch time: 103.77 s +2026-04-08 16:40:46.724466: +2026-04-08 16:40:46.727839: Epoch 25 +2026-04-08 16:40:46.734485: Current learning rate: 0.00977 +2026-04-08 16:42:29.864073: train_loss 0.046 +2026-04-08 16:42:29.872686: val_loss 0.04 +2026-04-08 16:42:29.875287: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:42:29.878602: Epoch time: 103.14 s +2026-04-08 16:42:30.932722: +2026-04-08 16:42:30.936323: Epoch 26 +2026-04-08 16:42:30.940127: Current learning rate: 0.00977 +2026-04-08 16:44:14.319276: train_loss 0.0379 +2026-04-08 16:44:14.324289: val_loss 0.0795 +2026-04-08 16:44:14.326507: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:44:14.329430: Epoch time: 103.39 s +2026-04-08 16:44:15.336635: +2026-04-08 16:44:15.339849: Epoch 27 +2026-04-08 16:44:15.341895: Current learning rate: 0.00976 +2026-04-08 16:45:58.906766: train_loss 0.0311 +2026-04-08 16:45:58.915374: val_loss 0.0802 +2026-04-08 16:45:58.918284: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:45:58.922704: Epoch time: 103.57 s +2026-04-08 16:45:59.944940: +2026-04-08 16:45:59.947549: Epoch 28 +2026-04-08 16:45:59.950130: Current learning rate: 0.00975 +2026-04-08 16:47:43.573847: train_loss 0.0343 +2026-04-08 16:47:43.583550: val_loss 0.0271 +2026-04-08 16:47:43.586311: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:47:43.588888: Epoch time: 103.63 s +2026-04-08 16:47:44.621562: +2026-04-08 16:47:44.623990: Epoch 29 +2026-04-08 16:47:44.626583: Current learning rate: 0.00974 +2026-04-08 16:49:26.972894: train_loss 0.0433 +2026-04-08 16:49:26.980220: val_loss 0.047 +2026-04-08 16:49:26.983214: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:49:26.985628: Epoch time: 102.35 s +2026-04-08 16:49:28.023444: +2026-04-08 16:49:28.026202: Epoch 30 +2026-04-08 16:49:28.033875: Current learning rate: 0.00973 +2026-04-08 16:51:11.273086: train_loss 0.0461 +2026-04-08 16:51:11.286990: val_loss 0.0394 +2026-04-08 16:51:11.296495: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:51:11.299664: Epoch time: 103.25 s +2026-04-08 16:51:12.350939: +2026-04-08 16:51:12.352938: Epoch 31 +2026-04-08 16:51:12.356330: Current learning rate: 0.00972 +2026-04-08 16:52:55.073970: train_loss 0.039 +2026-04-08 16:52:55.079797: val_loss 0.076 +2026-04-08 16:52:55.082534: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:52:55.085390: Epoch time: 102.73 s +2026-04-08 16:52:56.164354: +2026-04-08 16:52:56.167541: Epoch 32 +2026-04-08 16:52:56.169914: Current learning rate: 0.00971 +2026-04-08 16:54:40.881802: train_loss 0.0446 +2026-04-08 16:54:40.888842: val_loss 0.0385 +2026-04-08 16:54:40.891829: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:54:40.894965: Epoch time: 104.72 s +2026-04-08 16:54:41.947649: +2026-04-08 16:54:41.950901: Epoch 33 +2026-04-08 16:54:41.953525: Current learning rate: 0.0097 +2026-04-08 16:56:24.882230: train_loss 0.0372 +2026-04-08 16:56:24.890480: val_loss 0.0338 +2026-04-08 16:56:24.893816: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:56:24.898534: Epoch time: 102.94 s +2026-04-08 16:56:25.919127: +2026-04-08 16:56:25.922099: Epoch 34 +2026-04-08 16:56:25.924750: Current learning rate: 0.00969 +2026-04-08 16:58:08.951772: train_loss 0.0359 +2026-04-08 16:58:08.957915: val_loss 0.0414 +2026-04-08 16:58:08.960860: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:58:08.963270: Epoch time: 103.04 s +2026-04-08 16:58:10.014139: +2026-04-08 16:58:10.016068: Epoch 35 +2026-04-08 16:58:10.017848: Current learning rate: 0.00968 +2026-04-08 16:59:53.395461: train_loss 0.0283 +2026-04-08 16:59:53.402139: val_loss 0.0397 +2026-04-08 16:59:53.404022: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:59:53.406733: Epoch time: 103.38 s +2026-04-08 16:59:54.444796: +2026-04-08 16:59:54.447714: Epoch 36 +2026-04-08 16:59:54.450392: Current learning rate: 0.00968 +2026-04-08 17:01:37.106465: train_loss 0.0387 +2026-04-08 17:01:37.113348: val_loss 0.0792 +2026-04-08 17:01:37.115770: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:01:37.118211: Epoch time: 102.66 s +2026-04-08 17:01:38.241242: +2026-04-08 17:01:38.243889: Epoch 37 +2026-04-08 17:01:38.246930: Current learning rate: 0.00967 +2026-04-08 17:03:21.404658: train_loss 0.039 +2026-04-08 17:03:21.411961: val_loss 0.0441 +2026-04-08 17:03:21.415289: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:03:21.417500: Epoch time: 103.17 s +2026-04-08 17:03:22.517412: +2026-04-08 17:03:22.520025: Epoch 38 +2026-04-08 17:03:22.522773: Current learning rate: 0.00966 +2026-04-08 17:05:06.221125: train_loss 0.037 +2026-04-08 17:05:06.227552: val_loss 0.0526 +2026-04-08 17:05:06.229886: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:05:06.233603: Epoch time: 103.71 s +2026-04-08 17:05:08.300982: +2026-04-08 17:05:08.303291: Epoch 39 +2026-04-08 17:05:08.305146: Current learning rate: 0.00965 +2026-04-08 17:06:51.479753: train_loss 0.0326 +2026-04-08 17:06:51.486840: val_loss 0.0206 +2026-04-08 17:06:51.489103: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:06:51.491266: Epoch time: 103.18 s +2026-04-08 17:06:52.546419: +2026-04-08 17:06:52.548447: Epoch 40 +2026-04-08 17:06:52.551307: Current learning rate: 0.00964 +2026-04-08 17:08:36.809382: train_loss 0.0328 +2026-04-08 17:08:36.814611: val_loss 0.049 +2026-04-08 17:08:36.816777: Pseudo dice [0.0, 0.0, 0.0408, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:08:36.819219: Epoch time: 104.27 s +2026-04-08 17:08:36.821544: Yayy! New best EMA pseudo Dice: 0.0006 +2026-04-08 17:08:39.640513: +2026-04-08 17:08:39.643334: Epoch 41 +2026-04-08 17:08:39.645250: Current learning rate: 0.00963 +2026-04-08 17:10:22.421594: train_loss 0.0308 +2026-04-08 17:10:22.428684: val_loss 0.0377 +2026-04-08 17:10:22.430877: Pseudo dice [0.0, 0.0, 0.0683, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:10:22.433043: Epoch time: 102.78 s +2026-04-08 17:10:22.435372: Yayy! New best EMA pseudo Dice: 0.0015 +2026-04-08 17:10:24.802760: +2026-04-08 17:10:24.805918: Epoch 42 +2026-04-08 17:10:24.807982: Current learning rate: 0.00962 +2026-04-08 17:12:09.454884: train_loss 0.0264 +2026-04-08 17:12:09.462562: val_loss 0.0229 +2026-04-08 17:12:09.465170: Pseudo dice [0.0, 0.0, 0.0534, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:12:09.467796: Epoch time: 104.66 s +2026-04-08 17:12:09.470193: Yayy! New best EMA pseudo Dice: 0.0021 +2026-04-08 17:12:12.183559: +2026-04-08 17:12:12.189113: Epoch 43 +2026-04-08 17:12:12.191217: Current learning rate: 0.00961 +2026-04-08 17:13:55.675480: train_loss 0.0288 +2026-04-08 17:13:55.684524: val_loss 0.0233 +2026-04-08 17:13:55.686791: Pseudo dice [0.0, 0.0, 0.0359, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:13:55.689291: Epoch time: 103.5 s +2026-04-08 17:13:55.691138: Yayy! New best EMA pseudo Dice: 0.0024 +2026-04-08 17:13:58.377058: +2026-04-08 17:13:58.379928: Epoch 44 +2026-04-08 17:13:58.384841: Current learning rate: 0.0096 +2026-04-08 17:15:41.602765: train_loss 0.0327 +2026-04-08 17:15:41.612293: val_loss 0.0227 +2026-04-08 17:15:41.616532: Pseudo dice [0.0, 0.0, 0.2215, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:15:41.619281: Epoch time: 103.23 s +2026-04-08 17:15:41.623059: Yayy! New best EMA pseudo Dice: 0.0053 +2026-04-08 17:15:44.436322: +2026-04-08 17:15:44.439801: Epoch 45 +2026-04-08 17:15:44.444242: Current learning rate: 0.00959 +2026-04-08 17:17:27.415492: train_loss 0.0321 +2026-04-08 17:17:27.425318: val_loss 0.0318 +2026-04-08 17:17:27.427777: Pseudo dice [0.0, 0.0, 0.138, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:17:27.430224: Epoch time: 102.98 s +2026-04-08 17:17:27.434137: Yayy! New best EMA pseudo Dice: 0.0068 +2026-04-08 17:17:30.168942: +2026-04-08 17:17:30.172043: Epoch 46 +2026-04-08 17:17:30.173876: Current learning rate: 0.00959 +2026-04-08 17:19:13.575386: train_loss 0.0229 +2026-04-08 17:19:13.582141: val_loss 0.0148 +2026-04-08 17:19:13.584395: Pseudo dice [0.0, 0.0, 0.1033, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:19:13.587544: Epoch time: 103.41 s +2026-04-08 17:19:13.591767: Yayy! New best EMA pseudo Dice: 0.0076 +2026-04-08 17:19:15.994615: +2026-04-08 17:19:15.997275: Epoch 47 +2026-04-08 17:19:15.999581: Current learning rate: 0.00958 +2026-04-08 17:20:59.371521: train_loss 0.0208 +2026-04-08 17:20:59.379179: val_loss 0.027 +2026-04-08 17:20:59.381639: Pseudo dice [0.0, 0.0, 0.1008, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:20:59.385390: Epoch time: 103.38 s +2026-04-08 17:20:59.387801: Yayy! New best EMA pseudo Dice: 0.0083 +2026-04-08 17:21:02.098117: +2026-04-08 17:21:02.100420: Epoch 48 +2026-04-08 17:21:02.102125: Current learning rate: 0.00957 +2026-04-08 17:22:45.370613: train_loss 0.0228 +2026-04-08 17:22:45.377484: val_loss 0.0172 +2026-04-08 17:22:45.379638: Pseudo dice [0.0, 0.0, 0.1954, 0.0, 0.0, 0.0, 0.013] +2026-04-08 17:22:45.381778: Epoch time: 103.28 s +2026-04-08 17:22:45.383860: Yayy! New best EMA pseudo Dice: 0.0104 +2026-04-08 17:22:48.085745: +2026-04-08 17:22:48.089570: Epoch 49 +2026-04-08 17:22:48.091625: Current learning rate: 0.00956 +2026-04-08 17:24:31.568867: train_loss 0.0086 +2026-04-08 17:24:31.576149: val_loss 0.0179 +2026-04-08 17:24:31.578604: Pseudo dice [0.0, 0.0, 0.1355, 0.0, 0.0, 0.0, 0.2615] +2026-04-08 17:24:31.581059: Epoch time: 103.49 s +2026-04-08 17:24:33.343006: Yayy! New best EMA pseudo Dice: 0.015 +2026-04-08 17:24:36.053757: +2026-04-08 17:24:36.056089: Epoch 50 +2026-04-08 17:24:36.058040: Current learning rate: 0.00955 +2026-04-08 17:26:18.727198: train_loss 0.0227 +2026-04-08 17:26:18.736040: val_loss 0.0275 +2026-04-08 17:26:18.738909: Pseudo dice [0.0, 0.0, 0.1609, 0.0, 0.0, 0.0, 0.4795] +2026-04-08 17:26:18.741011: Epoch time: 102.68 s +2026-04-08 17:26:18.743762: Yayy! New best EMA pseudo Dice: 0.0227 +2026-04-08 17:26:21.496430: +2026-04-08 17:26:21.498698: Epoch 51 +2026-04-08 17:26:21.500510: Current learning rate: 0.00954 +2026-04-08 17:28:04.073471: train_loss 0.0144 +2026-04-08 17:28:04.079692: val_loss 0.0203 +2026-04-08 17:28:04.083645: Pseudo dice [0.0, 0.0, 0.1433, 0.0, 0.0, 0.0, 0.1221] +2026-04-08 17:28:04.087144: Epoch time: 102.58 s +2026-04-08 17:28:04.089781: Yayy! New best EMA pseudo Dice: 0.0242 +2026-04-08 17:28:06.834588: +2026-04-08 17:28:06.836744: Epoch 52 +2026-04-08 17:28:06.838739: Current learning rate: 0.00953 +2026-04-08 17:29:50.524384: train_loss 0.0127 +2026-04-08 17:29:50.535290: val_loss 0.0228 +2026-04-08 17:29:50.547318: Pseudo dice [0.0, 0.0, 0.2833, 0.0, 0.0, 0.0, 0.3473] +2026-04-08 17:29:50.550535: Epoch time: 103.69 s +2026-04-08 17:29:50.556462: Yayy! New best EMA pseudo Dice: 0.0308 +2026-04-08 17:29:53.302779: +2026-04-08 17:29:53.306613: Epoch 53 +2026-04-08 17:29:53.309035: Current learning rate: 0.00952 +2026-04-08 17:31:36.491329: train_loss 0.0139 +2026-04-08 17:31:36.497643: val_loss 0.0444 +2026-04-08 17:31:36.500742: Pseudo dice [0.0, 0.0, 0.1093, 0.0, 0.0, 0.0, 0.1141] +2026-04-08 17:31:36.502539: Epoch time: 103.19 s +2026-04-08 17:31:36.506023: Yayy! New best EMA pseudo Dice: 0.0309 +2026-04-08 17:31:39.140625: +2026-04-08 17:31:39.142748: Epoch 54 +2026-04-08 17:31:39.145084: Current learning rate: 0.00951 +2026-04-08 17:33:21.456674: train_loss 0.0151 +2026-04-08 17:33:21.465458: val_loss 0.0496 +2026-04-08 17:33:21.468243: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3681] +2026-04-08 17:33:21.470181: Epoch time: 102.32 s +2026-04-08 17:33:21.473007: Yayy! New best EMA pseudo Dice: 0.0331 +2026-04-08 17:33:24.250601: +2026-04-08 17:33:24.253059: Epoch 55 +2026-04-08 17:33:24.255475: Current learning rate: 0.0095 +2026-04-08 17:35:07.510266: train_loss 0.0085 +2026-04-08 17:35:07.515816: val_loss 0.0455 +2026-04-08 17:35:07.518382: Pseudo dice [0.0, 0.0, 0.2292, 0.0, 0.0, 0.0, 0.2941] +2026-04-08 17:35:07.520414: Epoch time: 103.26 s +2026-04-08 17:35:07.523147: Yayy! New best EMA pseudo Dice: 0.0372 +2026-04-08 17:35:10.300486: +2026-04-08 17:35:10.302739: Epoch 56 +2026-04-08 17:35:10.304878: Current learning rate: 0.00949 +2026-04-08 17:36:51.483317: train_loss -0.0006 +2026-04-08 17:36:51.488823: val_loss -0.0111 +2026-04-08 17:36:51.494446: Pseudo dice [0.0, 0.0, 0.4844, 0.0, 0.0, 0.0, 0.4827] +2026-04-08 17:36:51.496786: Epoch time: 101.19 s +2026-04-08 17:36:51.498954: Yayy! New best EMA pseudo Dice: 0.0473 +2026-04-08 17:36:54.177561: +2026-04-08 17:36:54.180330: Epoch 57 +2026-04-08 17:36:54.182022: Current learning rate: 0.00949 +2026-04-08 17:38:37.271602: train_loss 0.0022 +2026-04-08 17:38:37.279495: val_loss 0.0474 +2026-04-08 17:38:37.282426: Pseudo dice [0.0, 0.0, 0.2832, 0.0, 0.0, 0.0, 0.1188] +2026-04-08 17:38:37.285477: Epoch time: 103.1 s +2026-04-08 17:38:37.289582: Yayy! New best EMA pseudo Dice: 0.0483 +2026-04-08 17:38:40.025567: +2026-04-08 17:38:40.028228: Epoch 58 +2026-04-08 17:38:40.030373: Current learning rate: 0.00948 +2026-04-08 17:40:22.969726: train_loss 0.005 +2026-04-08 17:40:22.977474: val_loss 0.0329 +2026-04-08 17:40:22.980683: Pseudo dice [0.0, 0.0, 0.2877, 0.0, 0.0, 0.0008, 0.1023] +2026-04-08 17:40:22.982937: Epoch time: 102.95 s +2026-04-08 17:40:22.985123: Yayy! New best EMA pseudo Dice: 0.0491 +2026-04-08 17:40:25.741054: +2026-04-08 17:40:25.743229: Epoch 59 +2026-04-08 17:40:25.745067: Current learning rate: 0.00947 +2026-04-08 17:42:08.901463: train_loss 1e-04 +2026-04-08 17:42:08.907094: val_loss -0.0023 +2026-04-08 17:42:08.909284: Pseudo dice [0.0, 0.0, 0.3146, 0.0, 0.0, 0.0074, 0.2165] +2026-04-08 17:42:08.910981: Epoch time: 103.16 s +2026-04-08 17:42:08.913488: Yayy! New best EMA pseudo Dice: 0.0519 +2026-04-08 17:42:11.583736: +2026-04-08 17:42:11.586462: Epoch 60 +2026-04-08 17:42:11.588307: Current learning rate: 0.00946 +2026-04-08 17:43:53.769026: train_loss -0.0089 +2026-04-08 17:43:53.775288: val_loss 0.0196 +2026-04-08 17:43:53.777968: Pseudo dice [0.0, 0.0, 0.2748, 0.0, 0.0, 0.0043, 0.4821] +2026-04-08 17:43:53.780106: Epoch time: 102.19 s +2026-04-08 17:43:53.782400: Yayy! New best EMA pseudo Dice: 0.0576 +2026-04-08 17:43:56.510896: +2026-04-08 17:43:56.513408: Epoch 61 +2026-04-08 17:43:56.517282: Current learning rate: 0.00945 +2026-04-08 17:45:38.397713: train_loss -0.0078 +2026-04-08 17:45:38.405005: val_loss -0.0054 +2026-04-08 17:45:38.408270: Pseudo dice [0.0, 0.0, 0.262, 0.0, 0.0, 0.1748, 0.4182] +2026-04-08 17:45:38.410611: Epoch time: 101.89 s +2026-04-08 17:45:38.413302: Yayy! New best EMA pseudo Dice: 0.064 +2026-04-08 17:45:41.140711: +2026-04-08 17:45:41.142959: Epoch 62 +2026-04-08 17:45:41.144781: Current learning rate: 0.00944 +2026-04-08 17:47:22.697480: train_loss -0.0045 +2026-04-08 17:47:22.703014: val_loss 0.0398 +2026-04-08 17:47:22.705406: Pseudo dice [0.0, 0.0, 0.0012, 0.0, 0.0, 0.004, 0.4775] +2026-04-08 17:47:22.707557: Epoch time: 101.56 s +2026-04-08 17:47:22.710828: Yayy! New best EMA pseudo Dice: 0.0645 +2026-04-08 17:47:25.438646: +2026-04-08 17:47:25.441639: Epoch 63 +2026-04-08 17:47:25.443504: Current learning rate: 0.00943 +2026-04-08 17:49:08.454287: train_loss -0.0008 +2026-04-08 17:49:08.461127: val_loss 0.0371 +2026-04-08 17:49:08.463561: Pseudo dice [0.0, 0.0, 0.3539, 0.0, 0.0, 0.0709, 0.2084] +2026-04-08 17:49:08.466966: Epoch time: 103.02 s +2026-04-08 17:49:08.470114: Yayy! New best EMA pseudo Dice: 0.0671 +2026-04-08 17:49:11.269241: +2026-04-08 17:49:11.272066: Epoch 64 +2026-04-08 17:49:11.273885: Current learning rate: 0.00942 +2026-04-08 17:50:53.733146: train_loss -0.0065 +2026-04-08 17:50:53.740083: val_loss 0.0617 +2026-04-08 17:50:53.742379: Pseudo dice [0.0, 0.0, 0.0062, 0.0, 0.0, 0.0089, 0.2367] +2026-04-08 17:50:53.744715: Epoch time: 102.47 s +2026-04-08 17:50:54.818938: +2026-04-08 17:50:54.821699: Epoch 65 +2026-04-08 17:50:54.825314: Current learning rate: 0.00941 +2026-04-08 17:52:38.417769: train_loss -0.0089 +2026-04-08 17:52:38.427432: val_loss -0.0154 +2026-04-08 17:52:38.431488: Pseudo dice [0.0, 0.0, 0.1816, 0.0, 0.0, 0.3937, 0.363] +2026-04-08 17:52:38.436554: Epoch time: 103.6 s +2026-04-08 17:52:38.440111: Yayy! New best EMA pseudo Dice: 0.071 +2026-04-08 17:52:41.308781: +2026-04-08 17:52:41.311424: Epoch 66 +2026-04-08 17:52:41.313641: Current learning rate: 0.0094 +2026-04-08 17:54:24.886682: train_loss -0.002 +2026-04-08 17:54:24.899088: val_loss 0.0202 +2026-04-08 17:54:24.916429: Pseudo dice [0.0, 0.0, 0.3818, 0.0, 0.0, 0.039, 0.2756] +2026-04-08 17:54:24.919278: Epoch time: 103.58 s +2026-04-08 17:54:24.921768: Yayy! New best EMA pseudo Dice: 0.0738 +2026-04-08 17:54:27.903992: +2026-04-08 17:54:27.915837: Epoch 67 +2026-04-08 17:54:27.923712: Current learning rate: 0.00939 +2026-04-08 17:56:15.802054: train_loss -0.011 +2026-04-08 17:56:15.811177: val_loss -0.0036 +2026-04-08 17:56:15.814064: Pseudo dice [0.0, 0.0, 0.2591, 0.0, 0.0, 0.0612, 0.5695] +2026-04-08 17:56:15.817621: Epoch time: 107.9 s +2026-04-08 17:56:15.821885: Yayy! New best EMA pseudo Dice: 0.0792 +2026-04-08 17:56:18.658868: +2026-04-08 17:56:18.661210: Epoch 68 +2026-04-08 17:56:18.663496: Current learning rate: 0.00939 +2026-04-08 17:58:03.610740: train_loss -0.0117 +2026-04-08 17:58:03.620431: val_loss 0.0262 +2026-04-08 17:58:03.623800: Pseudo dice [0.0, 0.0, 0.3361, 0.0, 0.0, 0.0334, 0.4953] +2026-04-08 17:58:03.626671: Epoch time: 104.96 s +2026-04-08 17:58:03.630263: Yayy! New best EMA pseudo Dice: 0.0836 +2026-04-08 17:58:06.526029: +2026-04-08 17:58:06.528836: Epoch 69 +2026-04-08 17:58:06.530518: Current learning rate: 0.00938 +2026-04-08 17:59:49.513595: train_loss -0.0128 +2026-04-08 17:59:49.520199: val_loss -0.0246 +2026-04-08 17:59:49.521975: Pseudo dice [0.0, 0.0, 0.2532, 0.0, 0.0, 0.2153, 0.3432] +2026-04-08 17:59:49.524719: Epoch time: 102.99 s +2026-04-08 17:59:49.527278: Yayy! New best EMA pseudo Dice: 0.0868 +2026-04-08 17:59:52.485447: +2026-04-08 17:59:52.487349: Epoch 70 +2026-04-08 17:59:52.489664: Current learning rate: 0.00937 +2026-04-08 18:01:36.167188: train_loss -0.005 +2026-04-08 18:01:36.174544: val_loss 0.0077 +2026-04-08 18:01:36.176729: Pseudo dice [0.0, 0.0, 0.2018, 0.0, 0.0, 0.0322, 0.3798] +2026-04-08 18:01:36.179265: Epoch time: 103.68 s +2026-04-08 18:01:36.181298: Yayy! New best EMA pseudo Dice: 0.0869 +2026-04-08 18:01:38.978550: +2026-04-08 18:01:38.989413: Epoch 71 +2026-04-08 18:01:38.992585: Current learning rate: 0.00936 +2026-04-08 18:03:23.411850: train_loss -0.0211 +2026-04-08 18:03:23.418774: val_loss -0.0247 +2026-04-08 18:03:23.420795: Pseudo dice [0.0, 0.0, 0.2177, 0.0, 0.0, 0.2288, 0.5199] +2026-04-08 18:03:23.422647: Epoch time: 104.44 s +2026-04-08 18:03:23.424431: Yayy! New best EMA pseudo Dice: 0.092 +2026-04-08 18:03:26.254836: +2026-04-08 18:03:26.257517: Epoch 72 +2026-04-08 18:03:26.259573: Current learning rate: 0.00935 +2026-04-08 18:05:10.195415: train_loss -0.0058 +2026-04-08 18:05:10.204705: val_loss 0.0332 +2026-04-08 18:05:10.207593: Pseudo dice [0.0, 0.0, 0.3441, 0.0, 0.0, 0.0353, 0.6129] +2026-04-08 18:05:10.209872: Epoch time: 103.94 s +2026-04-08 18:05:10.212936: Yayy! New best EMA pseudo Dice: 0.097 +2026-04-08 18:05:13.470899: +2026-04-08 18:05:13.473322: Epoch 73 +2026-04-08 18:05:13.475044: Current learning rate: 0.00934 +2026-04-08 18:06:57.269567: train_loss -0.0277 +2026-04-08 18:06:57.276728: val_loss 0.014 +2026-04-08 18:06:57.279545: Pseudo dice [0.0, 0.0, 0.2923, 0.0, 0.0, 0.041, 0.5352] +2026-04-08 18:06:57.282471: Epoch time: 103.8 s +2026-04-08 18:06:57.285145: Yayy! New best EMA pseudo Dice: 0.0997 +2026-04-08 18:07:00.282227: +2026-04-08 18:07:00.288934: Epoch 74 +2026-04-08 18:07:00.291766: Current learning rate: 0.00933 +2026-04-08 18:08:44.984735: train_loss -0.0251 +2026-04-08 18:08:44.993361: val_loss 0.0529 +2026-04-08 18:08:44.997070: Pseudo dice [0.0, 0.0, 0.3343, 0.0, 0.0, 0.0247, 0.633] +2026-04-08 18:08:44.999793: Epoch time: 104.71 s +2026-04-08 18:08:45.002729: Yayy! New best EMA pseudo Dice: 0.1039 +2026-04-08 18:08:47.911562: +2026-04-08 18:08:47.913506: Epoch 75 +2026-04-08 18:08:47.915124: Current learning rate: 0.00932 +2026-04-08 18:10:32.350964: train_loss -0.0344 +2026-04-08 18:10:32.358794: val_loss -0.0291 +2026-04-08 18:10:32.362411: Pseudo dice [0.0, 0.0, 0.3478, 0.0, 0.0, 0.4262, 0.3844] +2026-04-08 18:10:32.364507: Epoch time: 104.44 s +2026-04-08 18:10:32.367045: Yayy! New best EMA pseudo Dice: 0.1101 +2026-04-08 18:10:35.247110: +2026-04-08 18:10:35.250640: Epoch 76 +2026-04-08 18:10:35.254577: Current learning rate: 0.00931 +2026-04-08 18:12:18.620991: train_loss -0.0214 +2026-04-08 18:12:18.628203: val_loss -0.0134 +2026-04-08 18:12:18.630402: Pseudo dice [0.0, 0.0, 0.5136, 0.0, 0.0, 0.1557, 0.6215] +2026-04-08 18:12:18.633259: Epoch time: 103.38 s +2026-04-08 18:12:18.636307: Yayy! New best EMA pseudo Dice: 0.1175 +2026-04-08 18:12:21.565682: +2026-04-08 18:12:21.568981: Epoch 77 +2026-04-08 18:12:21.571358: Current learning rate: 0.0093 +2026-04-08 18:14:05.272952: train_loss -0.0194 +2026-04-08 18:14:05.279330: val_loss -0.0171 +2026-04-08 18:14:05.281986: Pseudo dice [0.0, 0.0, 0.0813, 0.0, 0.0, 0.2337, 0.6252] +2026-04-08 18:14:05.286113: Epoch time: 103.71 s +2026-04-08 18:14:05.288397: Yayy! New best EMA pseudo Dice: 0.1192 +2026-04-08 18:14:08.248913: +2026-04-08 18:14:08.251446: Epoch 78 +2026-04-08 18:14:08.253641: Current learning rate: 0.0093 +2026-04-08 18:15:51.964406: train_loss -0.0193 +2026-04-08 18:15:51.972626: val_loss 0.0089 +2026-04-08 18:15:51.975695: Pseudo dice [0.0, 0.0, 0.2116, 0.0, 0.0, 0.0285, 0.4536] +2026-04-08 18:15:51.978418: Epoch time: 103.72 s +2026-04-08 18:15:53.087198: +2026-04-08 18:15:53.089775: Epoch 79 +2026-04-08 18:15:53.091950: Current learning rate: 0.00929 +2026-04-08 18:17:35.896774: train_loss -0.032 +2026-04-08 18:17:35.902390: val_loss -0.0398 +2026-04-08 18:17:35.904484: Pseudo dice [0.0, 0.0, 0.4895, 0.0, 0.0, 0.3944, 0.297] +2026-04-08 18:17:35.906793: Epoch time: 102.81 s +2026-04-08 18:17:35.914893: Yayy! New best EMA pseudo Dice: 0.1223 +2026-04-08 18:17:38.784569: +2026-04-08 18:17:38.786879: Epoch 80 +2026-04-08 18:17:38.788837: Current learning rate: 0.00928 +2026-04-08 18:19:21.983129: train_loss -0.0435 +2026-04-08 18:19:21.989934: val_loss 0.0385 +2026-04-08 18:19:21.992601: Pseudo dice [0.0, 0.0, 0.3589, 0.0, 0.0, 0.0308, 0.6257] +2026-04-08 18:19:21.996110: Epoch time: 103.2 s +2026-04-08 18:19:22.001312: Yayy! New best EMA pseudo Dice: 0.1246 +2026-04-08 18:19:24.994724: +2026-04-08 18:19:24.998275: Epoch 81 +2026-04-08 18:19:25.000242: Current learning rate: 0.00927 +2026-04-08 18:21:07.734794: train_loss -0.0396 +2026-04-08 18:21:07.740160: val_loss -0.0236 +2026-04-08 18:21:07.742356: Pseudo dice [0.0, 0.0, 0.419, 0.0, 0.0, 0.08, 0.4686] +2026-04-08 18:21:07.744606: Epoch time: 102.74 s +2026-04-08 18:21:07.746562: Yayy! New best EMA pseudo Dice: 0.126 +2026-04-08 18:21:10.563199: +2026-04-08 18:21:10.565491: Epoch 82 +2026-04-08 18:21:10.567589: Current learning rate: 0.00926 +2026-04-08 18:22:53.404326: train_loss -0.0371 +2026-04-08 18:22:53.411083: val_loss 0.0137 +2026-04-08 18:22:53.415003: Pseudo dice [0.0, 0.0, 0.2023, 0.0, 0.0, 0.0148, 0.4905] +2026-04-08 18:22:53.418175: Epoch time: 102.84 s +2026-04-08 18:22:54.456028: +2026-04-08 18:22:54.461070: Epoch 83 +2026-04-08 18:22:54.463676: Current learning rate: 0.00925 +2026-04-08 18:24:37.454721: train_loss -0.0184 +2026-04-08 18:24:37.460400: val_loss -0.0588 +2026-04-08 18:24:37.462648: Pseudo dice [0.0, 0.0, 0.5267, 0.0, 0.0, 0.5784, 0.6892] +2026-04-08 18:24:37.464894: Epoch time: 103.0 s +2026-04-08 18:24:37.466700: Yayy! New best EMA pseudo Dice: 0.1368 +2026-04-08 18:24:40.405379: +2026-04-08 18:24:40.407933: Epoch 84 +2026-04-08 18:24:40.411748: Current learning rate: 0.00924 +2026-04-08 18:26:23.718501: train_loss -0.0315 +2026-04-08 18:26:23.724139: val_loss -0.0056 +2026-04-08 18:26:23.726589: Pseudo dice [0.0, 0.0, 0.3735, 0.0, 0.0, 0.0495, 0.548] +2026-04-08 18:26:23.728569: Epoch time: 103.32 s +2026-04-08 18:26:23.730433: Yayy! New best EMA pseudo Dice: 0.137 +2026-04-08 18:26:26.507201: +2026-04-08 18:26:26.509100: Epoch 85 +2026-04-08 18:26:26.510839: Current learning rate: 0.00923 +2026-04-08 18:28:10.257008: train_loss -0.0342 +2026-04-08 18:28:10.263541: val_loss -0.0133 +2026-04-08 18:28:10.266362: Pseudo dice [0.0, 0.0, 0.5964, 0.0, 0.0, 0.0563, 0.618] +2026-04-08 18:28:10.269120: Epoch time: 103.75 s +2026-04-08 18:28:10.272358: Yayy! New best EMA pseudo Dice: 0.1414 +2026-04-08 18:28:13.167270: +2026-04-08 18:28:13.169927: Epoch 86 +2026-04-08 18:28:13.171835: Current learning rate: 0.00922 +2026-04-08 18:29:58.353553: train_loss -0.0437 +2026-04-08 18:29:58.359969: val_loss 0.0067 +2026-04-08 18:29:58.364990: Pseudo dice [0.0, 0.0, 0.3192, 0.0, 0.0, 0.0477, 0.5401] +2026-04-08 18:29:58.368256: Epoch time: 105.19 s +2026-04-08 18:29:59.426472: +2026-04-08 18:29:59.435760: Epoch 87 +2026-04-08 18:29:59.437546: Current learning rate: 0.00921 +2026-04-08 18:31:42.681446: train_loss -0.0346 +2026-04-08 18:31:42.690331: val_loss -0.0493 +2026-04-08 18:31:42.693361: Pseudo dice [0.0, 0.0, 0.6575, 0.0, 0.0, 0.1189, 0.5831] +2026-04-08 18:31:42.695981: Epoch time: 103.26 s +2026-04-08 18:31:42.698422: Yayy! New best EMA pseudo Dice: 0.1456 +2026-04-08 18:31:46.694337: +2026-04-08 18:31:46.702190: Epoch 88 +2026-04-08 18:31:46.704390: Current learning rate: 0.0092 +2026-04-08 18:33:29.400263: train_loss -0.026 +2026-04-08 18:33:29.406371: val_loss -0.0274 +2026-04-08 18:33:29.408845: Pseudo dice [0.0, 0.0, 0.4482, 0.0, 0.0, 0.0628, 0.6106] +2026-04-08 18:33:29.425425: Epoch time: 102.71 s +2026-04-08 18:33:29.427858: Yayy! New best EMA pseudo Dice: 0.1471 +2026-04-08 18:33:32.437974: +2026-04-08 18:33:32.440702: Epoch 89 +2026-04-08 18:33:32.444152: Current learning rate: 0.0092 +2026-04-08 18:35:17.233460: train_loss -0.0327 +2026-04-08 18:35:17.244846: val_loss -0.0117 +2026-04-08 18:35:17.261321: Pseudo dice [0.0, 0.0, 0.5513, 0.0, 0.0, 0.0446, 0.597] +2026-04-08 18:35:17.266543: Epoch time: 104.8 s +2026-04-08 18:35:17.270845: Yayy! New best EMA pseudo Dice: 0.1494 +2026-04-08 18:35:20.374884: +2026-04-08 18:35:20.397535: Epoch 90 +2026-04-08 18:35:20.403615: Current learning rate: 0.00919 +2026-04-08 18:37:03.461195: train_loss -0.0329 +2026-04-08 18:37:03.491619: val_loss -0.0424 +2026-04-08 18:37:03.495327: Pseudo dice [0.0, 0.0, 0.5075, 0.0, 0.0, 0.1809, 0.2096] +2026-04-08 18:37:03.498321: Epoch time: 103.09 s +2026-04-08 18:37:04.543059: +2026-04-08 18:37:04.550188: Epoch 91 +2026-04-08 18:37:04.552375: Current learning rate: 0.00918 +2026-04-08 18:38:47.386947: train_loss -0.0511 +2026-04-08 18:38:47.392190: val_loss -0.0646 +2026-04-08 18:38:47.394524: Pseudo dice [0.0, 0.0, 0.5586, 0.0, 0.0, 0.3057, 0.3764] +2026-04-08 18:38:47.396492: Epoch time: 102.85 s +2026-04-08 18:38:47.399654: Yayy! New best EMA pseudo Dice: 0.1503 +2026-04-08 18:38:50.311236: +2026-04-08 18:38:50.314543: Epoch 92 +2026-04-08 18:38:50.316756: Current learning rate: 0.00917 +2026-04-08 18:40:33.794095: train_loss -0.0452 +2026-04-08 18:40:33.800077: val_loss -0.0367 +2026-04-08 18:40:33.802667: Pseudo dice [0.0, 0.0, 0.3944, 0.0, 0.0, 0.3867, 0.2231] +2026-04-08 18:40:33.804881: Epoch time: 103.49 s +2026-04-08 18:40:34.827161: +2026-04-08 18:40:34.830310: Epoch 93 +2026-04-08 18:40:34.833267: Current learning rate: 0.00916 +2026-04-08 18:42:17.105741: train_loss -0.0337 +2026-04-08 18:42:17.113183: val_loss -0.0406 +2026-04-08 18:42:17.115683: Pseudo dice [0.0, 0.0, 0.5578, 0.0, 0.0, 0.0674, 0.5333] +2026-04-08 18:42:17.118161: Epoch time: 102.28 s +2026-04-08 18:42:17.120530: Yayy! New best EMA pseudo Dice: 0.1512 +2026-04-08 18:42:19.928715: +2026-04-08 18:42:19.931558: Epoch 94 +2026-04-08 18:42:19.933339: Current learning rate: 0.00915 +2026-04-08 18:44:02.753856: train_loss -0.0498 +2026-04-08 18:44:02.762509: val_loss -0.0488 +2026-04-08 18:44:02.765969: Pseudo dice [0.0, 0.0, 0.1848, 0.0, 0.0, 0.3519, 0.7038] +2026-04-08 18:44:02.768916: Epoch time: 102.83 s +2026-04-08 18:44:02.771915: Yayy! New best EMA pseudo Dice: 0.1538 +2026-04-08 18:44:05.633852: +2026-04-08 18:44:05.636535: Epoch 95 +2026-04-08 18:44:05.639252: Current learning rate: 0.00914 +2026-04-08 18:45:48.245681: train_loss -0.0446 +2026-04-08 18:45:48.253354: val_loss -0.0432 +2026-04-08 18:45:48.255991: Pseudo dice [0.0, 0.0, 0.3502, 0.0, 0.0, 0.1632, 0.7179] +2026-04-08 18:45:48.259342: Epoch time: 102.61 s +2026-04-08 18:45:48.262050: Yayy! New best EMA pseudo Dice: 0.156 +2026-04-08 18:45:51.137283: +2026-04-08 18:45:51.139531: Epoch 96 +2026-04-08 18:45:51.141287: Current learning rate: 0.00913 +2026-04-08 18:47:33.684206: train_loss -0.0545 +2026-04-08 18:47:33.690446: val_loss -0.0541 +2026-04-08 18:47:33.692920: Pseudo dice [0.0, 0.0, 0.5567, 0.0, 0.0, 0.1673, 0.506] +2026-04-08 18:47:33.696076: Epoch time: 102.55 s +2026-04-08 18:47:33.698549: Yayy! New best EMA pseudo Dice: 0.158 +2026-04-08 18:47:36.143056: +2026-04-08 18:47:36.145666: Epoch 97 +2026-04-08 18:47:36.147409: Current learning rate: 0.00912 +2026-04-08 18:49:18.895357: train_loss -0.042 +2026-04-08 18:49:18.899913: val_loss -0.0461 +2026-04-08 18:49:18.901486: Pseudo dice [0.0, 0.0, 0.4597, 0.0, 0.0, 0.2824, 0.6883] +2026-04-08 18:49:18.902821: Epoch time: 102.76 s +2026-04-08 18:49:18.904260: Yayy! New best EMA pseudo Dice: 0.1626 +2026-04-08 18:49:21.655052: +2026-04-08 18:49:21.659591: Epoch 98 +2026-04-08 18:49:21.662678: Current learning rate: 0.00911 +2026-04-08 18:51:05.197581: train_loss -0.0535 +2026-04-08 18:51:05.205403: val_loss -0.0483 +2026-04-08 18:51:05.207691: Pseudo dice [0.0, 0.0, 0.4677, 0.0, 0.0, 0.0878, 0.6976] +2026-04-08 18:51:05.210702: Epoch time: 103.55 s +2026-04-08 18:51:05.212731: Yayy! New best EMA pseudo Dice: 0.1643 +2026-04-08 18:51:07.986757: +2026-04-08 18:51:07.989301: Epoch 99 +2026-04-08 18:51:07.991016: Current learning rate: 0.0091 +2026-04-08 18:52:51.341675: train_loss -0.0446 +2026-04-08 18:52:51.349804: val_loss -0.0407 +2026-04-08 18:52:51.352496: Pseudo dice [0.0, 0.0, 0.3011, 0.0, 0.0, 0.2508, 0.4657] +2026-04-08 18:52:51.356477: Epoch time: 103.36 s +2026-04-08 18:52:53.942725: +2026-04-08 18:52:53.945654: Epoch 100 +2026-04-08 18:52:53.947827: Current learning rate: 0.0091 +2026-04-08 18:54:36.227700: train_loss -0.0385 +2026-04-08 18:54:36.234806: val_loss -0.0218 +2026-04-08 18:54:36.237847: Pseudo dice [0.0001, 0.0, 0.5872, 0.0, 0.0, 0.0842, 0.7191] +2026-04-08 18:54:36.239840: Epoch time: 102.29 s +2026-04-08 18:54:36.242225: Yayy! New best EMA pseudo Dice: 0.166 +2026-04-08 18:54:39.392097: +2026-04-08 18:54:39.397264: Epoch 101 +2026-04-08 18:54:39.399704: Current learning rate: 0.00909 +2026-04-08 18:56:22.013995: train_loss -0.0555 +2026-04-08 18:56:22.019650: val_loss -0.0194 +2026-04-08 18:56:22.021749: Pseudo dice [0.0308, 0.0, 0.2802, 0.0, 0.0, 0.0365, 0.7672] +2026-04-08 18:56:22.023716: Epoch time: 102.63 s +2026-04-08 18:56:23.070243: +2026-04-08 18:56:23.073525: Epoch 102 +2026-04-08 18:56:23.075850: Current learning rate: 0.00908 +2026-04-08 18:58:06.994434: train_loss -0.0511 +2026-04-08 18:58:07.001816: val_loss -0.0758 +2026-04-08 18:58:07.004042: Pseudo dice [0.1026, 0.0, 0.5086, 0.0, 0.0, 0.5351, 0.7464] +2026-04-08 18:58:07.006981: Epoch time: 103.93 s +2026-04-08 18:58:07.009072: Yayy! New best EMA pseudo Dice: 0.1758 +2026-04-08 18:58:09.765101: +2026-04-08 18:58:09.768783: Epoch 103 +2026-04-08 18:58:09.770475: Current learning rate: 0.00907 +2026-04-08 18:59:52.753493: train_loss -0.0684 +2026-04-08 18:59:52.759011: val_loss -0.0171 +2026-04-08 18:59:52.761133: Pseudo dice [0.0966, 0.0, 0.4932, 0.0, 0.0, 0.0504, 0.4498] +2026-04-08 18:59:52.762715: Epoch time: 102.99 s +2026-04-08 18:59:53.789602: +2026-04-08 18:59:53.791596: Epoch 104 +2026-04-08 18:59:53.793332: Current learning rate: 0.00906 +2026-04-08 19:01:36.021343: train_loss -0.0584 +2026-04-08 19:01:36.028111: val_loss -0.0191 +2026-04-08 19:01:36.031837: Pseudo dice [0.2444, 0.0, 0.5442, 0.0, 0.0, 0.1125, 0.4603] +2026-04-08 19:01:36.034518: Epoch time: 102.23 s +2026-04-08 19:01:36.036550: Yayy! New best EMA pseudo Dice: 0.1759 +2026-04-08 19:01:38.768121: +2026-04-08 19:01:38.770822: Epoch 105 +2026-04-08 19:01:38.772635: Current learning rate: 0.00905 +2026-04-08 19:03:21.783386: train_loss -0.0514 +2026-04-08 19:03:21.788268: val_loss -0.0062 +2026-04-08 19:03:21.790202: Pseudo dice [0.2637, 0.0, 0.46, 0.0, 0.0, 0.0123, 0.7804] +2026-04-08 19:03:21.792109: Epoch time: 103.02 s +2026-04-08 19:03:21.794172: Yayy! New best EMA pseudo Dice: 0.18 +2026-04-08 19:03:24.638496: +2026-04-08 19:03:24.641140: Epoch 106 +2026-04-08 19:03:24.644055: Current learning rate: 0.00904 +2026-04-08 19:05:06.823015: train_loss -0.0501 +2026-04-08 19:05:06.828286: val_loss -0.0229 +2026-04-08 19:05:06.830897: Pseudo dice [0.3651, 0.0, 0.1691, 0.0, 0.0, 0.053, 0.421] +2026-04-08 19:05:06.833567: Epoch time: 102.19 s +2026-04-08 19:05:07.869297: +2026-04-08 19:05:07.871455: Epoch 107 +2026-04-08 19:05:07.874167: Current learning rate: 0.00903 +2026-04-08 19:06:50.424798: train_loss -0.0425 +2026-04-08 19:06:50.431213: val_loss -0.0535 +2026-04-08 19:06:50.433932: Pseudo dice [0.3839, 0.0, 0.5842, 0.0, 0.0, 0.3402, 0.6888] +2026-04-08 19:06:50.436003: Epoch time: 102.56 s +2026-04-08 19:06:50.437758: Yayy! New best EMA pseudo Dice: 0.1873 +2026-04-08 19:06:52.982420: +2026-04-08 19:06:52.984885: Epoch 108 +2026-04-08 19:06:52.986538: Current learning rate: 0.00902 +2026-04-08 19:08:34.665880: train_loss -0.0586 +2026-04-08 19:08:34.673239: val_loss -0.055 +2026-04-08 19:08:34.676426: Pseudo dice [0.4764, 0.0, 0.5361, 0.0, 0.0, 0.5157, 0.7482] +2026-04-08 19:08:34.678841: Epoch time: 101.69 s +2026-04-08 19:08:34.680552: Yayy! New best EMA pseudo Dice: 0.2011 +2026-04-08 19:08:37.374275: +2026-04-08 19:08:37.376601: Epoch 109 +2026-04-08 19:08:37.378091: Current learning rate: 0.00901 +2026-04-08 19:10:18.763064: train_loss -0.0621 +2026-04-08 19:10:18.768824: val_loss -0.0463 +2026-04-08 19:10:18.771760: Pseudo dice [0.1841, 0.0, 0.2194, 0.0, 0.0, 0.0571, 0.3183] +2026-04-08 19:10:18.773590: Epoch time: 101.39 s +2026-04-08 19:10:19.825722: +2026-04-08 19:10:19.827600: Epoch 110 +2026-04-08 19:10:19.829373: Current learning rate: 0.009 +2026-04-08 19:12:02.094361: train_loss -0.0657 +2026-04-08 19:12:02.099757: val_loss -0.069 +2026-04-08 19:12:02.101613: Pseudo dice [0.1052, 0.0, 0.5781, 0.0, 0.0, 0.198, 0.7989] +2026-04-08 19:12:02.104848: Epoch time: 102.27 s +2026-04-08 19:12:03.159127: +2026-04-08 19:12:03.161081: Epoch 111 +2026-04-08 19:12:03.162975: Current learning rate: 0.009 +2026-04-08 19:13:44.955935: train_loss -0.0607 +2026-04-08 19:13:44.967001: val_loss -0.0865 +2026-04-08 19:13:44.968929: Pseudo dice [0.5615, 0.0, 0.3479, 0.0, 0.0, 0.5056, 0.5564] +2026-04-08 19:13:44.970647: Epoch time: 101.8 s +2026-04-08 19:13:44.972825: Yayy! New best EMA pseudo Dice: 0.2053 +2026-04-08 19:13:47.754585: +2026-04-08 19:13:47.756679: Epoch 112 +2026-04-08 19:13:47.758298: Current learning rate: 0.00899 +2026-04-08 19:15:29.285732: train_loss -0.0685 +2026-04-08 19:15:29.291447: val_loss -0.0697 +2026-04-08 19:15:29.293227: Pseudo dice [0.0854, 0.0, 0.567, 0.0, 0.0, 0.4155, 0.5597] +2026-04-08 19:15:29.295812: Epoch time: 101.53 s +2026-04-08 19:15:29.297551: Yayy! New best EMA pseudo Dice: 0.2081 +2026-04-08 19:15:31.994230: +2026-04-08 19:15:31.997101: Epoch 113 +2026-04-08 19:15:31.998698: Current learning rate: 0.00898 +2026-04-08 19:17:14.313000: train_loss -0.0721 +2026-04-08 19:17:14.318252: val_loss -0.0446 +2026-04-08 19:17:14.320340: Pseudo dice [0.0857, 0.0, 0.3896, 0.0, 0.0, 0.0332, 0.6428] +2026-04-08 19:17:14.322316: Epoch time: 102.32 s +2026-04-08 19:17:15.384866: +2026-04-08 19:17:15.387267: Epoch 114 +2026-04-08 19:17:15.388967: Current learning rate: 0.00897 +2026-04-08 19:18:58.626692: train_loss -0.061 +2026-04-08 19:18:58.634802: val_loss -0.0454 +2026-04-08 19:18:58.637854: Pseudo dice [0.3302, 0.0, 0.5802, 0.0, 0.0, 0.0478, 0.5884] +2026-04-08 19:18:58.640330: Epoch time: 103.25 s +2026-04-08 19:18:59.712500: +2026-04-08 19:18:59.714964: Epoch 115 +2026-04-08 19:18:59.716792: Current learning rate: 0.00896 +2026-04-08 19:20:41.978532: train_loss -0.0691 +2026-04-08 19:20:41.984458: val_loss -0.0801 +2026-04-08 19:20:41.989003: Pseudo dice [0.4021, 0.0, 0.4276, 0.0003, 0.0, 0.5598, 0.7316] +2026-04-08 19:20:41.991221: Epoch time: 102.27 s +2026-04-08 19:20:41.993392: Yayy! New best EMA pseudo Dice: 0.2152 +2026-04-08 19:20:44.488092: +2026-04-08 19:20:44.491279: Epoch 116 +2026-04-08 19:20:44.493067: Current learning rate: 0.00895 +2026-04-08 19:22:25.776469: train_loss -0.073 +2026-04-08 19:22:25.785525: val_loss 0.0096 +2026-04-08 19:22:25.788656: Pseudo dice [0.2577, 0.0, 0.4091, 0.0, 0.0063, 0.0433, 0.5776] +2026-04-08 19:22:25.792035: Epoch time: 101.29 s +2026-04-08 19:22:26.889965: +2026-04-08 19:22:26.892390: Epoch 117 +2026-04-08 19:22:26.894284: Current learning rate: 0.00894 +2026-04-08 19:24:08.814399: train_loss -0.0761 +2026-04-08 19:24:08.819703: val_loss -0.0065 +2026-04-08 19:24:08.822198: Pseudo dice [0.1128, 0.0, 0.5528, 0.0074, 0.0038, 0.0385, 0.5586] +2026-04-08 19:24:08.824102: Epoch time: 101.93 s +2026-04-08 19:24:09.898668: +2026-04-08 19:24:09.901813: Epoch 118 +2026-04-08 19:24:09.904248: Current learning rate: 0.00893 +2026-04-08 19:25:51.986228: train_loss -0.0646 +2026-04-08 19:25:51.992405: val_loss -0.0751 +2026-04-08 19:25:51.994806: Pseudo dice [0.1666, 0.0, 0.6065, 0.1598, 0.0514, 0.6741, 0.6309] +2026-04-08 19:25:51.996680: Epoch time: 102.09 s +2026-04-08 19:25:51.998737: Yayy! New best EMA pseudo Dice: 0.2209 +2026-04-08 19:25:54.758716: +2026-04-08 19:25:54.761556: Epoch 119 +2026-04-08 19:25:54.763351: Current learning rate: 0.00892 +2026-04-08 19:27:37.854702: train_loss -0.0721 +2026-04-08 19:27:37.860904: val_loss -0.0382 +2026-04-08 19:27:37.864122: Pseudo dice [0.3349, 0.0, 0.4887, 0.0089, 0.1187, 0.0321, 0.7089] +2026-04-08 19:27:37.866769: Epoch time: 103.1 s +2026-04-08 19:27:37.868729: Yayy! New best EMA pseudo Dice: 0.223 +2026-04-08 19:27:40.280328: +2026-04-08 19:27:40.284408: Epoch 120 +2026-04-08 19:27:40.286839: Current learning rate: 0.00891 +2026-04-08 19:29:23.648062: train_loss -0.0767 +2026-04-08 19:29:23.655592: val_loss -0.0686 +2026-04-08 19:29:23.658436: Pseudo dice [0.2942, 0.0, 0.3614, 0.0001, 0.155, 0.229, 0.3695] +2026-04-08 19:29:23.660812: Epoch time: 103.37 s +2026-04-08 19:29:24.733588: +2026-04-08 19:29:24.735960: Epoch 121 +2026-04-08 19:29:24.738983: Current learning rate: 0.0089 +2026-04-08 19:31:07.559728: train_loss -0.0631 +2026-04-08 19:31:07.565633: val_loss -0.0494 +2026-04-08 19:31:07.568118: Pseudo dice [0.4747, 0.0, 0.524, 0.0, 0.0396, 0.0701, 0.4772] +2026-04-08 19:31:07.569983: Epoch time: 102.83 s +2026-04-08 19:31:08.641584: +2026-04-08 19:31:08.643885: Epoch 122 +2026-04-08 19:31:08.645649: Current learning rate: 0.00889 +2026-04-08 19:32:50.563560: train_loss -0.0779 +2026-04-08 19:32:50.589593: val_loss -0.0783 +2026-04-08 19:32:50.594008: Pseudo dice [0.6176, 0.1339, 0.5572, 0.0001, 0.1948, 0.2629, 0.3752] +2026-04-08 19:32:50.597529: Epoch time: 101.93 s +2026-04-08 19:32:50.600517: Yayy! New best EMA pseudo Dice: 0.2299 +2026-04-08 19:32:54.170132: +2026-04-08 19:32:54.173446: Epoch 123 +2026-04-08 19:32:54.175385: Current learning rate: 0.00889 +2026-04-08 19:34:36.769466: train_loss -0.0765 +2026-04-08 19:34:36.777388: val_loss -0.0889 +2026-04-08 19:34:36.779572: Pseudo dice [0.3581, 0.1421, 0.4765, 0.0293, 0.2026, 0.4641, 0.2986] +2026-04-08 19:34:36.781995: Epoch time: 102.6 s +2026-04-08 19:34:36.784144: Yayy! New best EMA pseudo Dice: 0.235 +2026-04-08 19:34:39.644922: +2026-04-08 19:34:39.647919: Epoch 124 +2026-04-08 19:34:39.649864: Current learning rate: 0.00888 +2026-04-08 19:36:21.355497: train_loss -0.0864 +2026-04-08 19:36:21.360841: val_loss 0.0093 +2026-04-08 19:36:21.362678: Pseudo dice [0.4424, 0.2334, 0.2821, 0.0001, 0.2925, 0.0618, 0.6633] +2026-04-08 19:36:21.364293: Epoch time: 101.71 s +2026-04-08 19:36:21.366154: Yayy! New best EMA pseudo Dice: 0.2398 +2026-04-08 19:36:24.529445: +2026-04-08 19:36:24.531948: Epoch 125 +2026-04-08 19:36:24.533833: Current learning rate: 0.00887 +2026-04-08 19:38:07.714824: train_loss -0.0754 +2026-04-08 19:38:07.720267: val_loss -0.0698 +2026-04-08 19:38:07.722362: Pseudo dice [0.255, 0.17, 0.3197, 0.031, 0.148, 0.1135, 0.5669] +2026-04-08 19:38:07.724243: Epoch time: 103.19 s +2026-04-08 19:38:08.795917: +2026-04-08 19:38:08.798566: Epoch 126 +2026-04-08 19:38:08.800897: Current learning rate: 0.00886 +2026-04-08 19:39:51.018186: train_loss -0.0713 +2026-04-08 19:39:51.024708: val_loss -0.04 +2026-04-08 19:39:51.026750: Pseudo dice [0.0093, 0.4873, 0.5235, 0.0535, 0.3359, 0.0894, 0.5509] +2026-04-08 19:39:51.028458: Epoch time: 102.23 s +2026-04-08 19:39:51.030435: Yayy! New best EMA pseudo Dice: 0.2441 +2026-04-08 19:39:53.493045: +2026-04-08 19:39:53.495789: Epoch 127 +2026-04-08 19:39:53.497453: Current learning rate: 0.00885 +2026-04-08 19:41:34.816323: train_loss -0.0648 +2026-04-08 19:41:34.822439: val_loss -0.0429 +2026-04-08 19:41:34.824829: Pseudo dice [0.3466, 0.2402, 0.4258, 0.0051, 0.1687, 0.016, 0.7169] +2026-04-08 19:41:34.827773: Epoch time: 101.33 s +2026-04-08 19:41:34.830194: Yayy! New best EMA pseudo Dice: 0.2471 +2026-04-08 19:41:37.698921: +2026-04-08 19:41:37.701364: Epoch 128 +2026-04-08 19:41:37.703459: Current learning rate: 0.00884 +2026-04-08 19:43:20.853551: train_loss -0.0867 +2026-04-08 19:43:20.860212: val_loss -0.0461 +2026-04-08 19:43:20.863780: Pseudo dice [0.4019, 0.5791, 0.2983, 0.1629, 0.2002, 0.0654, 0.5601] +2026-04-08 19:43:20.866134: Epoch time: 103.16 s +2026-04-08 19:43:20.868030: Yayy! New best EMA pseudo Dice: 0.2548 +2026-04-08 19:43:23.725767: +2026-04-08 19:43:23.727766: Epoch 129 +2026-04-08 19:43:23.729723: Current learning rate: 0.00883 +2026-04-08 19:45:06.441270: train_loss -0.093 +2026-04-08 19:45:06.447718: val_loss -0.1125 +2026-04-08 19:45:06.449860: Pseudo dice [0.2322, 0.3555, 0.5915, 0.0009, 0.3055, 0.6881, 0.7494] +2026-04-08 19:45:06.452251: Epoch time: 102.72 s +2026-04-08 19:45:06.454324: Yayy! New best EMA pseudo Dice: 0.2711 +2026-04-08 19:45:09.331462: +2026-04-08 19:45:09.334053: Epoch 130 +2026-04-08 19:45:09.336521: Current learning rate: 0.00882 +2026-04-08 19:46:51.623975: train_loss -0.0964 +2026-04-08 19:46:51.630769: val_loss -0.0581 +2026-04-08 19:46:51.633279: Pseudo dice [0.445, 0.1346, 0.3001, 0.1365, 0.3994, 0.0262, 0.6882] +2026-04-08 19:46:51.635468: Epoch time: 102.3 s +2026-04-08 19:46:51.637884: Yayy! New best EMA pseudo Dice: 0.2744 +2026-04-08 19:46:54.214615: +2026-04-08 19:46:54.217163: Epoch 131 +2026-04-08 19:46:54.218846: Current learning rate: 0.00881 +2026-04-08 19:48:36.812234: train_loss -0.102 +2026-04-08 19:48:36.819167: val_loss -0.0155 +2026-04-08 19:48:36.822330: Pseudo dice [0.6967, 0.1894, 0.3539, 0.105, 0.271, 0.0457, 0.6772] +2026-04-08 19:48:36.826483: Epoch time: 102.6 s +2026-04-08 19:48:36.828877: Yayy! New best EMA pseudo Dice: 0.2804 +2026-04-08 19:48:39.611098: +2026-04-08 19:48:39.614724: Epoch 132 +2026-04-08 19:48:39.616541: Current learning rate: 0.0088 +2026-04-08 19:50:22.413379: train_loss -0.1019 +2026-04-08 19:50:22.422723: val_loss -0.0727 +2026-04-08 19:50:22.431917: Pseudo dice [0.4939, 0.4244, 0.512, 0.04, 0.2851, 0.3174, 0.6121] +2026-04-08 19:50:22.440108: Epoch time: 102.81 s +2026-04-08 19:50:22.443220: Yayy! New best EMA pseudo Dice: 0.2907 +2026-04-08 19:50:24.944638: +2026-04-08 19:50:24.949439: Epoch 133 +2026-04-08 19:50:24.953225: Current learning rate: 0.00879 +2026-04-08 19:52:07.266910: train_loss -0.0942 +2026-04-08 19:52:07.272281: val_loss -0.1043 +2026-04-08 19:52:07.275546: Pseudo dice [0.3037, 0.1697, 0.5293, 0.0925, 0.3475, 0.5651, 0.7365] +2026-04-08 19:52:07.277623: Epoch time: 102.33 s +2026-04-08 19:52:07.280049: Yayy! New best EMA pseudo Dice: 0.3008 +2026-04-08 19:52:10.334943: +2026-04-08 19:52:10.338322: Epoch 134 +2026-04-08 19:52:10.340466: Current learning rate: 0.00879 +2026-04-08 19:53:55.119970: train_loss -0.0988 +2026-04-08 19:53:55.137532: val_loss -0.0736 +2026-04-08 19:53:55.140502: Pseudo dice [0.0866, 0.3625, 0.344, 0.0918, 0.2591, 0.2492, 0.6654] +2026-04-08 19:53:55.142681: Epoch time: 104.79 s +2026-04-08 19:53:56.226939: +2026-04-08 19:53:56.229421: Epoch 135 +2026-04-08 19:53:56.231523: Current learning rate: 0.00878 +2026-04-08 19:55:39.048718: train_loss -0.0854 +2026-04-08 19:55:39.055258: val_loss -0.1045 +2026-04-08 19:55:39.057729: Pseudo dice [0.532, 0.5298, 0.6397, 0.0018, 0.1712, 0.1746, 0.6325] +2026-04-08 19:55:39.061965: Epoch time: 102.82 s +2026-04-08 19:55:39.065026: Yayy! New best EMA pseudo Dice: 0.3084 +2026-04-08 19:55:41.641379: +2026-04-08 19:55:41.644234: Epoch 136 +2026-04-08 19:55:41.646102: Current learning rate: 0.00877 +2026-04-08 19:57:23.947474: train_loss -0.1022 +2026-04-08 19:57:23.954762: val_loss -0.0769 +2026-04-08 19:57:23.957842: Pseudo dice [0.7034, 0.1248, 0.4967, 0.3226, 0.4322, 0.0786, 0.6627] +2026-04-08 19:57:23.961590: Epoch time: 102.31 s +2026-04-08 19:57:23.964305: Yayy! New best EMA pseudo Dice: 0.3179 +2026-04-08 19:57:26.847357: +2026-04-08 19:57:26.850035: Epoch 137 +2026-04-08 19:57:26.852022: Current learning rate: 0.00876 +2026-04-08 19:59:09.814338: train_loss -0.098 +2026-04-08 19:59:09.829760: val_loss -0.0699 +2026-04-08 19:59:09.835808: Pseudo dice [0.3567, 0.5169, 0.3951, 0.0123, 0.2219, 0.0966, 0.7265] +2026-04-08 19:59:09.839460: Epoch time: 102.97 s +2026-04-08 19:59:09.841548: Yayy! New best EMA pseudo Dice: 0.3193 +2026-04-08 19:59:12.368965: +2026-04-08 19:59:12.372273: Epoch 138 +2026-04-08 19:59:12.374569: Current learning rate: 0.00875 +2026-04-08 20:00:55.278445: train_loss -0.09 +2026-04-08 20:00:55.284985: val_loss -0.0987 +2026-04-08 20:00:55.287467: Pseudo dice [0.6575, 0.3444, 0.5402, 0.0563, 0.1057, 0.3268, 0.6512] +2026-04-08 20:00:55.289634: Epoch time: 102.91 s +2026-04-08 20:00:55.291837: Yayy! New best EMA pseudo Dice: 0.3257 +2026-04-08 20:00:59.037777: +2026-04-08 20:00:59.040530: Epoch 139 +2026-04-08 20:00:59.042920: Current learning rate: 0.00874 +2026-04-08 20:02:41.328181: train_loss -0.1131 +2026-04-08 20:02:41.335690: val_loss -0.0921 +2026-04-08 20:02:41.337642: Pseudo dice [0.2227, 0.429, 0.4091, 0.0147, 0.3294, 0.1004, 0.6763] +2026-04-08 20:02:41.339439: Epoch time: 102.29 s +2026-04-08 20:02:42.430888: +2026-04-08 20:02:42.432761: Epoch 140 +2026-04-08 20:02:42.434704: Current learning rate: 0.00873 +2026-04-08 20:04:25.263227: train_loss -0.0931 +2026-04-08 20:04:25.269937: val_loss -0.0058 +2026-04-08 20:04:25.273024: Pseudo dice [0.3743, 0.3349, 0.3436, 0.1194, 0.0799, 0.0158, 0.6595] +2026-04-08 20:04:25.276024: Epoch time: 102.84 s +2026-04-08 20:04:26.356491: +2026-04-08 20:04:26.359522: Epoch 141 +2026-04-08 20:04:26.363204: Current learning rate: 0.00872 +2026-04-08 20:06:09.168747: train_loss -0.0959 +2026-04-08 20:06:09.176256: val_loss -0.0945 +2026-04-08 20:06:09.178165: Pseudo dice [0.1888, 0.2334, 0.6414, 0.2786, 0.2879, 0.4124, 0.7322] +2026-04-08 20:06:09.180028: Epoch time: 102.82 s +2026-04-08 20:06:09.182511: Yayy! New best EMA pseudo Dice: 0.3271 +2026-04-08 20:06:12.036159: +2026-04-08 20:06:12.038157: Epoch 142 +2026-04-08 20:06:12.039912: Current learning rate: 0.00871 +2026-04-08 20:07:54.709736: train_loss -0.1029 +2026-04-08 20:07:54.714039: val_loss -0.1018 +2026-04-08 20:07:54.716096: Pseudo dice [0.3218, 0.7494, 0.6883, 0.1192, 0.4441, 0.093, 0.7366] +2026-04-08 20:07:54.718409: Epoch time: 102.68 s +2026-04-08 20:07:54.720971: Yayy! New best EMA pseudo Dice: 0.3394 +2026-04-08 20:07:57.528909: +2026-04-08 20:07:57.530970: Epoch 143 +2026-04-08 20:07:57.533377: Current learning rate: 0.0087 +2026-04-08 20:09:40.397853: train_loss -0.1101 +2026-04-08 20:09:40.403686: val_loss -0.1258 +2026-04-08 20:09:40.405558: Pseudo dice [0.3171, 0.3876, 0.579, 0.3407, 0.3067, 0.5213, 0.6351] +2026-04-08 20:09:40.407190: Epoch time: 102.87 s +2026-04-08 20:09:40.410275: Yayy! New best EMA pseudo Dice: 0.3496 +2026-04-08 20:09:43.324774: +2026-04-08 20:09:43.327695: Epoch 144 +2026-04-08 20:09:43.329740: Current learning rate: 0.00869 +2026-04-08 20:11:26.036366: train_loss -0.1084 +2026-04-08 20:11:26.043703: val_loss -0.0494 +2026-04-08 20:11:26.045569: Pseudo dice [0.7715, 0.5101, 0.355, 0.0499, 0.4707, 0.0473, 0.6835] +2026-04-08 20:11:26.047870: Epoch time: 102.71 s +2026-04-08 20:11:26.050581: Yayy! New best EMA pseudo Dice: 0.3559 +2026-04-08 20:11:28.990047: +2026-04-08 20:11:28.992074: Epoch 145 +2026-04-08 20:11:28.994169: Current learning rate: 0.00868 +2026-04-08 20:13:12.942140: train_loss -0.0863 +2026-04-08 20:13:12.950790: val_loss -0.0709 +2026-04-08 20:13:12.956396: Pseudo dice [0.3486, 0.4631, 0.5495, 0.4727, 0.3664, 0.1189, 0.7488] +2026-04-08 20:13:12.959390: Epoch time: 103.96 s +2026-04-08 20:13:12.961887: Yayy! New best EMA pseudo Dice: 0.3641 +2026-04-08 20:13:15.935320: +2026-04-08 20:13:15.937706: Epoch 146 +2026-04-08 20:13:15.940323: Current learning rate: 0.00868 +2026-04-08 20:14:58.612046: train_loss -0.0977 +2026-04-08 20:14:58.619261: val_loss -0.0854 +2026-04-08 20:14:58.621218: Pseudo dice [0.2015, 0.3505, 0.341, 0.0182, 0.5041, 0.6982, 0.8] +2026-04-08 20:14:58.623477: Epoch time: 102.68 s +2026-04-08 20:14:58.625792: Yayy! New best EMA pseudo Dice: 0.3693 +2026-04-08 20:15:01.453499: +2026-04-08 20:15:01.455707: Epoch 147 +2026-04-08 20:15:01.457503: Current learning rate: 0.00867 +2026-04-08 20:16:43.157543: train_loss -0.1046 +2026-04-08 20:16:43.164822: val_loss -0.1273 +2026-04-08 20:16:43.166923: Pseudo dice [0.4896, 0.1892, 0.6068, 0.608, 0.4582, 0.6765, 0.7108] +2026-04-08 20:16:43.168831: Epoch time: 101.71 s +2026-04-08 20:16:43.170852: Yayy! New best EMA pseudo Dice: 0.3858 +2026-04-08 20:16:45.991129: +2026-04-08 20:16:45.993516: Epoch 148 +2026-04-08 20:16:45.995283: Current learning rate: 0.00866 +2026-04-08 20:18:28.573872: train_loss -0.1161 +2026-04-08 20:18:28.581093: val_loss -0.0616 +2026-04-08 20:18:28.583198: Pseudo dice [0.5404, 0.8347, 0.6041, 0.1528, 0.2222, 0.0338, 0.6772] +2026-04-08 20:18:28.585987: Epoch time: 102.59 s +2026-04-08 20:18:28.588859: Yayy! New best EMA pseudo Dice: 0.391 +2026-04-08 20:18:31.478038: +2026-04-08 20:18:31.480144: Epoch 149 +2026-04-08 20:18:31.482300: Current learning rate: 0.00865 +2026-04-08 20:20:14.308999: train_loss -0.1228 +2026-04-08 20:20:14.315537: val_loss -0.0658 +2026-04-08 20:20:14.317336: Pseudo dice [0.4423, 0.8337, 0.5057, 0.5445, 0.4598, 0.0723, 0.7285] +2026-04-08 20:20:14.319586: Epoch time: 102.83 s +2026-04-08 20:20:15.652996: Yayy! New best EMA pseudo Dice: 0.4032 +2026-04-08 20:20:18.476029: +2026-04-08 20:20:18.477873: Epoch 150 +2026-04-08 20:20:18.479599: Current learning rate: 0.00864 +2026-04-08 20:22:00.188248: train_loss -0.1112 +2026-04-08 20:22:00.194931: val_loss -0.0928 +2026-04-08 20:22:00.197119: Pseudo dice [0.5023, 0.5491, 0.5905, 0.2754, 0.2705, 0.6941, 0.3427] +2026-04-08 20:22:00.199459: Epoch time: 101.72 s +2026-04-08 20:22:00.202031: Yayy! New best EMA pseudo Dice: 0.4089 +2026-04-08 20:22:03.012749: +2026-04-08 20:22:03.015288: Epoch 151 +2026-04-08 20:22:03.017191: Current learning rate: 0.00863 +2026-04-08 20:23:46.484412: train_loss -0.1143 +2026-04-08 20:23:46.491372: val_loss -0.0996 +2026-04-08 20:23:46.494304: Pseudo dice [0.2005, 0.7408, 0.4841, 0.6425, 0.4287, 0.156, 0.7454] +2026-04-08 20:23:46.496572: Epoch time: 103.47 s +2026-04-08 20:23:46.498728: Yayy! New best EMA pseudo Dice: 0.4166 +2026-04-08 20:23:49.364578: +2026-04-08 20:23:49.368198: Epoch 152 +2026-04-08 20:23:49.370511: Current learning rate: 0.00862 +2026-04-08 20:25:31.900796: train_loss -0.1347 +2026-04-08 20:25:31.908591: val_loss -0.0599 +2026-04-08 20:25:31.910756: Pseudo dice [0.4684, 0.6087, 0.5407, 0.57, 0.2208, 0.054, 0.7487] +2026-04-08 20:25:31.912852: Epoch time: 102.54 s +2026-04-08 20:25:31.915244: Yayy! New best EMA pseudo Dice: 0.4208 +2026-04-08 20:25:34.786442: +2026-04-08 20:25:34.788884: Epoch 153 +2026-04-08 20:25:34.794672: Current learning rate: 0.00861 +2026-04-08 20:27:17.473257: train_loss -0.1171 +2026-04-08 20:27:17.479074: val_loss -0.1148 +2026-04-08 20:27:17.481862: Pseudo dice [0.6563, 0.7358, 0.6131, 0.2844, 0.4148, 0.0731, 0.7985] +2026-04-08 20:27:17.484345: Epoch time: 102.69 s +2026-04-08 20:27:17.486817: Yayy! New best EMA pseudo Dice: 0.4298 +2026-04-08 20:27:20.298122: +2026-04-08 20:27:20.300693: Epoch 154 +2026-04-08 20:27:20.302422: Current learning rate: 0.0086 +2026-04-08 20:29:02.315520: train_loss -0.1286 +2026-04-08 20:29:02.320073: val_loss -0.1053 +2026-04-08 20:29:02.322489: Pseudo dice [0.4888, 0.6875, 0.7954, 0.0986, 0.2843, 0.139, 0.8196] +2026-04-08 20:29:02.324972: Epoch time: 102.02 s +2026-04-08 20:29:02.327066: Yayy! New best EMA pseudo Dice: 0.4341 +2026-04-08 20:29:05.957976: +2026-04-08 20:29:05.959793: Epoch 155 +2026-04-08 20:29:05.961618: Current learning rate: 0.00859 +2026-04-08 20:30:47.720538: train_loss -0.1172 +2026-04-08 20:30:47.727183: val_loss -0.1008 +2026-04-08 20:30:47.730561: Pseudo dice [0.5685, 0.6677, 0.5785, 0.3324, 0.426, 0.1383, 0.3649] +2026-04-08 20:30:47.732970: Epoch time: 101.77 s +2026-04-08 20:30:47.735525: Yayy! New best EMA pseudo Dice: 0.4347 +2026-04-08 20:30:50.632431: +2026-04-08 20:30:50.634746: Epoch 156 +2026-04-08 20:30:50.636499: Current learning rate: 0.00858 +2026-04-08 20:32:33.200236: train_loss -0.1151 +2026-04-08 20:32:33.207618: val_loss -0.1193 +2026-04-08 20:32:33.210478: Pseudo dice [0.2783, 0.7249, 0.6986, 0.4193, 0.5543, 0.5926, 0.6584] +2026-04-08 20:32:33.213199: Epoch time: 102.57 s +2026-04-08 20:32:33.215775: Yayy! New best EMA pseudo Dice: 0.4473 +2026-04-08 20:32:36.694770: +2026-04-08 20:32:36.697385: Epoch 157 +2026-04-08 20:32:36.699976: Current learning rate: 0.00858 +2026-04-08 20:34:19.761731: train_loss -0.1159 +2026-04-08 20:34:19.769883: val_loss -0.1051 +2026-04-08 20:34:19.773987: Pseudo dice [0.4512, 0.6142, 0.6852, 0.0627, 0.4327, 0.1549, 0.5906] +2026-04-08 20:34:19.777899: Epoch time: 103.07 s +2026-04-08 20:34:20.926316: +2026-04-08 20:34:20.928579: Epoch 158 +2026-04-08 20:34:20.930500: Current learning rate: 0.00857 +2026-04-08 20:36:04.307483: train_loss -0.1133 +2026-04-08 20:36:04.315155: val_loss -0.0746 +2026-04-08 20:36:04.318952: Pseudo dice [0.6736, 0.3271, 0.5365, 0.0747, 0.218, 0.0446, 0.4716] +2026-04-08 20:36:04.321978: Epoch time: 103.38 s +2026-04-08 20:36:05.496774: +2026-04-08 20:36:05.499696: Epoch 159 +2026-04-08 20:36:05.503000: Current learning rate: 0.00856 +2026-04-08 20:37:48.537761: train_loss -0.1099 +2026-04-08 20:37:48.543240: val_loss -0.1047 +2026-04-08 20:37:48.546190: Pseudo dice [0.1857, 0.4289, 0.5324, 0.5365, 0.2481, 0.1414, 0.3957] +2026-04-08 20:37:48.548187: Epoch time: 103.04 s +2026-04-08 20:37:49.671746: +2026-04-08 20:37:49.673372: Epoch 160 +2026-04-08 20:37:49.674939: Current learning rate: 0.00855 +2026-04-08 20:39:32.784067: train_loss -0.1133 +2026-04-08 20:39:32.789101: val_loss -0.0845 +2026-04-08 20:39:32.791233: Pseudo dice [0.3937, 0.3097, 0.6495, 0.1731, 0.36, 0.0793, 0.4308] +2026-04-08 20:39:32.793335: Epoch time: 103.12 s +2026-04-08 20:39:33.921845: +2026-04-08 20:39:33.923594: Epoch 161 +2026-04-08 20:39:33.926270: Current learning rate: 0.00854 +2026-04-08 20:41:15.941244: train_loss -0.11 +2026-04-08 20:41:15.949427: val_loss -0.1284 +2026-04-08 20:41:15.952422: Pseudo dice [0.0744, 0.6898, 0.6591, 0.302, 0.5411, 0.7105, 0.6148] +2026-04-08 20:41:15.956103: Epoch time: 102.02 s +2026-04-08 20:41:17.089148: +2026-04-08 20:41:17.092107: Epoch 162 +2026-04-08 20:41:17.094004: Current learning rate: 0.00853 +2026-04-08 20:42:59.549115: train_loss -0.1192 +2026-04-08 20:42:59.555570: val_loss -0.0939 +2026-04-08 20:42:59.558297: Pseudo dice [0.6071, 0.3344, 0.4428, 0.2806, 0.2781, 0.1324, 0.5663] +2026-04-08 20:42:59.561597: Epoch time: 102.46 s +2026-04-08 20:43:00.689332: +2026-04-08 20:43:00.693214: Epoch 163 +2026-04-08 20:43:00.694894: Current learning rate: 0.00852 +2026-04-08 20:44:43.324089: train_loss -0.1147 +2026-04-08 20:44:43.340694: val_loss -0.0872 +2026-04-08 20:44:43.343036: Pseudo dice [0.629, 0.3469, 0.5319, 0.0001, 0.3224, 0.0938, 0.8167] +2026-04-08 20:44:43.354304: Epoch time: 102.64 s +2026-04-08 20:44:44.500586: +2026-04-08 20:44:44.502769: Epoch 164 +2026-04-08 20:44:44.504750: Current learning rate: 0.00851 +2026-04-08 20:46:27.610373: train_loss -0.1095 +2026-04-08 20:46:27.617268: val_loss -0.1011 +2026-04-08 20:46:27.619603: Pseudo dice [0.5357, 0.5761, 0.482, 0.0133, 0.2315, 0.0926, 0.6439] +2026-04-08 20:46:27.622644: Epoch time: 103.11 s +2026-04-08 20:46:28.700881: +2026-04-08 20:46:28.703163: Epoch 165 +2026-04-08 20:46:28.705570: Current learning rate: 0.0085 +2026-04-08 20:48:11.296740: train_loss -0.1296 +2026-04-08 20:48:11.304997: val_loss -0.0434 +2026-04-08 20:48:11.307931: Pseudo dice [0.2086, 0.0791, 0.4903, 0.0, 0.2387, 0.046, 0.3531] +2026-04-08 20:48:11.314310: Epoch time: 102.6 s +2026-04-08 20:48:12.384004: +2026-04-08 20:48:12.386130: Epoch 166 +2026-04-08 20:48:12.388483: Current learning rate: 0.00849 +2026-04-08 20:49:59.293393: train_loss -0.1321 +2026-04-08 20:49:59.302512: val_loss -0.077 +2026-04-08 20:49:59.308310: Pseudo dice [0.5613, 0.3404, 0.6063, 0.4404, 0.3264, 0.0503, 0.5462] +2026-04-08 20:49:59.312201: Epoch time: 106.91 s +2026-04-08 20:50:00.401347: +2026-04-08 20:50:00.405308: Epoch 167 +2026-04-08 20:50:00.408159: Current learning rate: 0.00848 +2026-04-08 20:51:44.719809: train_loss -0.1216 +2026-04-08 20:51:44.729431: val_loss -0.1249 +2026-04-08 20:51:44.732875: Pseudo dice [0.4912, 0.7477, 0.395, 0.1829, 0.3913, 0.4842, 0.664] +2026-04-08 20:51:44.736594: Epoch time: 104.32 s +2026-04-08 20:51:45.855611: +2026-04-08 20:51:45.861401: Epoch 168 +2026-04-08 20:51:45.866816: Current learning rate: 0.00847 +2026-04-08 20:53:29.735618: train_loss -0.1142 +2026-04-08 20:53:29.742442: val_loss -0.0994 +2026-04-08 20:53:29.745771: Pseudo dice [0.4208, 0.551, 0.3118, 0.6541, 0.3906, 0.5775, 0.1212] +2026-04-08 20:53:29.749085: Epoch time: 103.88 s +2026-04-08 20:53:30.850865: +2026-04-08 20:53:30.853348: Epoch 169 +2026-04-08 20:53:30.855203: Current learning rate: 0.00847 +2026-04-08 20:55:15.740951: train_loss -0.1203 +2026-04-08 20:55:15.750196: val_loss -0.048 +2026-04-08 20:55:15.752750: Pseudo dice [0.3959, 0.5334, 0.4671, 0.2098, 0.3535, 0.1009, 0.6248] +2026-04-08 20:55:15.755330: Epoch time: 104.89 s +2026-04-08 20:55:16.869961: +2026-04-08 20:55:16.872634: Epoch 170 +2026-04-08 20:55:16.875075: Current learning rate: 0.00846 +2026-04-08 20:57:02.860344: train_loss -0.108 +2026-04-08 20:57:02.867557: val_loss -0.0926 +2026-04-08 20:57:02.870539: Pseudo dice [0.4739, 0.5963, 0.6558, 0.4864, 0.0558, 0.5909, 0.3045] +2026-04-08 20:57:02.878015: Epoch time: 105.99 s +2026-04-08 20:57:03.993152: +2026-04-08 20:57:03.995459: Epoch 171 +2026-04-08 20:57:03.997623: Current learning rate: 0.00845 +2026-04-08 20:58:47.943625: train_loss -0.1125 +2026-04-08 20:58:47.952317: val_loss -0.1096 +2026-04-08 20:58:47.955372: Pseudo dice [0.6797, 0.846, 0.4838, 0.5303, 0.4444, 0.2786, 0.3429] +2026-04-08 20:58:47.960593: Epoch time: 103.95 s +2026-04-08 20:58:49.053488: +2026-04-08 20:58:49.055863: Epoch 172 +2026-04-08 20:58:49.058331: Current learning rate: 0.00844 +2026-04-08 21:00:31.939831: train_loss -0.1202 +2026-04-08 21:00:31.945841: val_loss -0.0748 +2026-04-08 21:00:31.948645: Pseudo dice [0.4924, 0.8079, 0.3988, 0.2413, 0.2798, 0.1315, 0.7141] +2026-04-08 21:00:31.951356: Epoch time: 102.89 s +2026-04-08 21:00:33.042257: +2026-04-08 21:00:33.044682: Epoch 173 +2026-04-08 21:00:33.047083: Current learning rate: 0.00843 +2026-04-08 21:02:16.096486: train_loss -0.1316 +2026-04-08 21:02:16.102067: val_loss 0.008 +2026-04-08 21:02:16.104648: Pseudo dice [0.288, 0.743, 0.6357, 0.4266, 0.4687, 0.0308, 0.5211] +2026-04-08 21:02:16.106550: Epoch time: 103.06 s +2026-04-08 21:02:18.344009: +2026-04-08 21:02:18.346075: Epoch 174 +2026-04-08 21:02:18.347883: Current learning rate: 0.00842 +2026-04-08 21:04:00.592802: train_loss -0.1307 +2026-04-08 21:04:00.600180: val_loss -0.1054 +2026-04-08 21:04:00.602707: Pseudo dice [0.124, 0.2819, 0.6095, 0.4097, 0.4378, 0.1933, 0.7104] +2026-04-08 21:04:00.605679: Epoch time: 102.25 s +2026-04-08 21:04:01.722611: +2026-04-08 21:04:01.724724: Epoch 175 +2026-04-08 21:04:01.727056: Current learning rate: 0.00841 +2026-04-08 21:05:45.117390: train_loss -0.1324 +2026-04-08 21:05:45.127678: val_loss -0.0977 +2026-04-08 21:05:45.129939: Pseudo dice [0.5031, 0.5791, 0.3936, 0.1219, 0.4533, 0.1764, 0.5899] +2026-04-08 21:05:45.134304: Epoch time: 103.4 s +2026-04-08 21:05:46.445569: +2026-04-08 21:05:46.447954: Epoch 176 +2026-04-08 21:05:46.450565: Current learning rate: 0.0084 +2026-04-08 21:07:29.643755: train_loss -0.1218 +2026-04-08 21:07:29.650651: val_loss -0.0566 +2026-04-08 21:07:29.653356: Pseudo dice [0.5748, 0.444, 0.3316, 0.1256, 0.3968, 0.0384, 0.5751] +2026-04-08 21:07:29.656937: Epoch time: 103.2 s +2026-04-08 21:07:30.782283: +2026-04-08 21:07:30.785680: Epoch 177 +2026-04-08 21:07:30.787807: Current learning rate: 0.00839 +2026-04-08 21:09:13.364878: train_loss -0.1265 +2026-04-08 21:09:13.371068: val_loss -0.1022 +2026-04-08 21:09:13.373426: Pseudo dice [0.4388, 0.8282, 0.6237, 0.3187, 0.2657, 0.1425, 0.3766] +2026-04-08 21:09:13.375880: Epoch time: 102.59 s +2026-04-08 21:09:14.507653: +2026-04-08 21:09:14.509812: Epoch 178 +2026-04-08 21:09:14.512379: Current learning rate: 0.00838 +2026-04-08 21:10:58.170700: train_loss -0.1205 +2026-04-08 21:10:58.176763: val_loss -0.1086 +2026-04-08 21:10:58.178892: Pseudo dice [0.5555, 0.6493, 0.7412, 0.3092, 0.4633, 0.4064, 0.4281] +2026-04-08 21:10:58.181753: Epoch time: 103.67 s +2026-04-08 21:10:59.303028: +2026-04-08 21:10:59.304921: Epoch 179 +2026-04-08 21:10:59.306912: Current learning rate: 0.00837 +2026-04-08 21:12:42.147093: train_loss -0.1229 +2026-04-08 21:12:42.152318: val_loss -0.0932 +2026-04-08 21:12:42.154842: Pseudo dice [0.43, 0.4665, 0.3151, 0.5361, 0.4078, 0.09, 0.661] +2026-04-08 21:12:42.157173: Epoch time: 102.85 s +2026-04-08 21:12:43.282393: +2026-04-08 21:12:43.284151: Epoch 180 +2026-04-08 21:12:43.286381: Current learning rate: 0.00836 +2026-04-08 21:14:25.994528: train_loss -0.1188 +2026-04-08 21:14:26.001810: val_loss -0.1216 +2026-04-08 21:14:26.003990: Pseudo dice [0.3531, 0.6733, 0.7977, 0.4527, 0.3713, 0.2101, 0.3215] +2026-04-08 21:14:26.006288: Epoch time: 102.72 s +2026-04-08 21:14:27.141130: +2026-04-08 21:14:27.143128: Epoch 181 +2026-04-08 21:14:27.145102: Current learning rate: 0.00836 +2026-04-08 21:16:10.303447: train_loss -0.1277 +2026-04-08 21:16:10.312630: val_loss -0.1058 +2026-04-08 21:16:10.315471: Pseudo dice [0.4756, 0.3115, 0.5935, 0.5611, 0.4034, 0.212, 0.4266] +2026-04-08 21:16:10.317593: Epoch time: 103.17 s +2026-04-08 21:16:11.417921: +2026-04-08 21:16:11.419751: Epoch 182 +2026-04-08 21:16:11.421667: Current learning rate: 0.00835 +2026-04-08 21:17:54.471272: train_loss -0.1391 +2026-04-08 21:17:54.479081: val_loss -0.0972 +2026-04-08 21:17:54.481800: Pseudo dice [0.1264, 0.5642, 0.4423, 0.1372, 0.2316, 0.0994, 0.5769] +2026-04-08 21:17:54.484955: Epoch time: 103.06 s +2026-04-08 21:17:55.586903: +2026-04-08 21:17:55.589475: Epoch 183 +2026-04-08 21:17:55.591753: Current learning rate: 0.00834 +2026-04-08 21:19:39.860811: train_loss -0.1368 +2026-04-08 21:19:39.872638: val_loss -0.1454 +2026-04-08 21:19:39.876311: Pseudo dice [0.2014, 0.8115, 0.6448, 0.5209, 0.3122, 0.7524, 0.6624] +2026-04-08 21:19:39.880482: Epoch time: 104.28 s +2026-04-08 21:19:41.022368: +2026-04-08 21:19:41.025371: Epoch 184 +2026-04-08 21:19:41.028607: Current learning rate: 0.00833 +2026-04-08 21:21:24.010984: train_loss -0.1485 +2026-04-08 21:21:24.017962: val_loss -0.0495 +2026-04-08 21:21:24.020873: Pseudo dice [0.7422, 0.7868, 0.4074, 0.109, 0.4293, 0.1163, 0.6532] +2026-04-08 21:21:24.023221: Epoch time: 102.99 s +2026-04-08 21:21:25.157895: +2026-04-08 21:21:25.160103: Epoch 185 +2026-04-08 21:21:25.162358: Current learning rate: 0.00832 +2026-04-08 21:23:07.937995: train_loss -0.1332 +2026-04-08 21:23:07.944079: val_loss -0.0775 +2026-04-08 21:23:07.947117: Pseudo dice [0.1005, 0.4648, 0.4797, 0.1305, 0.308, 0.0878, 0.524] +2026-04-08 21:23:07.950281: Epoch time: 102.78 s +2026-04-08 21:23:09.036948: +2026-04-08 21:23:09.039294: Epoch 186 +2026-04-08 21:23:09.041473: Current learning rate: 0.00831 +2026-04-08 21:24:51.575573: train_loss -0.1286 +2026-04-08 21:24:51.581699: val_loss -0.0873 +2026-04-08 21:24:51.584239: Pseudo dice [0.2538, 0.3644, 0.496, 0.1406, 0.2639, 0.2899, 0.3672] +2026-04-08 21:24:51.591027: Epoch time: 102.54 s +2026-04-08 21:24:52.704142: +2026-04-08 21:24:52.706920: Epoch 187 +2026-04-08 21:24:52.709462: Current learning rate: 0.0083 +2026-04-08 21:26:34.751529: train_loss -0.1337 +2026-04-08 21:26:34.758563: val_loss -0.1243 +2026-04-08 21:26:34.761349: Pseudo dice [0.2691, 0.6987, 0.3947, 0.5351, 0.545, 0.5233, 0.7872] +2026-04-08 21:26:34.764369: Epoch time: 102.05 s +2026-04-08 21:26:35.872406: +2026-04-08 21:26:35.874647: Epoch 188 +2026-04-08 21:26:35.876765: Current learning rate: 0.00829 +2026-04-08 21:28:19.001086: train_loss -0.1368 +2026-04-08 21:28:19.009555: val_loss -0.1064 +2026-04-08 21:28:19.013623: Pseudo dice [0.1713, 0.1716, 0.5655, 0.0036, 0.5105, 0.1781, 0.7021] +2026-04-08 21:28:19.017772: Epoch time: 103.13 s +2026-04-08 21:28:20.139251: +2026-04-08 21:28:20.142790: Epoch 189 +2026-04-08 21:28:20.147410: Current learning rate: 0.00828 +2026-04-08 21:30:04.021523: train_loss -0.1311 +2026-04-08 21:30:04.027737: val_loss -0.0577 +2026-04-08 21:30:04.029515: Pseudo dice [0.3395, 0.5321, 0.5277, 0.6482, 0.2639, 0.0578, 0.2687] +2026-04-08 21:30:04.031079: Epoch time: 103.89 s +2026-04-08 21:30:05.138275: +2026-04-08 21:30:05.140433: Epoch 190 +2026-04-08 21:30:05.142932: Current learning rate: 0.00827 +2026-04-08 21:31:47.820026: train_loss -0.1329 +2026-04-08 21:31:47.826769: val_loss -0.1045 +2026-04-08 21:31:47.829057: Pseudo dice [0.3791, 0.8157, 0.255, 0.4002, 0.3752, 0.1442, 0.8408] +2026-04-08 21:31:47.831339: Epoch time: 102.68 s +2026-04-08 21:31:48.914843: +2026-04-08 21:31:48.916954: Epoch 191 +2026-04-08 21:31:48.919811: Current learning rate: 0.00826 +2026-04-08 21:33:31.746572: train_loss -0.1343 +2026-04-08 21:33:31.754068: val_loss -0.131 +2026-04-08 21:33:31.756594: Pseudo dice [0.604, 0.4582, 0.5879, 0.2825, 0.3754, 0.7445, 0.4162] +2026-04-08 21:33:31.759298: Epoch time: 102.83 s +2026-04-08 21:33:32.873331: +2026-04-08 21:33:32.875451: Epoch 192 +2026-04-08 21:33:32.877422: Current learning rate: 0.00825 +2026-04-08 21:35:15.323285: train_loss -0.1352 +2026-04-08 21:35:15.328765: val_loss -0.1208 +2026-04-08 21:35:15.331172: Pseudo dice [0.4749, 0.8273, 0.6941, 0.2969, 0.4948, 0.3147, 0.4456] +2026-04-08 21:35:15.333387: Epoch time: 102.45 s +2026-04-08 21:35:16.438417: +2026-04-08 21:35:16.441415: Epoch 193 +2026-04-08 21:35:16.445883: Current learning rate: 0.00824 +2026-04-08 21:37:01.487761: train_loss -0.1303 +2026-04-08 21:37:01.496048: val_loss -0.1081 +2026-04-08 21:37:01.499742: Pseudo dice [0.3104, 0.3038, 0.7467, 0.4591, 0.3457, 0.6146, 0.2028] +2026-04-08 21:37:01.502758: Epoch time: 105.05 s +2026-04-08 21:37:02.629047: +2026-04-08 21:37:02.631148: Epoch 194 +2026-04-08 21:37:02.633507: Current learning rate: 0.00824 +2026-04-08 21:38:45.596588: train_loss -0.1327 +2026-04-08 21:38:45.601467: val_loss -0.1015 +2026-04-08 21:38:45.603173: Pseudo dice [0.5316, 0.3574, 0.418, 0.4792, 0.346, 0.2045, 0.3837] +2026-04-08 21:38:45.605170: Epoch time: 102.97 s +2026-04-08 21:38:46.732061: +2026-04-08 21:38:46.735302: Epoch 195 +2026-04-08 21:38:46.737906: Current learning rate: 0.00823 +2026-04-08 21:40:28.673095: train_loss -0.1272 +2026-04-08 21:40:28.679285: val_loss -0.1264 +2026-04-08 21:40:28.681909: Pseudo dice [0.4779, 0.6675, 0.609, 0.1596, 0.4819, 0.2467, 0.8336] +2026-04-08 21:40:28.684021: Epoch time: 101.94 s +2026-04-08 21:40:29.799923: +2026-04-08 21:40:29.803327: Epoch 196 +2026-04-08 21:40:29.805919: Current learning rate: 0.00822 +2026-04-08 21:42:13.293297: train_loss -0.146 +2026-04-08 21:42:13.299916: val_loss -0.1284 +2026-04-08 21:42:13.302439: Pseudo dice [0.4334, 0.2516, 0.5252, 0.1429, 0.5093, 0.4571, 0.5124] +2026-04-08 21:42:13.304390: Epoch time: 103.5 s +2026-04-08 21:42:14.442932: +2026-04-08 21:42:14.445350: Epoch 197 +2026-04-08 21:42:14.447196: Current learning rate: 0.00821 +2026-04-08 21:43:58.773625: train_loss -0.1298 +2026-04-08 21:43:58.784926: val_loss -0.0841 +2026-04-08 21:43:58.789036: Pseudo dice [0.158, 0.4769, 0.6352, 0.3535, 0.4479, 0.0953, 0.7169] +2026-04-08 21:43:58.794873: Epoch time: 104.33 s +2026-04-08 21:43:59.893558: +2026-04-08 21:43:59.896004: Epoch 198 +2026-04-08 21:43:59.899183: Current learning rate: 0.0082 +2026-04-08 21:45:42.553018: train_loss -0.1437 +2026-04-08 21:45:42.558800: val_loss -0.1113 +2026-04-08 21:45:42.561460: Pseudo dice [0.3985, 0.5832, 0.4836, 0.0, 0.4615, 0.1288, 0.5316] +2026-04-08 21:45:42.564149: Epoch time: 102.66 s +2026-04-08 21:45:43.678606: +2026-04-08 21:45:43.681082: Epoch 199 +2026-04-08 21:45:43.684067: Current learning rate: 0.00819 +2026-04-08 21:47:26.786347: train_loss -0.1458 +2026-04-08 21:47:26.793925: val_loss -0.1072 +2026-04-08 21:47:26.796522: Pseudo dice [0.6258, 0.7483, 0.5939, 0.5062, 0.4791, 0.0571, 0.5808] +2026-04-08 21:47:26.799350: Epoch time: 103.11 s +2026-04-08 21:47:29.672745: +2026-04-08 21:47:29.675475: Epoch 200 +2026-04-08 21:47:29.677467: Current learning rate: 0.00818 +2026-04-08 21:49:12.349684: train_loss -0.1364 +2026-04-08 21:49:12.356339: val_loss -0.0477 +2026-04-08 21:49:12.358543: Pseudo dice [0.4071, 0.4895, 0.6956, 0.1397, 0.145, 0.067, 0.4914] +2026-04-08 21:49:12.360939: Epoch time: 102.68 s +2026-04-08 21:49:13.511518: +2026-04-08 21:49:13.513845: Epoch 201 +2026-04-08 21:49:13.516021: Current learning rate: 0.00817 +2026-04-08 21:50:57.997070: train_loss -0.152 +2026-04-08 21:50:58.003916: val_loss -0.051 +2026-04-08 21:50:58.007192: Pseudo dice [0.4338, 0.3347, 0.5656, 0.7048, 0.4556, 0.0744, 0.7405] +2026-04-08 21:50:58.010336: Epoch time: 104.49 s +2026-04-08 21:50:59.115148: +2026-04-08 21:50:59.117751: Epoch 202 +2026-04-08 21:50:59.120232: Current learning rate: 0.00816 +2026-04-08 21:52:43.296888: train_loss -0.1475 +2026-04-08 21:52:43.303794: val_loss -0.1014 +2026-04-08 21:52:43.306154: Pseudo dice [0.2565, 0.8011, 0.6792, 0.5515, 0.435, 0.0656, 0.5009] +2026-04-08 21:52:43.309237: Epoch time: 104.18 s +2026-04-08 21:52:44.430701: +2026-04-08 21:52:44.432813: Epoch 203 +2026-04-08 21:52:44.435316: Current learning rate: 0.00815 +2026-04-08 21:54:27.393361: train_loss -0.1405 +2026-04-08 21:54:27.407177: val_loss -0.1416 +2026-04-08 21:54:27.410931: Pseudo dice [0.4933, 0.7672, 0.6323, 0.5366, 0.3554, 0.7893, 0.7566] +2026-04-08 21:54:27.413293: Epoch time: 102.97 s +2026-04-08 21:54:27.415625: Yayy! New best EMA pseudo Dice: 0.4508 +2026-04-08 21:54:30.306293: +2026-04-08 21:54:30.309515: Epoch 204 +2026-04-08 21:54:30.311327: Current learning rate: 0.00814 +2026-04-08 21:56:13.534444: train_loss -0.1338 +2026-04-08 21:56:13.540285: val_loss -0.1182 +2026-04-08 21:56:13.542841: Pseudo dice [0.5011, 0.2884, 0.4133, 0.4325, 0.3605, 0.489, 0.471] +2026-04-08 21:56:13.545442: Epoch time: 103.23 s +2026-04-08 21:56:14.674310: +2026-04-08 21:56:14.677362: Epoch 205 +2026-04-08 21:56:14.679889: Current learning rate: 0.00813 +2026-04-08 21:57:57.717789: train_loss -0.1297 +2026-04-08 21:57:57.726364: val_loss -0.1143 +2026-04-08 21:57:57.729011: Pseudo dice [0.1659, 0.5369, 0.5877, 0.0012, 0.6606, 0.2175, 0.629] +2026-04-08 21:57:57.732238: Epoch time: 103.05 s +2026-04-08 21:57:58.802258: +2026-04-08 21:57:58.805203: Epoch 206 +2026-04-08 21:57:58.807783: Current learning rate: 0.00813 +2026-04-08 21:59:41.892534: train_loss -0.1255 +2026-04-08 21:59:41.899936: val_loss -0.1194 +2026-04-08 21:59:41.902549: Pseudo dice [0.6593, 0.3161, 0.6562, 0.3737, 0.3707, 0.8412, 0.4218] +2026-04-08 21:59:41.906702: Epoch time: 103.09 s +2026-04-08 21:59:41.908988: Yayy! New best EMA pseudo Dice: 0.4508 +2026-04-08 21:59:44.710106: +2026-04-08 21:59:44.724125: Epoch 207 +2026-04-08 21:59:44.731295: Current learning rate: 0.00812 +2026-04-08 22:01:29.795888: train_loss -0.1488 +2026-04-08 22:01:29.805362: val_loss -0.1037 +2026-04-08 22:01:29.808335: Pseudo dice [0.6768, 0.3113, 0.7036, 0.4158, 0.6035, 0.0569, 0.6311] +2026-04-08 22:01:29.810927: Epoch time: 105.09 s +2026-04-08 22:01:29.813297: Yayy! New best EMA pseudo Dice: 0.4543 +2026-04-08 22:01:32.744722: +2026-04-08 22:01:32.747321: Epoch 208 +2026-04-08 22:01:32.748892: Current learning rate: 0.00811 +2026-04-08 22:03:17.013878: train_loss -0.142 +2026-04-08 22:03:17.022126: val_loss -0.0957 +2026-04-08 22:03:17.024404: Pseudo dice [0.2685, 0.4415, 0.5984, 0.07, 0.3403, 0.0712, 0.6203] +2026-04-08 22:03:17.026910: Epoch time: 104.27 s +2026-04-08 22:03:18.102476: +2026-04-08 22:03:18.105234: Epoch 209 +2026-04-08 22:03:18.107466: Current learning rate: 0.0081 +2026-04-08 22:05:01.372170: train_loss -0.1307 +2026-04-08 22:05:01.379790: val_loss -0.1365 +2026-04-08 22:05:01.382623: Pseudo dice [0.7925, 0.4799, 0.5197, 0.4714, 0.5587, 0.5161, 0.6395] +2026-04-08 22:05:01.385473: Epoch time: 103.27 s +2026-04-08 22:05:01.387760: Yayy! New best EMA pseudo Dice: 0.4558 +2026-04-08 22:05:04.324057: +2026-04-08 22:05:04.329710: Epoch 210 +2026-04-08 22:05:04.334181: Current learning rate: 0.00809 +2026-04-08 22:06:47.356739: train_loss -0.1477 +2026-04-08 22:06:47.364211: val_loss -0.1188 +2026-04-08 22:06:47.367476: Pseudo dice [0.7165, 0.4656, 0.6847, 0.0008, 0.4605, 0.6873, 0.4764] +2026-04-08 22:06:47.370457: Epoch time: 103.04 s +2026-04-08 22:06:47.373181: Yayy! New best EMA pseudo Dice: 0.4601 +2026-04-08 22:06:51.184963: +2026-04-08 22:06:51.189058: Epoch 211 +2026-04-08 22:06:51.192290: Current learning rate: 0.00808 +2026-04-08 22:08:35.781440: train_loss -0.1455 +2026-04-08 22:08:35.788299: val_loss -0.0662 +2026-04-08 22:08:35.790246: Pseudo dice [0.2872, 0.7628, 0.6148, 0.5795, 0.5397, 0.0551, 0.5798] +2026-04-08 22:08:35.792819: Epoch time: 104.6 s +2026-04-08 22:08:35.795580: Yayy! New best EMA pseudo Dice: 0.4629 +2026-04-08 22:08:38.913937: +2026-04-08 22:08:38.916307: Epoch 212 +2026-04-08 22:08:38.918208: Current learning rate: 0.00807 +2026-04-08 22:10:24.308632: train_loss -0.142 +2026-04-08 22:10:24.314768: val_loss -0.1083 +2026-04-08 22:10:24.317320: Pseudo dice [0.5138, 0.2353, 0.4543, 0.551, 0.5383, 0.137, 0.7245] +2026-04-08 22:10:24.321082: Epoch time: 105.4 s +2026-04-08 22:10:25.385138: +2026-04-08 22:10:25.388494: Epoch 213 +2026-04-08 22:10:25.392436: Current learning rate: 0.00806 +2026-04-08 22:12:10.971445: train_loss -0.1376 +2026-04-08 22:12:10.978292: val_loss -0.0854 +2026-04-08 22:12:10.981380: Pseudo dice [0.2152, 0.7598, 0.5634, 0.0294, 0.4733, 0.131, 0.674] +2026-04-08 22:12:10.983450: Epoch time: 105.59 s +2026-04-08 22:12:12.026335: +2026-04-08 22:12:12.028872: Epoch 214 +2026-04-08 22:12:12.031779: Current learning rate: 0.00805 +2026-04-08 22:13:56.574408: train_loss -0.1497 +2026-04-08 22:13:56.582903: val_loss -0.1172 +2026-04-08 22:13:56.585240: Pseudo dice [0.722, 0.5934, 0.5744, 0.5214, 0.5285, 0.1154, 0.7465] +2026-04-08 22:13:56.587834: Epoch time: 104.55 s +2026-04-08 22:13:56.589704: Yayy! New best EMA pseudo Dice: 0.4649 +2026-04-08 22:13:59.451092: +2026-04-08 22:13:59.454543: Epoch 215 +2026-04-08 22:13:59.456891: Current learning rate: 0.00804 +2026-04-08 22:15:43.495461: train_loss -0.1491 +2026-04-08 22:15:43.504946: val_loss -0.132 +2026-04-08 22:15:43.507684: Pseudo dice [0.455, 0.5059, 0.5375, 0.5379, 0.4204, 0.1845, 0.8322] +2026-04-08 22:15:43.518671: Epoch time: 104.05 s +2026-04-08 22:15:43.542832: Yayy! New best EMA pseudo Dice: 0.468 +2026-04-08 22:15:46.455580: +2026-04-08 22:15:46.458356: Epoch 216 +2026-04-08 22:15:46.460503: Current learning rate: 0.00803 +2026-04-08 22:17:34.014418: train_loss -0.1417 +2026-04-08 22:17:34.022416: val_loss -0.0696 +2026-04-08 22:17:34.025225: Pseudo dice [0.3578, 0.3874, 0.4009, 0.6156, 0.5534, 0.0622, 0.4224] +2026-04-08 22:17:34.028200: Epoch time: 107.56 s +2026-04-08 22:17:35.095554: +2026-04-08 22:17:35.098766: Epoch 217 +2026-04-08 22:17:35.103760: Current learning rate: 0.00802 +2026-04-08 22:19:28.881024: train_loss -0.1566 +2026-04-08 22:19:28.886575: val_loss -0.105 +2026-04-08 22:19:28.889143: Pseudo dice [0.5058, 0.7079, 0.5192, 0.0111, 0.3329, 0.064, 0.628] +2026-04-08 22:19:28.893248: Epoch time: 113.79 s +2026-04-08 22:19:29.945645: +2026-04-08 22:19:29.948500: Epoch 218 +2026-04-08 22:19:29.950943: Current learning rate: 0.00801 +2026-04-08 22:21:13.458396: train_loss -0.1478 +2026-04-08 22:21:13.467181: val_loss -0.1044 +2026-04-08 22:21:13.472431: Pseudo dice [0.4621, 0.5682, 0.6069, 0.6289, 0.3695, 0.0638, 0.554] +2026-04-08 22:21:13.475355: Epoch time: 103.52 s +2026-04-08 22:21:14.530947: +2026-04-08 22:21:14.533118: Epoch 219 +2026-04-08 22:21:14.535460: Current learning rate: 0.00801 +2026-04-08 22:23:03.592336: train_loss -0.1386 +2026-04-08 22:23:03.600188: val_loss -0.0772 +2026-04-08 22:23:03.603033: Pseudo dice [0.4269, 0.6037, 0.4618, 0.0003, 0.5485, 0.051, 0.6247] +2026-04-08 22:23:03.605813: Epoch time: 109.06 s +2026-04-08 22:23:04.649894: +2026-04-08 22:23:04.654510: Epoch 220 +2026-04-08 22:23:04.657315: Current learning rate: 0.008 +2026-04-08 22:24:48.288301: train_loss -0.142 +2026-04-08 22:24:48.295888: val_loss -0.06 +2026-04-08 22:24:48.298715: Pseudo dice [0.7177, 0.2668, 0.6045, 0.4197, 0.3411, 0.0552, 0.729] +2026-04-08 22:24:48.301499: Epoch time: 103.64 s +2026-04-08 22:24:49.334133: +2026-04-08 22:24:49.336071: Epoch 221 +2026-04-08 22:24:49.339298: Current learning rate: 0.00799 +2026-04-08 22:26:31.495964: train_loss -0.1276 +2026-04-08 22:26:31.502987: val_loss -0.0852 +2026-04-08 22:26:31.505410: Pseudo dice [0.5019, 0.4517, 0.3544, 0.6936, 0.6172, 0.0393, 0.6061] +2026-04-08 22:26:31.507225: Epoch time: 102.17 s +2026-04-08 22:26:32.557797: +2026-04-08 22:26:32.560087: Epoch 222 +2026-04-08 22:26:32.562467: Current learning rate: 0.00798 +2026-04-08 22:28:15.559521: train_loss -0.1495 +2026-04-08 22:28:15.565367: val_loss -0.1466 +2026-04-08 22:28:15.568071: Pseudo dice [0.4084, 0.4516, 0.6088, 0.5214, 0.4251, 0.8185, 0.6718] +2026-04-08 22:28:15.570640: Epoch time: 103.0 s +2026-04-08 22:28:16.660383: +2026-04-08 22:28:16.663097: Epoch 223 +2026-04-08 22:28:16.665581: Current learning rate: 0.00797 +2026-04-08 22:29:59.493286: train_loss -0.1633 +2026-04-08 22:29:59.510400: val_loss -0.1451 +2026-04-08 22:29:59.516850: Pseudo dice [0.4608, 0.7273, 0.5703, 0.5439, 0.5258, 0.7251, 0.8644] +2026-04-08 22:29:59.520063: Epoch time: 102.84 s +2026-04-08 22:29:59.522728: Yayy! New best EMA pseudo Dice: 0.4782 +2026-04-08 22:30:02.422785: +2026-04-08 22:30:02.426865: Epoch 224 +2026-04-08 22:30:02.429532: Current learning rate: 0.00796 +2026-04-08 22:31:44.398020: train_loss -0.1508 +2026-04-08 22:31:44.407783: val_loss -0.1387 +2026-04-08 22:31:44.410777: Pseudo dice [0.7176, 0.2089, 0.5127, 0.0302, 0.4598, 0.2542, 0.753] +2026-04-08 22:31:44.413380: Epoch time: 101.98 s +2026-04-08 22:31:45.473273: +2026-04-08 22:31:45.475658: Epoch 225 +2026-04-08 22:31:45.478633: Current learning rate: 0.00795 +2026-04-08 22:33:27.756875: train_loss -0.1509 +2026-04-08 22:33:27.762472: val_loss -0.0224 +2026-04-08 22:33:27.764320: Pseudo dice [0.1603, 0.4299, 0.5537, 0.5817, 0.2414, 0.0418, 0.7099] +2026-04-08 22:33:27.766295: Epoch time: 102.29 s +2026-04-08 22:33:28.812734: +2026-04-08 22:33:28.816648: Epoch 226 +2026-04-08 22:33:28.818357: Current learning rate: 0.00794 +2026-04-08 22:35:11.420646: train_loss -0.1517 +2026-04-08 22:35:11.427511: val_loss -0.0665 +2026-04-08 22:35:11.429513: Pseudo dice [0.4746, 0.2605, 0.4902, 0.5513, 0.3228, 0.0564, 0.478] +2026-04-08 22:35:11.433383: Epoch time: 102.61 s +2026-04-08 22:35:12.473196: +2026-04-08 22:35:12.475856: Epoch 227 +2026-04-08 22:35:12.479135: Current learning rate: 0.00793 +2026-04-08 22:36:55.069433: train_loss -0.1438 +2026-04-08 22:36:55.076181: val_loss -0.1246 +2026-04-08 22:36:55.078382: Pseudo dice [0.6527, 0.413, 0.6344, 0.1246, 0.3262, 0.4131, 0.7225] +2026-04-08 22:36:55.081201: Epoch time: 102.6 s +2026-04-08 22:36:56.136063: +2026-04-08 22:36:56.138427: Epoch 228 +2026-04-08 22:36:56.140550: Current learning rate: 0.00792 +2026-04-08 22:38:38.932314: train_loss -0.1531 +2026-04-08 22:38:38.939458: val_loss -0.0666 +2026-04-08 22:38:38.941275: Pseudo dice [0.6302, 0.4489, 0.5323, 0.6866, 0.2937, 0.0853, 0.3606] +2026-04-08 22:38:38.943436: Epoch time: 102.8 s +2026-04-08 22:38:40.007051: +2026-04-08 22:38:40.009555: Epoch 229 +2026-04-08 22:38:40.011889: Current learning rate: 0.00791 +2026-04-08 22:40:23.846255: train_loss -0.148 +2026-04-08 22:40:23.852672: val_loss -0.0907 +2026-04-08 22:40:23.855074: Pseudo dice [0.4866, 0.4296, 0.5248, 0.3875, 0.5102, 0.136, 0.6751] +2026-04-08 22:40:23.856935: Epoch time: 103.84 s +2026-04-08 22:40:25.885153: +2026-04-08 22:40:25.887586: Epoch 230 +2026-04-08 22:40:25.889479: Current learning rate: 0.0079 +2026-04-08 22:42:09.088689: train_loss -0.1499 +2026-04-08 22:42:09.096242: val_loss -0.1331 +2026-04-08 22:42:09.101859: Pseudo dice [0.7559, 0.5153, 0.6091, 0.4612, 0.4539, 0.5541, 0.8083] +2026-04-08 22:42:09.106213: Epoch time: 103.21 s +2026-04-08 22:42:10.147349: +2026-04-08 22:42:10.151306: Epoch 231 +2026-04-08 22:42:10.154417: Current learning rate: 0.00789 +2026-04-08 22:43:52.563697: train_loss -0.1517 +2026-04-08 22:43:52.572227: val_loss -0.116 +2026-04-08 22:43:52.591785: Pseudo dice [0.5556, 0.3286, 0.4433, 0.4983, 0.4118, 0.1824, 0.7371] +2026-04-08 22:43:52.595712: Epoch time: 102.42 s +2026-04-08 22:43:53.647322: +2026-04-08 22:43:53.650372: Epoch 232 +2026-04-08 22:43:53.653543: Current learning rate: 0.00789 +2026-04-08 22:45:35.955767: train_loss -0.1355 +2026-04-08 22:45:35.961477: val_loss -0.1493 +2026-04-08 22:45:35.964772: Pseudo dice [0.4452, 0.5769, 0.6441, 0.5328, 0.4838, 0.2775, 0.8014] +2026-04-08 22:45:35.966908: Epoch time: 102.31 s +2026-04-08 22:45:37.009535: +2026-04-08 22:45:37.011984: Epoch 233 +2026-04-08 22:45:37.014040: Current learning rate: 0.00788 +2026-04-08 22:47:21.019796: train_loss -0.1402 +2026-04-08 22:47:21.026892: val_loss -0.0874 +2026-04-08 22:47:21.029598: Pseudo dice [0.6774, 0.3402, 0.3143, 0.6129, 0.527, 0.0974, 0.7094] +2026-04-08 22:47:21.032929: Epoch time: 104.01 s +2026-04-08 22:47:22.071999: +2026-04-08 22:47:22.075567: Epoch 234 +2026-04-08 22:47:22.079958: Current learning rate: 0.00787 +2026-04-08 22:49:04.554086: train_loss -0.1474 +2026-04-08 22:49:04.561256: val_loss -0.0636 +2026-04-08 22:49:04.564285: Pseudo dice [0.4227, 0.795, 0.5255, 0.6091, 0.3079, 0.0643, 0.7063] +2026-04-08 22:49:04.566599: Epoch time: 102.49 s +2026-04-08 22:49:05.590235: +2026-04-08 22:49:05.592617: Epoch 235 +2026-04-08 22:49:05.596153: Current learning rate: 0.00786 +2026-04-08 22:50:49.145169: train_loss -0.1418 +2026-04-08 22:50:49.152423: val_loss -0.0286 +2026-04-08 22:50:49.155359: Pseudo dice [0.2659, 0.7064, 0.2793, 0.4861, 0.395, 0.0608, 0.5544] +2026-04-08 22:50:49.158209: Epoch time: 103.56 s +2026-04-08 22:50:50.193163: +2026-04-08 22:50:50.196640: Epoch 236 +2026-04-08 22:50:50.199404: Current learning rate: 0.00785 +2026-04-08 22:52:33.060509: train_loss -0.1414 +2026-04-08 22:52:33.068011: val_loss -0.1322 +2026-04-08 22:52:33.071233: Pseudo dice [0.5403, 0.5411, 0.7057, 0.6311, 0.3949, 0.3078, 0.6381] +2026-04-08 22:52:33.073579: Epoch time: 102.87 s +2026-04-08 22:52:34.099481: +2026-04-08 22:52:34.102596: Epoch 237 +2026-04-08 22:52:34.105176: Current learning rate: 0.00784 +2026-04-08 22:54:17.244650: train_loss -0.146 +2026-04-08 22:54:17.252696: val_loss -0.1494 +2026-04-08 22:54:17.256009: Pseudo dice [0.5353, 0.3756, 0.6931, 0.236, 0.5767, 0.2114, 0.8007] +2026-04-08 22:54:17.258979: Epoch time: 103.15 s +2026-04-08 22:54:18.398318: +2026-04-08 22:54:18.401550: Epoch 238 +2026-04-08 22:54:18.403885: Current learning rate: 0.00783 +2026-04-08 22:56:00.943778: train_loss -0.1533 +2026-04-08 22:56:00.951508: val_loss -0.0861 +2026-04-08 22:56:00.955163: Pseudo dice [0.2551, 0.5195, 0.6201, 0.1167, 0.3514, 0.1052, 0.5833] +2026-04-08 22:56:00.957345: Epoch time: 102.55 s +2026-04-08 22:56:01.996778: +2026-04-08 22:56:02.001966: Epoch 239 +2026-04-08 22:56:02.004755: Current learning rate: 0.00782 +2026-04-08 22:57:44.523887: train_loss -0.1604 +2026-04-08 22:57:44.530968: val_loss -0.1471 +2026-04-08 22:57:44.533901: Pseudo dice [0.48, 0.4619, 0.6431, 0.0326, 0.5685, 0.7005, 0.7965] +2026-04-08 22:57:44.536719: Epoch time: 102.53 s +2026-04-08 22:57:45.584216: +2026-04-08 22:57:45.586744: Epoch 240 +2026-04-08 22:57:45.589568: Current learning rate: 0.00781 +2026-04-08 22:59:28.420160: train_loss -0.1552 +2026-04-08 22:59:28.425625: val_loss -0.1267 +2026-04-08 22:59:28.428000: Pseudo dice [0.3013, 0.3919, 0.5615, 0.7671, 0.308, 0.4118, 0.8294] +2026-04-08 22:59:28.430268: Epoch time: 102.84 s +2026-04-08 22:59:29.467223: +2026-04-08 22:59:29.471234: Epoch 241 +2026-04-08 22:59:29.473613: Current learning rate: 0.0078 +2026-04-08 23:01:11.518236: train_loss -0.1574 +2026-04-08 23:01:11.527140: val_loss -0.1064 +2026-04-08 23:01:11.534210: Pseudo dice [0.5564, 0.4941, 0.707, 0.6126, 0.2799, 0.1632, 0.6869] +2026-04-08 23:01:11.547961: Epoch time: 102.05 s +2026-04-08 23:01:12.610670: +2026-04-08 23:01:12.612974: Epoch 242 +2026-04-08 23:01:12.615375: Current learning rate: 0.00779 +2026-04-08 23:02:55.270152: train_loss -0.1465 +2026-04-08 23:02:55.278809: val_loss -0.0898 +2026-04-08 23:02:55.281086: Pseudo dice [0.7514, 0.8466, 0.4821, 0.2838, 0.3544, 0.0429, 0.6467] +2026-04-08 23:02:55.284323: Epoch time: 102.66 s +2026-04-08 23:02:56.343804: +2026-04-08 23:02:56.346163: Epoch 243 +2026-04-08 23:02:56.348855: Current learning rate: 0.00778 +2026-04-08 23:04:39.273152: train_loss -0.1511 +2026-04-08 23:04:39.280931: val_loss -0.115 +2026-04-08 23:04:39.285268: Pseudo dice [0.3973, 0.6662, 0.6762, 0.5651, 0.4752, 0.0891, 0.5475] +2026-04-08 23:04:39.289761: Epoch time: 102.93 s +2026-04-08 23:04:39.293236: Yayy! New best EMA pseudo Dice: 0.4788 +2026-04-08 23:04:41.887177: +2026-04-08 23:04:41.890089: Epoch 244 +2026-04-08 23:04:41.892672: Current learning rate: 0.00777 +2026-04-08 23:06:24.947083: train_loss -0.1509 +2026-04-08 23:06:24.953831: val_loss -0.1386 +2026-04-08 23:06:24.956448: Pseudo dice [0.3772, 0.3652, 0.6927, 0.01, 0.5261, 0.8633, 0.6277] +2026-04-08 23:06:24.959902: Epoch time: 103.06 s +2026-04-08 23:06:24.962280: Yayy! New best EMA pseudo Dice: 0.4804 +2026-04-08 23:06:27.655283: +2026-04-08 23:06:27.658065: Epoch 245 +2026-04-08 23:06:27.660043: Current learning rate: 0.00777 +2026-04-08 23:08:10.410513: train_loss -0.1536 +2026-04-08 23:08:10.416474: val_loss -0.1351 +2026-04-08 23:08:10.420287: Pseudo dice [0.3905, 0.7685, 0.6678, 0.421, 0.3302, 0.5325, 0.8321] +2026-04-08 23:08:10.422741: Epoch time: 102.76 s +2026-04-08 23:08:10.424743: Yayy! New best EMA pseudo Dice: 0.4887 +2026-04-08 23:08:12.842902: +2026-04-08 23:08:12.845693: Epoch 246 +2026-04-08 23:08:12.847441: Current learning rate: 0.00776 +2026-04-08 23:09:54.657458: train_loss -0.1505 +2026-04-08 23:09:54.669302: val_loss -0.0151 +2026-04-08 23:09:54.673376: Pseudo dice [0.2297, 0.7164, 0.4919, 0.4514, 0.3628, 0.0403, 0.6221] +2026-04-08 23:09:54.675791: Epoch time: 101.82 s +2026-04-08 23:09:55.729402: +2026-04-08 23:09:55.732688: Epoch 247 +2026-04-08 23:09:55.735042: Current learning rate: 0.00775 +2026-04-08 23:11:38.327748: train_loss -0.1432 +2026-04-08 23:11:38.338544: val_loss -0.072 +2026-04-08 23:11:38.341637: Pseudo dice [0.2651, 0.3448, 0.4384, 0.1672, 0.427, 0.0603, 0.8028] +2026-04-08 23:11:38.344015: Epoch time: 102.6 s +2026-04-08 23:11:39.412559: +2026-04-08 23:11:39.415861: Epoch 248 +2026-04-08 23:11:39.417870: Current learning rate: 0.00774 +2026-04-08 23:13:21.559924: train_loss -0.1533 +2026-04-08 23:13:21.565686: val_loss -0.0456 +2026-04-08 23:13:21.567977: Pseudo dice [0.5809, 0.6027, 0.2605, 0.2321, 0.491, 0.0474, 0.7227] +2026-04-08 23:13:21.569924: Epoch time: 102.15 s +2026-04-08 23:13:22.819364: +2026-04-08 23:13:22.821432: Epoch 249 +2026-04-08 23:13:22.823406: Current learning rate: 0.00773 +2026-04-08 23:15:06.347943: train_loss -0.1286 +2026-04-08 23:15:06.354195: val_loss -0.1145 +2026-04-08 23:15:06.356753: Pseudo dice [0.2888, 0.5961, 0.5357, 0.0521, 0.2634, 0.7248, 0.3114] +2026-04-08 23:15:06.359985: Epoch time: 103.53 s +2026-04-08 23:15:09.171486: +2026-04-08 23:15:09.175005: Epoch 250 +2026-04-08 23:15:09.177336: Current learning rate: 0.00772 +2026-04-08 23:16:52.142869: train_loss -0.1678 +2026-04-08 23:16:52.148984: val_loss -0.0403 +2026-04-08 23:16:52.151034: Pseudo dice [0.6657, 0.7041, 0.504, 0.4465, 0.5018, 0.0833, 0.4472] +2026-04-08 23:16:52.154665: Epoch time: 102.97 s +2026-04-08 23:16:53.195844: +2026-04-08 23:16:53.199217: Epoch 251 +2026-04-08 23:16:53.201306: Current learning rate: 0.00771 +2026-04-08 23:18:35.326339: train_loss -0.1519 +2026-04-08 23:18:35.333619: val_loss -0.1386 +2026-04-08 23:18:35.336649: Pseudo dice [0.5928, 0.3683, 0.3705, 0.5055, 0.3574, 0.7, 0.6245] +2026-04-08 23:18:35.339720: Epoch time: 102.13 s +2026-04-08 23:18:36.388112: +2026-04-08 23:18:36.390402: Epoch 252 +2026-04-08 23:18:36.392834: Current learning rate: 0.0077 +2026-04-08 23:20:18.005237: train_loss -0.1406 +2026-04-08 23:20:18.012248: val_loss -0.1075 +2026-04-08 23:20:18.015866: Pseudo dice [0.5391, 0.6899, 0.6758, 0.4606, 0.3066, 0.0817, 0.7637] +2026-04-08 23:20:18.018689: Epoch time: 101.62 s +2026-04-08 23:20:19.091026: +2026-04-08 23:20:19.093518: Epoch 253 +2026-04-08 23:20:19.095874: Current learning rate: 0.00769 +2026-04-08 23:22:01.352530: train_loss -0.157 +2026-04-08 23:22:01.359531: val_loss -0.1005 +2026-04-08 23:22:01.361980: Pseudo dice [0.5194, 0.8397, 0.5054, 0.2837, 0.3706, 0.1625, 0.6384] +2026-04-08 23:22:01.364856: Epoch time: 102.26 s +2026-04-08 23:22:02.428443: +2026-04-08 23:22:02.430466: Epoch 254 +2026-04-08 23:22:02.432819: Current learning rate: 0.00768 +2026-04-08 23:23:44.720134: train_loss -0.1434 +2026-04-08 23:23:44.727058: val_loss -0.1173 +2026-04-08 23:23:44.729689: Pseudo dice [0.523, 0.4175, 0.4288, 0.3119, 0.3621, 0.4667, 0.7789] +2026-04-08 23:23:44.732065: Epoch time: 102.3 s +2026-04-08 23:23:45.812287: +2026-04-08 23:23:45.825039: Epoch 255 +2026-04-08 23:23:45.828296: Current learning rate: 0.00767 +2026-04-08 23:25:28.610815: train_loss -0.1483 +2026-04-08 23:25:28.619412: val_loss -0.1327 +2026-04-08 23:25:28.624009: Pseudo dice [0.4347, 0.5352, 0.4367, 0.5819, 0.6252, 0.3332, 0.7884] +2026-04-08 23:25:28.629718: Epoch time: 102.8 s +2026-04-08 23:25:29.716158: +2026-04-08 23:25:29.719316: Epoch 256 +2026-04-08 23:25:29.724302: Current learning rate: 0.00766 +2026-04-08 23:27:11.860190: train_loss -0.1493 +2026-04-08 23:27:11.868667: val_loss -0.1109 +2026-04-08 23:27:11.871931: Pseudo dice [0.3071, 0.7957, 0.619, 0.3287, 0.4456, 0.2053, 0.2574] +2026-04-08 23:27:11.874375: Epoch time: 102.15 s +2026-04-08 23:27:12.929294: +2026-04-08 23:27:12.931627: Epoch 257 +2026-04-08 23:27:12.934851: Current learning rate: 0.00765 +2026-04-08 23:28:55.222698: train_loss -0.158 +2026-04-08 23:28:55.229458: val_loss -0.1518 +2026-04-08 23:28:55.232032: Pseudo dice [0.4966, 0.6532, 0.6173, 0.6702, 0.5139, 0.5674, 0.773] +2026-04-08 23:28:55.235626: Epoch time: 102.3 s +2026-04-08 23:28:56.317261: +2026-04-08 23:28:56.321280: Epoch 258 +2026-04-08 23:28:56.326921: Current learning rate: 0.00764 +2026-04-08 23:30:38.032944: train_loss -0.167 +2026-04-08 23:30:38.038501: val_loss -0.1506 +2026-04-08 23:30:38.040981: Pseudo dice [0.3787, 0.4604, 0.6098, 0.7248, 0.3602, 0.7024, 0.508] +2026-04-08 23:30:38.043615: Epoch time: 101.72 s +2026-04-08 23:30:38.045635: Yayy! New best EMA pseudo Dice: 0.4892 +2026-04-08 23:30:40.732583: +2026-04-08 23:30:40.735217: Epoch 259 +2026-04-08 23:30:40.737091: Current learning rate: 0.00764 +2026-04-08 23:32:22.145024: train_loss -0.1625 +2026-04-08 23:32:22.153026: val_loss -0.14 +2026-04-08 23:32:22.155304: Pseudo dice [0.4633, 0.4412, 0.7614, 0.134, 0.5401, 0.6824, 0.7101] +2026-04-08 23:32:22.157829: Epoch time: 101.42 s +2026-04-08 23:32:22.160175: Yayy! New best EMA pseudo Dice: 0.4936 +2026-04-08 23:32:25.106116: +2026-04-08 23:32:25.109267: Epoch 260 +2026-04-08 23:32:25.111272: Current learning rate: 0.00763 +2026-04-08 23:34:07.314498: train_loss -0.1475 +2026-04-08 23:34:07.320146: val_loss -0.1364 +2026-04-08 23:34:07.324528: Pseudo dice [0.629, 0.302, 0.6794, 0.4599, 0.5777, 0.2327, 0.7469] +2026-04-08 23:34:07.328122: Epoch time: 102.21 s +2026-04-08 23:34:07.331862: Yayy! New best EMA pseudo Dice: 0.4961 +2026-04-08 23:34:09.940934: +2026-04-08 23:34:09.944280: Epoch 261 +2026-04-08 23:34:09.946506: Current learning rate: 0.00762 +2026-04-08 23:35:52.776058: train_loss -0.1601 +2026-04-08 23:35:52.786918: val_loss -0.1175 +2026-04-08 23:35:52.791129: Pseudo dice [0.6204, 0.3756, 0.6511, 0.5229, 0.483, 0.1108, 0.7771] +2026-04-08 23:35:52.794970: Epoch time: 102.84 s +2026-04-08 23:35:52.802541: Yayy! New best EMA pseudo Dice: 0.497 +2026-04-08 23:35:55.644805: +2026-04-08 23:35:55.648166: Epoch 262 +2026-04-08 23:35:55.650128: Current learning rate: 0.00761 +2026-04-08 23:37:38.063670: train_loss -0.1435 +2026-04-08 23:37:38.069891: val_loss -0.1039 +2026-04-08 23:37:38.073354: Pseudo dice [0.753, 0.7662, 0.5852, 0.4868, 0.3498, 0.2102, 0.6094] +2026-04-08 23:37:38.075804: Epoch time: 102.42 s +2026-04-08 23:37:38.078278: Yayy! New best EMA pseudo Dice: 0.5011 +2026-04-08 23:37:40.805990: +2026-04-08 23:37:40.808704: Epoch 263 +2026-04-08 23:37:40.810396: Current learning rate: 0.0076 +2026-04-08 23:39:23.010338: train_loss -0.1475 +2026-04-08 23:39:23.018410: val_loss -0.1426 +2026-04-08 23:39:23.020395: Pseudo dice [0.8851, 0.6306, 0.6273, 0.3946, 0.3777, 0.2133, 0.7831] +2026-04-08 23:39:23.023383: Epoch time: 102.21 s +2026-04-08 23:39:23.025815: Yayy! New best EMA pseudo Dice: 0.5068 +2026-04-08 23:39:25.646902: +2026-04-08 23:39:25.650222: Epoch 264 +2026-04-08 23:39:25.653283: Current learning rate: 0.00759 +2026-04-08 23:41:08.687863: train_loss -0.1631 +2026-04-08 23:41:08.695541: val_loss -0.0778 +2026-04-08 23:41:08.698309: Pseudo dice [0.4672, 0.2573, 0.7569, 0.5948, 0.4459, 0.0614, 0.7612] +2026-04-08 23:41:08.700941: Epoch time: 103.04 s +2026-04-08 23:41:09.731584: +2026-04-08 23:41:09.733535: Epoch 265 +2026-04-08 23:41:09.744342: Current learning rate: 0.00758 +2026-04-08 23:42:51.296972: train_loss -0.1531 +2026-04-08 23:42:51.304054: val_loss -0.115 +2026-04-08 23:42:51.306454: Pseudo dice [0.348, 0.7076, 0.6071, 0.6174, 0.3727, 0.1089, 0.4471] +2026-04-08 23:42:51.309576: Epoch time: 101.57 s +2026-04-08 23:42:52.351308: +2026-04-08 23:42:52.353723: Epoch 266 +2026-04-08 23:42:52.355960: Current learning rate: 0.00757 +2026-04-08 23:44:34.276962: train_loss -0.1525 +2026-04-08 23:44:34.282915: val_loss -0.13 +2026-04-08 23:44:34.285057: Pseudo dice [0.7036, 0.6782, 0.7636, 0.2578, 0.5721, 0.1529, 0.581] +2026-04-08 23:44:34.287029: Epoch time: 101.93 s +2026-04-08 23:44:36.236171: +2026-04-08 23:44:36.238016: Epoch 267 +2026-04-08 23:44:36.240853: Current learning rate: 0.00756 +2026-04-08 23:46:18.817300: train_loss -0.1513 +2026-04-08 23:46:18.823689: val_loss -0.0977 +2026-04-08 23:46:18.827574: Pseudo dice [0.5175, 0.824, 0.6415, 0.7095, 0.583, 0.0694, 0.776] +2026-04-08 23:46:18.832365: Epoch time: 102.58 s +2026-04-08 23:46:18.835578: Yayy! New best EMA pseudo Dice: 0.5111 +2026-04-08 23:46:21.656114: +2026-04-08 23:46:21.658830: Epoch 268 +2026-04-08 23:46:21.660654: Current learning rate: 0.00755 +2026-04-08 23:48:04.157989: train_loss -0.1766 +2026-04-08 23:48:04.165445: val_loss -0.1493 +2026-04-08 23:48:04.168567: Pseudo dice [0.3499, 0.7699, 0.5151, 0.5027, 0.4877, 0.7667, 0.7951] +2026-04-08 23:48:04.172511: Epoch time: 102.5 s +2026-04-08 23:48:04.175065: Yayy! New best EMA pseudo Dice: 0.5198 +2026-04-08 23:48:06.721753: +2026-04-08 23:48:06.724257: Epoch 269 +2026-04-08 23:48:06.726133: Current learning rate: 0.00754 +2026-04-08 23:49:48.632732: train_loss -0.168 +2026-04-08 23:49:48.638826: val_loss -0.1139 +2026-04-08 23:49:48.640625: Pseudo dice [0.7952, 0.4662, 0.6451, 0.1616, 0.326, 0.2333, 0.7485] +2026-04-08 23:49:48.643111: Epoch time: 101.91 s +2026-04-08 23:49:49.668797: +2026-04-08 23:49:49.670805: Epoch 270 +2026-04-08 23:49:49.673501: Current learning rate: 0.00753 +2026-04-08 23:51:32.097109: train_loss -0.1629 +2026-04-08 23:51:32.109499: val_loss -0.1611 +2026-04-08 23:51:32.118625: Pseudo dice [0.4241, 0.1795, 0.6988, 0.4538, 0.5691, 0.657, 0.6917] +2026-04-08 23:51:32.123307: Epoch time: 102.43 s +2026-04-08 23:51:33.173609: +2026-04-08 23:51:33.176411: Epoch 271 +2026-04-08 23:51:33.178716: Current learning rate: 0.00752 +2026-04-08 23:53:15.821170: train_loss -0.1586 +2026-04-08 23:53:15.833893: val_loss -0.1623 +2026-04-08 23:53:15.836733: Pseudo dice [0.3393, 0.5533, 0.7074, 0.061, 0.3872, 0.7905, 0.6065] +2026-04-08 23:53:15.839486: Epoch time: 102.65 s +2026-04-08 23:53:16.867540: +2026-04-08 23:53:16.870679: Epoch 272 +2026-04-08 23:53:16.874367: Current learning rate: 0.00751 +2026-04-08 23:54:58.889553: train_loss -0.1426 +2026-04-08 23:54:58.894615: val_loss -0.1536 +2026-04-08 23:54:58.896652: Pseudo dice [0.4408, 0.1857, 0.6514, 0.4594, 0.301, 0.5824, 0.8122] +2026-04-08 23:54:58.898705: Epoch time: 102.03 s +2026-04-08 23:54:59.929490: +2026-04-08 23:54:59.931811: Epoch 273 +2026-04-08 23:54:59.934944: Current learning rate: 0.00751 +2026-04-08 23:56:42.413358: train_loss -0.151 +2026-04-08 23:56:42.421705: val_loss -0.0897 +2026-04-08 23:56:42.424557: Pseudo dice [0.3677, 0.6212, 0.5891, 0.3794, 0.5827, 0.0263, 0.8532] +2026-04-08 23:56:42.429255: Epoch time: 102.49 s +2026-04-08 23:56:43.457740: +2026-04-08 23:56:43.461049: Epoch 274 +2026-04-08 23:56:43.464123: Current learning rate: 0.0075 +2026-04-08 23:58:26.278934: train_loss -0.1615 +2026-04-08 23:58:26.285877: val_loss -0.1584 +2026-04-08 23:58:26.287989: Pseudo dice [0.2976, 0.7996, 0.6217, 0.3508, 0.5762, 0.7993, 0.8579] +2026-04-08 23:58:26.290505: Epoch time: 102.82 s +2026-04-08 23:58:26.292988: Yayy! New best EMA pseudo Dice: 0.5202 +2026-04-08 23:58:28.932506: +2026-04-08 23:58:28.935108: Epoch 275 +2026-04-08 23:58:28.936959: Current learning rate: 0.00749 +2026-04-09 00:00:10.852431: train_loss -0.1551 +2026-04-09 00:00:10.858044: val_loss -0.1243 +2026-04-09 00:00:10.859967: Pseudo dice [0.3322, 0.4934, 0.5444, 0.7756, 0.572, 0.2632, 0.8984] +2026-04-09 00:00:10.862009: Epoch time: 101.92 s +2026-04-09 00:00:10.863956: Yayy! New best EMA pseudo Dice: 0.5236 +2026-04-09 00:00:13.566795: +2026-04-09 00:00:13.569311: Epoch 276 +2026-04-09 00:00:13.571025: Current learning rate: 0.00748 +2026-04-09 00:01:58.050989: train_loss -0.1536 +2026-04-09 00:01:58.060767: val_loss -0.1302 +2026-04-09 00:01:58.067206: Pseudo dice [0.2539, 0.841, 0.5532, 0.544, 0.6349, 0.7317, 0.4791] +2026-04-09 00:01:58.070979: Epoch time: 104.49 s +2026-04-09 00:01:58.074565: Yayy! New best EMA pseudo Dice: 0.5289 +2026-04-09 00:02:01.134737: +2026-04-09 00:02:01.140038: Epoch 277 +2026-04-09 00:02:01.146399: Current learning rate: 0.00747 +2026-04-09 00:03:45.169196: train_loss -0.158 +2026-04-09 00:03:45.174536: val_loss -0.079 +2026-04-09 00:03:45.176763: Pseudo dice [0.571, 0.7564, 0.4505, 0.761, 0.2951, 0.0472, 0.7198] +2026-04-09 00:03:45.181388: Epoch time: 104.04 s +2026-04-09 00:03:46.216322: +2026-04-09 00:03:46.219389: Epoch 278 +2026-04-09 00:03:46.222154: Current learning rate: 0.00746 +2026-04-09 00:05:29.404312: train_loss -0.1593 +2026-04-09 00:05:29.411755: val_loss -0.1634 +2026-04-09 00:05:29.414380: Pseudo dice [0.5964, 0.682, 0.7439, 0.707, 0.4825, 0.7649, 0.8036] +2026-04-09 00:05:29.430722: Epoch time: 103.19 s +2026-04-09 00:05:29.436168: Yayy! New best EMA pseudo Dice: 0.543 +2026-04-09 00:05:32.309752: +2026-04-09 00:05:32.313088: Epoch 279 +2026-04-09 00:05:32.315890: Current learning rate: 0.00745 +2026-04-09 00:07:15.005196: train_loss -0.1557 +2026-04-09 00:07:15.015600: val_loss -0.1093 +2026-04-09 00:07:15.018708: Pseudo dice [0.6755, 0.6375, 0.7301, 0.6158, 0.3668, 0.0806, 0.8197] +2026-04-09 00:07:15.020938: Epoch time: 102.7 s +2026-04-09 00:07:15.026489: Yayy! New best EMA pseudo Dice: 0.5448 +2026-04-09 00:07:18.017380: +2026-04-09 00:07:18.022028: Epoch 280 +2026-04-09 00:07:18.024518: Current learning rate: 0.00744 +2026-04-09 00:09:01.081977: train_loss -0.1576 +2026-04-09 00:09:01.087829: val_loss -0.1033 +2026-04-09 00:09:01.090049: Pseudo dice [0.5672, 0.1846, 0.4963, 0.3084, 0.5813, 0.1709, 0.8132] +2026-04-09 00:09:01.091995: Epoch time: 103.07 s +2026-04-09 00:09:02.139254: +2026-04-09 00:09:02.141419: Epoch 281 +2026-04-09 00:09:02.143674: Current learning rate: 0.00743 +2026-04-09 00:10:44.125681: train_loss -0.1756 +2026-04-09 00:10:44.132495: val_loss -0.1665 +2026-04-09 00:10:44.134938: Pseudo dice [0.5405, 0.3482, 0.6667, 0.7369, 0.5511, 0.6919, 0.8622] +2026-04-09 00:10:44.148896: Epoch time: 101.99 s +2026-04-09 00:10:45.188825: +2026-04-09 00:10:45.191208: Epoch 282 +2026-04-09 00:10:45.193560: Current learning rate: 0.00742 +2026-04-09 00:12:28.428923: train_loss -0.1497 +2026-04-09 00:12:28.438444: val_loss -0.136 +2026-04-09 00:12:28.442073: Pseudo dice [0.7264, 0.7052, 0.5609, 0.359, 0.4222, 0.4484, 0.7436] +2026-04-09 00:12:28.444555: Epoch time: 103.24 s +2026-04-09 00:12:28.447772: Yayy! New best EMA pseudo Dice: 0.5465 +2026-04-09 00:12:31.367455: +2026-04-09 00:12:31.371099: Epoch 283 +2026-04-09 00:12:31.373153: Current learning rate: 0.00741 +2026-04-09 00:14:13.962152: train_loss -0.1739 +2026-04-09 00:14:13.968731: val_loss -0.117 +2026-04-09 00:14:13.970959: Pseudo dice [0.7765, 0.6149, 0.4136, 0.0727, 0.3403, 0.2428, 0.7924] +2026-04-09 00:14:13.973064: Epoch time: 102.6 s +2026-04-09 00:14:15.009986: +2026-04-09 00:14:15.012873: Epoch 284 +2026-04-09 00:14:15.015815: Current learning rate: 0.0074 +2026-04-09 00:15:56.350261: train_loss -0.1555 +2026-04-09 00:15:56.357232: val_loss -0.144 +2026-04-09 00:15:56.360577: Pseudo dice [0.5283, 0.8177, 0.6361, 0.16, 0.4932, 0.6788, 0.7349] +2026-04-09 00:15:56.363936: Epoch time: 101.34 s +2026-04-09 00:15:58.297307: +2026-04-09 00:15:58.299587: Epoch 285 +2026-04-09 00:15:58.301494: Current learning rate: 0.00739 +2026-04-09 00:17:40.368980: train_loss -0.1727 +2026-04-09 00:17:40.377020: val_loss -0.1314 +2026-04-09 00:17:40.379287: Pseudo dice [0.4165, 0.5079, 0.5782, 0.3914, 0.439, 0.2472, 0.8149] +2026-04-09 00:17:40.381532: Epoch time: 102.08 s +2026-04-09 00:17:41.446281: +2026-04-09 00:17:41.451565: Epoch 286 +2026-04-09 00:17:41.454580: Current learning rate: 0.00738 +2026-04-09 00:19:23.715957: train_loss -0.1621 +2026-04-09 00:19:23.721651: val_loss -0.157 +2026-04-09 00:19:23.723849: Pseudo dice [0.6504, 0.3589, 0.5529, 0.4428, 0.5255, 0.6658, 0.8366] +2026-04-09 00:19:23.726534: Epoch time: 102.27 s +2026-04-09 00:19:24.790981: +2026-04-09 00:19:24.792994: Epoch 287 +2026-04-09 00:19:24.795323: Current learning rate: 0.00738 +2026-04-09 00:21:06.134930: train_loss -0.1656 +2026-04-09 00:21:06.142757: val_loss -0.1293 +2026-04-09 00:21:06.146014: Pseudo dice [0.7567, 0.6211, 0.6347, 0.1753, 0.4888, 0.0863, 0.6866] +2026-04-09 00:21:06.151011: Epoch time: 101.35 s +2026-04-09 00:21:07.215335: +2026-04-09 00:21:07.217664: Epoch 288 +2026-04-09 00:21:07.219954: Current learning rate: 0.00737 +2026-04-09 00:22:50.212180: train_loss -0.1607 +2026-04-09 00:22:50.220379: val_loss -0.1112 +2026-04-09 00:22:50.223184: Pseudo dice [0.8239, 0.7902, 0.5913, 0.5778, 0.5576, 0.1423, 0.6797] +2026-04-09 00:22:50.227298: Epoch time: 103.0 s +2026-04-09 00:22:51.293139: +2026-04-09 00:22:51.295466: Epoch 289 +2026-04-09 00:22:51.298518: Current learning rate: 0.00736 +2026-04-09 00:24:33.081732: train_loss -0.1569 +2026-04-09 00:24:33.088133: val_loss -0.1159 +2026-04-09 00:24:33.090374: Pseudo dice [0.7403, 0.66, 0.5183, 0.638, 0.4095, 0.6712, 0.6454] +2026-04-09 00:24:33.093549: Epoch time: 101.79 s +2026-04-09 00:24:33.097894: Yayy! New best EMA pseudo Dice: 0.5487 +2026-04-09 00:24:35.535286: +2026-04-09 00:24:35.537950: Epoch 290 +2026-04-09 00:24:35.539656: Current learning rate: 0.00735 +2026-04-09 00:26:17.200598: train_loss -0.1629 +2026-04-09 00:26:17.207450: val_loss -0.1429 +2026-04-09 00:26:17.210025: Pseudo dice [0.4699, 0.361, 0.6279, 0.5913, 0.4473, 0.4436, 0.6879] +2026-04-09 00:26:17.212192: Epoch time: 101.67 s +2026-04-09 00:26:18.280707: +2026-04-09 00:26:18.302357: Epoch 291 +2026-04-09 00:26:18.305541: Current learning rate: 0.00734 +2026-04-09 00:27:59.627437: train_loss -0.1526 +2026-04-09 00:27:59.633502: val_loss -0.1239 +2026-04-09 00:27:59.636445: Pseudo dice [0.4387, 0.8127, 0.7019, 0.0534, 0.2658, 0.0826, 0.6403] +2026-04-09 00:27:59.638863: Epoch time: 101.35 s +2026-04-09 00:28:00.735136: +2026-04-09 00:28:00.737749: Epoch 292 +2026-04-09 00:28:00.740185: Current learning rate: 0.00733 +2026-04-09 00:29:42.230441: train_loss -0.167 +2026-04-09 00:29:42.237895: val_loss -0.1078 +2026-04-09 00:29:42.240630: Pseudo dice [0.2274, 0.4869, 0.4574, 0.2247, 0.4174, 0.2463, 0.6237] +2026-04-09 00:29:42.243065: Epoch time: 101.5 s +2026-04-09 00:29:43.290431: +2026-04-09 00:29:43.293008: Epoch 293 +2026-04-09 00:29:43.295083: Current learning rate: 0.00732 +2026-04-09 00:31:25.156210: train_loss -0.1563 +2026-04-09 00:31:25.163826: val_loss -0.1499 +2026-04-09 00:31:25.166580: Pseudo dice [0.5927, 0.2794, 0.7331, 0.2165, 0.4852, 0.5364, 0.6989] +2026-04-09 00:31:25.169176: Epoch time: 101.87 s +2026-04-09 00:31:26.228594: +2026-04-09 00:31:26.230431: Epoch 294 +2026-04-09 00:31:26.232973: Current learning rate: 0.00731 +2026-04-09 00:33:08.748186: train_loss -0.1707 +2026-04-09 00:33:08.757272: val_loss -0.1026 +2026-04-09 00:33:08.760359: Pseudo dice [0.5142, 0.5674, 0.6374, 0.7487, 0.2887, 0.1891, 0.1666] +2026-04-09 00:33:08.768679: Epoch time: 102.52 s +2026-04-09 00:33:09.836296: +2026-04-09 00:33:09.838762: Epoch 295 +2026-04-09 00:33:09.841058: Current learning rate: 0.0073 +2026-04-09 00:34:51.810372: train_loss -0.1578 +2026-04-09 00:34:51.818776: val_loss -0.1456 +2026-04-09 00:34:51.823847: Pseudo dice [0.4743, 0.706, 0.551, 0.6365, 0.3393, 0.4409, 0.8453] +2026-04-09 00:34:51.830496: Epoch time: 101.98 s +2026-04-09 00:34:52.897187: +2026-04-09 00:34:52.899469: Epoch 296 +2026-04-09 00:34:52.901569: Current learning rate: 0.00729 +2026-04-09 00:36:35.765815: train_loss -0.1674 +2026-04-09 00:36:35.774302: val_loss -0.1187 +2026-04-09 00:36:35.777271: Pseudo dice [0.6359, 0.4024, 0.5315, 0.0311, 0.5838, 0.4919, 0.7676] +2026-04-09 00:36:35.782423: Epoch time: 102.87 s +2026-04-09 00:36:36.833016: +2026-04-09 00:36:36.835124: Epoch 297 +2026-04-09 00:36:36.837056: Current learning rate: 0.00728 +2026-04-09 00:38:18.895892: train_loss -0.1395 +2026-04-09 00:38:18.903767: val_loss -0.0927 +2026-04-09 00:38:18.906590: Pseudo dice [0.4911, 0.4561, 0.2668, 0.507, 0.3483, 0.0661, 0.779] +2026-04-09 00:38:18.909122: Epoch time: 102.07 s +2026-04-09 00:38:19.945593: +2026-04-09 00:38:19.951169: Epoch 298 +2026-04-09 00:38:19.953846: Current learning rate: 0.00727 +2026-04-09 00:40:02.578478: train_loss -0.1575 +2026-04-09 00:40:02.584380: val_loss -0.1138 +2026-04-09 00:40:02.586822: Pseudo dice [0.6849, 0.7104, 0.7367, 0.6613, 0.5136, 0.092, 0.8904] +2026-04-09 00:40:02.589657: Epoch time: 102.64 s +2026-04-09 00:40:03.639993: +2026-04-09 00:40:03.643629: Epoch 299 +2026-04-09 00:40:03.646397: Current learning rate: 0.00726 +2026-04-09 00:41:45.442103: train_loss -0.1618 +2026-04-09 00:41:45.447381: val_loss -0.1373 +2026-04-09 00:41:45.449219: Pseudo dice [0.5264, 0.3633, 0.7143, 0.5227, 0.3837, 0.5465, 0.4111] +2026-04-09 00:41:45.451551: Epoch time: 101.81 s +2026-04-09 00:41:48.018382: +2026-04-09 00:41:48.021067: Epoch 300 +2026-04-09 00:41:48.022880: Current learning rate: 0.00725 +2026-04-09 00:43:28.968699: train_loss -0.1688 +2026-04-09 00:43:28.977052: val_loss -0.1027 +2026-04-09 00:43:28.980499: Pseudo dice [0.5223, 0.758, 0.6564, 0.197, 0.5858, 0.1887, 0.751] +2026-04-09 00:43:28.982468: Epoch time: 100.95 s +2026-04-09 00:43:30.030304: +2026-04-09 00:43:30.034792: Epoch 301 +2026-04-09 00:43:30.036836: Current learning rate: 0.00724 +2026-04-09 00:45:12.266552: train_loss -0.1635 +2026-04-09 00:45:12.273178: val_loss -0.0828 +2026-04-09 00:45:12.275547: Pseudo dice [0.4468, 0.8622, 0.6152, 0.5243, 0.1989, 0.0696, 0.7724] +2026-04-09 00:45:12.277666: Epoch time: 102.24 s +2026-04-09 00:45:13.340213: +2026-04-09 00:45:13.342720: Epoch 302 +2026-04-09 00:45:13.345052: Current learning rate: 0.00724 +2026-04-09 00:46:55.139410: train_loss -0.1649 +2026-04-09 00:46:55.145004: val_loss -0.0998 +2026-04-09 00:46:55.148063: Pseudo dice [0.7998, 0.5989, 0.5234, 0.5032, 0.5724, 0.0239, 0.8429] +2026-04-09 00:46:55.149930: Epoch time: 101.8 s +2026-04-09 00:46:56.207568: +2026-04-09 00:46:56.211790: Epoch 303 +2026-04-09 00:46:56.214458: Current learning rate: 0.00723 +2026-04-09 00:48:38.205445: train_loss -0.1701 +2026-04-09 00:48:38.211713: val_loss -0.0718 +2026-04-09 00:48:38.214440: Pseudo dice [0.5107, 0.5446, 0.5041, 0.6947, 0.6072, 0.0651, 0.7889] +2026-04-09 00:48:38.216777: Epoch time: 102.0 s +2026-04-09 00:48:39.263371: +2026-04-09 00:48:39.266521: Epoch 304 +2026-04-09 00:48:39.268926: Current learning rate: 0.00722 +2026-04-09 00:50:21.979810: train_loss -0.1697 +2026-04-09 00:50:21.986001: val_loss -0.1305 +2026-04-09 00:50:21.989060: Pseudo dice [0.6876, 0.505, 0.6575, 0.6415, 0.588, 0.1335, 0.6264] +2026-04-09 00:50:21.992042: Epoch time: 102.72 s +2026-04-09 00:50:23.068820: +2026-04-09 00:50:23.070757: Epoch 305 +2026-04-09 00:50:23.072947: Current learning rate: 0.00721 +2026-04-09 00:52:04.980886: train_loss -0.1602 +2026-04-09 00:52:04.987290: val_loss -0.1587 +2026-04-09 00:52:04.989599: Pseudo dice [0.7967, 0.488, 0.682, 0.6324, 0.6348, 0.719, 0.6228] +2026-04-09 00:52:04.991999: Epoch time: 101.92 s +2026-04-09 00:52:06.077852: +2026-04-09 00:52:06.079636: Epoch 306 +2026-04-09 00:52:06.081162: Current learning rate: 0.0072 +2026-04-09 00:53:48.268706: train_loss -0.1729 +2026-04-09 00:53:48.275155: val_loss -0.1558 +2026-04-09 00:53:48.278092: Pseudo dice [0.2889, 0.7255, 0.724, 0.0628, 0.6536, 0.7563, 0.8318] +2026-04-09 00:53:48.280537: Epoch time: 102.19 s +2026-04-09 00:53:49.350809: +2026-04-09 00:53:49.353328: Epoch 307 +2026-04-09 00:53:49.355126: Current learning rate: 0.00719 +2026-04-09 00:55:31.462837: train_loss -0.1624 +2026-04-09 00:55:31.468655: val_loss -0.1257 +2026-04-09 00:55:31.472100: Pseudo dice [0.6764, 0.6705, 0.5734, 0.5665, 0.2415, 0.1682, 0.7781] +2026-04-09 00:55:31.474473: Epoch time: 102.12 s +2026-04-09 00:55:32.554670: +2026-04-09 00:55:32.557716: Epoch 308 +2026-04-09 00:55:32.560863: Current learning rate: 0.00718 +2026-04-09 00:57:14.309185: train_loss -0.1655 +2026-04-09 00:57:14.325335: val_loss -0.1232 +2026-04-09 00:57:14.334952: Pseudo dice [0.6377, 0.3976, 0.4656, 0.6076, 0.6231, 0.0705, 0.7458] +2026-04-09 00:57:14.336909: Epoch time: 101.76 s +2026-04-09 00:57:15.394382: +2026-04-09 00:57:15.396543: Epoch 309 +2026-04-09 00:57:15.399111: Current learning rate: 0.00717 +2026-04-09 00:58:56.808176: train_loss -0.1699 +2026-04-09 00:58:56.813396: val_loss -0.1507 +2026-04-09 00:58:56.816298: Pseudo dice [0.7505, 0.8104, 0.6282, 0.722, 0.3611, 0.2929, 0.8131] +2026-04-09 00:58:56.818658: Epoch time: 101.42 s +2026-04-09 00:58:57.891716: +2026-04-09 00:58:57.893618: Epoch 310 +2026-04-09 00:58:57.895731: Current learning rate: 0.00716 +2026-04-09 01:00:39.716955: train_loss -0.1558 +2026-04-09 01:00:39.723006: val_loss -0.1333 +2026-04-09 01:00:39.725875: Pseudo dice [0.6153, 0.7025, 0.4929, 0.7426, 0.6235, 0.7774, 0.7318] +2026-04-09 01:00:39.728128: Epoch time: 101.83 s +2026-04-09 01:00:39.730924: Yayy! New best EMA pseudo Dice: 0.5559 +2026-04-09 01:00:42.485026: +2026-04-09 01:00:42.487517: Epoch 311 +2026-04-09 01:00:42.489290: Current learning rate: 0.00715 +2026-04-09 01:02:23.836785: train_loss -0.147 +2026-04-09 01:02:23.844063: val_loss -0.1529 +2026-04-09 01:02:23.846383: Pseudo dice [0.5049, 0.6858, 0.6184, 0.619, 0.3481, 0.7312, 0.876] +2026-04-09 01:02:23.850144: Epoch time: 101.35 s +2026-04-09 01:02:23.852636: Yayy! New best EMA pseudo Dice: 0.5629 +2026-04-09 01:02:26.341421: +2026-04-09 01:02:26.344560: Epoch 312 +2026-04-09 01:02:26.346403: Current learning rate: 0.00714 +2026-04-09 01:04:08.347903: train_loss -0.1664 +2026-04-09 01:04:08.354942: val_loss -0.1329 +2026-04-09 01:04:08.357050: Pseudo dice [0.6374, 0.769, 0.6131, 0.621, 0.4331, 0.2311, 0.7437] +2026-04-09 01:04:08.359269: Epoch time: 102.01 s +2026-04-09 01:04:08.361922: Yayy! New best EMA pseudo Dice: 0.5645 +2026-04-09 01:04:11.178419: +2026-04-09 01:04:11.181324: Epoch 313 +2026-04-09 01:04:11.183284: Current learning rate: 0.00713 +2026-04-09 01:05:53.069017: train_loss -0.1699 +2026-04-09 01:05:53.076022: val_loss -0.1087 +2026-04-09 01:05:53.078264: Pseudo dice [0.5611, 0.5705, 0.7289, 0.0, 0.3298, 0.0838, 0.4634] +2026-04-09 01:05:53.080658: Epoch time: 101.89 s +2026-04-09 01:05:54.142368: +2026-04-09 01:05:54.144852: Epoch 314 +2026-04-09 01:05:54.146914: Current learning rate: 0.00712 +2026-04-09 01:07:35.580276: train_loss -0.1644 +2026-04-09 01:07:35.586805: val_loss -0.1069 +2026-04-09 01:07:35.589473: Pseudo dice [0.4385, 0.6881, 0.7135, 0.337, 0.2186, 0.0391, 0.4159] +2026-04-09 01:07:35.591548: Epoch time: 101.44 s +2026-04-09 01:07:36.666072: +2026-04-09 01:07:36.668193: Epoch 315 +2026-04-09 01:07:36.670140: Current learning rate: 0.00711 +2026-04-09 01:09:18.633380: train_loss -0.1748 +2026-04-09 01:09:18.639127: val_loss -0.0476 +2026-04-09 01:09:18.641580: Pseudo dice [0.3558, 0.4824, 0.4873, 0.622, 0.5366, 0.0755, 0.3714] +2026-04-09 01:09:18.644349: Epoch time: 101.97 s +2026-04-09 01:09:19.725517: +2026-04-09 01:09:19.729779: Epoch 316 +2026-04-09 01:09:19.732194: Current learning rate: 0.0071 +2026-04-09 01:11:01.438955: train_loss -0.178 +2026-04-09 01:11:01.444510: val_loss -0.1487 +2026-04-09 01:11:01.446247: Pseudo dice [0.4984, 0.3784, 0.6561, 0.4627, 0.5452, 0.2543, 0.8692] +2026-04-09 01:11:01.456917: Epoch time: 101.72 s +2026-04-09 01:11:02.516118: +2026-04-09 01:11:02.521642: Epoch 317 +2026-04-09 01:11:02.523423: Current learning rate: 0.0071 +2026-04-09 01:12:43.400388: train_loss -0.17 +2026-04-09 01:12:43.405877: val_loss -0.1237 +2026-04-09 01:12:43.407657: Pseudo dice [0.5326, 0.5925, 0.5829, 0.4283, 0.5579, 0.2947, 0.3504] +2026-04-09 01:12:43.410288: Epoch time: 100.89 s +2026-04-09 01:12:44.490709: +2026-04-09 01:12:44.492464: Epoch 318 +2026-04-09 01:12:44.494331: Current learning rate: 0.00709 +2026-04-09 01:14:26.154650: train_loss -0.1471 +2026-04-09 01:14:26.160491: val_loss -0.0887 +2026-04-09 01:14:26.162788: Pseudo dice [0.4826, 0.729, 0.5215, 0.0873, 0.4122, 0.0701, 0.6495] +2026-04-09 01:14:26.164730: Epoch time: 101.67 s +2026-04-09 01:14:27.226954: +2026-04-09 01:14:27.229965: Epoch 319 +2026-04-09 01:14:27.232334: Current learning rate: 0.00708 +2026-04-09 01:16:09.318403: train_loss -0.1582 +2026-04-09 01:16:09.325286: val_loss -0.1254 +2026-04-09 01:16:09.327404: Pseudo dice [0.6214, 0.5961, 0.8049, 0.6944, 0.41, 0.0377, 0.6034] +2026-04-09 01:16:09.329702: Epoch time: 102.09 s +2026-04-09 01:16:10.383668: +2026-04-09 01:16:10.385900: Epoch 320 +2026-04-09 01:16:10.388439: Current learning rate: 0.00707 +2026-04-09 01:17:52.398228: train_loss -0.1646 +2026-04-09 01:17:52.404063: val_loss -0.101 +2026-04-09 01:17:52.406440: Pseudo dice [0.5827, 0.5007, 0.5971, 0.6358, 0.3805, 0.1942, 0.4995] +2026-04-09 01:17:52.408514: Epoch time: 102.02 s +2026-04-09 01:17:53.475509: +2026-04-09 01:17:53.477716: Epoch 321 +2026-04-09 01:17:53.480341: Current learning rate: 0.00706 +2026-04-09 01:19:35.384385: train_loss -0.1744 +2026-04-09 01:19:35.390139: val_loss -0.1033 +2026-04-09 01:19:35.393041: Pseudo dice [0.7365, 0.3686, 0.481, 0.5724, 0.3645, 0.1051, 0.7535] +2026-04-09 01:19:35.395803: Epoch time: 101.91 s +2026-04-09 01:19:36.442730: +2026-04-09 01:19:36.444934: Epoch 322 +2026-04-09 01:19:36.446962: Current learning rate: 0.00705 +2026-04-09 01:21:17.644039: train_loss -0.1805 +2026-04-09 01:21:17.649711: val_loss -0.1653 +2026-04-09 01:21:17.651858: Pseudo dice [0.6504, 0.6373, 0.5868, 0.6437, 0.3962, 0.887, 0.8701] +2026-04-09 01:21:17.654278: Epoch time: 101.2 s +2026-04-09 01:21:18.721295: +2026-04-09 01:21:18.723690: Epoch 323 +2026-04-09 01:21:18.725309: Current learning rate: 0.00704 +2026-04-09 01:23:00.537522: train_loss -0.1592 +2026-04-09 01:23:00.544077: val_loss -0.11 +2026-04-09 01:23:00.546653: Pseudo dice [0.3842, 0.5684, 0.3851, 0.4745, 0.258, 0.2269, 0.8124] +2026-04-09 01:23:00.549275: Epoch time: 101.82 s +2026-04-09 01:23:02.558955: +2026-04-09 01:23:02.561228: Epoch 324 +2026-04-09 01:23:02.563538: Current learning rate: 0.00703 +2026-04-09 01:24:43.625880: train_loss -0.1539 +2026-04-09 01:24:43.631419: val_loss -0.1156 +2026-04-09 01:24:43.633408: Pseudo dice [0.5378, 0.5028, 0.5858, 0.2508, 0.6456, 0.0799, 0.6093] +2026-04-09 01:24:43.636230: Epoch time: 101.07 s +2026-04-09 01:24:44.708579: +2026-04-09 01:24:44.711473: Epoch 325 +2026-04-09 01:24:44.714062: Current learning rate: 0.00702 +2026-04-09 01:26:26.623177: train_loss -0.1708 +2026-04-09 01:26:26.629037: val_loss -0.0569 +2026-04-09 01:26:26.631898: Pseudo dice [0.1863, 0.6935, 0.3889, 0.6742, 0.4648, 0.0688, 0.6788] +2026-04-09 01:26:26.634300: Epoch time: 101.92 s +2026-04-09 01:26:27.696565: +2026-04-09 01:26:27.698265: Epoch 326 +2026-04-09 01:26:27.700087: Current learning rate: 0.00701 +2026-04-09 01:28:09.649458: train_loss -0.1562 +2026-04-09 01:28:09.656366: val_loss -0.1148 +2026-04-09 01:28:09.658680: Pseudo dice [0.4608, 0.2165, 0.5676, 0.786, 0.3824, 0.1, 0.8204] +2026-04-09 01:28:09.661341: Epoch time: 101.96 s +2026-04-09 01:28:10.741545: +2026-04-09 01:28:10.744078: Epoch 327 +2026-04-09 01:28:10.747524: Current learning rate: 0.007 +2026-04-09 01:29:52.705337: train_loss -0.1704 +2026-04-09 01:29:52.711125: val_loss -0.1675 +2026-04-09 01:29:52.713208: Pseudo dice [0.5108, 0.11, 0.7239, 0.6924, 0.4895, 0.3237, 0.7271] +2026-04-09 01:29:52.715420: Epoch time: 101.97 s +2026-04-09 01:29:53.792123: +2026-04-09 01:29:53.794428: Epoch 328 +2026-04-09 01:29:53.796448: Current learning rate: 0.00699 +2026-04-09 01:31:35.736004: train_loss -0.1689 +2026-04-09 01:31:35.743881: val_loss -0.0812 +2026-04-09 01:31:35.746509: Pseudo dice [0.3434, 0.3612, 0.5477, 0.6474, 0.4548, 0.1409, 0.4507] +2026-04-09 01:31:35.749574: Epoch time: 101.95 s +2026-04-09 01:31:36.816985: +2026-04-09 01:31:36.819177: Epoch 329 +2026-04-09 01:31:36.821430: Current learning rate: 0.00698 +2026-04-09 01:33:19.179946: train_loss -0.1746 +2026-04-09 01:33:19.185977: val_loss -0.119 +2026-04-09 01:33:19.188387: Pseudo dice [0.4622, 0.3538, 0.6495, 0.1443, 0.427, 0.1392, 0.759] +2026-04-09 01:33:19.191231: Epoch time: 102.37 s +2026-04-09 01:33:20.260811: +2026-04-09 01:33:20.263042: Epoch 330 +2026-04-09 01:33:20.266729: Current learning rate: 0.00697 +2026-04-09 01:35:02.512730: train_loss -0.1724 +2026-04-09 01:35:02.520516: val_loss -0.1649 +2026-04-09 01:35:02.523474: Pseudo dice [0.6267, 0.7708, 0.5665, 0.66, 0.4225, 0.7012, 0.7449] +2026-04-09 01:35:02.526430: Epoch time: 102.25 s +2026-04-09 01:35:03.596510: +2026-04-09 01:35:03.599406: Epoch 331 +2026-04-09 01:35:03.601610: Current learning rate: 0.00696 +2026-04-09 01:36:45.059087: train_loss -0.1702 +2026-04-09 01:36:45.064658: val_loss -0.1203 +2026-04-09 01:36:45.066732: Pseudo dice [0.4864, 0.6072, 0.4988, 0.6731, 0.6196, 0.0526, 0.7842] +2026-04-09 01:36:45.068825: Epoch time: 101.47 s +2026-04-09 01:36:46.158089: +2026-04-09 01:36:46.160987: Epoch 332 +2026-04-09 01:36:46.163169: Current learning rate: 0.00696 +2026-04-09 01:38:27.801848: train_loss -0.1682 +2026-04-09 01:38:27.808768: val_loss -0.1395 +2026-04-09 01:38:27.812011: Pseudo dice [0.314, 0.705, 0.6088, 0.6861, 0.5808, 0.8381, 0.6709] +2026-04-09 01:38:27.815327: Epoch time: 101.65 s +2026-04-09 01:38:28.883904: +2026-04-09 01:38:28.886300: Epoch 333 +2026-04-09 01:38:28.888522: Current learning rate: 0.00695 +2026-04-09 01:40:10.385962: train_loss -0.1632 +2026-04-09 01:40:10.391639: val_loss -0.1273 +2026-04-09 01:40:10.393842: Pseudo dice [0.6518, 0.5983, 0.5791, 0.2497, 0.4419, 0.6417, 0.6495] +2026-04-09 01:40:10.395933: Epoch time: 101.51 s +2026-04-09 01:40:11.542390: +2026-04-09 01:40:11.545756: Epoch 334 +2026-04-09 01:40:11.547785: Current learning rate: 0.00694 +2026-04-09 01:41:52.948292: train_loss -0.1584 +2026-04-09 01:41:52.954338: val_loss -0.1405 +2026-04-09 01:41:52.956776: Pseudo dice [0.5333, 0.7976, 0.6205, 0.6997, 0.2458, 0.6464, 0.6696] +2026-04-09 01:41:52.959226: Epoch time: 101.41 s +2026-04-09 01:41:54.054927: +2026-04-09 01:41:54.057013: Epoch 335 +2026-04-09 01:41:54.058926: Current learning rate: 0.00693 +2026-04-09 01:43:35.457519: train_loss -0.1702 +2026-04-09 01:43:35.464012: val_loss -0.1173 +2026-04-09 01:43:35.466300: Pseudo dice [0.6047, 0.348, 0.6145, 0.6787, 0.3918, 0.2608, 0.406] +2026-04-09 01:43:35.468847: Epoch time: 101.41 s +2026-04-09 01:43:36.547429: +2026-04-09 01:43:36.549738: Epoch 336 +2026-04-09 01:43:36.551455: Current learning rate: 0.00692 +2026-04-09 01:45:17.979842: train_loss -0.1467 +2026-04-09 01:45:17.985781: val_loss -0.0926 +2026-04-09 01:45:17.988350: Pseudo dice [0.4431, 0.5008, 0.3832, 0.8498, 0.3323, 0.0856, 0.5671] +2026-04-09 01:45:17.990118: Epoch time: 101.44 s +2026-04-09 01:45:19.106004: +2026-04-09 01:45:19.107941: Epoch 337 +2026-04-09 01:45:19.109991: Current learning rate: 0.00691 +2026-04-09 01:47:01.876817: train_loss -0.1601 +2026-04-09 01:47:01.882873: val_loss -0.0632 +2026-04-09 01:47:01.885102: Pseudo dice [0.5604, 0.7598, 0.5141, 0.4632, 0.2749, 0.0491, 0.7986] +2026-04-09 01:47:01.887598: Epoch time: 102.77 s +2026-04-09 01:47:02.978584: +2026-04-09 01:47:02.980800: Epoch 338 +2026-04-09 01:47:02.983007: Current learning rate: 0.0069 +2026-04-09 01:48:44.678881: train_loss -0.1682 +2026-04-09 01:48:44.685520: val_loss -0.1391 +2026-04-09 01:48:44.687978: Pseudo dice [0.5012, 0.0827, 0.5953, 0.1041, 0.5077, 0.5557, 0.7334] +2026-04-09 01:48:44.690097: Epoch time: 101.7 s +2026-04-09 01:48:45.786665: +2026-04-09 01:48:45.789110: Epoch 339 +2026-04-09 01:48:45.792386: Current learning rate: 0.00689 +2026-04-09 01:50:28.773715: train_loss -0.1549 +2026-04-09 01:50:28.778977: val_loss -0.0972 +2026-04-09 01:50:28.780694: Pseudo dice [0.2554, 0.6182, 0.7192, 0.2692, 0.2418, 0.119, 0.2845] +2026-04-09 01:50:28.782813: Epoch time: 102.99 s +2026-04-09 01:50:29.861435: +2026-04-09 01:50:29.863658: Epoch 340 +2026-04-09 01:50:29.865726: Current learning rate: 0.00688 +2026-04-09 01:52:11.414675: train_loss -0.1572 +2026-04-09 01:52:11.421049: val_loss -0.1615 +2026-04-09 01:52:11.423873: Pseudo dice [0.5983, 0.8097, 0.7165, 0.7142, 0.5552, 0.7946, 0.7077] +2026-04-09 01:52:11.426709: Epoch time: 101.56 s +2026-04-09 01:52:12.514556: +2026-04-09 01:52:12.516604: Epoch 341 +2026-04-09 01:52:12.518488: Current learning rate: 0.00687 +2026-04-09 01:53:55.260564: train_loss -0.1592 +2026-04-09 01:53:55.268539: val_loss -0.1389 +2026-04-09 01:53:55.271137: Pseudo dice [0.5931, 0.752, 0.6808, 0.6477, 0.5544, 0.735, 0.7648] +2026-04-09 01:53:55.273949: Epoch time: 102.75 s +2026-04-09 01:53:56.345081: +2026-04-09 01:53:56.347577: Epoch 342 +2026-04-09 01:53:56.349509: Current learning rate: 0.00686 +2026-04-09 01:55:38.612988: train_loss -0.174 +2026-04-09 01:55:38.620202: val_loss -0.1054 +2026-04-09 01:55:38.622687: Pseudo dice [0.6762, 0.4328, 0.5178, 0.6426, 0.5332, 0.1166, 0.8645] +2026-04-09 01:55:38.626548: Epoch time: 102.27 s +2026-04-09 01:55:39.721177: +2026-04-09 01:55:39.724947: Epoch 343 +2026-04-09 01:55:39.727521: Current learning rate: 0.00685 +2026-04-09 01:57:21.107636: train_loss -0.1536 +2026-04-09 01:57:21.112952: val_loss -0.1442 +2026-04-09 01:57:21.115266: Pseudo dice [0.5837, 0.6787, 0.5903, 0.7801, 0.4795, 0.0806, 0.5933] +2026-04-09 01:57:21.117647: Epoch time: 101.39 s +2026-04-09 01:57:23.186648: +2026-04-09 01:57:23.188547: Epoch 344 +2026-04-09 01:57:23.190279: Current learning rate: 0.00684 +2026-04-09 01:59:04.001700: train_loss -0.17 +2026-04-09 01:59:04.007881: val_loss -0.1428 +2026-04-09 01:59:04.011111: Pseudo dice [0.6134, 0.5533, 0.7298, 0.6359, 0.557, 0.7224, 0.6291] +2026-04-09 01:59:04.013303: Epoch time: 100.82 s +2026-04-09 01:59:05.118411: +2026-04-09 01:59:05.120584: Epoch 345 +2026-04-09 01:59:05.122591: Current learning rate: 0.00683 +2026-04-09 02:00:46.485387: train_loss -0.1721 +2026-04-09 02:00:46.491649: val_loss -0.0802 +2026-04-09 02:00:46.495225: Pseudo dice [0.664, 0.5534, 0.339, 0.334, 0.1467, 0.1297, 0.4272] +2026-04-09 02:00:46.497196: Epoch time: 101.37 s +2026-04-09 02:00:47.586537: +2026-04-09 02:00:47.588508: Epoch 346 +2026-04-09 02:00:47.590633: Current learning rate: 0.00682 +2026-04-09 02:02:30.036455: train_loss -0.1692 +2026-04-09 02:02:30.045520: val_loss -0.089 +2026-04-09 02:02:30.048185: Pseudo dice [0.3429, 0.3494, 0.5481, 0.5442, 0.3831, 0.0834, 0.7142] +2026-04-09 02:02:30.050487: Epoch time: 102.45 s +2026-04-09 02:02:31.143708: +2026-04-09 02:02:31.147548: Epoch 347 +2026-04-09 02:02:31.149965: Current learning rate: 0.00681 +2026-04-09 02:04:13.169805: train_loss -0.1871 +2026-04-09 02:04:13.176578: val_loss -0.1252 +2026-04-09 02:04:13.178622: Pseudo dice [0.8022, 0.7435, 0.6528, 0.5971, 0.5652, 0.0581, 0.6731] +2026-04-09 02:04:13.180650: Epoch time: 102.03 s +2026-04-09 02:04:14.268148: +2026-04-09 02:04:14.271296: Epoch 348 +2026-04-09 02:04:14.273613: Current learning rate: 0.0068 +2026-04-09 02:05:56.943690: train_loss -0.1782 +2026-04-09 02:05:56.951273: val_loss -0.048 +2026-04-09 02:05:56.954961: Pseudo dice [0.4434, 0.2976, 0.4787, 0.618, 0.4866, 0.0148, 0.8167] +2026-04-09 02:05:56.957029: Epoch time: 102.68 s +2026-04-09 02:05:58.044473: +2026-04-09 02:05:58.046717: Epoch 349 +2026-04-09 02:05:58.048872: Current learning rate: 0.0068 +2026-04-09 02:07:39.626509: train_loss -0.18 +2026-04-09 02:07:39.632918: val_loss -0.1417 +2026-04-09 02:07:39.635478: Pseudo dice [0.6872, 0.4375, 0.6815, 0.5512, 0.4263, 0.5252, 0.8419] +2026-04-09 02:07:39.637570: Epoch time: 101.59 s +2026-04-09 02:07:42.418220: +2026-04-09 02:07:42.421029: Epoch 350 +2026-04-09 02:07:42.423751: Current learning rate: 0.00679 +2026-04-09 02:09:24.260584: train_loss -0.1721 +2026-04-09 02:09:24.268050: val_loss -0.1304 +2026-04-09 02:09:24.270300: Pseudo dice [0.3797, 0.3723, 0.6535, 0.694, 0.2854, 0.5547, 0.7236] +2026-04-09 02:09:24.272952: Epoch time: 101.85 s +2026-04-09 02:09:25.364618: +2026-04-09 02:09:25.366602: Epoch 351 +2026-04-09 02:09:25.368673: Current learning rate: 0.00678 +2026-04-09 02:11:07.782914: train_loss -0.1631 +2026-04-09 02:11:07.790479: val_loss -0.1172 +2026-04-09 02:11:07.792714: Pseudo dice [0.7007, 0.7426, 0.5283, 0.485, 0.5611, 0.1758, 0.7819] +2026-04-09 02:11:07.794938: Epoch time: 102.42 s +2026-04-09 02:11:08.894745: +2026-04-09 02:11:08.897929: Epoch 352 +2026-04-09 02:11:08.900178: Current learning rate: 0.00677 +2026-04-09 02:12:50.966623: train_loss -0.1657 +2026-04-09 02:12:50.971988: val_loss -0.0736 +2026-04-09 02:12:50.974459: Pseudo dice [0.3275, 0.6855, 0.627, 0.3651, 0.6298, 0.0647, 0.861] +2026-04-09 02:12:50.977194: Epoch time: 102.07 s +2026-04-09 02:12:52.077006: +2026-04-09 02:12:52.080263: Epoch 353 +2026-04-09 02:12:52.081992: Current learning rate: 0.00676 +2026-04-09 02:14:34.311448: train_loss -0.1639 +2026-04-09 02:14:34.318413: val_loss -0.112 +2026-04-09 02:14:34.322971: Pseudo dice [0.032, 0.0702, 0.6523, 0.4837, 0.4576, 0.21, 0.5125] +2026-04-09 02:14:34.325145: Epoch time: 102.24 s +2026-04-09 02:14:35.425062: +2026-04-09 02:14:35.428828: Epoch 354 +2026-04-09 02:14:35.432273: Current learning rate: 0.00675 +2026-04-09 02:16:17.301212: train_loss -0.1784 +2026-04-09 02:16:17.307456: val_loss -0.1096 +2026-04-09 02:16:17.309722: Pseudo dice [0.3728, 0.5083, 0.7313, 0.6792, 0.6197, 0.0847, 0.8578] +2026-04-09 02:16:17.311961: Epoch time: 101.88 s +2026-04-09 02:16:18.420936: +2026-04-09 02:16:18.424065: Epoch 355 +2026-04-09 02:16:18.426285: Current learning rate: 0.00674 +2026-04-09 02:18:00.326265: train_loss -0.1699 +2026-04-09 02:18:00.334816: val_loss -0.166 +2026-04-09 02:18:00.337229: Pseudo dice [0.3115, 0.6083, 0.7242, 0.703, 0.5265, 0.7464, 0.895] +2026-04-09 02:18:00.340394: Epoch time: 101.91 s +2026-04-09 02:18:01.452787: +2026-04-09 02:18:01.454955: Epoch 356 +2026-04-09 02:18:01.456825: Current learning rate: 0.00673 +2026-04-09 02:19:43.341549: train_loss -0.1732 +2026-04-09 02:19:43.346713: val_loss -0.1471 +2026-04-09 02:19:43.348380: Pseudo dice [0.5016, 0.0748, 0.6193, 0.6765, 0.5413, 0.1241, 0.7334] +2026-04-09 02:19:43.350087: Epoch time: 101.89 s +2026-04-09 02:19:44.450964: +2026-04-09 02:19:44.453299: Epoch 357 +2026-04-09 02:19:44.455400: Current learning rate: 0.00672 +2026-04-09 02:21:26.853900: train_loss -0.1683 +2026-04-09 02:21:26.862688: val_loss -0.113 +2026-04-09 02:21:26.865056: Pseudo dice [0.415, 0.433, 0.3577, 0.5094, 0.3573, 0.0326, 0.5169] +2026-04-09 02:21:26.867470: Epoch time: 102.41 s +2026-04-09 02:21:27.953516: +2026-04-09 02:21:27.955559: Epoch 358 +2026-04-09 02:21:27.957167: Current learning rate: 0.00671 +2026-04-09 02:23:10.260252: train_loss -0.1654 +2026-04-09 02:23:10.266706: val_loss -0.1159 +2026-04-09 02:23:10.269025: Pseudo dice [0.2638, 0.6078, 0.5683, 0.4008, 0.5932, 0.432, 0.6454] +2026-04-09 02:23:10.271560: Epoch time: 102.31 s +2026-04-09 02:23:11.381013: +2026-04-09 02:23:11.384273: Epoch 359 +2026-04-09 02:23:11.386839: Current learning rate: 0.0067 +2026-04-09 02:24:53.452642: train_loss -0.1555 +2026-04-09 02:24:53.463212: val_loss -0.1029 +2026-04-09 02:24:53.465839: Pseudo dice [0.4525, 0.5516, 0.579, 0.6371, 0.4774, 0.035, 0.46] +2026-04-09 02:24:53.468826: Epoch time: 102.07 s +2026-04-09 02:24:54.571760: +2026-04-09 02:24:54.573815: Epoch 360 +2026-04-09 02:24:54.576622: Current learning rate: 0.00669 +2026-04-09 02:26:36.370071: train_loss -0.1778 +2026-04-09 02:26:36.377339: val_loss -0.1224 +2026-04-09 02:26:36.379701: Pseudo dice [0.2406, 0.4076, 0.6128, 0.5451, 0.5199, 0.1026, 0.7899] +2026-04-09 02:26:36.382234: Epoch time: 101.8 s +2026-04-09 02:26:37.509067: +2026-04-09 02:26:37.511418: Epoch 361 +2026-04-09 02:26:37.513625: Current learning rate: 0.00668 +2026-04-09 02:28:19.051193: train_loss -0.1678 +2026-04-09 02:28:19.056436: val_loss -0.111 +2026-04-09 02:28:19.058595: Pseudo dice [0.4881, 0.5163, 0.7366, 0.3264, 0.2385, 0.3693, 0.3485] +2026-04-09 02:28:19.060218: Epoch time: 101.55 s +2026-04-09 02:28:20.181315: +2026-04-09 02:28:20.186878: Epoch 362 +2026-04-09 02:28:20.189755: Current learning rate: 0.00667 +2026-04-09 02:30:01.370982: train_loss -0.1557 +2026-04-09 02:30:01.376304: val_loss -0.127 +2026-04-09 02:30:01.378350: Pseudo dice [0.6041, 0.5853, 0.703, 0.1363, 0.3925, 0.3905, 0.6967] +2026-04-09 02:30:01.380183: Epoch time: 101.19 s +2026-04-09 02:30:02.498513: +2026-04-09 02:30:02.500926: Epoch 363 +2026-04-09 02:30:02.503040: Current learning rate: 0.00666 +2026-04-09 02:31:44.008354: train_loss -0.1553 +2026-04-09 02:31:44.015036: val_loss -0.0598 +2026-04-09 02:31:44.019445: Pseudo dice [0.4182, 0.2041, 0.4982, 0.145, 0.211, 0.0597, 0.42] +2026-04-09 02:31:44.021892: Epoch time: 101.51 s +2026-04-09 02:31:46.103050: +2026-04-09 02:31:46.105023: Epoch 364 +2026-04-09 02:31:46.106695: Current learning rate: 0.00665 +2026-04-09 02:33:27.403630: train_loss -0.1553 +2026-04-09 02:33:27.409883: val_loss -0.1303 +2026-04-09 02:33:27.412367: Pseudo dice [0.5298, 0.8368, 0.4993, 0.0941, 0.5986, 0.1168, 0.8317] +2026-04-09 02:33:27.414548: Epoch time: 101.3 s +2026-04-09 02:33:28.510960: +2026-04-09 02:33:28.513512: Epoch 365 +2026-04-09 02:33:28.516954: Current learning rate: 0.00665 +2026-04-09 02:35:09.953484: train_loss -0.1632 +2026-04-09 02:35:09.959137: val_loss -0.1335 +2026-04-09 02:35:09.962760: Pseudo dice [0.6791, 0.2851, 0.6802, 0.4396, 0.4096, 0.2347, 0.5058] +2026-04-09 02:35:09.966415: Epoch time: 101.45 s +2026-04-09 02:35:11.098476: +2026-04-09 02:35:11.101043: Epoch 366 +2026-04-09 02:35:11.103079: Current learning rate: 0.00664 +2026-04-09 02:36:53.018893: train_loss -0.1733 +2026-04-09 02:36:53.026617: val_loss -0.1215 +2026-04-09 02:36:53.035889: Pseudo dice [0.6351, 0.2614, 0.5001, 0.8438, 0.6347, 0.3509, 0.7874] +2026-04-09 02:36:53.040077: Epoch time: 101.92 s +2026-04-09 02:36:54.189330: +2026-04-09 02:36:54.191287: Epoch 367 +2026-04-09 02:36:54.193007: Current learning rate: 0.00663 +2026-04-09 02:38:35.290269: train_loss -0.1717 +2026-04-09 02:38:35.295052: val_loss -0.1243 +2026-04-09 02:38:35.297122: Pseudo dice [0.3369, 0.4195, 0.7403, 0.4519, 0.4316, 0.2244, 0.7766] +2026-04-09 02:38:35.298955: Epoch time: 101.1 s +2026-04-09 02:38:36.427497: +2026-04-09 02:38:36.429175: Epoch 368 +2026-04-09 02:38:36.431019: Current learning rate: 0.00662 +2026-04-09 02:40:18.063119: train_loss -0.1799 +2026-04-09 02:40:18.068693: val_loss -0.1298 +2026-04-09 02:40:18.084466: Pseudo dice [0.4877, 0.7212, 0.5936, 0.1246, 0.3654, 0.1502, 0.6868] +2026-04-09 02:40:18.087054: Epoch time: 101.64 s +2026-04-09 02:40:19.192272: +2026-04-09 02:40:19.194703: Epoch 369 +2026-04-09 02:40:19.197501: Current learning rate: 0.00661 +2026-04-09 02:42:00.542440: train_loss -0.1794 +2026-04-09 02:42:00.547592: val_loss -0.086 +2026-04-09 02:42:00.550122: Pseudo dice [0.6361, 0.5157, 0.328, 0.382, 0.5695, 0.0427, 0.7914] +2026-04-09 02:42:00.552211: Epoch time: 101.35 s +2026-04-09 02:42:01.677593: +2026-04-09 02:42:01.679603: Epoch 370 +2026-04-09 02:42:01.681293: Current learning rate: 0.0066 +2026-04-09 02:43:43.016370: train_loss -0.1809 +2026-04-09 02:43:43.022078: val_loss -0.128 +2026-04-09 02:43:43.024203: Pseudo dice [0.7879, 0.7455, 0.623, 0.0, 0.3471, 0.1624, 0.6052] +2026-04-09 02:43:43.026307: Epoch time: 101.34 s +2026-04-09 02:43:44.154124: +2026-04-09 02:43:44.156626: Epoch 371 +2026-04-09 02:43:44.159261: Current learning rate: 0.00659 +2026-04-09 02:45:26.428328: train_loss -0.1729 +2026-04-09 02:45:26.435214: val_loss -0.141 +2026-04-09 02:45:26.437579: Pseudo dice [0.7774, 0.7844, 0.5577, 0.5132, 0.4024, 0.3862, 0.4559] +2026-04-09 02:45:26.440772: Epoch time: 102.28 s +2026-04-09 02:45:27.573317: +2026-04-09 02:45:27.576291: Epoch 372 +2026-04-09 02:45:27.579034: Current learning rate: 0.00658 +2026-04-09 02:47:09.246041: train_loss -0.1677 +2026-04-09 02:47:09.252466: val_loss -0.1292 +2026-04-09 02:47:09.260134: Pseudo dice [0.7594, 0.5634, 0.6412, 0.2298, 0.2901, 0.6741, 0.4816] +2026-04-09 02:47:09.262144: Epoch time: 101.68 s +2026-04-09 02:47:10.391616: +2026-04-09 02:47:10.393821: Epoch 373 +2026-04-09 02:47:10.395529: Current learning rate: 0.00657 +2026-04-09 02:48:52.594187: train_loss -0.1751 +2026-04-09 02:48:52.598929: val_loss -0.0753 +2026-04-09 02:48:52.601072: Pseudo dice [0.5302, 0.8046, 0.2895, 0.585, 0.3885, 0.0239, 0.3772] +2026-04-09 02:48:52.603466: Epoch time: 102.21 s +2026-04-09 02:48:53.703825: +2026-04-09 02:48:53.706513: Epoch 374 +2026-04-09 02:48:53.708464: Current learning rate: 0.00656 +2026-04-09 02:50:36.380509: train_loss -0.1757 +2026-04-09 02:50:36.388671: val_loss -0.1368 +2026-04-09 02:50:36.392129: Pseudo dice [0.3929, 0.7035, 0.667, 0.7718, 0.5905, 0.0914, 0.8943] +2026-04-09 02:50:36.394151: Epoch time: 102.68 s +2026-04-09 02:50:37.486432: +2026-04-09 02:50:37.491008: Epoch 375 +2026-04-09 02:50:37.493932: Current learning rate: 0.00655 +2026-04-09 02:52:19.184671: train_loss -0.1509 +2026-04-09 02:52:19.192038: val_loss -0.1518 +2026-04-09 02:52:19.195099: Pseudo dice [0.5714, 0.8059, 0.6349, 0.1339, 0.5105, 0.5983, 0.7404] +2026-04-09 02:52:19.197415: Epoch time: 101.7 s +2026-04-09 02:52:20.317767: +2026-04-09 02:52:20.320313: Epoch 376 +2026-04-09 02:52:20.322672: Current learning rate: 0.00654 +2026-04-09 02:54:02.976225: train_loss -0.1596 +2026-04-09 02:54:02.983867: val_loss -0.0826 +2026-04-09 02:54:02.986524: Pseudo dice [0.394, 0.6254, 0.7054, 0.6464, 0.217, 0.1444, 0.5975] +2026-04-09 02:54:02.990256: Epoch time: 102.66 s +2026-04-09 02:54:04.089126: +2026-04-09 02:54:04.093184: Epoch 377 +2026-04-09 02:54:04.095623: Current learning rate: 0.00653 +2026-04-09 02:55:45.420522: train_loss -0.1646 +2026-04-09 02:55:45.426189: val_loss -0.1035 +2026-04-09 02:55:45.428496: Pseudo dice [0.4414, 0.5437, 0.6795, 0.572, 0.5501, 0.2115, 0.6061] +2026-04-09 02:55:45.430617: Epoch time: 101.33 s +2026-04-09 02:55:46.542885: +2026-04-09 02:55:46.544862: Epoch 378 +2026-04-09 02:55:46.547037: Current learning rate: 0.00652 +2026-04-09 02:57:28.752563: train_loss -0.1765 +2026-04-09 02:57:28.758427: val_loss -0.1494 +2026-04-09 02:57:28.760685: Pseudo dice [0.7193, 0.6373, 0.6613, 0.7825, 0.5603, 0.7115, 0.7869] +2026-04-09 02:57:28.763734: Epoch time: 102.21 s +2026-04-09 02:57:29.862676: +2026-04-09 02:57:29.865247: Epoch 379 +2026-04-09 02:57:29.867163: Current learning rate: 0.00651 +2026-04-09 02:59:11.499932: train_loss -0.1565 +2026-04-09 02:59:11.507404: val_loss -0.1473 +2026-04-09 02:59:11.509599: Pseudo dice [0.7633, 0.5468, 0.7191, 0.5465, 0.4464, 0.6226, 0.6661] +2026-04-09 02:59:11.511807: Epoch time: 101.64 s +2026-04-09 02:59:12.631260: +2026-04-09 02:59:12.633212: Epoch 380 +2026-04-09 02:59:12.635356: Current learning rate: 0.0065 +2026-04-09 03:00:54.722042: train_loss -0.1702 +2026-04-09 03:00:54.736798: val_loss -0.1469 +2026-04-09 03:00:54.739089: Pseudo dice [0.532, 0.5927, 0.6788, 0.6477, 0.5428, 0.1245, 0.8047] +2026-04-09 03:00:54.742107: Epoch time: 102.09 s +2026-04-09 03:00:55.848989: +2026-04-09 03:00:55.850828: Epoch 381 +2026-04-09 03:00:55.853690: Current learning rate: 0.00649 +2026-04-09 03:02:38.207123: train_loss -0.1711 +2026-04-09 03:02:38.212814: val_loss -0.1206 +2026-04-09 03:02:38.215530: Pseudo dice [0.3683, 0.6951, 0.6375, 0.2552, 0.5873, 0.1648, 0.7581] +2026-04-09 03:02:38.218873: Epoch time: 102.36 s +2026-04-09 03:02:39.325337: +2026-04-09 03:02:39.328561: Epoch 382 +2026-04-09 03:02:39.330585: Current learning rate: 0.00648 +2026-04-09 03:04:20.588836: train_loss -0.1748 +2026-04-09 03:04:20.595452: val_loss -0.1107 +2026-04-09 03:04:20.597241: Pseudo dice [0.8092, 0.8861, 0.3173, 0.3206, 0.3999, 0.0858, 0.6252] +2026-04-09 03:04:20.599801: Epoch time: 101.27 s +2026-04-09 03:04:21.728950: +2026-04-09 03:04:21.731247: Epoch 383 +2026-04-09 03:04:21.733282: Current learning rate: 0.00648 +2026-04-09 03:06:03.792316: train_loss -0.1603 +2026-04-09 03:06:03.799913: val_loss -0.1582 +2026-04-09 03:06:03.802151: Pseudo dice [0.5422, 0.4857, 0.7259, 0.218, 0.4614, 0.8331, 0.7686] +2026-04-09 03:06:03.804230: Epoch time: 102.07 s +2026-04-09 03:06:05.875818: +2026-04-09 03:06:05.877583: Epoch 384 +2026-04-09 03:06:05.879714: Current learning rate: 0.00647 +2026-04-09 03:07:48.002944: train_loss -0.185 +2026-04-09 03:07:48.011431: val_loss -0.1652 +2026-04-09 03:07:48.016503: Pseudo dice [0.2084, 0.5316, 0.7744, 0.7278, 0.4425, 0.6744, 0.8918] +2026-04-09 03:07:48.019639: Epoch time: 102.13 s +2026-04-09 03:07:49.136514: +2026-04-09 03:07:49.139071: Epoch 385 +2026-04-09 03:07:49.141166: Current learning rate: 0.00646 +2026-04-09 03:09:29.984950: train_loss -0.188 +2026-04-09 03:09:29.991209: val_loss -0.1548 +2026-04-09 03:09:29.993227: Pseudo dice [0.6746, 0.5199, 0.7106, 0.414, 0.4559, 0.6922, 0.6359] +2026-04-09 03:09:29.995609: Epoch time: 100.85 s +2026-04-09 03:09:31.107796: +2026-04-09 03:09:31.109587: Epoch 386 +2026-04-09 03:09:31.112032: Current learning rate: 0.00645 +2026-04-09 03:11:12.605767: train_loss -0.173 +2026-04-09 03:11:12.613206: val_loss -0.0844 +2026-04-09 03:11:12.615499: Pseudo dice [0.7188, 0.8261, 0.5581, 0.6868, 0.3797, 0.0459, 0.596] +2026-04-09 03:11:12.618919: Epoch time: 101.5 s +2026-04-09 03:11:13.746815: +2026-04-09 03:11:13.748837: Epoch 387 +2026-04-09 03:11:13.750797: Current learning rate: 0.00644 +2026-04-09 03:12:55.373953: train_loss -0.1717 +2026-04-09 03:12:55.381278: val_loss -0.1795 +2026-04-09 03:12:55.383697: Pseudo dice [0.514, 0.4824, 0.6855, 0.6914, 0.4202, 0.5448, 0.8539] +2026-04-09 03:12:55.386009: Epoch time: 101.63 s +2026-04-09 03:12:56.509083: +2026-04-09 03:12:56.511337: Epoch 388 +2026-04-09 03:12:56.512966: Current learning rate: 0.00643 +2026-04-09 03:14:38.192261: train_loss -0.1869 +2026-04-09 03:14:38.200180: val_loss -0.1028 +2026-04-09 03:14:38.204769: Pseudo dice [0.7798, 0.3908, 0.6234, 0.0073, 0.5433, 0.1096, 0.8139] +2026-04-09 03:14:38.207150: Epoch time: 101.69 s +2026-04-09 03:14:39.314823: +2026-04-09 03:14:39.317147: Epoch 389 +2026-04-09 03:14:39.319172: Current learning rate: 0.00642 +2026-04-09 03:16:21.597345: train_loss -0.1826 +2026-04-09 03:16:21.605775: val_loss -0.143 +2026-04-09 03:16:21.608257: Pseudo dice [0.6761, 0.6787, 0.5519, 0.742, 0.5809, 0.1294, 0.6105] +2026-04-09 03:16:21.610569: Epoch time: 102.29 s +2026-04-09 03:16:22.734101: +2026-04-09 03:16:22.736543: Epoch 390 +2026-04-09 03:16:22.739567: Current learning rate: 0.00641 +2026-04-09 03:18:04.889875: train_loss -0.1778 +2026-04-09 03:18:04.897883: val_loss -0.1589 +2026-04-09 03:18:04.900616: Pseudo dice [0.5393, 0.5784, 0.6847, 0.584, 0.4276, 0.1897, 0.8593] +2026-04-09 03:18:04.903129: Epoch time: 102.16 s +2026-04-09 03:18:06.037132: +2026-04-09 03:18:06.038906: Epoch 391 +2026-04-09 03:18:06.040496: Current learning rate: 0.0064 +2026-04-09 03:19:47.431202: train_loss -0.1789 +2026-04-09 03:19:47.437529: val_loss -0.1658 +2026-04-09 03:19:47.441253: Pseudo dice [0.6044, 0.5508, 0.6403, 0.4594, 0.6681, 0.4074, 0.8445] +2026-04-09 03:19:47.443882: Epoch time: 101.4 s +2026-04-09 03:19:48.569606: +2026-04-09 03:19:48.572161: Epoch 392 +2026-04-09 03:19:48.574810: Current learning rate: 0.00639 +2026-04-09 03:21:31.396467: train_loss -0.1874 +2026-04-09 03:21:31.403116: val_loss -0.1019 +2026-04-09 03:21:31.405676: Pseudo dice [0.7639, 0.5796, 0.7063, 0.5498, 0.5211, 0.0873, 0.7496] +2026-04-09 03:21:31.408033: Epoch time: 102.83 s +2026-04-09 03:21:32.521777: +2026-04-09 03:21:32.524308: Epoch 393 +2026-04-09 03:21:32.533290: Current learning rate: 0.00638 +2026-04-09 03:23:14.006592: train_loss -0.1784 +2026-04-09 03:23:14.013196: val_loss -0.1285 +2026-04-09 03:23:14.015289: Pseudo dice [0.2479, 0.4171, 0.5675, 0.0606, 0.5375, 0.1563, 0.7442] +2026-04-09 03:23:14.017536: Epoch time: 101.49 s +2026-04-09 03:23:15.119601: +2026-04-09 03:23:15.122132: Epoch 394 +2026-04-09 03:23:15.123801: Current learning rate: 0.00637 +2026-04-09 03:24:57.381586: train_loss -0.2127 +2026-04-09 03:24:57.389580: val_loss -0.149 +2026-04-09 03:24:57.391957: Pseudo dice [0.4255, 0.0, 0.4361, 0.7314, 0.6085, 0.2835, 0.4381] +2026-04-09 03:24:57.396315: Epoch time: 102.27 s +2026-04-09 03:24:58.541043: +2026-04-09 03:24:58.543231: Epoch 395 +2026-04-09 03:24:58.546311: Current learning rate: 0.00636 +2026-04-09 03:26:40.850962: train_loss -0.3027 +2026-04-09 03:26:40.861593: val_loss -0.2456 +2026-04-09 03:26:40.867151: Pseudo dice [0.0, 0.0, 0.4817, 0.4644, 0.5838, 0.0335, 0.8432] +2026-04-09 03:26:40.872158: Epoch time: 102.31 s +2026-04-09 03:26:41.989949: +2026-04-09 03:26:41.992587: Epoch 396 +2026-04-09 03:26:41.994698: Current learning rate: 0.00635 +2026-04-09 03:28:24.250622: train_loss -0.3561 +2026-04-09 03:28:24.257869: val_loss -0.2974 +2026-04-09 03:28:24.260859: Pseudo dice [0.0, 0.0, 0.4646, 0.5116, 0.3625, 0.0872, 0.555] +2026-04-09 03:28:24.262901: Epoch time: 102.26 s +2026-04-09 03:28:25.381911: +2026-04-09 03:28:25.385413: Epoch 397 +2026-04-09 03:28:25.389694: Current learning rate: 0.00634 +2026-04-09 03:30:07.384395: train_loss -0.3556 +2026-04-09 03:30:07.390376: val_loss -0.326 +2026-04-09 03:30:07.392774: Pseudo dice [0.0, 0.0, 0.588, 0.1758, 0.4343, 0.1686, 0.4893] +2026-04-09 03:30:07.395225: Epoch time: 102.01 s +2026-04-09 03:30:08.506034: +2026-04-09 03:30:08.508498: Epoch 398 +2026-04-09 03:30:08.510852: Current learning rate: 0.00633 +2026-04-09 03:31:50.587851: train_loss -0.3053 +2026-04-09 03:31:50.594218: val_loss -0.3182 +2026-04-09 03:31:50.598699: Pseudo dice [0.0, 0.0, 0.3415, 0.2446, 0.4215, 0.0, 0.0] +2026-04-09 03:31:50.602171: Epoch time: 102.09 s +2026-04-09 03:31:51.724352: +2026-04-09 03:31:51.726515: Epoch 399 +2026-04-09 03:31:51.728382: Current learning rate: 0.00632 +2026-04-09 03:33:33.762554: train_loss -0.3189 +2026-04-09 03:33:33.769619: val_loss -0.2849 +2026-04-09 03:33:33.773525: Pseudo dice [0.0, 0.0, 0.6651, 0.0788, 0.2003, 0.0, 0.0] +2026-04-09 03:33:33.775694: Epoch time: 102.04 s +2026-04-09 03:33:36.595480: +2026-04-09 03:33:36.597761: Epoch 400 +2026-04-09 03:33:36.599215: Current learning rate: 0.00631 +2026-04-09 03:35:18.735785: train_loss -0.3584 +2026-04-09 03:35:18.742517: val_loss -0.3691 +2026-04-09 03:35:18.748743: Pseudo dice [0.257, 0.0, 0.7501, 0.0011, 0.6206, 0.0, 0.3255] +2026-04-09 03:35:18.752029: Epoch time: 102.14 s +2026-04-09 03:35:19.860838: +2026-04-09 03:35:19.863985: Epoch 401 +2026-04-09 03:35:19.866603: Current learning rate: 0.0063 +2026-04-09 03:37:01.178931: train_loss -0.3712 +2026-04-09 03:37:01.184805: val_loss -0.2744 +2026-04-09 03:37:01.186790: Pseudo dice [0.0002, 0.0, 0.4804, 0.3348, 0.3928, 0.1235, 0.6828] +2026-04-09 03:37:01.189642: Epoch time: 101.32 s +2026-04-09 03:37:02.316252: +2026-04-09 03:37:02.318119: Epoch 402 +2026-04-09 03:37:02.319924: Current learning rate: 0.0063 +2026-04-09 03:38:44.349970: train_loss -0.3533 +2026-04-09 03:38:44.355835: val_loss -0.2078 +2026-04-09 03:38:44.358217: Pseudo dice [0.0393, 0.0, 0.4147, 0.0674, 0.3106, 0.0084, 0.0286] +2026-04-09 03:38:44.363351: Epoch time: 102.04 s +2026-04-09 03:38:46.453560: +2026-04-09 03:38:46.456838: Epoch 403 +2026-04-09 03:38:46.458688: Current learning rate: 0.00629 +2026-04-09 03:40:28.570380: train_loss -0.3078 +2026-04-09 03:40:28.576931: val_loss -0.2951 +2026-04-09 03:40:28.578842: Pseudo dice [0.1457, 0.0, 0.5449, 0.3399, 0.5637, 0.0, 0.0001] +2026-04-09 03:40:28.581387: Epoch time: 102.12 s +2026-04-09 03:40:29.696749: +2026-04-09 03:40:29.699150: Epoch 404 +2026-04-09 03:40:29.701235: Current learning rate: 0.00628 +2026-04-09 03:42:12.269025: train_loss -0.3301 +2026-04-09 03:42:12.277163: val_loss -0.1137 +2026-04-09 03:42:12.279655: Pseudo dice [0.0, 0.0, 0.4629, 0.4159, 0.4329, 0.0, 0.0] +2026-04-09 03:42:12.283404: Epoch time: 102.58 s +2026-04-09 03:42:13.405347: +2026-04-09 03:42:13.410230: Epoch 405 +2026-04-09 03:42:13.412950: Current learning rate: 0.00627 +2026-04-09 03:43:55.730360: train_loss -0.3569 +2026-04-09 03:43:55.739988: val_loss -0.3011 +2026-04-09 03:43:55.754636: Pseudo dice [0.0, 0.0, 0.3844, 0.1023, 0.488, 0.0715, 0.6139] +2026-04-09 03:43:55.757129: Epoch time: 102.33 s +2026-04-09 03:43:56.895201: +2026-04-09 03:43:56.897093: Epoch 406 +2026-04-09 03:43:56.899078: Current learning rate: 0.00626 +2026-04-09 03:45:38.837450: train_loss -0.3558 +2026-04-09 03:45:38.844279: val_loss -0.2888 +2026-04-09 03:45:38.847019: Pseudo dice [0.0, 0.0, 0.0159, 0.2602, 0.3991, 0.0, 0.3732] +2026-04-09 03:45:38.849852: Epoch time: 101.95 s +2026-04-09 03:45:39.957543: +2026-04-09 03:45:39.964571: Epoch 407 +2026-04-09 03:45:39.968037: Current learning rate: 0.00625 +2026-04-09 03:47:21.574011: train_loss -0.3366 +2026-04-09 03:47:21.580070: val_loss -0.3429 +2026-04-09 03:47:21.582336: Pseudo dice [0.0, 0.0, 0.5105, 0.0717, 0.6103, 0.0, 0.2097] +2026-04-09 03:47:21.584627: Epoch time: 101.62 s +2026-04-09 03:47:22.712845: +2026-04-09 03:47:22.715037: Epoch 408 +2026-04-09 03:47:22.716967: Current learning rate: 0.00624 +2026-04-09 03:49:04.348401: train_loss -0.3144 +2026-04-09 03:49:04.353405: val_loss -0.2855 +2026-04-09 03:49:04.355258: Pseudo dice [0.0, 0.0, 0.5284, 0.1198, 0.0538, 0.0, 0.0] +2026-04-09 03:49:04.357362: Epoch time: 101.64 s +2026-04-09 03:49:05.480284: +2026-04-09 03:49:05.482082: Epoch 409 +2026-04-09 03:49:05.484054: Current learning rate: 0.00623 +2026-04-09 03:50:46.786302: train_loss -0.3506 +2026-04-09 03:50:46.791557: val_loss -0.2549 +2026-04-09 03:50:46.793809: Pseudo dice [0.0, 0.0, 0.476, 0.0956, 0.4902, 0.0524, 0.5626] +2026-04-09 03:50:46.795962: Epoch time: 101.31 s +2026-04-09 03:50:47.904551: +2026-04-09 03:50:47.906282: Epoch 410 +2026-04-09 03:50:47.908231: Current learning rate: 0.00622 +2026-04-09 03:52:29.634445: train_loss -0.3554 +2026-04-09 03:52:29.641800: val_loss -0.2596 +2026-04-09 03:52:29.645005: Pseudo dice [0.0, 0.0, 0.4001, 0.5534, 0.5006, 0.0, 0.0] +2026-04-09 03:52:29.647362: Epoch time: 101.73 s +2026-04-09 03:52:30.698937: +2026-04-09 03:52:30.700895: Epoch 411 +2026-04-09 03:52:30.703903: Current learning rate: 0.00621 +2026-04-09 03:54:12.796653: train_loss -0.3507 +2026-04-09 03:54:12.810303: val_loss -0.2977 +2026-04-09 03:54:12.813203: Pseudo dice [0.0, 0.0, 0.3785, 0.5928, 0.3244, 0.008, 0.4683] +2026-04-09 03:54:12.815055: Epoch time: 102.1 s +2026-04-09 03:54:13.871485: +2026-04-09 03:54:13.873368: Epoch 412 +2026-04-09 03:54:13.876348: Current learning rate: 0.0062 +2026-04-09 03:55:55.865022: train_loss -0.3388 +2026-04-09 03:55:55.871782: val_loss -0.3192 +2026-04-09 03:55:55.874547: Pseudo dice [0.0, 0.0, 0.6213, 0.3472, 0.4995, 0.0, 0.0] +2026-04-09 03:55:55.877061: Epoch time: 102.0 s +2026-04-09 03:55:56.924882: +2026-04-09 03:55:56.927090: Epoch 413 +2026-04-09 03:55:56.929617: Current learning rate: 0.00619 +2026-04-09 03:57:39.466906: train_loss -0.3438 +2026-04-09 03:57:39.474727: val_loss -0.3621 +2026-04-09 03:57:39.477269: Pseudo dice [0.0, 0.0, 0.588, 0.3555, 0.5148, 0.285, 0.6649] +2026-04-09 03:57:39.479934: Epoch time: 102.55 s +2026-04-09 03:57:40.533300: +2026-04-09 03:57:40.535335: Epoch 414 +2026-04-09 03:57:40.537455: Current learning rate: 0.00618 +2026-04-09 03:59:21.538600: train_loss -0.3418 +2026-04-09 03:59:21.544392: val_loss -0.2966 +2026-04-09 03:59:21.546444: Pseudo dice [0.0, 0.0, 0.6038, 0.6772, 0.4258, 0.1118, 0.0173] +2026-04-09 03:59:21.548986: Epoch time: 101.01 s +2026-04-09 03:59:22.614989: +2026-04-09 03:59:22.617076: Epoch 415 +2026-04-09 03:59:22.618939: Current learning rate: 0.00617 +2026-04-09 04:01:04.959797: train_loss -0.3526 +2026-04-09 04:01:04.964394: val_loss -0.3128 +2026-04-09 04:01:04.972789: Pseudo dice [0.0024, 0.0, 0.6199, 0.4805, 0.3442, 0.1117, 0.6402] +2026-04-09 04:01:04.974541: Epoch time: 102.35 s +2026-04-09 04:01:06.028113: +2026-04-09 04:01:06.029915: Epoch 416 +2026-04-09 04:01:06.031326: Current learning rate: 0.00616 +2026-04-09 04:02:48.512474: train_loss -0.3566 +2026-04-09 04:02:48.518806: val_loss -0.3454 +2026-04-09 04:02:48.520755: Pseudo dice [0.0, 0.0, 0.2552, 0.0105, 0.3699, 0.6527, 0.178] +2026-04-09 04:02:48.524047: Epoch time: 102.49 s +2026-04-09 04:02:49.595684: +2026-04-09 04:02:49.602955: Epoch 417 +2026-04-09 04:02:49.606028: Current learning rate: 0.00615 +2026-04-09 04:04:31.108267: train_loss -0.3246 +2026-04-09 04:04:31.114372: val_loss -0.2298 +2026-04-09 04:04:31.117081: Pseudo dice [0.0, 0.0, 0.1741, 0.0, 0.439, 0.0125, 0.0] +2026-04-09 04:04:31.118927: Epoch time: 101.52 s +2026-04-09 04:04:32.189274: +2026-04-09 04:04:32.191201: Epoch 418 +2026-04-09 04:04:32.192748: Current learning rate: 0.00614 +2026-04-09 04:06:13.853650: train_loss -0.2944 +2026-04-09 04:06:13.862539: val_loss -0.3079 +2026-04-09 04:06:13.872560: Pseudo dice [0.0, 0.0, 0.4582, 0.2154, 0.6012, 0.0, 0.0] +2026-04-09 04:06:13.875024: Epoch time: 101.67 s +2026-04-09 04:06:14.956124: +2026-04-09 04:06:14.958080: Epoch 419 +2026-04-09 04:06:14.959765: Current learning rate: 0.00613 +2026-04-09 04:07:56.907306: train_loss -0.3267 +2026-04-09 04:07:56.914299: val_loss -0.2564 +2026-04-09 04:07:56.917659: Pseudo dice [0.0, 0.0, 0.4327, 0.4983, 0.5333, 0.0, 0.2877] +2026-04-09 04:07:56.919810: Epoch time: 101.96 s +2026-04-09 04:07:57.984690: +2026-04-09 04:07:57.987442: Epoch 420 +2026-04-09 04:07:57.989634: Current learning rate: 0.00612 +2026-04-09 04:09:39.898413: train_loss -0.3476 +2026-04-09 04:09:39.905864: val_loss -0.3511 +2026-04-09 04:09:39.908316: Pseudo dice [0.0, 0.0, 0.6701, 0.6196, 0.4744, 0.273, 0.6195] +2026-04-09 04:09:39.910728: Epoch time: 101.92 s +2026-04-09 04:09:40.984922: +2026-04-09 04:09:40.987172: Epoch 421 +2026-04-09 04:09:40.989675: Current learning rate: 0.00612 +2026-04-09 04:11:22.492974: train_loss -0.3747 +2026-04-09 04:11:22.500262: val_loss -0.3599 +2026-04-09 04:11:22.503017: Pseudo dice [0.0, 0.0, 0.6847, 0.1959, 0.5025, 0.05, 0.6365] +2026-04-09 04:11:22.505435: Epoch time: 101.51 s +2026-04-09 04:11:23.572779: +2026-04-09 04:11:23.575155: Epoch 422 +2026-04-09 04:11:23.577107: Current learning rate: 0.00611 +2026-04-09 04:13:06.454883: train_loss -0.3763 +2026-04-09 04:13:06.463845: val_loss -0.2748 +2026-04-09 04:13:06.466260: Pseudo dice [0.0, 0.0, 0.5304, 0.2032, 0.3515, 0.0745, 0.7012] +2026-04-09 04:13:06.468385: Epoch time: 102.89 s +2026-04-09 04:13:08.495520: +2026-04-09 04:13:08.497710: Epoch 423 +2026-04-09 04:13:08.499504: Current learning rate: 0.0061 +2026-04-09 04:14:51.568978: train_loss -0.3449 +2026-04-09 04:14:51.577431: val_loss -0.3566 +2026-04-09 04:14:51.579772: Pseudo dice [0.0, 0.0, 0.6316, 0.3818, 0.5557, 0.1413, 0.6439] +2026-04-09 04:14:51.582117: Epoch time: 103.08 s +2026-04-09 04:14:52.637679: +2026-04-09 04:14:52.640173: Epoch 424 +2026-04-09 04:14:52.643259: Current learning rate: 0.00609 +2026-04-09 04:16:34.571791: train_loss -0.3827 +2026-04-09 04:16:34.579829: val_loss -0.3298 +2026-04-09 04:16:34.582692: Pseudo dice [0.0, 0.0, 0.4133, 0.01, 0.5609, 0.0614, 0.6863] +2026-04-09 04:16:34.584871: Epoch time: 101.94 s +2026-04-09 04:16:35.640432: +2026-04-09 04:16:35.642627: Epoch 425 +2026-04-09 04:16:35.644160: Current learning rate: 0.00608 +2026-04-09 04:18:17.581075: train_loss -0.3609 +2026-04-09 04:18:17.589685: val_loss -0.3586 +2026-04-09 04:18:17.593138: Pseudo dice [0.0, 0.0, 0.5271, 0.0217, 0.4198, 0.6303, 0.581] +2026-04-09 04:18:17.595183: Epoch time: 101.94 s +2026-04-09 04:18:18.715823: +2026-04-09 04:18:18.718023: Epoch 426 +2026-04-09 04:18:18.720643: Current learning rate: 0.00607 +2026-04-09 04:20:00.200248: train_loss -0.3732 +2026-04-09 04:20:00.207318: val_loss -0.303 +2026-04-09 04:20:00.209372: Pseudo dice [0.0, 0.0, 0.5497, 0.716, 0.5267, 0.0215, 0.3368] +2026-04-09 04:20:00.211745: Epoch time: 101.49 s +2026-04-09 04:20:01.290750: +2026-04-09 04:20:01.293052: Epoch 427 +2026-04-09 04:20:01.295192: Current learning rate: 0.00606 +2026-04-09 04:21:42.526041: train_loss -0.3537 +2026-04-09 04:21:42.533743: val_loss -0.2815 +2026-04-09 04:21:42.536085: Pseudo dice [0.0, 0.0, 0.6233, 0.008, 0.3384, 0.2059, 0.5535] +2026-04-09 04:21:42.538207: Epoch time: 101.24 s +2026-04-09 04:21:43.611029: +2026-04-09 04:21:43.613076: Epoch 428 +2026-04-09 04:21:43.615020: Current learning rate: 0.00605 +2026-04-09 04:23:25.168101: train_loss -0.3863 +2026-04-09 04:23:25.174567: val_loss -0.2982 +2026-04-09 04:23:25.176037: Pseudo dice [0.0, 0.0, 0.4475, 0.015, 0.6183, 0.0453, 0.5373] +2026-04-09 04:23:25.177717: Epoch time: 101.56 s +2026-04-09 04:23:26.238859: +2026-04-09 04:23:26.240694: Epoch 429 +2026-04-09 04:23:26.242491: Current learning rate: 0.00604 +2026-04-09 04:25:08.971125: train_loss -0.3854 +2026-04-09 04:25:08.986483: val_loss -0.4178 +2026-04-09 04:25:08.996750: Pseudo dice [0.0, 0.0, 0.7394, 0.4322, 0.4475, 0.5152, 0.7084] +2026-04-09 04:25:09.006426: Epoch time: 102.74 s +2026-04-09 04:25:10.073277: +2026-04-09 04:25:10.075162: Epoch 430 +2026-04-09 04:25:10.076710: Current learning rate: 0.00603 +2026-04-09 04:26:52.337829: train_loss -0.3703 +2026-04-09 04:26:52.348822: val_loss -0.2306 +2026-04-09 04:26:52.350737: Pseudo dice [0.0, 0.0, 0.4778, 0.3413, 0.2793, 0.0821, 0.6618] +2026-04-09 04:26:52.353072: Epoch time: 102.27 s +2026-04-09 04:26:53.415838: +2026-04-09 04:26:53.418106: Epoch 431 +2026-04-09 04:26:53.420338: Current learning rate: 0.00602 +2026-04-09 04:28:35.030468: train_loss -0.3655 +2026-04-09 04:28:35.040400: val_loss -0.2602 +2026-04-09 04:28:35.047434: Pseudo dice [0.0, 0.0, 0.6134, 0.1938, 0.2912, 0.0049, 0.0] +2026-04-09 04:28:35.052090: Epoch time: 101.62 s +2026-04-09 04:28:36.100367: +2026-04-09 04:28:36.103167: Epoch 432 +2026-04-09 04:28:36.106249: Current learning rate: 0.00601 +2026-04-09 04:30:17.735881: train_loss -0.3405 +2026-04-09 04:30:17.743834: val_loss -0.3458 +2026-04-09 04:30:17.745991: Pseudo dice [0.0, 0.0, 0.5668, 0.0621, 0.413, 0.3914, 0.0017] +2026-04-09 04:30:17.747957: Epoch time: 101.64 s +2026-04-09 04:30:18.809533: +2026-04-09 04:30:18.812183: Epoch 433 +2026-04-09 04:30:18.814308: Current learning rate: 0.006 +2026-04-09 04:32:00.933989: train_loss -0.3688 +2026-04-09 04:32:00.944528: val_loss -0.2919 +2026-04-09 04:32:00.949421: Pseudo dice [0.1786, 0.0, 0.3221, 0.5333, 0.3348, 0.0, 0.0] +2026-04-09 04:32:00.952182: Epoch time: 102.13 s +2026-04-09 04:32:02.010004: +2026-04-09 04:32:02.012355: Epoch 434 +2026-04-09 04:32:02.014674: Current learning rate: 0.00599 +2026-04-09 04:33:44.114439: train_loss -0.3064 +2026-04-09 04:33:44.123532: val_loss -0.2231 +2026-04-09 04:33:44.125439: Pseudo dice [0.0, 0.0, 0.2552, 0.4022, 0.5773, 0.0, 0.0032] +2026-04-09 04:33:44.127080: Epoch time: 102.11 s +2026-04-09 04:33:45.183251: +2026-04-09 04:33:45.185313: Epoch 435 +2026-04-09 04:33:45.187126: Current learning rate: 0.00598 +2026-04-09 04:35:27.918101: train_loss -0.3524 +2026-04-09 04:35:27.928702: val_loss -0.3619 +2026-04-09 04:35:27.930667: Pseudo dice [0.2169, 0.0, 0.4952, 0.5584, 0.5604, 0.0, 0.6858] +2026-04-09 04:35:27.932980: Epoch time: 102.74 s +2026-04-09 04:35:28.989180: +2026-04-09 04:35:28.991996: Epoch 436 +2026-04-09 04:35:28.994856: Current learning rate: 0.00597 +2026-04-09 04:37:11.010684: train_loss -0.3819 +2026-04-09 04:37:11.017776: val_loss -0.3848 +2026-04-09 04:37:11.019697: Pseudo dice [0.0, 0.0, 0.648, 0.4236, 0.4841, 0.1932, 0.6661] +2026-04-09 04:37:11.022224: Epoch time: 102.03 s +2026-04-09 04:37:12.092196: +2026-04-09 04:37:12.094101: Epoch 437 +2026-04-09 04:37:12.096157: Current learning rate: 0.00596 +2026-04-09 04:38:54.701235: train_loss -0.3621 +2026-04-09 04:38:54.708388: val_loss -0.3086 +2026-04-09 04:38:54.710922: Pseudo dice [0.6368, 0.0, 0.5998, 0.0908, 0.2955, 0.0171, 0.1963] +2026-04-09 04:38:54.713895: Epoch time: 102.61 s +2026-04-09 04:38:55.791080: +2026-04-09 04:38:55.793766: Epoch 438 +2026-04-09 04:38:55.795965: Current learning rate: 0.00595 +2026-04-09 04:40:37.406143: train_loss -0.3674 +2026-04-09 04:40:37.414201: val_loss -0.1806 +2026-04-09 04:40:37.416963: Pseudo dice [0.213, 0.0, 0.2567, 0.0447, 0.6311, 0.0051, 0.5844] +2026-04-09 04:40:37.421330: Epoch time: 101.62 s +2026-04-09 04:40:38.461251: +2026-04-09 04:40:38.463140: Epoch 439 +2026-04-09 04:40:38.465003: Current learning rate: 0.00594 +2026-04-09 04:42:20.440073: train_loss -0.384 +2026-04-09 04:42:20.449481: val_loss -0.2859 +2026-04-09 04:42:20.451852: Pseudo dice [0.422, 0.0, 0.5725, 0.1531, 0.6325, 0.0219, 0.6071] +2026-04-09 04:42:20.453572: Epoch time: 101.98 s +2026-04-09 04:42:21.514758: +2026-04-09 04:42:21.520533: Epoch 440 +2026-04-09 04:42:21.522758: Current learning rate: 0.00593 +2026-04-09 04:44:02.960746: train_loss -0.396 +2026-04-09 04:44:02.974771: val_loss -0.3849 +2026-04-09 04:44:02.978320: Pseudo dice [0.5456, 0.0, 0.4518, 0.2211, 0.6353, 0.1916, 0.5471] +2026-04-09 04:44:02.982289: Epoch time: 101.45 s +2026-04-09 04:44:04.051081: +2026-04-09 04:44:04.053105: Epoch 441 +2026-04-09 04:44:04.055124: Current learning rate: 0.00592 +2026-04-09 04:45:46.129493: train_loss -0.3421 +2026-04-09 04:45:46.137344: val_loss -0.3463 +2026-04-09 04:45:46.139504: Pseudo dice [0.2967, 0.0, 0.3979, 0.3727, 0.2447, 0.2543, 0.6395] +2026-04-09 04:45:46.142207: Epoch time: 102.08 s +2026-04-09 04:45:47.177328: +2026-04-09 04:45:47.179784: Epoch 442 +2026-04-09 04:45:47.181825: Current learning rate: 0.00592 +2026-04-09 04:47:28.299656: train_loss -0.3901 +2026-04-09 04:47:28.305290: val_loss -0.2833 +2026-04-09 04:47:28.306881: Pseudo dice [0.6444, 0.0, 0.4601, 0.6021, 0.3786, 0.0215, 0.5255] +2026-04-09 04:47:28.309434: Epoch time: 101.13 s +2026-04-09 04:47:30.352393: +2026-04-09 04:47:30.354118: Epoch 443 +2026-04-09 04:47:30.355850: Current learning rate: 0.00591 +2026-04-09 04:49:11.514557: train_loss -0.3784 +2026-04-09 04:49:11.522242: val_loss -0.3909 +2026-04-09 04:49:11.525174: Pseudo dice [0.013, 0.0, 0.5997, 0.4231, 0.2188, 0.5128, 0.5483] +2026-04-09 04:49:11.527417: Epoch time: 101.17 s +2026-04-09 04:49:12.577583: +2026-04-09 04:49:12.579906: Epoch 444 +2026-04-09 04:49:12.581829: Current learning rate: 0.0059 +2026-04-09 04:50:53.854276: train_loss -0.3549 +2026-04-09 04:50:53.860101: val_loss -0.3707 +2026-04-09 04:50:53.862285: Pseudo dice [0.5897, 0.0, 0.6366, 0.0986, 0.4488, 0.2778, 0.5049] +2026-04-09 04:50:53.864563: Epoch time: 101.28 s +2026-04-09 04:50:54.913288: +2026-04-09 04:50:54.915848: Epoch 445 +2026-04-09 04:50:54.918333: Current learning rate: 0.00589 +2026-04-09 04:52:36.217621: train_loss -0.3775 +2026-04-09 04:52:36.223149: val_loss -0.3777 +2026-04-09 04:52:36.225493: Pseudo dice [0.0, 0.0, 0.7211, 0.6656, 0.641, 0.3017, 0.4863] +2026-04-09 04:52:36.227347: Epoch time: 101.31 s +2026-04-09 04:52:37.279577: +2026-04-09 04:52:37.281996: Epoch 446 +2026-04-09 04:52:37.284165: Current learning rate: 0.00588 +2026-04-09 04:54:19.158078: train_loss -0.3583 +2026-04-09 04:54:19.164298: val_loss -0.3139 +2026-04-09 04:54:19.166741: Pseudo dice [0.0, 0.0, 0.4741, 0.704, 0.5847, 0.0786, 0.6169] +2026-04-09 04:54:19.168570: Epoch time: 101.88 s +2026-04-09 04:54:20.211376: +2026-04-09 04:54:20.213234: Epoch 447 +2026-04-09 04:54:20.214777: Current learning rate: 0.00587 +2026-04-09 04:56:01.436488: train_loss -0.3262 +2026-04-09 04:56:01.443399: val_loss -0.2572 +2026-04-09 04:56:01.445746: Pseudo dice [0.0, 0.0, 0.5829, 0.5096, 0.4527, 0.0623, 0.0013] +2026-04-09 04:56:01.447443: Epoch time: 101.23 s +2026-04-09 04:56:02.501005: +2026-04-09 04:56:02.503785: Epoch 448 +2026-04-09 04:56:02.506022: Current learning rate: 0.00586 +2026-04-09 04:57:44.030239: train_loss -0.3686 +2026-04-09 04:57:44.038079: val_loss -0.223 +2026-04-09 04:57:44.040288: Pseudo dice [0.0, 0.0, 0.1508, 0.5157, 0.485, 0.0175, 0.6094] +2026-04-09 04:57:44.042787: Epoch time: 101.53 s +2026-04-09 04:57:45.116335: +2026-04-09 04:57:45.118500: Epoch 449 +2026-04-09 04:57:45.120565: Current learning rate: 0.00585 +2026-04-09 04:59:27.101241: train_loss -0.3532 +2026-04-09 04:59:27.107235: val_loss -0.2585 +2026-04-09 04:59:27.110146: Pseudo dice [0.0, 0.0, 0.5807, 0.3602, 0.4198, 0.0005, 0.4502] +2026-04-09 04:59:27.112294: Epoch time: 101.99 s +2026-04-09 04:59:29.813022: +2026-04-09 04:59:29.815959: Epoch 450 +2026-04-09 04:59:29.817461: Current learning rate: 0.00584 +2026-04-09 05:01:11.792265: train_loss -0.3837 +2026-04-09 05:01:11.802533: val_loss -0.3594 +2026-04-09 05:01:11.804647: Pseudo dice [0.0, 0.0, 0.5351, 0.5058, 0.5141, 0.0802, 0.5808] +2026-04-09 05:01:11.806639: Epoch time: 101.98 s +2026-04-09 05:01:12.845205: +2026-04-09 05:01:12.846972: Epoch 451 +2026-04-09 05:01:12.848656: Current learning rate: 0.00583 +2026-04-09 05:02:55.229208: train_loss -0.3936 +2026-04-09 05:02:55.234993: val_loss -0.2602 +2026-04-09 05:02:55.237124: Pseudo dice [0.0, 0.0, 0.5959, 0.0085, 0.4376, 0.0093, 0.4644] +2026-04-09 05:02:55.239478: Epoch time: 102.39 s +2026-04-09 05:02:56.299747: +2026-04-09 05:02:56.302177: Epoch 452 +2026-04-09 05:02:56.305941: Current learning rate: 0.00582 +2026-04-09 05:04:38.177608: train_loss -0.3944 +2026-04-09 05:04:38.183511: val_loss -0.2277 +2026-04-09 05:04:38.185415: Pseudo dice [0.0, 0.0, 0.542, 0.5452, 0.3329, 0.0039, 0.6322] +2026-04-09 05:04:38.187594: Epoch time: 101.88 s +2026-04-09 05:04:39.233869: +2026-04-09 05:04:39.235810: Epoch 453 +2026-04-09 05:04:39.237495: Current learning rate: 0.00581 +2026-04-09 05:06:21.033589: train_loss -0.3416 +2026-04-09 05:06:21.038728: val_loss -0.3535 +2026-04-09 05:06:21.040489: Pseudo dice [0.0, 0.0, 0.4916, 0.4847, 0.3167, 0.0, 0.7232] +2026-04-09 05:06:21.042424: Epoch time: 101.8 s +2026-04-09 05:06:22.097283: +2026-04-09 05:06:22.099697: Epoch 454 +2026-04-09 05:06:22.101501: Current learning rate: 0.0058 +2026-04-09 05:08:04.387787: train_loss -0.3495 +2026-04-09 05:08:04.393542: val_loss -0.3521 +2026-04-09 05:08:04.395675: Pseudo dice [0.0, 0.0, 0.5625, 0.2473, 0.2662, 0.0001, 0.4999] +2026-04-09 05:08:04.397755: Epoch time: 102.29 s +2026-04-09 05:08:05.450459: +2026-04-09 05:08:05.452450: Epoch 455 +2026-04-09 05:08:05.455157: Current learning rate: 0.00579 +2026-04-09 05:09:46.642540: train_loss -0.3731 +2026-04-09 05:09:46.648975: val_loss -0.2549 +2026-04-09 05:09:46.651285: Pseudo dice [0.0, 0.0, 0.5525, 0.2687, 0.4612, 0.0, 0.6925] +2026-04-09 05:09:46.653646: Epoch time: 101.2 s +2026-04-09 05:09:47.717254: +2026-04-09 05:09:47.719218: Epoch 456 +2026-04-09 05:09:47.722522: Current learning rate: 0.00578 +2026-04-09 05:11:28.519892: train_loss -0.3522 +2026-04-09 05:11:28.525760: val_loss -0.2806 +2026-04-09 05:11:28.528940: Pseudo dice [0.0, 0.0, 0.6769, 0.0482, 0.5406, 0.0, 0.7086] +2026-04-09 05:11:28.530859: Epoch time: 100.81 s +2026-04-09 05:11:29.552042: +2026-04-09 05:11:29.553579: Epoch 457 +2026-04-09 05:11:29.555279: Current learning rate: 0.00577 +2026-04-09 05:13:11.241950: train_loss -0.3559 +2026-04-09 05:13:11.248085: val_loss -0.343 +2026-04-09 05:13:11.250085: Pseudo dice [0.0, 0.0, 0.6684, 0.3674, 0.5948, 0.0026, 0.7125] +2026-04-09 05:13:11.252980: Epoch time: 101.69 s +2026-04-09 05:13:12.283529: +2026-04-09 05:13:12.285586: Epoch 458 +2026-04-09 05:13:12.287264: Current learning rate: 0.00576 +2026-04-09 05:14:54.363222: train_loss -0.3622 +2026-04-09 05:14:54.369477: val_loss -0.3491 +2026-04-09 05:14:54.371457: Pseudo dice [0.0, 0.0, 0.2979, 0.637, 0.4677, 0.001, 0.7567] +2026-04-09 05:14:54.373620: Epoch time: 102.08 s +2026-04-09 05:14:55.419913: +2026-04-09 05:14:55.422361: Epoch 459 +2026-04-09 05:14:55.424466: Current learning rate: 0.00575 +2026-04-09 05:16:37.478950: train_loss -0.4017 +2026-04-09 05:16:37.485299: val_loss -0.3132 +2026-04-09 05:16:37.487577: Pseudo dice [0.5942, 0.0, 0.5631, 0.2949, 0.2979, 0.1468, 0.5609] +2026-04-09 05:16:37.489600: Epoch time: 102.06 s +2026-04-09 05:16:38.544063: +2026-04-09 05:16:38.545894: Epoch 460 +2026-04-09 05:16:38.548244: Current learning rate: 0.00574 +2026-04-09 05:18:20.205445: train_loss -0.3698 +2026-04-09 05:18:20.213500: val_loss -0.3231 +2026-04-09 05:18:20.216356: Pseudo dice [0.2572, 0.0, 0.3826, 0.1286, 0.4984, 0.0, 0.5285] +2026-04-09 05:18:20.218853: Epoch time: 101.66 s +2026-04-09 05:18:21.270169: +2026-04-09 05:18:21.272881: Epoch 461 +2026-04-09 05:18:21.274555: Current learning rate: 0.00573 +2026-04-09 05:20:03.407730: train_loss -0.3604 +2026-04-09 05:20:03.414500: val_loss -0.3283 +2026-04-09 05:20:03.416786: Pseudo dice [0.2367, 0.0, 0.4175, 0.4063, 0.4044, 0.2009, 0.6381] +2026-04-09 05:20:03.419210: Epoch time: 102.14 s +2026-04-09 05:20:04.468407: +2026-04-09 05:20:04.470097: Epoch 462 +2026-04-09 05:20:04.472030: Current learning rate: 0.00572 +2026-04-09 05:21:46.371884: train_loss -0.3357 +2026-04-09 05:21:46.378295: val_loss -0.1983 +2026-04-09 05:21:46.380816: Pseudo dice [0.0, 0.0, 0.573, 0.266, 0.3824, 0.0119, 0.6511] +2026-04-09 05:21:46.383029: Epoch time: 101.91 s +2026-04-09 05:21:47.439396: +2026-04-09 05:21:47.441758: Epoch 463 +2026-04-09 05:21:47.444006: Current learning rate: 0.00571 +2026-04-09 05:23:30.142360: train_loss -0.3393 +2026-04-09 05:23:30.148406: val_loss -0.2618 +2026-04-09 05:23:30.152238: Pseudo dice [0.0, 0.0, 0.4566, 0.0001, 0.2967, 0.0234, 0.0193] +2026-04-09 05:23:30.155166: Epoch time: 102.71 s +2026-04-09 05:23:31.209984: +2026-04-09 05:23:31.212255: Epoch 464 +2026-04-09 05:23:31.217897: Current learning rate: 0.0057 +2026-04-09 05:25:13.129248: train_loss -0.334 +2026-04-09 05:25:13.134924: val_loss -0.2381 +2026-04-09 05:25:13.137012: Pseudo dice [0.0, 0.0, 0.5469, 0.0016, 0.4972, 0.0087, 0.0361] +2026-04-09 05:25:13.139436: Epoch time: 101.92 s +2026-04-09 05:25:14.194679: +2026-04-09 05:25:14.196808: Epoch 465 +2026-04-09 05:25:14.198703: Current learning rate: 0.0057 +2026-04-09 05:26:55.005835: train_loss -0.3269 +2026-04-09 05:26:55.011608: val_loss -0.3102 +2026-04-09 05:26:55.013816: Pseudo dice [0.0, 0.0, 0.2422, 0.2221, 0.4441, 0.0037, 0.1733] +2026-04-09 05:26:55.015985: Epoch time: 100.81 s +2026-04-09 05:26:56.057837: +2026-04-09 05:26:56.059873: Epoch 466 +2026-04-09 05:26:56.061973: Current learning rate: 0.00569 +2026-04-09 05:28:38.090335: train_loss -0.3613 +2026-04-09 05:28:38.097274: val_loss -0.2266 +2026-04-09 05:28:38.099579: Pseudo dice [0.0, 0.0, 0.3066, 0.3787, 0.3364, 0.0338, 0.2202] +2026-04-09 05:28:38.101828: Epoch time: 102.04 s +2026-04-09 05:28:39.155383: +2026-04-09 05:28:39.157068: Epoch 467 +2026-04-09 05:28:39.158807: Current learning rate: 0.00568 +2026-04-09 05:30:21.053870: train_loss -0.3205 +2026-04-09 05:30:21.058430: val_loss -0.3269 +2026-04-09 05:30:21.059998: Pseudo dice [0.0, 0.0, 0.5144, 0.4186, 0.4939, 0.0, 0.5232] +2026-04-09 05:30:21.061616: Epoch time: 101.9 s +2026-04-09 05:30:22.099374: +2026-04-09 05:30:22.101283: Epoch 468 +2026-04-09 05:30:22.102977: Current learning rate: 0.00567 +2026-04-09 05:32:03.266006: train_loss -0.3646 +2026-04-09 05:32:03.274514: val_loss -0.3453 +2026-04-09 05:32:03.276322: Pseudo dice [0.0, 0.0, 0.4406, 0.1823, 0.419, 0.0, 0.4456] +2026-04-09 05:32:03.278188: Epoch time: 101.17 s +2026-04-09 05:32:04.325880: +2026-04-09 05:32:04.327802: Epoch 469 +2026-04-09 05:32:04.329546: Current learning rate: 0.00566 +2026-04-09 05:33:46.357603: train_loss -0.327 +2026-04-09 05:33:46.363372: val_loss -0.3627 +2026-04-09 05:33:46.366039: Pseudo dice [0.0, 0.0, 0.6613, 0.5041, 0.544, 0.1067, 0.7067] +2026-04-09 05:33:46.367961: Epoch time: 102.03 s +2026-04-09 05:33:47.408569: +2026-04-09 05:33:47.411032: Epoch 470 +2026-04-09 05:33:47.413201: Current learning rate: 0.00565 +2026-04-09 05:35:28.817357: train_loss -0.3698 +2026-04-09 05:35:28.822193: val_loss -0.2537 +2026-04-09 05:35:28.824716: Pseudo dice [0.0, 0.0, 0.3984, 0.0935, 0.4644, 0.0, 0.7974] +2026-04-09 05:35:28.826698: Epoch time: 101.41 s +2026-04-09 05:35:29.883840: +2026-04-09 05:35:29.885969: Epoch 471 +2026-04-09 05:35:29.888420: Current learning rate: 0.00564 +2026-04-09 05:37:11.485071: train_loss -0.341 +2026-04-09 05:37:11.491162: val_loss -0.2221 +2026-04-09 05:37:11.493059: Pseudo dice [0.0, 0.0, 0.307, 0.0052, 0.3515, 0.0, 0.0164] +2026-04-09 05:37:11.495380: Epoch time: 101.6 s +2026-04-09 05:37:12.541389: +2026-04-09 05:37:12.543251: Epoch 472 +2026-04-09 05:37:12.545583: Current learning rate: 0.00563 +2026-04-09 05:38:54.348263: train_loss -0.3558 +2026-04-09 05:38:54.353753: val_loss -0.2713 +2026-04-09 05:38:54.355836: Pseudo dice [0.0, 0.0, 0.5439, 0.4423, 0.2836, 0.0217, 0.1223] +2026-04-09 05:38:54.358710: Epoch time: 101.81 s +2026-04-09 05:38:55.412373: +2026-04-09 05:38:55.415210: Epoch 473 +2026-04-09 05:38:55.416838: Current learning rate: 0.00562 +2026-04-09 05:40:37.079645: train_loss -0.3672 +2026-04-09 05:40:37.086107: val_loss -0.1737 +2026-04-09 05:40:37.088306: Pseudo dice [0.0, 0.0, 0.6088, 0.3869, 0.5118, 0.0179, 0.6695] +2026-04-09 05:40:37.091892: Epoch time: 101.67 s +2026-04-09 05:40:38.137567: +2026-04-09 05:40:38.140033: Epoch 474 +2026-04-09 05:40:38.142401: Current learning rate: 0.00561 +2026-04-09 05:42:19.639559: train_loss -0.3779 +2026-04-09 05:42:19.645920: val_loss -0.3304 +2026-04-09 05:42:19.648072: Pseudo dice [0.0, 0.0, 0.7017, 0.0028, 0.3147, 0.1006, 0.4918] +2026-04-09 05:42:19.651345: Epoch time: 101.51 s +2026-04-09 05:42:20.701286: +2026-04-09 05:42:20.703267: Epoch 475 +2026-04-09 05:42:20.704943: Current learning rate: 0.0056 +2026-04-09 05:44:02.172222: train_loss -0.3773 +2026-04-09 05:44:02.178657: val_loss -0.3331 +2026-04-09 05:44:02.180538: Pseudo dice [0.0, 0.0, 0.5911, 0.2474, 0.6358, 0.3235, 0.0482] +2026-04-09 05:44:02.182533: Epoch time: 101.47 s +2026-04-09 05:44:03.239399: +2026-04-09 05:44:03.244149: Epoch 476 +2026-04-09 05:44:03.245852: Current learning rate: 0.00559 +2026-04-09 05:45:45.330837: train_loss -0.3804 +2026-04-09 05:45:45.338226: val_loss -0.2131 +2026-04-09 05:45:45.340206: Pseudo dice [0.0, 0.0, 0.4571, 0.5186, 0.4685, 0.0545, 0.5177] +2026-04-09 05:45:45.342200: Epoch time: 102.09 s +2026-04-09 05:45:46.390714: +2026-04-09 05:45:46.393031: Epoch 477 +2026-04-09 05:45:46.397731: Current learning rate: 0.00558 +2026-04-09 05:47:27.622535: train_loss -0.3693 +2026-04-09 05:47:27.630127: val_loss -0.3102 +2026-04-09 05:47:27.633047: Pseudo dice [0.0, 0.0, 0.4097, 0.5863, 0.4273, 0.0221, 0.699] +2026-04-09 05:47:27.635986: Epoch time: 101.23 s +2026-04-09 05:47:28.702602: +2026-04-09 05:47:28.704820: Epoch 478 +2026-04-09 05:47:28.707294: Current learning rate: 0.00557 +2026-04-09 05:49:10.005057: train_loss -0.3607 +2026-04-09 05:49:10.012130: val_loss -0.3683 +2026-04-09 05:49:10.014128: Pseudo dice [0.0, 0.0, 0.5109, 0.5513, 0.6391, 0.0556, 0.5203] +2026-04-09 05:49:10.016299: Epoch time: 101.31 s +2026-04-09 05:49:11.114350: +2026-04-09 05:49:11.116279: Epoch 479 +2026-04-09 05:49:11.118022: Current learning rate: 0.00556 +2026-04-09 05:50:51.944390: train_loss -0.3859 +2026-04-09 05:50:51.953415: val_loss -0.3575 +2026-04-09 05:50:51.955533: Pseudo dice [0.0276, 0.0, 0.6255, 0.4534, 0.2648, 0.1772, 0.5223] +2026-04-09 05:50:51.958580: Epoch time: 100.83 s +2026-04-09 05:50:53.044471: +2026-04-09 05:50:53.047168: Epoch 480 +2026-04-09 05:50:53.050036: Current learning rate: 0.00555 +2026-04-09 05:52:34.726735: train_loss -0.3616 +2026-04-09 05:52:34.736632: val_loss -0.2773 +2026-04-09 05:52:34.739055: Pseudo dice [0.0, 0.0, 0.4579, 0.4703, 0.4054, 0.0075, 0.0817] +2026-04-09 05:52:34.741035: Epoch time: 101.69 s +2026-04-09 05:52:35.807993: +2026-04-09 05:52:35.810325: Epoch 481 +2026-04-09 05:52:35.812665: Current learning rate: 0.00554 +2026-04-09 05:54:17.767558: train_loss -0.3759 +2026-04-09 05:54:17.773533: val_loss -0.3087 +2026-04-09 05:54:17.775589: Pseudo dice [0.0, 0.0, 0.5258, 0.0383, 0.3592, 0.0908, 0.6177] +2026-04-09 05:54:17.777884: Epoch time: 101.96 s +2026-04-09 05:54:18.850147: +2026-04-09 05:54:18.853027: Epoch 482 +2026-04-09 05:54:18.854951: Current learning rate: 0.00553 +2026-04-09 05:56:00.102723: train_loss -0.3414 +2026-04-09 05:56:00.111019: val_loss -0.1924 +2026-04-09 05:56:00.113050: Pseudo dice [0.0, 0.0, 0.5194, 0.0841, 0.5621, 0.0, 0.3882] +2026-04-09 05:56:00.115303: Epoch time: 101.26 s +2026-04-09 05:56:01.178449: +2026-04-09 05:56:01.180561: Epoch 483 +2026-04-09 05:56:01.182301: Current learning rate: 0.00552 +2026-04-09 05:57:43.280013: train_loss -0.323 +2026-04-09 05:57:43.291070: val_loss -0.3555 +2026-04-09 05:57:43.294566: Pseudo dice [0.0, 0.0, 0.5821, 0.4308, 0.5661, 0.0, 0.3547] +2026-04-09 05:57:43.302754: Epoch time: 102.1 s +2026-04-09 05:57:44.364024: +2026-04-09 05:57:44.366711: Epoch 484 +2026-04-09 05:57:44.368944: Current learning rate: 0.00551 +2026-04-09 05:59:26.464266: train_loss -0.345 +2026-04-09 05:59:26.475371: val_loss -0.2387 +2026-04-09 05:59:26.477376: Pseudo dice [0.0, 0.0, 0.6818, 0.0656, 0.4213, 0.0004, 0.662] +2026-04-09 05:59:26.479402: Epoch time: 102.1 s +2026-04-09 05:59:27.553323: +2026-04-09 05:59:27.556545: Epoch 485 +2026-04-09 05:59:27.558251: Current learning rate: 0.0055 +2026-04-09 06:01:09.801538: train_loss -0.3717 +2026-04-09 06:01:09.808294: val_loss -0.2755 +2026-04-09 06:01:09.810082: Pseudo dice [0.0, 0.0, 0.6838, 0.0855, 0.4214, 0.0667, 0.07] +2026-04-09 06:01:09.811831: Epoch time: 102.25 s +2026-04-09 06:01:10.879668: +2026-04-09 06:01:10.882735: Epoch 486 +2026-04-09 06:01:10.884528: Current learning rate: 0.00549 +2026-04-09 06:02:52.402108: train_loss -0.366 +2026-04-09 06:02:52.407515: val_loss -0.3846 +2026-04-09 06:02:52.409554: Pseudo dice [0.0, 0.0, 0.5899, 0.6529, 0.5718, 0.7037, 0.8455] +2026-04-09 06:02:52.411497: Epoch time: 101.53 s +2026-04-09 06:02:53.499126: +2026-04-09 06:02:53.500942: Epoch 487 +2026-04-09 06:02:53.502565: Current learning rate: 0.00548 +2026-04-09 06:04:35.493138: train_loss -0.3622 +2026-04-09 06:04:35.498417: val_loss -0.3385 +2026-04-09 06:04:35.501051: Pseudo dice [0.0, 0.0, 0.5332, 0.3672, 0.5695, 0.1633, 0.5503] +2026-04-09 06:04:35.503887: Epoch time: 102.0 s +2026-04-09 06:04:36.569229: +2026-04-09 06:04:36.573399: Epoch 488 +2026-04-09 06:04:36.575133: Current learning rate: 0.00547 +2026-04-09 06:06:18.097934: train_loss -0.3651 +2026-04-09 06:06:18.104334: val_loss -0.3641 +2026-04-09 06:06:18.106208: Pseudo dice [0.0, 0.0, 0.4138, 0.4077, 0.5921, 0.1041, 0.7006] +2026-04-09 06:06:18.108592: Epoch time: 101.53 s +2026-04-09 06:06:19.183182: +2026-04-09 06:06:19.188241: Epoch 489 +2026-04-09 06:06:19.190428: Current learning rate: 0.00546 +2026-04-09 06:08:01.162292: train_loss -0.3789 +2026-04-09 06:08:01.170192: val_loss -0.1718 +2026-04-09 06:08:01.172400: Pseudo dice [0.0, 0.0, 0.6196, 0.4498, 0.354, 0.0, 0.6729] +2026-04-09 06:08:01.174553: Epoch time: 101.98 s +2026-04-09 06:08:02.241651: +2026-04-09 06:08:02.244112: Epoch 490 +2026-04-09 06:08:02.246409: Current learning rate: 0.00546 +2026-04-09 06:09:43.513093: train_loss -0.3888 +2026-04-09 06:09:43.520479: val_loss -0.2665 +2026-04-09 06:09:43.523237: Pseudo dice [0.0, 0.0, 0.3014, 0.0189, 0.2126, 0.0, 0.2649] +2026-04-09 06:09:43.525578: Epoch time: 101.27 s +2026-04-09 06:09:44.579757: +2026-04-09 06:09:44.582361: Epoch 491 +2026-04-09 06:09:44.584926: Current learning rate: 0.00545 +2026-04-09 06:11:25.728909: train_loss -0.3781 +2026-04-09 06:11:25.735229: val_loss -0.3282 +2026-04-09 06:11:25.737199: Pseudo dice [0.0, 0.0, 0.6372, 0.5384, 0.5213, 0.0678, 0.5122] +2026-04-09 06:11:25.739129: Epoch time: 101.15 s +2026-04-09 06:11:26.794389: +2026-04-09 06:11:26.796971: Epoch 492 +2026-04-09 06:11:26.799196: Current learning rate: 0.00544 +2026-04-09 06:13:08.901736: train_loss -0.3389 +2026-04-09 06:13:08.907827: val_loss -0.2976 +2026-04-09 06:13:08.910941: Pseudo dice [0.022, 0.0, 0.5951, 0.6119, 0.5826, 0.0, 0.708] +2026-04-09 06:13:08.919406: Epoch time: 102.11 s +2026-04-09 06:13:09.999092: +2026-04-09 06:13:10.001803: Epoch 493 +2026-04-09 06:13:10.003988: Current learning rate: 0.00543 +2026-04-09 06:14:51.692816: train_loss -0.3453 +2026-04-09 06:14:51.698538: val_loss -0.2745 +2026-04-09 06:14:51.700606: Pseudo dice [0.0, 0.0, 0.6885, 0.1186, 0.4394, 0.0001, 0.6675] +2026-04-09 06:14:51.702541: Epoch time: 101.7 s +2026-04-09 06:14:52.793886: +2026-04-09 06:14:52.795740: Epoch 494 +2026-04-09 06:14:52.797446: Current learning rate: 0.00542 +2026-04-09 06:16:33.561991: train_loss -0.4014 +2026-04-09 06:16:33.571753: val_loss -0.2752 +2026-04-09 06:16:33.574330: Pseudo dice [0.0, 0.0, 0.6008, 0.3179, 0.5019, 0.1419, 0.4912] +2026-04-09 06:16:33.577527: Epoch time: 100.77 s +2026-04-09 06:16:34.628970: +2026-04-09 06:16:34.630970: Epoch 495 +2026-04-09 06:16:34.633855: Current learning rate: 0.00541 +2026-04-09 06:18:17.712263: train_loss -0.4207 +2026-04-09 06:18:17.718370: val_loss -0.3466 +2026-04-09 06:18:17.720279: Pseudo dice [0.047, 0.0, 0.5452, 0.5931, 0.5411, 0.025, 0.6889] +2026-04-09 06:18:17.722598: Epoch time: 103.09 s +2026-04-09 06:18:18.807053: +2026-04-09 06:18:18.809895: Epoch 496 +2026-04-09 06:18:18.812390: Current learning rate: 0.0054 +2026-04-09 06:20:00.869027: train_loss -0.3942 +2026-04-09 06:20:00.875415: val_loss -0.2464 +2026-04-09 06:20:00.878086: Pseudo dice [0.0, 0.0, 0.5315, 0.4011, 0.4581, 0.0731, 0.6958] +2026-04-09 06:20:00.880343: Epoch time: 102.07 s +2026-04-09 06:20:01.950345: +2026-04-09 06:20:01.952410: Epoch 497 +2026-04-09 06:20:01.954043: Current learning rate: 0.00539 +2026-04-09 06:21:43.756560: train_loss -0.4041 +2026-04-09 06:21:43.763747: val_loss -0.2882 +2026-04-09 06:21:43.765773: Pseudo dice [0.0964, 0.0, 0.5844, 0.2289, 0.3701, 0.0223, 0.5175] +2026-04-09 06:21:43.768451: Epoch time: 101.81 s +2026-04-09 06:21:44.856557: +2026-04-09 06:21:44.859756: Epoch 498 +2026-04-09 06:21:44.861816: Current learning rate: 0.00538 +2026-04-09 06:23:27.263058: train_loss -0.3642 +2026-04-09 06:23:27.268799: val_loss -0.3507 +2026-04-09 06:23:27.270789: Pseudo dice [0.6596, 0.0, 0.7003, 0.3243, 0.652, 0.126, 0.0067] +2026-04-09 06:23:27.272596: Epoch time: 102.41 s +2026-04-09 06:23:28.343374: +2026-04-09 06:23:28.345642: Epoch 499 +2026-04-09 06:23:28.348309: Current learning rate: 0.00537 +2026-04-09 06:25:09.817307: train_loss -0.3344 +2026-04-09 06:25:09.823733: val_loss -0.3111 +2026-04-09 06:25:09.826599: Pseudo dice [0.6878, 0.0, 0.6028, 0.028, 0.4738, 0.0, 0.0] +2026-04-09 06:25:09.828863: Epoch time: 101.48 s +2026-04-09 06:25:12.512976: +2026-04-09 06:25:12.514678: Epoch 500 +2026-04-09 06:25:12.516305: Current learning rate: 0.00536 +2026-04-09 06:26:53.925592: train_loss -0.3412 +2026-04-09 06:26:53.930603: val_loss -0.3421 +2026-04-09 06:26:53.932226: Pseudo dice [0.0, 0.0, 0.6976, 0.6853, 0.5634, 0.0, 0.0] +2026-04-09 06:26:53.934739: Epoch time: 101.42 s +2026-04-09 06:26:55.013904: +2026-04-09 06:26:55.015772: Epoch 501 +2026-04-09 06:26:55.017641: Current learning rate: 0.00535 +2026-04-09 06:28:37.020896: train_loss -0.371 +2026-04-09 06:28:37.027946: val_loss -0.3064 +2026-04-09 06:28:37.030331: Pseudo dice [0.0166, 0.0, 0.1739, 0.3419, 0.4082, 0.3702, 0.336] +2026-04-09 06:28:37.032031: Epoch time: 102.01 s +2026-04-09 06:28:38.111706: +2026-04-09 06:28:38.114747: Epoch 502 +2026-04-09 06:28:38.119954: Current learning rate: 0.00534 +2026-04-09 06:30:19.317506: train_loss -0.3682 +2026-04-09 06:30:19.324796: val_loss -0.2648 +2026-04-09 06:30:19.327269: Pseudo dice [0.4423, 0.0, 0.62, 0.0253, 0.3442, 0.0043, 0.6213] +2026-04-09 06:30:19.329848: Epoch time: 101.21 s +2026-04-09 06:30:20.402123: +2026-04-09 06:30:20.407150: Epoch 503 +2026-04-09 06:30:20.412081: Current learning rate: 0.00533 +2026-04-09 06:32:02.238785: train_loss -0.3374 +2026-04-09 06:32:02.247228: val_loss -0.3592 +2026-04-09 06:32:02.249779: Pseudo dice [0.0, 0.0, 0.543, 0.5798, 0.4962, 0.0, 0.6124] +2026-04-09 06:32:02.251816: Epoch time: 101.84 s +2026-04-09 06:32:03.322346: +2026-04-09 06:32:03.324106: Epoch 504 +2026-04-09 06:32:03.326074: Current learning rate: 0.00532 +2026-04-09 06:33:46.328166: train_loss -0.3766 +2026-04-09 06:33:46.333330: val_loss -0.3927 +2026-04-09 06:33:46.335391: Pseudo dice [0.5329, 0.0, 0.674, 0.5801, 0.4931, 0.0, 0.5385] +2026-04-09 06:33:46.337491: Epoch time: 103.01 s +2026-04-09 06:33:47.423945: +2026-04-09 06:33:47.426044: Epoch 505 +2026-04-09 06:33:47.427715: Current learning rate: 0.00531 +2026-04-09 06:35:28.383592: train_loss -0.3526 +2026-04-09 06:35:28.392737: val_loss -0.28 +2026-04-09 06:35:28.396686: Pseudo dice [0.3722, 0.0, 0.3723, 0.472, 0.4481, 0.0, 0.519] +2026-04-09 06:35:28.400170: Epoch time: 100.96 s +2026-04-09 06:35:29.454743: +2026-04-09 06:35:29.456655: Epoch 506 +2026-04-09 06:35:29.459126: Current learning rate: 0.0053 +2026-04-09 06:37:11.103011: train_loss -0.3811 +2026-04-09 06:37:11.108819: val_loss -0.2621 +2026-04-09 06:37:11.110523: Pseudo dice [0.0, 0.0, 0.4512, 0.2588, 0.3491, 0.0, 0.7213] +2026-04-09 06:37:11.112709: Epoch time: 101.65 s +2026-04-09 06:37:12.215933: +2026-04-09 06:37:12.217946: Epoch 507 +2026-04-09 06:37:12.219851: Current learning rate: 0.00529 +2026-04-09 06:38:53.214337: train_loss -0.3804 +2026-04-09 06:38:53.219003: val_loss -0.2815 +2026-04-09 06:38:53.221136: Pseudo dice [0.0, 0.0, 0.3954, 0.5476, 0.4364, 0.0, 0.4126] +2026-04-09 06:38:53.223295: Epoch time: 101.0 s +2026-04-09 06:38:54.300658: +2026-04-09 06:38:54.303547: Epoch 508 +2026-04-09 06:38:54.305889: Current learning rate: 0.00528 +2026-04-09 06:40:35.992961: train_loss -0.3814 +2026-04-09 06:40:35.997879: val_loss -0.3713 +2026-04-09 06:40:36.000164: Pseudo dice [0.42, 0.0, 0.5494, 0.0126, 0.5881, 0.004, 0.71] +2026-04-09 06:40:36.002134: Epoch time: 101.7 s +2026-04-09 06:40:37.098157: +2026-04-09 06:40:37.100040: Epoch 509 +2026-04-09 06:40:37.101408: Current learning rate: 0.00527 +2026-04-09 06:42:18.567419: train_loss -0.3872 +2026-04-09 06:42:18.575208: val_loss -0.3345 +2026-04-09 06:42:18.578079: Pseudo dice [0.0, 0.0, 0.6925, 0.4848, 0.2588, 0.0523, 0.6425] +2026-04-09 06:42:18.581055: Epoch time: 101.47 s +2026-04-09 06:42:19.688390: +2026-04-09 06:42:19.690251: Epoch 510 +2026-04-09 06:42:19.691937: Current learning rate: 0.00526 +2026-04-09 06:44:00.824113: train_loss -0.3907 +2026-04-09 06:44:00.830415: val_loss -0.2298 +2026-04-09 06:44:00.832801: Pseudo dice [0.0, 0.0, 0.5294, 0.3312, 0.5508, 0.0802, 0.813] +2026-04-09 06:44:00.834823: Epoch time: 101.14 s +2026-04-09 06:44:01.920884: +2026-04-09 06:44:01.923888: Epoch 511 +2026-04-09 06:44:01.929060: Current learning rate: 0.00525 +2026-04-09 06:45:42.946517: train_loss -0.383 +2026-04-09 06:45:42.952927: val_loss -0.3648 +2026-04-09 06:45:42.955179: Pseudo dice [0.0, 0.0, 0.6165, 0.1122, 0.4264, 0.0576, 0.3298] +2026-04-09 06:45:42.957603: Epoch time: 101.03 s +2026-04-09 06:45:44.020029: +2026-04-09 06:45:44.021859: Epoch 512 +2026-04-09 06:45:44.023580: Current learning rate: 0.00524 +2026-04-09 06:47:25.437378: train_loss -0.3896 +2026-04-09 06:47:25.444654: val_loss -0.3477 +2026-04-09 06:47:25.447625: Pseudo dice [0.0457, 0.0, 0.6543, 0.5355, 0.5631, 0.4222, 0.6781] +2026-04-09 06:47:25.449940: Epoch time: 101.42 s +2026-04-09 06:47:26.524478: +2026-04-09 06:47:26.527249: Epoch 513 +2026-04-09 06:47:26.529234: Current learning rate: 0.00523 +2026-04-09 06:49:07.548552: train_loss -0.4075 +2026-04-09 06:49:07.555813: val_loss -0.3992 +2026-04-09 06:49:07.557789: Pseudo dice [0.3355, 0.0, 0.5438, 0.7202, 0.563, 0.5888, 0.8661] +2026-04-09 06:49:07.559693: Epoch time: 101.03 s +2026-04-09 06:49:08.648873: +2026-04-09 06:49:08.651245: Epoch 514 +2026-04-09 06:49:08.652901: Current learning rate: 0.00522 +2026-04-09 06:50:50.598474: train_loss -0.4048 +2026-04-09 06:50:50.605462: val_loss -0.3537 +2026-04-09 06:50:50.607641: Pseudo dice [0.325, 0.0, 0.5772, 0.5625, 0.3658, 0.0914, 0.535] +2026-04-09 06:50:50.609191: Epoch time: 101.95 s +2026-04-09 06:50:51.683679: +2026-04-09 06:50:51.686610: Epoch 515 +2026-04-09 06:50:51.688654: Current learning rate: 0.00521 +2026-04-09 06:52:32.784276: train_loss -0.3825 +2026-04-09 06:52:32.788817: val_loss -0.3509 +2026-04-09 06:52:32.790617: Pseudo dice [0.1762, 0.0, 0.6202, 0.1469, 0.3679, 0.0, 0.4897] +2026-04-09 06:52:32.792166: Epoch time: 101.1 s +2026-04-09 06:52:33.870545: +2026-04-09 06:52:33.872457: Epoch 516 +2026-04-09 06:52:33.874562: Current learning rate: 0.0052 +2026-04-09 06:54:15.035425: train_loss -0.3803 +2026-04-09 06:54:15.042082: val_loss -0.2628 +2026-04-09 06:54:15.043917: Pseudo dice [0.6613, 0.0, 0.7806, 0.5892, 0.4403, 0.0, 0.6789] +2026-04-09 06:54:15.046531: Epoch time: 101.17 s +2026-04-09 06:54:16.126306: +2026-04-09 06:54:16.128915: Epoch 517 +2026-04-09 06:54:16.131903: Current learning rate: 0.00519 +2026-04-09 06:55:58.419495: train_loss -0.3832 +2026-04-09 06:55:58.426894: val_loss -0.3906 +2026-04-09 06:55:58.430919: Pseudo dice [0.7432, 0.0, 0.5996, 0.1214, 0.4224, 0.4653, 0.0243] +2026-04-09 06:55:58.433315: Epoch time: 102.3 s +2026-04-09 06:55:59.504037: +2026-04-09 06:55:59.506328: Epoch 518 +2026-04-09 06:55:59.507700: Current learning rate: 0.00518 +2026-04-09 06:57:40.935742: train_loss -0.3853 +2026-04-09 06:57:40.940785: val_loss -0.3792 +2026-04-09 06:57:40.942754: Pseudo dice [0.0685, 0.0, 0.4879, 0.2528, 0.6295, 0.684, 0.4625] +2026-04-09 06:57:40.944508: Epoch time: 101.43 s +2026-04-09 06:57:42.021604: +2026-04-09 06:57:42.028182: Epoch 519 +2026-04-09 06:57:42.031363: Current learning rate: 0.00518 +2026-04-09 06:59:23.543515: train_loss -0.3739 +2026-04-09 06:59:23.547910: val_loss -0.2068 +2026-04-09 06:59:23.549398: Pseudo dice [0.2833, 0.0, 0.418, 0.0334, 0.55, 0.0712, 0.5498] +2026-04-09 06:59:23.550893: Epoch time: 101.53 s +2026-04-09 06:59:24.610287: +2026-04-09 06:59:24.612468: Epoch 520 +2026-04-09 06:59:24.614137: Current learning rate: 0.00517 +2026-04-09 07:01:06.128807: train_loss -0.3645 +2026-04-09 07:01:06.134434: val_loss -0.3876 +2026-04-09 07:01:06.137541: Pseudo dice [0.5375, 0.0, 0.513, 0.2852, 0.5889, 0.3388, 0.5414] +2026-04-09 07:01:06.139358: Epoch time: 101.52 s +2026-04-09 07:01:07.247594: +2026-04-09 07:01:07.250356: Epoch 521 +2026-04-09 07:01:07.253205: Current learning rate: 0.00516 +2026-04-09 07:02:49.267437: train_loss -0.391 +2026-04-09 07:02:49.274695: val_loss -0.2506 +2026-04-09 07:02:49.276926: Pseudo dice [0.0133, 0.0, 0.6453, 0.2459, 0.4236, 0.0454, 0.2273] +2026-04-09 07:02:49.279250: Epoch time: 102.02 s +2026-04-09 07:02:50.347704: +2026-04-09 07:02:50.350142: Epoch 522 +2026-04-09 07:02:50.352099: Current learning rate: 0.00515 +2026-04-09 07:04:33.269632: train_loss -0.3946 +2026-04-09 07:04:33.277405: val_loss -0.2432 +2026-04-09 07:04:33.281027: Pseudo dice [0.5318, 0.0, 0.5246, 0.5899, 0.5127, 0.0204, 0.7792] +2026-04-09 07:04:33.284342: Epoch time: 102.93 s +2026-04-09 07:04:34.380998: +2026-04-09 07:04:34.383559: Epoch 523 +2026-04-09 07:04:34.386058: Current learning rate: 0.00514 +2026-04-09 07:06:16.614631: train_loss -0.4037 +2026-04-09 07:06:16.619624: val_loss -0.1415 +2026-04-09 07:06:16.622190: Pseudo dice [0.5077, 0.0, 0.2238, 0.0368, 0.4331, 0.0656, 0.415] +2026-04-09 07:06:16.623989: Epoch time: 102.24 s +2026-04-09 07:06:17.716386: +2026-04-09 07:06:17.718758: Epoch 524 +2026-04-09 07:06:17.721071: Current learning rate: 0.00513 +2026-04-09 07:07:59.214738: train_loss -0.3858 +2026-04-09 07:07:59.220745: val_loss -0.3534 +2026-04-09 07:07:59.222616: Pseudo dice [0.7013, 0.0, 0.5804, 0.0191, 0.6791, 0.0467, 0.8252] +2026-04-09 07:07:59.224734: Epoch time: 101.5 s +2026-04-09 07:08:01.228876: +2026-04-09 07:08:01.230745: Epoch 525 +2026-04-09 07:08:01.232729: Current learning rate: 0.00512 +2026-04-09 07:09:42.622126: train_loss -0.371 +2026-04-09 07:09:42.627913: val_loss -0.2801 +2026-04-09 07:09:42.632073: Pseudo dice [0.0, 0.0, 0.3669, 0.24, 0.339, 0.0, 0.0] +2026-04-09 07:09:42.634006: Epoch time: 101.4 s +2026-04-09 07:09:43.728547: +2026-04-09 07:09:43.730920: Epoch 526 +2026-04-09 07:09:43.732624: Current learning rate: 0.00511 +2026-04-09 07:11:25.770856: train_loss -0.3188 +2026-04-09 07:11:25.776697: val_loss -0.374 +2026-04-09 07:11:25.778776: Pseudo dice [0.0, 0.0, 0.5154, 0.0, 0.5533, 0.0259, 0.5893] +2026-04-09 07:11:25.780825: Epoch time: 102.05 s +2026-04-09 07:11:26.866894: +2026-04-09 07:11:26.870128: Epoch 527 +2026-04-09 07:11:26.872674: Current learning rate: 0.0051 +2026-04-09 07:13:08.430841: train_loss -0.3866 +2026-04-09 07:13:08.438861: val_loss -0.3371 +2026-04-09 07:13:08.441279: Pseudo dice [0.0, 0.0, 0.3874, 0.6051, 0.4548, 0.0973, 0.557] +2026-04-09 07:13:08.443842: Epoch time: 101.57 s +2026-04-09 07:13:09.517888: +2026-04-09 07:13:09.519880: Epoch 528 +2026-04-09 07:13:09.523866: Current learning rate: 0.00509 +2026-04-09 07:14:51.279342: train_loss -0.3706 +2026-04-09 07:14:51.285383: val_loss -0.3681 +2026-04-09 07:14:51.287441: Pseudo dice [0.0, 0.0, 0.4747, 0.7056, 0.5765, 0.0, 0.8072] +2026-04-09 07:14:51.289110: Epoch time: 101.76 s +2026-04-09 07:14:52.355776: +2026-04-09 07:14:52.357768: Epoch 529 +2026-04-09 07:14:52.359296: Current learning rate: 0.00508 +2026-04-09 07:16:34.380395: train_loss -0.3576 +2026-04-09 07:16:34.386127: val_loss -0.3741 +2026-04-09 07:16:34.388113: Pseudo dice [0.0, 0.0, 0.6277, 0.2323, 0.4987, 0.0, 0.0229] +2026-04-09 07:16:34.390389: Epoch time: 102.03 s +2026-04-09 07:16:35.485664: +2026-04-09 07:16:35.487735: Epoch 530 +2026-04-09 07:16:35.491119: Current learning rate: 0.00507 +2026-04-09 07:18:17.398242: train_loss -0.3754 +2026-04-09 07:18:17.404587: val_loss -0.3464 +2026-04-09 07:18:17.406835: Pseudo dice [0.0168, 0.0, 0.762, 0.255, 0.4954, 0.0687, 0.4757] +2026-04-09 07:18:17.409276: Epoch time: 101.92 s +2026-04-09 07:18:18.522261: +2026-04-09 07:18:18.524259: Epoch 531 +2026-04-09 07:18:18.525857: Current learning rate: 0.00506 +2026-04-09 07:19:59.952693: train_loss -0.4186 +2026-04-09 07:19:59.958471: val_loss -0.2594 +2026-04-09 07:19:59.960442: Pseudo dice [0.6744, 0.0, 0.4781, 0.5037, 0.3417, 0.0422, 0.5381] +2026-04-09 07:19:59.962192: Epoch time: 101.43 s +2026-04-09 07:20:01.100901: +2026-04-09 07:20:01.103738: Epoch 532 +2026-04-09 07:20:01.105388: Current learning rate: 0.00505 +2026-04-09 07:21:42.306007: train_loss -0.4182 +2026-04-09 07:21:42.311766: val_loss -0.3155 +2026-04-09 07:21:42.313767: Pseudo dice [0.1838, 0.0, 0.3738, 0.3565, 0.5635, 0.0645, 0.7067] +2026-04-09 07:21:42.315328: Epoch time: 101.21 s +2026-04-09 07:21:43.411521: +2026-04-09 07:21:43.413594: Epoch 533 +2026-04-09 07:21:43.415143: Current learning rate: 0.00504 +2026-04-09 07:23:25.186194: train_loss -0.4299 +2026-04-09 07:23:25.191177: val_loss -0.3901 +2026-04-09 07:23:25.193260: Pseudo dice [0.07, 0.0, 0.5269, 0.6372, 0.5382, 0.7824, 0.836] +2026-04-09 07:23:25.195856: Epoch time: 101.78 s +2026-04-09 07:23:26.272221: +2026-04-09 07:23:26.274051: Epoch 534 +2026-04-09 07:23:26.275715: Current learning rate: 0.00503 +2026-04-09 07:25:08.039139: train_loss -0.3818 +2026-04-09 07:25:08.044106: val_loss -0.3544 +2026-04-09 07:25:08.046015: Pseudo dice [0.4166, 0.0, 0.4923, 0.5814, 0.6153, 0.0337, 0.8553] +2026-04-09 07:25:08.047939: Epoch time: 101.77 s +2026-04-09 07:25:09.130399: +2026-04-09 07:25:09.132405: Epoch 535 +2026-04-09 07:25:09.134347: Current learning rate: 0.00502 +2026-04-09 07:26:51.140938: train_loss -0.3578 +2026-04-09 07:26:51.147207: val_loss -0.33 +2026-04-09 07:26:51.149876: Pseudo dice [0.0, 0.0, 0.7521, 0.5968, 0.5258, 0.2409, 0.0001] +2026-04-09 07:26:51.153015: Epoch time: 102.01 s +2026-04-09 07:26:52.227678: +2026-04-09 07:26:52.229727: Epoch 536 +2026-04-09 07:26:52.232193: Current learning rate: 0.00501 +2026-04-09 07:28:34.737515: train_loss -0.3884 +2026-04-09 07:28:34.743487: val_loss -0.3548 +2026-04-09 07:28:34.745920: Pseudo dice [0.0148, 0.0, 0.456, 0.6322, 0.5359, 0.1287, 0.5747] +2026-04-09 07:28:34.748599: Epoch time: 102.51 s +2026-04-09 07:28:35.840947: +2026-04-09 07:28:35.842766: Epoch 537 +2026-04-09 07:28:35.844588: Current learning rate: 0.005 +2026-04-09 07:30:17.103829: train_loss -0.3748 +2026-04-09 07:30:17.109245: val_loss -0.3723 +2026-04-09 07:30:17.111627: Pseudo dice [0.0, 0.0, 0.78, 0.3834, 0.3623, 0.5114, 0.4725] +2026-04-09 07:30:17.113656: Epoch time: 101.27 s +2026-04-09 07:30:18.197274: +2026-04-09 07:30:18.199212: Epoch 538 +2026-04-09 07:30:18.201869: Current learning rate: 0.00499 +2026-04-09 07:32:00.463828: train_loss -0.3674 +2026-04-09 07:32:00.488423: val_loss -0.3254 +2026-04-09 07:32:00.492922: Pseudo dice [0.0, 0.0, 0.5196, 0.2066, 0.3024, 0.0302, 0.7004] +2026-04-09 07:32:00.495582: Epoch time: 102.27 s +2026-04-09 07:32:01.592736: +2026-04-09 07:32:01.594936: Epoch 539 +2026-04-09 07:32:01.597286: Current learning rate: 0.00498 +2026-04-09 07:33:43.258306: train_loss -0.3921 +2026-04-09 07:33:43.277109: val_loss -0.3062 +2026-04-09 07:33:43.279719: Pseudo dice [0.048, 0.0, 0.5517, 0.608, 0.5271, 0.1017, 0.4814] +2026-04-09 07:33:43.281808: Epoch time: 101.67 s +2026-04-09 07:33:44.363755: +2026-04-09 07:33:44.365934: Epoch 540 +2026-04-09 07:33:44.368342: Current learning rate: 0.00497 +2026-04-09 07:35:26.294890: train_loss -0.3551 +2026-04-09 07:35:26.300535: val_loss -0.2739 +2026-04-09 07:35:26.302270: Pseudo dice [0.0, 0.0, 0.6887, 0.3516, 0.4017, 0.0, 0.0] +2026-04-09 07:35:26.305287: Epoch time: 101.93 s +2026-04-09 07:35:27.400876: +2026-04-09 07:35:27.403364: Epoch 541 +2026-04-09 07:35:27.405269: Current learning rate: 0.00496 +2026-04-09 07:37:08.747277: train_loss -0.3563 +2026-04-09 07:37:08.751849: val_loss -0.3537 +2026-04-09 07:37:08.753331: Pseudo dice [0.2403, 0.0, 0.4469, 0.4205, 0.542, 0.0, 0.6094] +2026-04-09 07:37:08.755316: Epoch time: 101.35 s +2026-04-09 07:37:09.852045: +2026-04-09 07:37:09.853645: Epoch 542 +2026-04-09 07:37:09.856098: Current learning rate: 0.00495 +2026-04-09 07:38:51.907337: train_loss -0.402 +2026-04-09 07:38:51.914390: val_loss -0.3274 +2026-04-09 07:38:51.916599: Pseudo dice [0.2211, 0.0, 0.324, 0.6478, 0.5503, 0.0, 0.7226] +2026-04-09 07:38:51.918641: Epoch time: 102.06 s +2026-04-09 07:38:53.010572: +2026-04-09 07:38:53.013876: Epoch 543 +2026-04-09 07:38:53.015963: Current learning rate: 0.00494 +2026-04-09 07:40:34.454792: train_loss -0.3592 +2026-04-09 07:40:34.460259: val_loss -0.4072 +2026-04-09 07:40:34.462611: Pseudo dice [0.535, 0.0, 0.4926, 0.7361, 0.3977, 0.0, 0.5094] +2026-04-09 07:40:34.464407: Epoch time: 101.45 s +2026-04-09 07:40:35.544015: +2026-04-09 07:40:35.546677: Epoch 544 +2026-04-09 07:40:35.548749: Current learning rate: 0.00493 +2026-04-09 07:42:17.035123: train_loss -0.3935 +2026-04-09 07:42:17.040029: val_loss -0.3443 +2026-04-09 07:42:17.042318: Pseudo dice [0.4005, 0.0, 0.5542, 0.3053, 0.4927, 0.0, 0.8034] +2026-04-09 07:42:17.044305: Epoch time: 101.49 s +2026-04-09 07:42:18.140460: +2026-04-09 07:42:18.142837: Epoch 545 +2026-04-09 07:42:18.144981: Current learning rate: 0.00492 +2026-04-09 07:44:00.950916: train_loss -0.3881 +2026-04-09 07:44:00.957076: val_loss -0.1142 +2026-04-09 07:44:00.959547: Pseudo dice [0.3158, 0.0, 0.5523, 0.53, 0.1968, 0.0, 0.2772] +2026-04-09 07:44:00.961428: Epoch time: 102.81 s +2026-04-09 07:44:02.064913: +2026-04-09 07:44:02.067140: Epoch 546 +2026-04-09 07:44:02.069191: Current learning rate: 0.00491 +2026-04-09 07:45:43.099051: train_loss -0.4011 +2026-04-09 07:45:43.104747: val_loss -0.2814 +2026-04-09 07:45:43.107024: Pseudo dice [0.2686, 0.0, 0.5171, 0.8164, 0.3143, 0.0, 0.3806] +2026-04-09 07:45:43.109489: Epoch time: 101.04 s +2026-04-09 07:45:44.216356: +2026-04-09 07:45:44.218450: Epoch 547 +2026-04-09 07:45:44.220443: Current learning rate: 0.0049 +2026-04-09 07:47:26.315527: train_loss -0.3807 +2026-04-09 07:47:26.320419: val_loss -0.2717 +2026-04-09 07:47:26.322110: Pseudo dice [0.3645, 0.0, 0.648, 0.038, 0.398, 0.0, 0.5103] +2026-04-09 07:47:26.323912: Epoch time: 102.1 s +2026-04-09 07:47:27.394525: +2026-04-09 07:47:27.397073: Epoch 548 +2026-04-09 07:47:27.398946: Current learning rate: 0.00489 +2026-04-09 07:49:10.218032: train_loss -0.3778 +2026-04-09 07:49:10.226788: val_loss -0.3854 +2026-04-09 07:49:10.230385: Pseudo dice [0.5171, 0.0, 0.5623, 0.2684, 0.5625, 0.0, 0.7167] +2026-04-09 07:49:10.234024: Epoch time: 102.83 s +2026-04-09 07:49:11.330056: +2026-04-09 07:49:11.332478: Epoch 549 +2026-04-09 07:49:11.335070: Current learning rate: 0.00488 +2026-04-09 07:50:53.238026: train_loss -0.3684 +2026-04-09 07:50:53.244719: val_loss -0.3602 +2026-04-09 07:50:53.246845: Pseudo dice [0.2204, 0.0, 0.6811, 0.3698, 0.3506, 0.0, 0.4175] +2026-04-09 07:50:53.249716: Epoch time: 101.91 s +2026-04-09 07:50:55.744823: +2026-04-09 07:50:55.747193: Epoch 550 +2026-04-09 07:50:55.748855: Current learning rate: 0.00487 +2026-04-09 07:52:37.641045: train_loss -0.3702 +2026-04-09 07:52:37.646179: val_loss -0.3198 +2026-04-09 07:52:37.648406: Pseudo dice [0.2124, 0.0, 0.5801, 0.3877, 0.449, 0.0, 0.4605] +2026-04-09 07:52:37.650422: Epoch time: 101.9 s +2026-04-09 07:52:38.726165: +2026-04-09 07:52:38.728591: Epoch 551 +2026-04-09 07:52:38.730830: Current learning rate: 0.00486 +2026-04-09 07:54:20.100788: train_loss -0.4121 +2026-04-09 07:54:20.109849: val_loss -0.376 +2026-04-09 07:54:20.112512: Pseudo dice [0.2695, 0.0, 0.6687, 0.6156, 0.6551, 0.0167, 0.4638] +2026-04-09 07:54:20.114378: Epoch time: 101.38 s +2026-04-09 07:54:21.214195: +2026-04-09 07:54:21.217668: Epoch 552 +2026-04-09 07:54:21.219819: Current learning rate: 0.00485 +2026-04-09 07:56:04.009211: train_loss -0.3639 +2026-04-09 07:56:04.016297: val_loss -0.3389 +2026-04-09 07:56:04.019799: Pseudo dice [0.7016, 0.0, 0.567, 0.0792, 0.3186, 0.0709, 0.4129] +2026-04-09 07:56:04.022585: Epoch time: 102.8 s +2026-04-09 07:56:05.150630: +2026-04-09 07:56:05.152954: Epoch 553 +2026-04-09 07:56:05.155042: Current learning rate: 0.00484 +2026-04-09 07:57:46.850157: train_loss -0.398 +2026-04-09 07:57:46.855894: val_loss -0.3197 +2026-04-09 07:57:46.858079: Pseudo dice [0.1458, 0.0, 0.5987, 0.2538, 0.4589, 0.0331, 0.8545] +2026-04-09 07:57:46.860893: Epoch time: 101.7 s +2026-04-09 07:57:47.945416: +2026-04-09 07:57:47.948450: Epoch 554 +2026-04-09 07:57:47.950412: Current learning rate: 0.00484 +2026-04-09 07:59:29.187792: train_loss -0.3946 +2026-04-09 07:59:29.193958: val_loss -0.3391 +2026-04-09 07:59:29.195719: Pseudo dice [0.5752, 0.0, 0.4721, 0.2074, 0.2845, 0.0299, 0.4381] +2026-04-09 07:59:29.197629: Epoch time: 101.25 s +2026-04-09 07:59:30.277270: +2026-04-09 07:59:30.278945: Epoch 555 +2026-04-09 07:59:30.280719: Current learning rate: 0.00483 +2026-04-09 08:01:11.651134: train_loss -0.3531 +2026-04-09 08:01:11.658161: val_loss -0.3405 +2026-04-09 08:01:11.660941: Pseudo dice [0.514, 0.0, 0.6446, 0.0352, 0.6045, 0.0, 0.6912] +2026-04-09 08:01:11.662815: Epoch time: 101.38 s +2026-04-09 08:01:12.737256: +2026-04-09 08:01:12.739353: Epoch 556 +2026-04-09 08:01:12.741635: Current learning rate: 0.00482 +2026-04-09 08:02:54.129145: train_loss -0.3893 +2026-04-09 08:02:54.135801: val_loss -0.3895 +2026-04-09 08:02:54.137880: Pseudo dice [0.1969, 0.0, 0.6896, 0.0672, 0.5834, 0.4765, 0.509] +2026-04-09 08:02:54.139902: Epoch time: 101.4 s +2026-04-09 08:02:55.209162: +2026-04-09 08:02:55.210775: Epoch 557 +2026-04-09 08:02:55.212797: Current learning rate: 0.00481 +2026-04-09 08:04:36.391109: train_loss -0.4005 +2026-04-09 08:04:36.402796: val_loss -0.3613 +2026-04-09 08:04:36.405769: Pseudo dice [0.2973, 0.0, 0.6383, 0.2827, 0.4856, 0.206, 0.8094] +2026-04-09 08:04:36.408041: Epoch time: 101.18 s +2026-04-09 08:04:37.501770: +2026-04-09 08:04:37.503639: Epoch 558 +2026-04-09 08:04:37.505602: Current learning rate: 0.0048 +2026-04-09 08:06:19.038063: train_loss -0.4116 +2026-04-09 08:06:19.044429: val_loss -0.2579 +2026-04-09 08:06:19.046768: Pseudo dice [0.1407, 0.0, 0.6234, 0.3494, 0.3141, 0.0576, 0.7646] +2026-04-09 08:06:19.048939: Epoch time: 101.54 s +2026-04-09 08:06:20.116652: +2026-04-09 08:06:20.119342: Epoch 559 +2026-04-09 08:06:20.121338: Current learning rate: 0.00479 +2026-04-09 08:08:01.394356: train_loss -0.4039 +2026-04-09 08:08:01.404505: val_loss -0.2512 +2026-04-09 08:08:01.406512: Pseudo dice [0.3325, 0.0, 0.6759, 0.5538, 0.5455, 0.0133, 0.7333] +2026-04-09 08:08:01.411018: Epoch time: 101.28 s +2026-04-09 08:08:02.496213: +2026-04-09 08:08:02.499719: Epoch 560 +2026-04-09 08:08:02.501505: Current learning rate: 0.00478 +2026-04-09 08:09:44.148407: train_loss -0.4101 +2026-04-09 08:09:44.155802: val_loss -0.3603 +2026-04-09 08:09:44.159160: Pseudo dice [0.0, 0.0, 0.4012, 0.6771, 0.528, 0.2413, 0.2993] +2026-04-09 08:09:44.162239: Epoch time: 101.66 s +2026-04-09 08:09:45.240009: +2026-04-09 08:09:45.242214: Epoch 561 +2026-04-09 08:09:45.244051: Current learning rate: 0.00477 +2026-04-09 08:11:26.971581: train_loss -0.407 +2026-04-09 08:11:26.977965: val_loss -0.3558 +2026-04-09 08:11:26.983057: Pseudo dice [0.2329, 0.0, 0.6687, 0.564, 0.4031, 0.1522, 0.6215] +2026-04-09 08:11:26.985183: Epoch time: 101.73 s +2026-04-09 08:11:28.053110: +2026-04-09 08:11:28.055353: Epoch 562 +2026-04-09 08:11:28.057816: Current learning rate: 0.00476 +2026-04-09 08:13:09.652136: train_loss -0.3981 +2026-04-09 08:13:09.657428: val_loss -0.3449 +2026-04-09 08:13:09.662858: Pseudo dice [0.4963, 0.0, 0.607, 0.0676, 0.4575, 0.0295, 0.3093] +2026-04-09 08:13:09.665899: Epoch time: 101.6 s +2026-04-09 08:13:10.739138: +2026-04-09 08:13:10.742207: Epoch 563 +2026-04-09 08:13:10.744281: Current learning rate: 0.00475 +2026-04-09 08:14:52.240679: train_loss -0.3973 +2026-04-09 08:14:52.246255: val_loss -0.3206 +2026-04-09 08:14:52.248181: Pseudo dice [0.405, 0.0, 0.5398, 0.2597, 0.6036, 0.0419, 0.5917] +2026-04-09 08:14:52.249806: Epoch time: 101.5 s +2026-04-09 08:14:53.330157: +2026-04-09 08:14:53.331816: Epoch 564 +2026-04-09 08:14:53.333294: Current learning rate: 0.00474 +2026-04-09 08:16:34.867742: train_loss -0.4118 +2026-04-09 08:16:34.874986: val_loss -0.217 +2026-04-09 08:16:34.878271: Pseudo dice [0.4604, 0.0, 0.5256, 0.2132, 0.3982, 0.0178, 0.7606] +2026-04-09 08:16:34.881442: Epoch time: 101.54 s +2026-04-09 08:16:35.966681: +2026-04-09 08:16:35.968437: Epoch 565 +2026-04-09 08:16:35.970485: Current learning rate: 0.00473 +2026-04-09 08:18:18.306687: train_loss -0.4085 +2026-04-09 08:18:18.312028: val_loss -0.3109 +2026-04-09 08:18:18.313706: Pseudo dice [0.4889, 0.0, 0.5721, 0.6269, 0.3864, 0.1532, 0.4556] +2026-04-09 08:18:18.315275: Epoch time: 102.34 s +2026-04-09 08:18:19.384351: +2026-04-09 08:18:19.386739: Epoch 566 +2026-04-09 08:18:19.388439: Current learning rate: 0.00472 +2026-04-09 08:20:01.079231: train_loss -0.4157 +2026-04-09 08:20:01.085483: val_loss -0.4012 +2026-04-09 08:20:01.087433: Pseudo dice [0.2003, 0.0, 0.2414, 0.1006, 0.5931, 0.7863, 0.7951] +2026-04-09 08:20:01.089145: Epoch time: 101.7 s +2026-04-09 08:20:02.175932: +2026-04-09 08:20:02.178699: Epoch 567 +2026-04-09 08:20:02.180844: Current learning rate: 0.00471 +2026-04-09 08:21:43.797831: train_loss -0.3988 +2026-04-09 08:21:43.802891: val_loss -0.3865 +2026-04-09 08:21:43.805220: Pseudo dice [0.0423, 0.0, 0.5468, 0.5203, 0.6201, 0.1487, 0.8181] +2026-04-09 08:21:43.807051: Epoch time: 101.62 s +2026-04-09 08:21:44.881717: +2026-04-09 08:21:44.883529: Epoch 568 +2026-04-09 08:21:44.885423: Current learning rate: 0.0047 +2026-04-09 08:23:26.564677: train_loss -0.3955 +2026-04-09 08:23:26.570803: val_loss -0.2738 +2026-04-09 08:23:26.573231: Pseudo dice [0.7388, 0.0, 0.6333, 0.4202, 0.5271, 0.0879, 0.2917] +2026-04-09 08:23:26.576555: Epoch time: 101.69 s +2026-04-09 08:23:27.649623: +2026-04-09 08:23:27.651412: Epoch 569 +2026-04-09 08:23:27.653184: Current learning rate: 0.00469 +2026-04-09 08:25:09.153179: train_loss -0.41 +2026-04-09 08:25:09.158975: val_loss -0.3601 +2026-04-09 08:25:09.161767: Pseudo dice [0.3472, 0.0, 0.6828, 0.1535, 0.3952, 0.09, 0.5804] +2026-04-09 08:25:09.165831: Epoch time: 101.51 s +2026-04-09 08:25:10.242523: +2026-04-09 08:25:10.244750: Epoch 570 +2026-04-09 08:25:10.247072: Current learning rate: 0.00468 +2026-04-09 08:26:51.984501: train_loss -0.3824 +2026-04-09 08:26:51.991334: val_loss -0.1032 +2026-04-09 08:26:51.993342: Pseudo dice [0.2711, 0.0, 0.3151, 0.2506, 0.2935, 0.0039, 0.0] +2026-04-09 08:26:51.995506: Epoch time: 101.75 s +2026-04-09 08:26:53.075759: +2026-04-09 08:26:53.077824: Epoch 571 +2026-04-09 08:26:53.079801: Current learning rate: 0.00467 +2026-04-09 08:28:34.814888: train_loss -0.3502 +2026-04-09 08:28:34.819570: val_loss -0.3398 +2026-04-09 08:28:34.821678: Pseudo dice [0.3027, 0.0, 0.4011, 0.1548, 0.5848, 0.4559, 0.0] +2026-04-09 08:28:34.824145: Epoch time: 101.74 s +2026-04-09 08:28:35.904642: +2026-04-09 08:28:35.906543: Epoch 572 +2026-04-09 08:28:35.908103: Current learning rate: 0.00466 +2026-04-09 08:30:17.627636: train_loss -0.3412 +2026-04-09 08:30:17.634825: val_loss -0.2661 +2026-04-09 08:30:17.639340: Pseudo dice [0.3332, 0.0, 0.5781, 0.0073, 0.4964, 0.0, 0.0] +2026-04-09 08:30:17.641369: Epoch time: 101.73 s +2026-04-09 08:30:18.758225: +2026-04-09 08:30:18.761210: Epoch 573 +2026-04-09 08:30:18.764440: Current learning rate: 0.00465 +2026-04-09 08:32:00.278204: train_loss -0.3146 +2026-04-09 08:32:00.285316: val_loss -0.3045 +2026-04-09 08:32:00.287473: Pseudo dice [0.2671, 0.0, 0.6285, 0.6091, 0.5246, 0.0155, 0.0] +2026-04-09 08:32:00.289436: Epoch time: 101.52 s +2026-04-09 08:32:01.396212: +2026-04-09 08:32:01.397837: Epoch 574 +2026-04-09 08:32:01.399637: Current learning rate: 0.00464 +2026-04-09 08:33:43.509525: train_loss -0.3774 +2026-04-09 08:33:43.516461: val_loss -0.2049 +2026-04-09 08:33:43.520798: Pseudo dice [0.1962, 0.0, 0.4106, 0.3695, 0.4585, 0.0193, 0.0] +2026-04-09 08:33:43.523388: Epoch time: 102.12 s +2026-04-09 08:33:44.637012: +2026-04-09 08:33:44.639391: Epoch 575 +2026-04-09 08:33:44.641149: Current learning rate: 0.00463 +2026-04-09 08:35:26.989285: train_loss -0.3587 +2026-04-09 08:35:26.995853: val_loss -0.3956 +2026-04-09 08:35:26.998555: Pseudo dice [0.5189, 0.0, 0.6774, 0.5449, 0.405, 0.5226, 0.0033] +2026-04-09 08:35:27.000957: Epoch time: 102.36 s +2026-04-09 08:35:28.122361: +2026-04-09 08:35:28.124702: Epoch 576 +2026-04-09 08:35:28.127020: Current learning rate: 0.00462 +2026-04-09 08:37:09.708537: train_loss -0.3704 +2026-04-09 08:37:09.713529: val_loss -0.1535 +2026-04-09 08:37:09.715485: Pseudo dice [0.1634, 0.0, 0.4844, 0.4395, 0.2368, 0.0462, 0.0] +2026-04-09 08:37:09.719062: Epoch time: 101.59 s +2026-04-09 08:37:10.812287: +2026-04-09 08:37:10.814548: Epoch 577 +2026-04-09 08:37:10.816299: Current learning rate: 0.00461 +2026-04-09 08:38:52.490422: train_loss -0.3721 +2026-04-09 08:38:52.497154: val_loss -0.3203 +2026-04-09 08:38:52.499546: Pseudo dice [0.0, 0.0, 0.5221, 0.3026, 0.4709, 0.0989, 0.014] +2026-04-09 08:38:52.502408: Epoch time: 101.68 s +2026-04-09 08:38:53.613578: +2026-04-09 08:38:53.615646: Epoch 578 +2026-04-09 08:38:53.617596: Current learning rate: 0.0046 +2026-04-09 08:40:35.404831: train_loss -0.3817 +2026-04-09 08:40:35.410988: val_loss -0.3329 +2026-04-09 08:40:35.412907: Pseudo dice [0.3633, 0.0, 0.5608, 0.6386, 0.4349, 0.113, 0.6941] +2026-04-09 08:40:35.414983: Epoch time: 101.79 s +2026-04-09 08:40:36.507825: +2026-04-09 08:40:36.510525: Epoch 579 +2026-04-09 08:40:36.513051: Current learning rate: 0.00459 +2026-04-09 08:42:18.857326: train_loss -0.3502 +2026-04-09 08:42:18.868083: val_loss -0.3252 +2026-04-09 08:42:18.870791: Pseudo dice [0.6724, 0.0, 0.7138, 0.0136, 0.3902, 0.152, 0.543] +2026-04-09 08:42:18.872887: Epoch time: 102.35 s +2026-04-09 08:42:19.977364: +2026-04-09 08:42:19.980018: Epoch 580 +2026-04-09 08:42:19.982265: Current learning rate: 0.00458 +2026-04-09 08:44:01.059724: train_loss -0.3877 +2026-04-09 08:44:01.065690: val_loss -0.3702 +2026-04-09 08:44:01.069027: Pseudo dice [0.0, 0.0, 0.7562, 0.5497, 0.5086, 0.4901, 0.4524] +2026-04-09 08:44:01.070930: Epoch time: 101.09 s +2026-04-09 08:44:02.180573: +2026-04-09 08:44:02.182735: Epoch 581 +2026-04-09 08:44:02.184497: Current learning rate: 0.00457 +2026-04-09 08:45:44.257247: train_loss -0.3751 +2026-04-09 08:45:44.263097: val_loss -0.3454 +2026-04-09 08:45:44.265200: Pseudo dice [0.1707, 0.0, 0.5619, 0.1531, 0.4033, 0.2294, 0.3201] +2026-04-09 08:45:44.267432: Epoch time: 102.08 s +2026-04-09 08:45:45.379166: +2026-04-09 08:45:45.380853: Epoch 582 +2026-04-09 08:45:45.382869: Current learning rate: 0.00456 +2026-04-09 08:47:26.983103: train_loss -0.4093 +2026-04-09 08:47:26.990707: val_loss -0.3268 +2026-04-09 08:47:26.992727: Pseudo dice [0.3229, 0.0, 0.6849, 0.6882, 0.3987, 0.1088, 0.69] +2026-04-09 08:47:26.995063: Epoch time: 101.61 s +2026-04-09 08:47:28.125423: +2026-04-09 08:47:28.129260: Epoch 583 +2026-04-09 08:47:28.131574: Current learning rate: 0.00455 +2026-04-09 08:49:09.630635: train_loss -0.422 +2026-04-09 08:49:09.648848: val_loss -0.3369 +2026-04-09 08:49:09.661316: Pseudo dice [0.7289, 0.0, 0.6437, 0.693, 0.5803, 0.0006, 0.7182] +2026-04-09 08:49:09.667613: Epoch time: 101.51 s +2026-04-09 08:49:10.777934: +2026-04-09 08:49:10.779855: Epoch 584 +2026-04-09 08:49:10.781573: Current learning rate: 0.00454 +2026-04-09 08:50:51.950967: train_loss -0.3913 +2026-04-09 08:50:51.962188: val_loss -0.3668 +2026-04-09 08:50:51.965081: Pseudo dice [0.5823, 0.0, 0.6299, 0.4787, 0.5817, 0.0064, 0.7998] +2026-04-09 08:50:51.967606: Epoch time: 101.18 s +2026-04-09 08:50:53.062862: +2026-04-09 08:50:53.065321: Epoch 585 +2026-04-09 08:50:53.067088: Current learning rate: 0.00453 +2026-04-09 08:52:35.573639: train_loss -0.4144 +2026-04-09 08:52:35.580670: val_loss -0.3996 +2026-04-09 08:52:35.583656: Pseudo dice [0.0, 0.0, 0.6519, 0.6913, 0.5273, 0.4807, 0.7011] +2026-04-09 08:52:35.586317: Epoch time: 102.51 s +2026-04-09 08:52:36.680148: +2026-04-09 08:52:36.683045: Epoch 586 +2026-04-09 08:52:36.685483: Current learning rate: 0.00452 +2026-04-09 08:54:18.312253: train_loss -0.3925 +2026-04-09 08:54:18.320064: val_loss -0.295 +2026-04-09 08:54:18.322508: Pseudo dice [0.0, 0.0, 0.3568, 0.0002, 0.5121, 0.1943, 0.6722] +2026-04-09 08:54:18.324750: Epoch time: 101.64 s +2026-04-09 08:54:19.427292: +2026-04-09 08:54:19.431421: Epoch 587 +2026-04-09 08:54:19.433203: Current learning rate: 0.00451 +2026-04-09 08:56:00.264368: train_loss -0.3945 +2026-04-09 08:56:00.269212: val_loss -0.3512 +2026-04-09 08:56:00.271462: Pseudo dice [0.1846, 0.0, 0.7438, 0.595, 0.4006, 0.1715, 0.4904] +2026-04-09 08:56:00.273463: Epoch time: 100.84 s +2026-04-09 08:56:01.348477: +2026-04-09 08:56:01.350939: Epoch 588 +2026-04-09 08:56:01.352549: Current learning rate: 0.0045 +2026-04-09 08:57:43.733781: train_loss -0.4212 +2026-04-09 08:57:43.741741: val_loss -0.3334 +2026-04-09 08:57:43.744603: Pseudo dice [0.2332, 0.0, 0.4079, 0.3373, 0.2572, 0.2762, 0.5664] +2026-04-09 08:57:43.747036: Epoch time: 102.39 s +2026-04-09 08:57:44.884859: +2026-04-09 08:57:44.889308: Epoch 589 +2026-04-09 08:57:44.891860: Current learning rate: 0.00449 +2026-04-09 08:59:27.230386: train_loss -0.3823 +2026-04-09 08:59:27.237202: val_loss -0.2683 +2026-04-09 08:59:27.239969: Pseudo dice [0.0533, 0.0, 0.6875, 0.6663, 0.52, 0.0185, 0.704] +2026-04-09 08:59:27.243014: Epoch time: 102.35 s +2026-04-09 08:59:28.329687: +2026-04-09 08:59:28.333994: Epoch 590 +2026-04-09 08:59:28.338403: Current learning rate: 0.00448 +2026-04-09 09:01:10.714765: train_loss -0.3682 +2026-04-09 09:01:10.722173: val_loss -0.1824 +2026-04-09 09:01:10.724435: Pseudo dice [0.0, 0.0, 0.6659, 0.0113, 0.4231, 0.0521, 0.7324] +2026-04-09 09:01:10.726499: Epoch time: 102.39 s +2026-04-09 09:01:11.831810: +2026-04-09 09:01:11.834276: Epoch 591 +2026-04-09 09:01:11.838137: Current learning rate: 0.00447 +2026-04-09 09:02:54.063013: train_loss -0.4109 +2026-04-09 09:02:54.068642: val_loss -0.3522 +2026-04-09 09:02:54.070712: Pseudo dice [0.018, 0.0, 0.6935, 0.4894, 0.3047, 0.4823, 0.7209] +2026-04-09 09:02:54.074846: Epoch time: 102.23 s +2026-04-09 09:02:55.247238: +2026-04-09 09:02:55.249452: Epoch 592 +2026-04-09 09:02:55.251106: Current learning rate: 0.00446 +2026-04-09 09:04:36.891898: train_loss -0.4191 +2026-04-09 09:04:36.898077: val_loss -0.3364 +2026-04-09 09:04:36.900422: Pseudo dice [0.0211, 0.0, 0.4646, 0.3102, 0.2639, 0.1495, 0.8272] +2026-04-09 09:04:36.902794: Epoch time: 101.65 s +2026-04-09 09:04:38.011358: +2026-04-09 09:04:38.013665: Epoch 593 +2026-04-09 09:04:38.015651: Current learning rate: 0.00445 +2026-04-09 09:06:20.552552: train_loss -0.388 +2026-04-09 09:06:20.559684: val_loss -0.3892 +2026-04-09 09:06:20.562821: Pseudo dice [0.1439, 0.0, 0.6347, 0.6399, 0.5258, 0.5177, 0.8008] +2026-04-09 09:06:20.565820: Epoch time: 102.54 s +2026-04-09 09:06:21.666468: +2026-04-09 09:06:21.668917: Epoch 594 +2026-04-09 09:06:21.671386: Current learning rate: 0.00444 +2026-04-09 09:08:03.638060: train_loss -0.4241 +2026-04-09 09:08:03.667220: val_loss -0.3729 +2026-04-09 09:08:03.670065: Pseudo dice [0.075, 0.0, 0.6189, 0.4927, 0.3551, 0.1837, 0.4334] +2026-04-09 09:08:03.672027: Epoch time: 101.97 s +2026-04-09 09:08:04.779209: +2026-04-09 09:08:04.781323: Epoch 595 +2026-04-09 09:08:04.783239: Current learning rate: 0.00443 +2026-04-09 09:09:46.460702: train_loss -0.3963 +2026-04-09 09:09:46.469208: val_loss -0.3351 +2026-04-09 09:09:46.472658: Pseudo dice [0.0, 0.0, 0.4824, 0.5378, 0.4718, 0.023, 0.7984] +2026-04-09 09:09:46.474966: Epoch time: 101.68 s +2026-04-09 09:09:47.628838: +2026-04-09 09:09:47.631058: Epoch 596 +2026-04-09 09:09:47.634355: Current learning rate: 0.00442 +2026-04-09 09:11:30.436818: train_loss -0.4029 +2026-04-09 09:11:30.444465: val_loss -0.2787 +2026-04-09 09:11:30.451173: Pseudo dice [0.0, 0.0, 0.5757, 0.1496, 0.3428, 0.0163, 0.7686] +2026-04-09 09:11:30.458757: Epoch time: 102.81 s +2026-04-09 09:11:31.541543: +2026-04-09 09:11:31.544977: Epoch 597 +2026-04-09 09:11:31.546764: Current learning rate: 0.00441 +2026-04-09 09:13:14.043748: train_loss -0.3899 +2026-04-09 09:13:14.051206: val_loss -0.3595 +2026-04-09 09:13:14.053600: Pseudo dice [0.3069, 0.0, 0.6558, 0.5685, 0.6099, 0.0773, 0.7973] +2026-04-09 09:13:14.056133: Epoch time: 102.51 s +2026-04-09 09:13:15.159819: +2026-04-09 09:13:15.162272: Epoch 598 +2026-04-09 09:13:15.164277: Current learning rate: 0.0044 +2026-04-09 09:14:56.747367: train_loss -0.4083 +2026-04-09 09:14:56.754364: val_loss -0.3522 +2026-04-09 09:14:56.757695: Pseudo dice [0.5583, 0.0, 0.4948, 0.461, 0.6023, 0.0954, 0.749] +2026-04-09 09:14:56.760278: Epoch time: 101.59 s +2026-04-09 09:14:57.855861: +2026-04-09 09:14:57.859495: Epoch 599 +2026-04-09 09:14:57.864142: Current learning rate: 0.00439 +2026-04-09 09:16:39.461611: train_loss -0.3725 +2026-04-09 09:16:39.469814: val_loss -0.395 +2026-04-09 09:16:39.472770: Pseudo dice [0.5633, 0.0, 0.6844, 0.685, 0.4881, 0.0713, 0.6488] +2026-04-09 09:16:39.475376: Epoch time: 101.61 s +2026-04-09 09:16:42.227125: +2026-04-09 09:16:42.229597: Epoch 600 +2026-04-09 09:16:42.232015: Current learning rate: 0.00438 +2026-04-09 09:18:24.174481: train_loss -0.4132 +2026-04-09 09:18:24.180865: val_loss -0.3908 +2026-04-09 09:18:24.182746: Pseudo dice [0.3478, 0.0, 0.3794, 0.5091, 0.5413, 0.2206, 0.6166] +2026-04-09 09:18:24.185674: Epoch time: 101.95 s +2026-04-09 09:18:25.275729: +2026-04-09 09:18:25.278010: Epoch 601 +2026-04-09 09:18:25.279919: Current learning rate: 0.00437 +2026-04-09 09:20:07.520159: train_loss -0.3918 +2026-04-09 09:20:07.526905: val_loss -0.3104 +2026-04-09 09:20:07.529670: Pseudo dice [0.6441, 0.0, 0.3842, 0.6555, 0.3453, 0.146, 0.4396] +2026-04-09 09:20:07.533360: Epoch time: 102.25 s +2026-04-09 09:20:08.614587: +2026-04-09 09:20:08.616891: Epoch 602 +2026-04-09 09:20:08.619211: Current learning rate: 0.00436 +2026-04-09 09:21:51.136842: train_loss -0.3735 +2026-04-09 09:21:51.142836: val_loss -0.1912 +2026-04-09 09:21:51.145163: Pseudo dice [0.4934, 0.0, 0.5677, 0.3369, 0.2965, 0.0054, 0.4811] +2026-04-09 09:21:51.147306: Epoch time: 102.53 s +2026-04-09 09:21:52.302956: +2026-04-09 09:21:52.305358: Epoch 603 +2026-04-09 09:21:52.307879: Current learning rate: 0.00435 +2026-04-09 09:23:34.910213: train_loss -0.4173 +2026-04-09 09:23:34.916723: val_loss -0.2867 +2026-04-09 09:23:34.918866: Pseudo dice [0.0309, 0.0, 0.3937, 0.3907, 0.3753, 0.0218, 0.6612] +2026-04-09 09:23:34.920696: Epoch time: 102.61 s +2026-04-09 09:23:36.027575: +2026-04-09 09:23:36.029585: Epoch 604 +2026-04-09 09:23:36.032122: Current learning rate: 0.00434 +2026-04-09 09:25:19.179320: train_loss -0.3897 +2026-04-09 09:25:19.188668: val_loss -0.2894 +2026-04-09 09:25:19.191400: Pseudo dice [0.5373, 0.0, 0.5106, 0.4169, 0.3746, 0.0441, 0.5177] +2026-04-09 09:25:19.194603: Epoch time: 103.15 s +2026-04-09 09:25:20.302116: +2026-04-09 09:25:20.305279: Epoch 605 +2026-04-09 09:25:20.307535: Current learning rate: 0.00433 +2026-04-09 09:27:03.585056: train_loss -0.4148 +2026-04-09 09:27:03.591562: val_loss -0.4007 +2026-04-09 09:27:03.593873: Pseudo dice [0.7272, 0.0, 0.7435, 0.22, 0.4039, 0.2987, 0.4552] +2026-04-09 09:27:03.596709: Epoch time: 103.29 s +2026-04-09 09:27:04.734243: +2026-04-09 09:27:04.737459: Epoch 606 +2026-04-09 09:27:04.739131: Current learning rate: 0.00432 +2026-04-09 09:28:46.674995: train_loss -0.4287 +2026-04-09 09:28:46.684107: val_loss -0.2936 +2026-04-09 09:28:46.686578: Pseudo dice [0.5994, 0.0, 0.5539, 0.3098, 0.3873, 0.0132, 0.2708] +2026-04-09 09:28:46.688732: Epoch time: 101.94 s +2026-04-09 09:28:47.779896: +2026-04-09 09:28:47.782655: Epoch 607 +2026-04-09 09:28:47.784607: Current learning rate: 0.00431 +2026-04-09 09:30:30.389647: train_loss -0.3623 +2026-04-09 09:30:30.396137: val_loss -0.3956 +2026-04-09 09:30:30.398086: Pseudo dice [0.6557, 0.0, 0.7181, 0.7943, 0.4463, 0.348, 0.4887] +2026-04-09 09:30:30.399994: Epoch time: 102.61 s +2026-04-09 09:30:31.490318: +2026-04-09 09:30:31.492621: Epoch 608 +2026-04-09 09:30:31.494312: Current learning rate: 0.0043 +2026-04-09 09:32:13.328608: train_loss -0.3959 +2026-04-09 09:32:13.334006: val_loss -0.2595 +2026-04-09 09:32:13.337128: Pseudo dice [0.0001, 0.0, 0.5199, 0.4206, 0.457, 0.0584, 0.1551] +2026-04-09 09:32:13.338928: Epoch time: 101.84 s +2026-04-09 09:32:14.486784: +2026-04-09 09:32:14.488543: Epoch 609 +2026-04-09 09:32:14.490751: Current learning rate: 0.00429 +2026-04-09 09:33:55.772980: train_loss -0.3531 +2026-04-09 09:33:55.779155: val_loss -0.2962 +2026-04-09 09:33:55.781085: Pseudo dice [0.0558, 0.0, 0.4521, 0.0495, 0.3447, 0.0669, 0.6451] +2026-04-09 09:33:55.783267: Epoch time: 101.29 s +2026-04-09 09:33:56.918022: +2026-04-09 09:33:56.920403: Epoch 610 +2026-04-09 09:33:56.922247: Current learning rate: 0.00429 +2026-04-09 09:35:39.012555: train_loss -0.4062 +2026-04-09 09:35:39.017687: val_loss -0.3554 +2026-04-09 09:35:39.020420: Pseudo dice [0.241, 0.0, 0.5736, 0.0051, 0.2904, 0.4818, 0.2837] +2026-04-09 09:35:39.022333: Epoch time: 102.1 s +2026-04-09 09:35:40.144019: +2026-04-09 09:35:40.147096: Epoch 611 +2026-04-09 09:35:40.149269: Current learning rate: 0.00428 +2026-04-09 09:37:22.984488: train_loss -0.3793 +2026-04-09 09:37:22.990777: val_loss -0.3538 +2026-04-09 09:37:22.993876: Pseudo dice [0.0446, 0.0, 0.1954, 0.633, 0.3864, 0.4824, 0.2806] +2026-04-09 09:37:22.995947: Epoch time: 102.84 s +2026-04-09 09:37:24.109747: +2026-04-09 09:37:24.112183: Epoch 612 +2026-04-09 09:37:24.114344: Current learning rate: 0.00427 +2026-04-09 09:39:07.568606: train_loss -0.4014 +2026-04-09 09:39:07.574030: val_loss -0.3524 +2026-04-09 09:39:07.576112: Pseudo dice [0.2019, 0.0, 0.6009, 0.1383, 0.4, 0.0451, 0.6951] +2026-04-09 09:39:07.577983: Epoch time: 103.46 s +2026-04-09 09:39:08.666347: +2026-04-09 09:39:08.668216: Epoch 613 +2026-04-09 09:39:08.670335: Current learning rate: 0.00426 +2026-04-09 09:40:50.406723: train_loss -0.402 +2026-04-09 09:40:50.412029: val_loss -0.3709 +2026-04-09 09:40:50.414308: Pseudo dice [0.3982, 0.0, 0.5038, 0.439, 0.5726, 0.0818, 0.3807] +2026-04-09 09:40:50.416291: Epoch time: 101.74 s +2026-04-09 09:40:51.515507: +2026-04-09 09:40:51.517852: Epoch 614 +2026-04-09 09:40:51.519759: Current learning rate: 0.00425 +2026-04-09 09:42:32.825930: train_loss -0.3858 +2026-04-09 09:42:32.831795: val_loss -0.2148 +2026-04-09 09:42:32.833464: Pseudo dice [0.283, 0.0, 0.4732, 0.3637, 0.1734, 0.0591, 0.5543] +2026-04-09 09:42:32.835229: Epoch time: 101.31 s +2026-04-09 09:42:33.941621: +2026-04-09 09:42:33.943394: Epoch 615 +2026-04-09 09:42:33.944850: Current learning rate: 0.00424 +2026-04-09 09:44:15.156907: train_loss -0.3823 +2026-04-09 09:44:15.164382: val_loss -0.4319 +2026-04-09 09:44:15.167442: Pseudo dice [0.3026, 0.0, 0.605, 0.646, 0.5813, 0.6327, 0.8011] +2026-04-09 09:44:15.169910: Epoch time: 101.22 s +2026-04-09 09:44:16.290648: +2026-04-09 09:44:16.294835: Epoch 616 +2026-04-09 09:44:16.300545: Current learning rate: 0.00423 +2026-04-09 09:45:58.194196: train_loss -0.3609 +2026-04-09 09:45:58.200744: val_loss -0.3609 +2026-04-09 09:45:58.203510: Pseudo dice [0.4436, 0.0, 0.6371, 0.2154, 0.5422, 0.2672, 0.7163] +2026-04-09 09:45:58.205474: Epoch time: 101.91 s +2026-04-09 09:45:59.306307: +2026-04-09 09:45:59.308358: Epoch 617 +2026-04-09 09:45:59.310694: Current learning rate: 0.00422 +2026-04-09 09:47:41.119330: train_loss -0.3978 +2026-04-09 09:47:41.126086: val_loss -0.2092 +2026-04-09 09:47:41.129100: Pseudo dice [0.1357, 0.0, 0.4517, 0.4787, 0.2346, 0.0438, 0.7262] +2026-04-09 09:47:41.131208: Epoch time: 101.82 s +2026-04-09 09:47:42.239704: +2026-04-09 09:47:42.241973: Epoch 618 +2026-04-09 09:47:42.243773: Current learning rate: 0.00421 +2026-04-09 09:49:23.481928: train_loss -0.4189 +2026-04-09 09:49:23.487748: val_loss -0.3942 +2026-04-09 09:49:23.490270: Pseudo dice [0.5047, 0.0, 0.673, 0.3607, 0.512, 0.6638, 0.6463] +2026-04-09 09:49:23.492890: Epoch time: 101.25 s +2026-04-09 09:49:24.612122: +2026-04-09 09:49:24.614043: Epoch 619 +2026-04-09 09:49:24.615816: Current learning rate: 0.0042 +2026-04-09 09:51:06.311466: train_loss -0.3873 +2026-04-09 09:51:06.316408: val_loss -0.2266 +2026-04-09 09:51:06.318504: Pseudo dice [0.7247, 0.0, 0.7098, 0.1679, 0.5503, 0.0252, 0.3768] +2026-04-09 09:51:06.320787: Epoch time: 101.7 s +2026-04-09 09:51:07.413804: +2026-04-09 09:51:07.417113: Epoch 620 +2026-04-09 09:51:07.419750: Current learning rate: 0.00419 +2026-04-09 09:52:49.628912: train_loss -0.4017 +2026-04-09 09:52:49.634833: val_loss -0.4084 +2026-04-09 09:52:49.637455: Pseudo dice [0.541, 0.0, 0.6069, 0.3261, 0.6236, 0.1432, 0.7728] +2026-04-09 09:52:49.639553: Epoch time: 102.22 s +2026-04-09 09:52:50.762972: +2026-04-09 09:52:50.767034: Epoch 621 +2026-04-09 09:52:50.771092: Current learning rate: 0.00418 +2026-04-09 09:54:32.308540: train_loss -0.4169 +2026-04-09 09:54:32.315935: val_loss -0.3212 +2026-04-09 09:54:32.318563: Pseudo dice [0.5558, 0.0, 0.6967, 0.3282, 0.5522, 0.0505, 0.4215] +2026-04-09 09:54:32.320864: Epoch time: 101.55 s +2026-04-09 09:54:33.421257: +2026-04-09 09:54:33.425788: Epoch 622 +2026-04-09 09:54:33.428131: Current learning rate: 0.00417 +2026-04-09 09:56:15.236084: train_loss -0.3949 +2026-04-09 09:56:15.241891: val_loss -0.3637 +2026-04-09 09:56:15.244392: Pseudo dice [0.5244, 0.0, 0.6843, 0.1049, 0.0354, 0.6508, 0.6171] +2026-04-09 09:56:15.247033: Epoch time: 101.82 s +2026-04-09 09:56:16.338337: +2026-04-09 09:56:16.340477: Epoch 623 +2026-04-09 09:56:16.342865: Current learning rate: 0.00416 +2026-04-09 09:57:58.400583: train_loss -0.4184 +2026-04-09 09:57:58.407040: val_loss -0.3609 +2026-04-09 09:57:58.409083: Pseudo dice [0.5194, 0.0, 0.5552, 0.3904, 0.3179, 0.3085, 0.4493] +2026-04-09 09:57:58.411629: Epoch time: 102.07 s +2026-04-09 09:57:59.576375: +2026-04-09 09:57:59.578349: Epoch 624 +2026-04-09 09:57:59.581245: Current learning rate: 0.00415 +2026-04-09 09:59:41.931586: train_loss -0.399 +2026-04-09 09:59:41.940175: val_loss -0.4106 +2026-04-09 09:59:41.943759: Pseudo dice [0.083, 0.0, 0.6899, 0.7429, 0.4676, 0.6532, 0.7637] +2026-04-09 09:59:41.946178: Epoch time: 102.36 s +2026-04-09 09:59:43.065197: +2026-04-09 09:59:43.069965: Epoch 625 +2026-04-09 09:59:43.071723: Current learning rate: 0.00414 +2026-04-09 10:01:26.227936: train_loss -0.4105 +2026-04-09 10:01:26.233511: val_loss -0.3458 +2026-04-09 10:01:26.235330: Pseudo dice [0.2452, 0.0, 0.6418, 0.6738, 0.6188, 0.1618, 0.6676] +2026-04-09 10:01:26.237479: Epoch time: 103.17 s +2026-04-09 10:01:27.352279: +2026-04-09 10:01:27.354093: Epoch 626 +2026-04-09 10:01:27.356218: Current learning rate: 0.00413 +2026-04-09 10:03:09.314842: train_loss -0.3988 +2026-04-09 10:03:09.320525: val_loss -0.3306 +2026-04-09 10:03:09.322457: Pseudo dice [0.3433, 0.0, 0.6856, 0.5098, 0.5267, 0.119, 0.8728] +2026-04-09 10:03:09.324439: Epoch time: 101.97 s +2026-04-09 10:03:10.420495: +2026-04-09 10:03:10.422801: Epoch 627 +2026-04-09 10:03:10.424818: Current learning rate: 0.00412 +2026-04-09 10:04:52.032418: train_loss -0.4369 +2026-04-09 10:04:52.037463: val_loss -0.3489 +2026-04-09 10:04:52.039261: Pseudo dice [0.4057, 0.0, 0.6441, 0.5954, 0.5847, 0.1117, 0.5216] +2026-04-09 10:04:52.043072: Epoch time: 101.62 s +2026-04-09 10:04:53.158301: +2026-04-09 10:04:53.160195: Epoch 628 +2026-04-09 10:04:53.161833: Current learning rate: 0.00411 +2026-04-09 10:06:35.294221: train_loss -0.3961 +2026-04-09 10:06:35.299628: val_loss -0.1935 +2026-04-09 10:06:35.303722: Pseudo dice [0.2909, 0.0, 0.2586, 0.4205, 0.6053, 0.0, 0.6185] +2026-04-09 10:06:35.305776: Epoch time: 102.14 s +2026-04-09 10:06:36.416432: +2026-04-09 10:06:36.418233: Epoch 629 +2026-04-09 10:06:36.420103: Current learning rate: 0.0041 +2026-04-09 10:08:18.510452: train_loss -0.4046 +2026-04-09 10:08:18.517688: val_loss -0.2645 +2026-04-09 10:08:18.521318: Pseudo dice [0.4973, 0.0, 0.5622, 0.6124, 0.5858, 0.0175, 0.7778] +2026-04-09 10:08:18.524665: Epoch time: 102.1 s +2026-04-09 10:08:19.648781: +2026-04-09 10:08:19.650647: Epoch 630 +2026-04-09 10:08:19.652395: Current learning rate: 0.00409 +2026-04-09 10:10:01.895947: train_loss -0.409 +2026-04-09 10:10:01.916457: val_loss -0.42 +2026-04-09 10:10:01.921965: Pseudo dice [0.6332, 0.0, 0.7002, 0.2606, 0.5363, 0.4335, 0.6231] +2026-04-09 10:10:01.925071: Epoch time: 102.25 s +2026-04-09 10:10:03.032756: +2026-04-09 10:10:03.036182: Epoch 631 +2026-04-09 10:10:03.040165: Current learning rate: 0.00408 +2026-04-09 10:11:46.820324: train_loss -0.4049 +2026-04-09 10:11:46.826616: val_loss -0.318 +2026-04-09 10:11:46.828897: Pseudo dice [0.1622, 0.0, 0.6073, 0.7107, 0.5692, 0.0755, 0.8256] +2026-04-09 10:11:46.831321: Epoch time: 103.79 s +2026-04-09 10:11:47.959113: +2026-04-09 10:11:47.963310: Epoch 632 +2026-04-09 10:11:47.968363: Current learning rate: 0.00407 +2026-04-09 10:13:31.696486: train_loss -0.4141 +2026-04-09 10:13:31.703949: val_loss -0.2843 +2026-04-09 10:13:31.707214: Pseudo dice [0.0696, 0.0, 0.6602, 0.4776, 0.5656, 0.1122, 0.5714] +2026-04-09 10:13:31.709846: Epoch time: 103.74 s +2026-04-09 10:13:32.828426: +2026-04-09 10:13:32.834385: Epoch 633 +2026-04-09 10:13:32.840272: Current learning rate: 0.00406 +2026-04-09 10:15:16.468054: train_loss -0.4179 +2026-04-09 10:15:16.476186: val_loss -0.2713 +2026-04-09 10:15:16.482030: Pseudo dice [0.3001, 0.0, 0.5734, 0.0022, 0.6342, 0.0353, 0.7714] +2026-04-09 10:15:16.486315: Epoch time: 103.64 s +2026-04-09 10:15:17.598695: +2026-04-09 10:15:17.601728: Epoch 634 +2026-04-09 10:15:17.605017: Current learning rate: 0.00405 +2026-04-09 10:16:59.263315: train_loss -0.4129 +2026-04-09 10:16:59.268683: val_loss -0.3966 +2026-04-09 10:16:59.271155: Pseudo dice [0.0735, 0.0, 0.5746, 0.0927, 0.3145, 0.7343, 0.7586] +2026-04-09 10:16:59.273573: Epoch time: 101.67 s +2026-04-09 10:17:00.413103: +2026-04-09 10:17:00.415633: Epoch 635 +2026-04-09 10:17:00.417849: Current learning rate: 0.00404 +2026-04-09 10:18:42.354560: train_loss -0.4216 +2026-04-09 10:18:42.360348: val_loss -0.3324 +2026-04-09 10:18:42.362446: Pseudo dice [0.7334, 0.0, 0.7022, 0.1692, 0.3681, 0.141, 0.6207] +2026-04-09 10:18:42.365504: Epoch time: 101.94 s +2026-04-09 10:18:43.649910: +2026-04-09 10:18:43.653250: Epoch 636 +2026-04-09 10:18:43.656019: Current learning rate: 0.00403 +2026-04-09 10:20:26.036388: train_loss -0.4314 +2026-04-09 10:20:26.042192: val_loss -0.3314 +2026-04-09 10:20:26.044618: Pseudo dice [0.2463, 0.0, 0.3466, 0.53, 0.5535, 0.0155, 0.5212] +2026-04-09 10:20:26.046901: Epoch time: 102.39 s +2026-04-09 10:20:27.154361: +2026-04-09 10:20:27.156685: Epoch 637 +2026-04-09 10:20:27.158811: Current learning rate: 0.00402 +2026-04-09 10:22:09.385930: train_loss -0.4282 +2026-04-09 10:22:09.393877: val_loss -0.26 +2026-04-09 10:22:09.396212: Pseudo dice [0.5502, 0.0, 0.3001, 0.6655, 0.3833, 0.034, 0.8399] +2026-04-09 10:22:09.398969: Epoch time: 102.23 s +2026-04-09 10:22:10.561909: +2026-04-09 10:22:10.564648: Epoch 638 +2026-04-09 10:22:10.567268: Current learning rate: 0.00401 +2026-04-09 10:23:53.943085: train_loss -0.4335 +2026-04-09 10:23:53.950569: val_loss -0.3021 +2026-04-09 10:23:53.952330: Pseudo dice [0.7484, 0.0, 0.3879, 0.7215, 0.3564, 0.0381, 0.6049] +2026-04-09 10:23:53.954508: Epoch time: 103.38 s +2026-04-09 10:23:55.079603: +2026-04-09 10:23:55.081869: Epoch 639 +2026-04-09 10:23:55.084007: Current learning rate: 0.004 +2026-04-09 10:25:36.554268: train_loss -0.4387 +2026-04-09 10:25:36.560391: val_loss -0.4218 +2026-04-09 10:25:36.562683: Pseudo dice [0.3163, 0.0, 0.5033, 0.6701, 0.578, 0.7995, 0.5183] +2026-04-09 10:25:36.564803: Epoch time: 101.48 s +2026-04-09 10:25:37.688607: +2026-04-09 10:25:37.690965: Epoch 640 +2026-04-09 10:25:37.692562: Current learning rate: 0.00399 +2026-04-09 10:27:20.349692: train_loss -0.4149 +2026-04-09 10:27:20.363768: val_loss -0.3009 +2026-04-09 10:27:20.367274: Pseudo dice [0.444, 0.0, 0.4812, 0.0, 0.4048, 0.1587, 0.558] +2026-04-09 10:27:20.369311: Epoch time: 102.66 s +2026-04-09 10:27:21.483116: +2026-04-09 10:27:21.484919: Epoch 641 +2026-04-09 10:27:21.486397: Current learning rate: 0.00398 +2026-04-09 10:29:03.709322: train_loss -0.3916 +2026-04-09 10:29:03.715752: val_loss -0.3806 +2026-04-09 10:29:03.717664: Pseudo dice [0.1734, 0.0, 0.5077, 0.1994, 0.3289, 0.0, 0.6469] +2026-04-09 10:29:03.720636: Epoch time: 102.23 s +2026-04-09 10:29:04.853140: +2026-04-09 10:29:04.856420: Epoch 642 +2026-04-09 10:29:04.859552: Current learning rate: 0.00397 +2026-04-09 10:30:46.319763: train_loss -0.3898 +2026-04-09 10:30:46.328444: val_loss -0.3908 +2026-04-09 10:30:46.331833: Pseudo dice [0.0727, 0.0, 0.7808, 0.7271, 0.3537, 0.0725, 0.8214] +2026-04-09 10:30:46.333940: Epoch time: 101.47 s +2026-04-09 10:30:47.540763: +2026-04-09 10:30:47.543057: Epoch 643 +2026-04-09 10:30:47.545017: Current learning rate: 0.00396 +2026-04-09 10:32:28.613771: train_loss -0.3759 +2026-04-09 10:32:28.620187: val_loss -0.3497 +2026-04-09 10:32:28.622039: Pseudo dice [0.0, 0.0, 0.7041, 0.0253, 0.3603, 0.0606, 0.3802] +2026-04-09 10:32:28.623698: Epoch time: 101.08 s +2026-04-09 10:32:29.767056: +2026-04-09 10:32:29.768903: Epoch 644 +2026-04-09 10:32:29.770436: Current learning rate: 0.00395 +2026-04-09 10:34:10.898881: train_loss -0.3946 +2026-04-09 10:34:10.903914: val_loss -0.2273 +2026-04-09 10:34:10.905840: Pseudo dice [0.0, 0.0, 0.5172, 0.2625, 0.2034, 0.0253, 0.1006] +2026-04-09 10:34:10.907478: Epoch time: 101.14 s +2026-04-09 10:34:12.099010: +2026-04-09 10:34:12.101477: Epoch 645 +2026-04-09 10:34:12.103605: Current learning rate: 0.00394 +2026-04-09 10:35:55.351183: train_loss -0.3811 +2026-04-09 10:35:55.357366: val_loss -0.3262 +2026-04-09 10:35:55.360822: Pseudo dice [0.0, 0.0, 0.5798, 0.2158, 0.237, 0.0, 0.0] +2026-04-09 10:35:55.363096: Epoch time: 103.26 s +2026-04-09 10:35:56.445858: +2026-04-09 10:35:56.448457: Epoch 646 +2026-04-09 10:35:56.450584: Current learning rate: 0.00393 +2026-04-09 10:37:38.339088: train_loss -0.3464 +2026-04-09 10:37:38.344157: val_loss -0.2494 +2026-04-09 10:37:38.347386: Pseudo dice [0.0, 0.0, 0.4938, 0.3271, 0.4571, 0.0468, 0.0] +2026-04-09 10:37:38.349267: Epoch time: 101.9 s +2026-04-09 10:37:39.452958: +2026-04-09 10:37:39.455295: Epoch 647 +2026-04-09 10:37:39.457677: Current learning rate: 0.00392 +2026-04-09 10:39:21.650103: train_loss -0.366 +2026-04-09 10:39:21.657315: val_loss -0.2327 +2026-04-09 10:39:21.659547: Pseudo dice [0.0, 0.0, 0.5742, 0.2569, 0.3195, 0.0177, 0.0006] +2026-04-09 10:39:21.661714: Epoch time: 102.2 s +2026-04-09 10:39:22.817032: +2026-04-09 10:39:22.819456: Epoch 648 +2026-04-09 10:39:22.821961: Current learning rate: 0.00391 +2026-04-09 10:41:05.110089: train_loss -0.3753 +2026-04-09 10:41:05.116496: val_loss -0.3032 +2026-04-09 10:41:05.118859: Pseudo dice [0.0, 0.0, 0.5271, 0.421, 0.4934, 0.0337, 0.1743] +2026-04-09 10:41:05.121675: Epoch time: 102.3 s +2026-04-09 10:41:06.260646: +2026-04-09 10:41:06.263266: Epoch 649 +2026-04-09 10:41:06.265796: Current learning rate: 0.0039 +2026-04-09 10:42:47.755118: train_loss -0.3953 +2026-04-09 10:42:47.761159: val_loss -0.4036 +2026-04-09 10:42:47.763639: Pseudo dice [0.0099, 0.0, 0.7167, 0.5801, 0.6038, 0.7172, 0.6821] +2026-04-09 10:42:47.765313: Epoch time: 101.5 s +2026-04-09 10:42:50.421088: +2026-04-09 10:42:50.423682: Epoch 650 +2026-04-09 10:42:50.425627: Current learning rate: 0.00389 +2026-04-09 10:44:33.274026: train_loss -0.4088 +2026-04-09 10:44:33.280401: val_loss -0.3164 +2026-04-09 10:44:33.282547: Pseudo dice [0.7773, 0.0, 0.4822, 0.094, 0.4147, 0.2206, 0.4985] +2026-04-09 10:44:33.285874: Epoch time: 102.86 s +2026-04-09 10:44:34.385576: +2026-04-09 10:44:34.387664: Epoch 651 +2026-04-09 10:44:34.389516: Current learning rate: 0.00388 +2026-04-09 10:46:15.696989: train_loss -0.4075 +2026-04-09 10:46:15.703956: val_loss -0.3421 +2026-04-09 10:46:15.707257: Pseudo dice [0.21, 0.0, 0.785, 0.5218, 0.3791, 0.1417, 0.387] +2026-04-09 10:46:15.709409: Epoch time: 101.31 s +2026-04-09 10:46:16.908182: +2026-04-09 10:46:16.909967: Epoch 652 +2026-04-09 10:46:16.911738: Current learning rate: 0.00387 +2026-04-09 10:47:58.896083: train_loss -0.3959 +2026-04-09 10:47:58.902769: val_loss -0.286 +2026-04-09 10:47:58.905905: Pseudo dice [0.3286, 0.0, 0.6168, 0.0383, 0.4835, 0.0358, 0.333] +2026-04-09 10:47:58.907835: Epoch time: 101.99 s +2026-04-09 10:48:00.086294: +2026-04-09 10:48:00.088145: Epoch 653 +2026-04-09 10:48:00.090287: Current learning rate: 0.00386 +2026-04-09 10:49:41.307117: train_loss -0.4075 +2026-04-09 10:49:41.311955: val_loss -0.2876 +2026-04-09 10:49:41.313860: Pseudo dice [0.5701, 0.0, 0.7086, 0.3541, 0.2771, 0.0403, 0.6707] +2026-04-09 10:49:41.315429: Epoch time: 101.22 s +2026-04-09 10:49:42.449451: +2026-04-09 10:49:42.453205: Epoch 654 +2026-04-09 10:49:42.455138: Current learning rate: 0.00385 +2026-04-09 10:51:24.579648: train_loss -0.4271 +2026-04-09 10:51:24.585240: val_loss -0.3938 +2026-04-09 10:51:24.587679: Pseudo dice [0.6614, 0.0, 0.6965, 0.1658, 0.4313, 0.4052, 0.6228] +2026-04-09 10:51:24.589862: Epoch time: 102.13 s +2026-04-09 10:51:25.745970: +2026-04-09 10:51:25.748356: Epoch 655 +2026-04-09 10:51:25.750920: Current learning rate: 0.00384 +2026-04-09 10:53:07.179683: train_loss -0.4309 +2026-04-09 10:53:07.186322: val_loss -0.3936 +2026-04-09 10:53:07.188681: Pseudo dice [0.2478, 0.0, 0.6862, 0.1891, 0.584, 0.2094, 0.4073] +2026-04-09 10:53:07.190625: Epoch time: 101.44 s +2026-04-09 10:53:08.340952: +2026-04-09 10:53:08.342962: Epoch 656 +2026-04-09 10:53:08.345561: Current learning rate: 0.00383 +2026-04-09 10:54:50.345502: train_loss -0.4366 +2026-04-09 10:54:50.352160: val_loss -0.3918 +2026-04-09 10:54:50.355430: Pseudo dice [0.087, 0.0, 0.6576, 0.048, 0.5982, 0.4736, 0.7221] +2026-04-09 10:54:50.357932: Epoch time: 102.01 s +2026-04-09 10:54:51.480081: +2026-04-09 10:54:51.482125: Epoch 657 +2026-04-09 10:54:51.483885: Current learning rate: 0.00382 +2026-04-09 10:56:32.919453: train_loss -0.4094 +2026-04-09 10:56:32.927197: val_loss -0.3548 +2026-04-09 10:56:32.929075: Pseudo dice [0.2162, 0.0, 0.7041, 0.2859, 0.6119, 0.1129, 0.3236] +2026-04-09 10:56:32.930730: Epoch time: 101.44 s +2026-04-09 10:56:34.043100: +2026-04-09 10:56:34.045273: Epoch 658 +2026-04-09 10:56:34.047073: Current learning rate: 0.00381 +2026-04-09 10:58:15.241205: train_loss -0.4112 +2026-04-09 10:58:15.246465: val_loss -0.4004 +2026-04-09 10:58:15.248885: Pseudo dice [0.3529, 0.0, 0.6845, 0.0488, 0.4626, 0.6536, 0.0596] +2026-04-09 10:58:15.250776: Epoch time: 101.2 s +2026-04-09 10:58:16.414494: +2026-04-09 10:58:16.417178: Epoch 659 +2026-04-09 10:58:16.419178: Current learning rate: 0.0038 +2026-04-09 10:59:57.238389: train_loss -0.3993 +2026-04-09 10:59:57.243829: val_loss -0.3465 +2026-04-09 10:59:57.245736: Pseudo dice [0.0276, 0.0, 0.2726, 0.506, 0.3878, 0.1211, 0.0653] +2026-04-09 10:59:57.247672: Epoch time: 100.83 s +2026-04-09 10:59:58.431700: +2026-04-09 10:59:58.433853: Epoch 660 +2026-04-09 10:59:58.435850: Current learning rate: 0.00379 +2026-04-09 11:01:40.644313: train_loss -0.363 +2026-04-09 11:01:40.650731: val_loss -0.3482 +2026-04-09 11:01:40.652644: Pseudo dice [0.7485, 0.0, 0.5484, 0.5798, 0.4857, 0.0235, 0.5878] +2026-04-09 11:01:40.654881: Epoch time: 102.22 s +2026-04-09 11:01:41.802943: +2026-04-09 11:01:41.806154: Epoch 661 +2026-04-09 11:01:41.808074: Current learning rate: 0.00378 +2026-04-09 11:03:23.412007: train_loss -0.3975 +2026-04-09 11:03:23.417388: val_loss -0.4162 +2026-04-09 11:03:23.419627: Pseudo dice [0.3328, 0.0, 0.5682, 0.4652, 0.452, 0.6965, 0.7473] +2026-04-09 11:03:23.421646: Epoch time: 101.61 s +2026-04-09 11:03:24.592467: +2026-04-09 11:03:24.594491: Epoch 662 +2026-04-09 11:03:24.596424: Current learning rate: 0.00377 +2026-04-09 11:05:06.092364: train_loss -0.4245 +2026-04-09 11:05:06.100481: val_loss -0.3369 +2026-04-09 11:05:06.102346: Pseudo dice [0.0556, 0.0, 0.7249, 0.3821, 0.6242, 0.1416, 0.376] +2026-04-09 11:05:06.105306: Epoch time: 101.5 s +2026-04-09 11:05:07.319364: +2026-04-09 11:05:07.321712: Epoch 663 +2026-04-09 11:05:07.323914: Current learning rate: 0.00376 +2026-04-09 11:06:49.803746: train_loss -0.4114 +2026-04-09 11:06:49.811157: val_loss -0.3685 +2026-04-09 11:06:49.813727: Pseudo dice [0.3303, 0.0, 0.7091, 0.7486, 0.4263, 0.2088, 0.5693] +2026-04-09 11:06:49.817115: Epoch time: 102.49 s +2026-04-09 11:06:50.952422: +2026-04-09 11:06:50.954468: Epoch 664 +2026-04-09 11:06:50.956568: Current learning rate: 0.00375 +2026-04-09 11:08:32.253566: train_loss -0.4118 +2026-04-09 11:08:32.259817: val_loss -0.3451 +2026-04-09 11:08:32.261869: Pseudo dice [0.5253, 0.0, 0.7407, 0.7001, 0.3945, 0.0201, 0.6849] +2026-04-09 11:08:32.264067: Epoch time: 101.3 s +2026-04-09 11:08:34.297451: +2026-04-09 11:08:34.299728: Epoch 665 +2026-04-09 11:08:34.301769: Current learning rate: 0.00374 +2026-04-09 11:10:15.870209: train_loss -0.423 +2026-04-09 11:10:15.876086: val_loss -0.3571 +2026-04-09 11:10:15.881080: Pseudo dice [0.5889, 0.0, 0.7361, 0.2046, 0.5938, 0.0685, 0.6753] +2026-04-09 11:10:15.885343: Epoch time: 101.58 s +2026-04-09 11:10:17.032413: +2026-04-09 11:10:17.034773: Epoch 666 +2026-04-09 11:10:17.036702: Current learning rate: 0.00373 +2026-04-09 11:11:59.151117: train_loss -0.3582 +2026-04-09 11:11:59.157479: val_loss -0.3552 +2026-04-09 11:11:59.159755: Pseudo dice [0.2645, 0.0, 0.6877, 0.0879, 0.4386, 0.1822, 0.2437] +2026-04-09 11:11:59.163053: Epoch time: 102.12 s +2026-04-09 11:12:00.309189: +2026-04-09 11:12:00.311963: Epoch 667 +2026-04-09 11:12:00.314384: Current learning rate: 0.00372 +2026-04-09 11:13:41.716721: train_loss -0.3855 +2026-04-09 11:13:41.723934: val_loss -0.2024 +2026-04-09 11:13:41.726456: Pseudo dice [0.4222, 0.0, 0.615, 0.3252, 0.2203, 0.0602, 0.4914] +2026-04-09 11:13:41.729220: Epoch time: 101.41 s +2026-04-09 11:13:42.910025: +2026-04-09 11:13:42.912818: Epoch 668 +2026-04-09 11:13:42.914698: Current learning rate: 0.00371 +2026-04-09 11:15:24.298123: train_loss -0.403 +2026-04-09 11:15:24.303054: val_loss -0.2801 +2026-04-09 11:15:24.304623: Pseudo dice [0.7656, 0.0, 0.4254, 0.4389, 0.4897, 0.0099, 0.2624] +2026-04-09 11:15:24.306327: Epoch time: 101.39 s +2026-04-09 11:15:25.441978: +2026-04-09 11:15:25.444000: Epoch 669 +2026-04-09 11:15:25.445814: Current learning rate: 0.0037 +2026-04-09 11:17:06.879219: train_loss -0.4113 +2026-04-09 11:17:06.886535: val_loss -0.2988 +2026-04-09 11:17:06.889333: Pseudo dice [0.4011, 0.0, 0.3822, 0.7795, 0.4224, 0.0276, 0.3369] +2026-04-09 11:17:06.892246: Epoch time: 101.44 s +2026-04-09 11:17:08.057528: +2026-04-09 11:17:08.059485: Epoch 670 +2026-04-09 11:17:08.061010: Current learning rate: 0.00369 +2026-04-09 11:18:50.500619: train_loss -0.4114 +2026-04-09 11:18:50.507883: val_loss -0.3609 +2026-04-09 11:18:50.510535: Pseudo dice [0.7038, 0.0, 0.5728, 0.4149, 0.3645, 0.192, 0.7316] +2026-04-09 11:18:50.513037: Epoch time: 102.45 s +2026-04-09 11:18:51.652675: +2026-04-09 11:18:51.655394: Epoch 671 +2026-04-09 11:18:51.657297: Current learning rate: 0.00368 +2026-04-09 11:20:34.140158: train_loss -0.4353 +2026-04-09 11:20:34.146230: val_loss -0.2751 +2026-04-09 11:20:34.148032: Pseudo dice [0.4514, 0.0, 0.7958, 0.0328, 0.3872, 0.0519, 0.7103] +2026-04-09 11:20:34.150542: Epoch time: 102.49 s +2026-04-09 11:20:35.319829: +2026-04-09 11:20:35.321991: Epoch 672 +2026-04-09 11:20:35.323835: Current learning rate: 0.00367 +2026-04-09 11:22:17.757092: train_loss -0.4162 +2026-04-09 11:22:17.761837: val_loss -0.3117 +2026-04-09 11:22:17.764683: Pseudo dice [0.1877, 0.0, 0.6386, 0.1423, 0.5141, 0.0637, 0.7064] +2026-04-09 11:22:17.767371: Epoch time: 102.44 s +2026-04-09 11:22:18.910779: +2026-04-09 11:22:18.915290: Epoch 673 +2026-04-09 11:22:18.920478: Current learning rate: 0.00366 +2026-04-09 11:24:02.912476: train_loss -0.424 +2026-04-09 11:24:02.921914: val_loss -0.3771 +2026-04-09 11:24:02.924587: Pseudo dice [0.4503, 0.0, 0.634, 0.5497, 0.439, 0.0677, 0.8207] +2026-04-09 11:24:02.927028: Epoch time: 104.0 s +2026-04-09 11:24:04.043129: +2026-04-09 11:24:04.045130: Epoch 674 +2026-04-09 11:24:04.047279: Current learning rate: 0.00365 +2026-04-09 11:25:46.884736: train_loss -0.438 +2026-04-09 11:25:46.892589: val_loss -0.3468 +2026-04-09 11:25:46.895357: Pseudo dice [0.4012, 0.0, 0.6241, 0.1847, 0.362, 0.085, 0.6259] +2026-04-09 11:25:46.897934: Epoch time: 102.84 s +2026-04-09 11:25:48.028914: +2026-04-09 11:25:48.031549: Epoch 675 +2026-04-09 11:25:48.033967: Current learning rate: 0.00364 +2026-04-09 11:27:30.526199: train_loss -0.4224 +2026-04-09 11:27:30.537778: val_loss -0.3865 +2026-04-09 11:27:30.541694: Pseudo dice [0.7641, 0.0, 0.5692, 0.0145, 0.5727, 0.3024, 0.6513] +2026-04-09 11:27:30.545685: Epoch time: 102.5 s +2026-04-09 11:27:31.678341: +2026-04-09 11:27:31.680410: Epoch 676 +2026-04-09 11:27:31.682292: Current learning rate: 0.00363 +2026-04-09 11:29:14.106602: train_loss -0.4071 +2026-04-09 11:29:14.112258: val_loss -0.2729 +2026-04-09 11:29:14.114973: Pseudo dice [0.5003, 0.0, 0.4696, 0.1552, 0.6328, 0.028, 0.7441] +2026-04-09 11:29:14.116953: Epoch time: 102.43 s +2026-04-09 11:29:15.256714: +2026-04-09 11:29:15.258748: Epoch 677 +2026-04-09 11:29:15.260754: Current learning rate: 0.00362 +2026-04-09 11:30:56.547459: train_loss -0.4192 +2026-04-09 11:30:56.553986: val_loss -0.3478 +2026-04-09 11:30:56.555862: Pseudo dice [0.2024, 0.0, 0.7043, 0.4296, 0.2518, 0.0426, 0.139] +2026-04-09 11:30:56.557819: Epoch time: 101.29 s +2026-04-09 11:30:57.709799: +2026-04-09 11:30:57.711800: Epoch 678 +2026-04-09 11:30:57.713867: Current learning rate: 0.00361 +2026-04-09 11:32:40.990777: train_loss -0.4008 +2026-04-09 11:32:40.997085: val_loss -0.2688 +2026-04-09 11:32:40.999340: Pseudo dice [0.203, 0.0, 0.6397, 0.7077, 0.5737, 0.0057, 0.7649] +2026-04-09 11:32:41.001750: Epoch time: 103.28 s +2026-04-09 11:32:42.130540: +2026-04-09 11:32:42.133811: Epoch 679 +2026-04-09 11:32:42.136838: Current learning rate: 0.0036 +2026-04-09 11:34:24.729780: train_loss -0.4178 +2026-04-09 11:34:24.736451: val_loss -0.2936 +2026-04-09 11:34:24.738323: Pseudo dice [0.1596, 0.0, 0.4077, 0.741, 0.543, 0.0921, 0.859] +2026-04-09 11:34:24.742115: Epoch time: 102.6 s +2026-04-09 11:34:25.901837: +2026-04-09 11:34:25.904726: Epoch 680 +2026-04-09 11:34:25.907140: Current learning rate: 0.00359 +2026-04-09 11:36:07.258744: train_loss -0.4481 +2026-04-09 11:36:07.264794: val_loss -0.4037 +2026-04-09 11:36:07.267353: Pseudo dice [0.0228, 0.0, 0.5411, 0.5296, 0.3916, 0.537, 0.6136] +2026-04-09 11:36:07.269390: Epoch time: 101.36 s +2026-04-09 11:36:08.393384: +2026-04-09 11:36:08.395375: Epoch 681 +2026-04-09 11:36:08.397022: Current learning rate: 0.00358 +2026-04-09 11:37:50.481910: train_loss -0.4565 +2026-04-09 11:37:50.488665: val_loss -0.2773 +2026-04-09 11:37:50.490736: Pseudo dice [0.4251, 0.0, 0.5601, 0.271, 0.4743, 0.0293, 0.8021] +2026-04-09 11:37:50.493098: Epoch time: 102.09 s +2026-04-09 11:37:51.666680: +2026-04-09 11:37:51.669170: Epoch 682 +2026-04-09 11:37:51.671165: Current learning rate: 0.00357 +2026-04-09 11:39:33.469328: train_loss -0.4304 +2026-04-09 11:39:33.474994: val_loss -0.3988 +2026-04-09 11:39:33.477028: Pseudo dice [0.7284, 0.0, 0.5691, 0.6285, 0.4487, 0.1438, 0.5606] +2026-04-09 11:39:33.478844: Epoch time: 101.81 s +2026-04-09 11:39:34.650975: +2026-04-09 11:39:34.652946: Epoch 683 +2026-04-09 11:39:34.654755: Current learning rate: 0.00356 +2026-04-09 11:41:16.350744: train_loss -0.436 +2026-04-09 11:41:16.356132: val_loss -0.3987 +2026-04-09 11:41:16.358383: Pseudo dice [0.3061, 0.0, 0.4827, 0.7954, 0.6064, 0.7249, 0.8242] +2026-04-09 11:41:16.361012: Epoch time: 101.7 s +2026-04-09 11:41:17.506104: +2026-04-09 11:41:17.508718: Epoch 684 +2026-04-09 11:41:17.512115: Current learning rate: 0.00355 +2026-04-09 11:42:58.962028: train_loss -0.4305 +2026-04-09 11:42:58.968298: val_loss -0.3335 +2026-04-09 11:42:58.969968: Pseudo dice [0.1957, 0.0, 0.7039, 0.6398, 0.3683, 0.151, 0.5416] +2026-04-09 11:42:58.972315: Epoch time: 101.46 s +2026-04-09 11:43:01.041152: +2026-04-09 11:43:01.043636: Epoch 685 +2026-04-09 11:43:01.045915: Current learning rate: 0.00354 +2026-04-09 11:44:43.255533: train_loss -0.4182 +2026-04-09 11:44:43.267055: val_loss -0.2005 +2026-04-09 11:44:43.269956: Pseudo dice [0.117, 0.0, 0.6372, 0.6098, 0.2591, 0.0196, 0.4295] +2026-04-09 11:44:43.273096: Epoch time: 102.22 s +2026-04-09 11:44:44.396967: +2026-04-09 11:44:44.401120: Epoch 686 +2026-04-09 11:44:44.404105: Current learning rate: 0.00353 +2026-04-09 11:46:26.644886: train_loss -0.4072 +2026-04-09 11:46:26.650154: val_loss -0.4279 +2026-04-09 11:46:26.651965: Pseudo dice [0.2119, 0.0, 0.6436, 0.7615, 0.4012, 0.3384, 0.7011] +2026-04-09 11:46:26.653595: Epoch time: 102.25 s +2026-04-09 11:46:27.779537: +2026-04-09 11:46:27.781762: Epoch 687 +2026-04-09 11:46:27.784262: Current learning rate: 0.00352 +2026-04-09 11:48:09.568595: train_loss -0.4121 +2026-04-09 11:48:09.574182: val_loss -0.3665 +2026-04-09 11:48:09.577151: Pseudo dice [0.4724, 0.0, 0.4656, 0.0042, 0.5295, 0.0842, 0.6553] +2026-04-09 11:48:09.579314: Epoch time: 101.79 s +2026-04-09 11:48:10.700116: +2026-04-09 11:48:10.702839: Epoch 688 +2026-04-09 11:48:10.704782: Current learning rate: 0.00351 +2026-04-09 11:49:52.793654: train_loss -0.4266 +2026-04-09 11:49:52.799528: val_loss -0.1998 +2026-04-09 11:49:52.802338: Pseudo dice [0.0, 0.0, 0.4451, 0.3275, 0.3295, 0.1049, 0.5504] +2026-04-09 11:49:52.804915: Epoch time: 102.1 s +2026-04-09 11:49:53.921818: +2026-04-09 11:49:53.925174: Epoch 689 +2026-04-09 11:49:53.927727: Current learning rate: 0.0035 +2026-04-09 11:51:34.866236: train_loss -0.424 +2026-04-09 11:51:34.870504: val_loss -0.3529 +2026-04-09 11:51:34.872481: Pseudo dice [0.2919, 0.0, 0.5036, 0.0669, 0.5115, 0.0935, 0.5387] +2026-04-09 11:51:34.874318: Epoch time: 100.95 s +2026-04-09 11:51:35.997536: +2026-04-09 11:51:35.999617: Epoch 690 +2026-04-09 11:51:36.001172: Current learning rate: 0.00349 +2026-04-09 11:53:18.027539: train_loss -0.4312 +2026-04-09 11:53:18.033480: val_loss -0.1137 +2026-04-09 11:53:18.035485: Pseudo dice [0.8017, 0.0, 0.3715, 0.5326, 0.3216, 0.0122, 0.1286] +2026-04-09 11:53:18.037234: Epoch time: 102.03 s +2026-04-09 11:53:19.149924: +2026-04-09 11:53:19.154479: Epoch 691 +2026-04-09 11:53:19.160406: Current learning rate: 0.00348 +2026-04-09 11:55:01.845402: train_loss -0.416 +2026-04-09 11:55:01.853032: val_loss -0.4153 +2026-04-09 11:55:01.855641: Pseudo dice [0.5846, 0.0, 0.7862, 0.3037, 0.2574, 0.5668, 0.8222] +2026-04-09 11:55:01.858748: Epoch time: 102.7 s +2026-04-09 11:55:03.046550: +2026-04-09 11:55:03.048986: Epoch 692 +2026-04-09 11:55:03.051163: Current learning rate: 0.00346 +2026-04-09 11:56:46.325728: train_loss -0.4357 +2026-04-09 11:56:46.332980: val_loss -0.3139 +2026-04-09 11:56:46.336205: Pseudo dice [0.3993, 0.0, 0.4753, 0.5806, 0.529, 0.0253, 0.7996] +2026-04-09 11:56:46.339101: Epoch time: 103.28 s +2026-04-09 11:56:47.459627: +2026-04-09 11:56:47.462279: Epoch 693 +2026-04-09 11:56:47.464503: Current learning rate: 0.00345 +2026-04-09 11:58:29.413472: train_loss -0.3933 +2026-04-09 11:58:29.419525: val_loss -0.3918 +2026-04-09 11:58:29.423194: Pseudo dice [0.1001, 0.0, 0.6514, 0.5285, 0.5795, 0.4207, 0.0871] +2026-04-09 11:58:29.425411: Epoch time: 101.96 s +2026-04-09 11:58:30.563499: +2026-04-09 11:58:30.568488: Epoch 694 +2026-04-09 11:58:30.570837: Current learning rate: 0.00344 +2026-04-09 12:00:12.594060: train_loss -0.383 +2026-04-09 12:00:12.602508: val_loss -0.3573 +2026-04-09 12:00:12.604973: Pseudo dice [0.4713, 0.0, 0.3433, 0.7827, 0.3723, 0.0156, 0.7134] +2026-04-09 12:00:12.607315: Epoch time: 102.03 s +2026-04-09 12:00:13.820800: +2026-04-09 12:00:13.823935: Epoch 695 +2026-04-09 12:00:13.826959: Current learning rate: 0.00343 +2026-04-09 12:01:56.899801: train_loss -0.4395 +2026-04-09 12:01:56.914652: val_loss -0.3845 +2026-04-09 12:01:56.922032: Pseudo dice [0.6398, 0.0, 0.695, 0.0388, 0.6364, 0.033, 0.648] +2026-04-09 12:01:56.924094: Epoch time: 103.08 s +2026-04-09 12:01:58.064020: +2026-04-09 12:01:58.072948: Epoch 696 +2026-04-09 12:01:58.075170: Current learning rate: 0.00342 +2026-04-09 12:03:39.972435: train_loss -0.4357 +2026-04-09 12:03:39.978554: val_loss -0.3718 +2026-04-09 12:03:39.980463: Pseudo dice [0.2123, 0.0, 0.4558, 0.3823, 0.5326, 0.3489, 0.6315] +2026-04-09 12:03:39.982526: Epoch time: 101.91 s +2026-04-09 12:03:41.139132: +2026-04-09 12:03:41.141027: Epoch 697 +2026-04-09 12:03:41.143109: Current learning rate: 0.00341 +2026-04-09 12:05:22.424770: train_loss -0.4069 +2026-04-09 12:05:22.430555: val_loss -0.2579 +2026-04-09 12:05:22.432578: Pseudo dice [0.6332, 0.0, 0.6383, 0.0592, 0.4314, 0.0302, 0.5183] +2026-04-09 12:05:22.434141: Epoch time: 101.29 s +2026-04-09 12:05:23.619024: +2026-04-09 12:05:23.620591: Epoch 698 +2026-04-09 12:05:23.622271: Current learning rate: 0.0034 +2026-04-09 12:07:04.882278: train_loss -0.4273 +2026-04-09 12:07:04.890059: val_loss -0.3227 +2026-04-09 12:07:04.903003: Pseudo dice [0.4811, 0.0, 0.6027, 0.5637, 0.3952, 0.1938, 0.4061] +2026-04-09 12:07:04.904976: Epoch time: 101.27 s +2026-04-09 12:07:06.089523: +2026-04-09 12:07:06.091767: Epoch 699 +2026-04-09 12:07:06.094652: Current learning rate: 0.00339 +2026-04-09 12:08:48.071087: train_loss -0.4311 +2026-04-09 12:08:48.077407: val_loss -0.38 +2026-04-09 12:08:48.079693: Pseudo dice [0.6248, 0.0, 0.61, 0.4329, 0.5713, 0.4389, 0.6321] +2026-04-09 12:08:48.082011: Epoch time: 101.98 s +2026-04-09 12:08:50.888201: +2026-04-09 12:08:50.890476: Epoch 700 +2026-04-09 12:08:50.892104: Current learning rate: 0.00338 +2026-04-09 12:10:32.580942: train_loss -0.4229 +2026-04-09 12:10:32.586279: val_loss -0.358 +2026-04-09 12:10:32.588168: Pseudo dice [0.3174, 0.0, 0.639, 0.7871, 0.4887, 0.1473, 0.7788] +2026-04-09 12:10:32.591110: Epoch time: 101.7 s +2026-04-09 12:10:33.734149: +2026-04-09 12:10:33.737025: Epoch 701 +2026-04-09 12:10:33.740055: Current learning rate: 0.00337 +2026-04-09 12:12:15.331264: train_loss -0.4311 +2026-04-09 12:12:15.338111: val_loss -0.3409 +2026-04-09 12:12:15.342196: Pseudo dice [0.4192, 0.0, 0.4957, 0.3161, 0.3447, 0.0647, 0.8067] +2026-04-09 12:12:15.345138: Epoch time: 101.6 s +2026-04-09 12:12:16.480270: +2026-04-09 12:12:16.482225: Epoch 702 +2026-04-09 12:12:16.483955: Current learning rate: 0.00336 +2026-04-09 12:13:58.332711: train_loss -0.4229 +2026-04-09 12:13:58.339538: val_loss -0.3762 +2026-04-09 12:13:58.342554: Pseudo dice [0.3475, 0.0, 0.5479, 0.544, 0.3564, 0.1676, 0.7165] +2026-04-09 12:13:58.344675: Epoch time: 101.86 s +2026-04-09 12:13:59.464731: +2026-04-09 12:13:59.466469: Epoch 703 +2026-04-09 12:13:59.468215: Current learning rate: 0.00335 +2026-04-09 12:15:40.934096: train_loss -0.4203 +2026-04-09 12:15:40.940286: val_loss -0.3199 +2026-04-09 12:15:40.943246: Pseudo dice [0.0721, 0.0, 0.6842, 0.2931, 0.6109, 0.2931, 0.704] +2026-04-09 12:15:40.945188: Epoch time: 101.47 s +2026-04-09 12:15:42.098640: +2026-04-09 12:15:42.100655: Epoch 704 +2026-04-09 12:15:42.102952: Current learning rate: 0.00334 +2026-04-09 12:17:24.737628: train_loss -0.4266 +2026-04-09 12:17:24.743306: val_loss -0.2834 +2026-04-09 12:17:24.745924: Pseudo dice [0.1162, 0.0, 0.7206, 0.49, 0.4778, 0.0773, 0.7849] +2026-04-09 12:17:24.748308: Epoch time: 102.64 s +2026-04-09 12:17:25.887369: +2026-04-09 12:17:25.889884: Epoch 705 +2026-04-09 12:17:25.891972: Current learning rate: 0.00333 +2026-04-09 12:19:07.205977: train_loss -0.4242 +2026-04-09 12:19:07.211893: val_loss -0.2812 +2026-04-09 12:19:07.215182: Pseudo dice [0.3488, 0.0, 0.5538, 0.4461, 0.2491, 0.0056, 0.344] +2026-04-09 12:19:07.217160: Epoch time: 101.32 s +2026-04-09 12:19:08.364158: +2026-04-09 12:19:08.366837: Epoch 706 +2026-04-09 12:19:08.368566: Current learning rate: 0.00332 +2026-04-09 12:20:49.852801: train_loss -0.4072 +2026-04-09 12:20:49.857852: val_loss -0.2302 +2026-04-09 12:20:49.861299: Pseudo dice [0.1772, 0.0, 0.4925, 0.4437, 0.5734, 0.0345, 0.4665] +2026-04-09 12:20:49.863784: Epoch time: 101.49 s +2026-04-09 12:20:50.979036: +2026-04-09 12:20:50.980705: Epoch 707 +2026-04-09 12:20:50.982283: Current learning rate: 0.00331 +2026-04-09 12:22:33.170109: train_loss -0.3852 +2026-04-09 12:22:33.181056: val_loss -0.3421 +2026-04-09 12:22:33.183686: Pseudo dice [0.0711, 0.0, 0.6398, 0.6644, 0.5883, 0.1808, 0.6982] +2026-04-09 12:22:33.187088: Epoch time: 102.19 s +2026-04-09 12:22:34.430986: +2026-04-09 12:22:34.432926: Epoch 708 +2026-04-09 12:22:34.434870: Current learning rate: 0.0033 +2026-04-09 12:24:16.666246: train_loss -0.4018 +2026-04-09 12:24:16.671492: val_loss -0.3616 +2026-04-09 12:24:16.673357: Pseudo dice [0.5262, 0.0, 0.5456, 0.1528, 0.3094, 0.2262, 0.5405] +2026-04-09 12:24:16.675007: Epoch time: 102.24 s +2026-04-09 12:24:17.851507: +2026-04-09 12:24:17.853397: Epoch 709 +2026-04-09 12:24:17.855704: Current learning rate: 0.00329 +2026-04-09 12:25:59.011946: train_loss -0.4208 +2026-04-09 12:25:59.018160: val_loss -0.418 +2026-04-09 12:25:59.020368: Pseudo dice [0.5056, 0.0, 0.4923, 0.3199, 0.5455, 0.6561, 0.6124] +2026-04-09 12:25:59.023047: Epoch time: 101.16 s +2026-04-09 12:26:00.143588: +2026-04-09 12:26:00.146627: Epoch 710 +2026-04-09 12:26:00.148633: Current learning rate: 0.00328 +2026-04-09 12:27:41.767073: train_loss -0.3949 +2026-04-09 12:27:41.772425: val_loss -0.3875 +2026-04-09 12:27:41.775041: Pseudo dice [0.1173, 0.0, 0.625, 0.2491, 0.495, 0.2851, 0.6085] +2026-04-09 12:27:41.778094: Epoch time: 101.63 s +2026-04-09 12:27:42.957045: +2026-04-09 12:27:42.960026: Epoch 711 +2026-04-09 12:27:42.962148: Current learning rate: 0.00327 +2026-04-09 12:29:24.418003: train_loss -0.4222 +2026-04-09 12:29:24.423429: val_loss -0.4041 +2026-04-09 12:29:24.424952: Pseudo dice [0.3323, 0.0, 0.6571, 0.0115, 0.5239, 0.6317, 0.8919] +2026-04-09 12:29:24.427389: Epoch time: 101.46 s +2026-04-09 12:29:25.997805: +2026-04-09 12:29:25.999639: Epoch 712 +2026-04-09 12:29:26.002087: Current learning rate: 0.00326 +2026-04-09 12:31:09.469521: train_loss -0.4234 +2026-04-09 12:31:09.475301: val_loss -0.29 +2026-04-09 12:31:09.478142: Pseudo dice [0.6057, 0.0, 0.5335, 0.363, 0.3594, 0.0126, 0.7767] +2026-04-09 12:31:09.480005: Epoch time: 103.47 s +2026-04-09 12:31:10.656018: +2026-04-09 12:31:10.657966: Epoch 713 +2026-04-09 12:31:10.660093: Current learning rate: 0.00325 +2026-04-09 12:32:52.996511: train_loss -0.3935 +2026-04-09 12:32:53.002054: val_loss -0.3962 +2026-04-09 12:32:53.004549: Pseudo dice [0.3469, 0.0, 0.654, 0.6764, 0.4182, 0.7158, 0.2499] +2026-04-09 12:32:53.006447: Epoch time: 102.34 s +2026-04-09 12:32:54.200245: +2026-04-09 12:32:54.202229: Epoch 714 +2026-04-09 12:32:54.205084: Current learning rate: 0.00324 +2026-04-09 12:34:35.907931: train_loss -0.4315 +2026-04-09 12:34:35.915377: val_loss -0.3537 +2026-04-09 12:34:35.918149: Pseudo dice [0.4268, 0.0, 0.6141, 0.304, 0.5829, 0.0472, 0.4892] +2026-04-09 12:34:35.920170: Epoch time: 101.71 s +2026-04-09 12:34:37.094541: +2026-04-09 12:34:37.100770: Epoch 715 +2026-04-09 12:34:37.102603: Current learning rate: 0.00323 +2026-04-09 12:36:19.148710: train_loss -0.4031 +2026-04-09 12:36:19.153231: val_loss -0.3791 +2026-04-09 12:36:19.155441: Pseudo dice [0.6232, 0.0, 0.5518, 0.5674, 0.6301, 0.0395, 0.7987] +2026-04-09 12:36:19.158065: Epoch time: 102.06 s +2026-04-09 12:36:20.331937: +2026-04-09 12:36:20.333801: Epoch 716 +2026-04-09 12:36:20.335606: Current learning rate: 0.00322 +2026-04-09 12:38:01.698613: train_loss -0.4294 +2026-04-09 12:38:01.706862: val_loss -0.1851 +2026-04-09 12:38:01.710305: Pseudo dice [0.4475, 0.0, 0.5569, 0.2176, 0.5001, 0.0201, 0.6679] +2026-04-09 12:38:01.712884: Epoch time: 101.37 s +2026-04-09 12:38:02.870758: +2026-04-09 12:38:02.873802: Epoch 717 +2026-04-09 12:38:02.876134: Current learning rate: 0.00321 +2026-04-09 12:39:46.097905: train_loss -0.4329 +2026-04-09 12:39:46.111839: val_loss -0.3913 +2026-04-09 12:39:46.115326: Pseudo dice [0.8517, 0.0, 0.6391, 0.5183, 0.1204, 0.5582, 0.2157] +2026-04-09 12:39:46.119241: Epoch time: 103.23 s +2026-04-09 12:39:47.298537: +2026-04-09 12:39:47.301902: Epoch 718 +2026-04-09 12:39:47.306877: Current learning rate: 0.0032 +2026-04-09 12:41:29.545750: train_loss -0.4397 +2026-04-09 12:41:29.551224: val_loss -0.3239 +2026-04-09 12:41:29.555766: Pseudo dice [0.7025, 0.0, 0.6345, 0.5524, 0.2886, 0.0574, 0.5245] +2026-04-09 12:41:29.559615: Epoch time: 102.25 s +2026-04-09 12:41:30.730993: +2026-04-09 12:41:30.733665: Epoch 719 +2026-04-09 12:41:30.736011: Current learning rate: 0.00319 +2026-04-09 12:43:13.349440: train_loss -0.4432 +2026-04-09 12:43:13.354072: val_loss -0.3619 +2026-04-09 12:43:13.356003: Pseudo dice [0.726, 0.0, 0.5745, 0.682, 0.3297, 0.0199, 0.5464] +2026-04-09 12:43:13.358064: Epoch time: 102.62 s +2026-04-09 12:43:14.530302: +2026-04-09 12:43:14.532813: Epoch 720 +2026-04-09 12:43:14.535350: Current learning rate: 0.00318 +2026-04-09 12:44:57.186377: train_loss -0.4273 +2026-04-09 12:44:57.199723: val_loss -0.3813 +2026-04-09 12:44:57.204482: Pseudo dice [0.5221, 0.0, 0.6094, 0.5406, 0.3224, 0.0331, 0.865] +2026-04-09 12:44:57.209848: Epoch time: 102.66 s +2026-04-09 12:44:58.408315: +2026-04-09 12:44:58.416536: Epoch 721 +2026-04-09 12:44:58.422004: Current learning rate: 0.00317 +2026-04-09 12:46:39.904141: train_loss -0.4435 +2026-04-09 12:46:39.914253: val_loss -0.2796 +2026-04-09 12:46:39.916239: Pseudo dice [0.0007, 0.0, 0.628, 0.5402, 0.389, 0.0578, 0.4639] +2026-04-09 12:46:39.919290: Epoch time: 101.5 s +2026-04-09 12:46:41.296802: +2026-04-09 12:46:41.299874: Epoch 722 +2026-04-09 12:46:41.301804: Current learning rate: 0.00316 +2026-04-09 12:48:22.931769: train_loss -0.434 +2026-04-09 12:48:22.939819: val_loss -0.4292 +2026-04-09 12:48:22.941934: Pseudo dice [0.3998, 0.0, 0.795, 0.1661, 0.4255, 0.5004, 0.6704] +2026-04-09 12:48:22.944190: Epoch time: 101.64 s +2026-04-09 12:48:24.129468: +2026-04-09 12:48:24.133294: Epoch 723 +2026-04-09 12:48:24.135435: Current learning rate: 0.00315 +2026-04-09 12:50:06.656977: train_loss -0.434 +2026-04-09 12:50:06.662739: val_loss -0.3883 +2026-04-09 12:50:06.664782: Pseudo dice [0.3691, 0.0, 0.5052, 0.2474, 0.4722, 0.4751, 0.6681] +2026-04-09 12:50:06.667219: Epoch time: 102.53 s +2026-04-09 12:50:08.842262: +2026-04-09 12:50:08.844058: Epoch 724 +2026-04-09 12:50:08.845794: Current learning rate: 0.00314 +2026-04-09 12:51:50.965907: train_loss -0.4206 +2026-04-09 12:51:50.974321: val_loss -0.4009 +2026-04-09 12:51:50.978435: Pseudo dice [0.573, 0.0, 0.6444, 0.5444, 0.5228, 0.2128, 0.5225] +2026-04-09 12:51:50.981227: Epoch time: 102.13 s +2026-04-09 12:51:52.167920: +2026-04-09 12:51:52.170099: Epoch 725 +2026-04-09 12:51:52.171887: Current learning rate: 0.00313 +2026-04-09 12:53:34.867081: train_loss -0.4281 +2026-04-09 12:53:34.874736: val_loss -0.4265 +2026-04-09 12:53:34.876744: Pseudo dice [0.6528, 0.0, 0.7899, 0.4725, 0.627, 0.096, 0.5746] +2026-04-09 12:53:34.878962: Epoch time: 102.7 s +2026-04-09 12:53:36.072652: +2026-04-09 12:53:36.074779: Epoch 726 +2026-04-09 12:53:36.077765: Current learning rate: 0.00312 +2026-04-09 12:55:18.790226: train_loss -0.4365 +2026-04-09 12:55:18.795570: val_loss -0.3837 +2026-04-09 12:55:18.798023: Pseudo dice [0.0005, 0.0, 0.5546, 0.6571, 0.3554, 0.6437, 0.6566] +2026-04-09 12:55:18.800077: Epoch time: 102.72 s +2026-04-09 12:55:19.973854: +2026-04-09 12:55:19.976366: Epoch 727 +2026-04-09 12:55:19.979835: Current learning rate: 0.00311 +2026-04-09 12:57:03.112424: train_loss -0.4313 +2026-04-09 12:57:03.117815: val_loss -0.429 +2026-04-09 12:57:03.120425: Pseudo dice [0.0375, 0.0, 0.6164, 0.009, 0.4835, 0.6936, 0.6587] +2026-04-09 12:57:03.122969: Epoch time: 103.14 s +2026-04-09 12:57:04.334307: +2026-04-09 12:57:04.337099: Epoch 728 +2026-04-09 12:57:04.339845: Current learning rate: 0.0031 +2026-04-09 12:58:45.899748: train_loss -0.4509 +2026-04-09 12:58:45.904833: val_loss -0.3543 +2026-04-09 12:58:45.907027: Pseudo dice [0.6738, 0.0, 0.8317, 0.3536, 0.4228, 0.0576, 0.6753] +2026-04-09 12:58:45.909470: Epoch time: 101.57 s +2026-04-09 12:58:47.040602: +2026-04-09 12:58:47.042697: Epoch 729 +2026-04-09 12:58:47.044832: Current learning rate: 0.00309 +2026-04-09 13:00:28.859566: train_loss -0.4472 +2026-04-09 13:00:28.866070: val_loss -0.3539 +2026-04-09 13:00:28.868709: Pseudo dice [0.0, 0.0, 0.717, 0.3904, 0.4603, 0.0898, 0.5681] +2026-04-09 13:00:28.871515: Epoch time: 101.82 s +2026-04-09 13:00:30.014911: +2026-04-09 13:00:30.018389: Epoch 730 +2026-04-09 13:00:30.021585: Current learning rate: 0.00308 +2026-04-09 13:02:12.245427: train_loss -0.4161 +2026-04-09 13:02:12.253473: val_loss -0.3272 +2026-04-09 13:02:12.256419: Pseudo dice [0.0749, 0.0, 0.4796, 0.0116, 0.3587, 0.0468, 0.7595] +2026-04-09 13:02:12.259423: Epoch time: 102.23 s +2026-04-09 13:02:13.387469: +2026-04-09 13:02:13.389652: Epoch 731 +2026-04-09 13:02:13.392254: Current learning rate: 0.00307 +2026-04-09 13:03:54.936270: train_loss -0.419 +2026-04-09 13:03:54.941738: val_loss -0.2993 +2026-04-09 13:03:54.943390: Pseudo dice [0.381, 0.0, 0.7037, 0.624, 0.5795, 0.0625, 0.7888] +2026-04-09 13:03:54.945997: Epoch time: 101.55 s +2026-04-09 13:03:56.124550: +2026-04-09 13:03:56.127345: Epoch 732 +2026-04-09 13:03:56.129472: Current learning rate: 0.00306 +2026-04-09 13:05:39.096499: train_loss -0.4391 +2026-04-09 13:05:39.102513: val_loss -0.3926 +2026-04-09 13:05:39.105321: Pseudo dice [0.8744, 0.0, 0.6802, 0.6121, 0.2667, 0.3771, 0.6129] +2026-04-09 13:05:39.108615: Epoch time: 102.98 s +2026-04-09 13:05:40.252739: +2026-04-09 13:05:40.255535: Epoch 733 +2026-04-09 13:05:40.257797: Current learning rate: 0.00305 +2026-04-09 13:07:22.098515: train_loss -0.4398 +2026-04-09 13:07:22.104986: val_loss -0.3665 +2026-04-09 13:07:22.107146: Pseudo dice [0.0493, 0.0, 0.781, 0.4893, 0.5837, 0.7695, 0.5634] +2026-04-09 13:07:22.110364: Epoch time: 101.85 s +2026-04-09 13:07:23.277093: +2026-04-09 13:07:23.279298: Epoch 734 +2026-04-09 13:07:23.280982: Current learning rate: 0.00304 +2026-04-09 13:09:05.873466: train_loss -0.4187 +2026-04-09 13:09:05.879907: val_loss -0.401 +2026-04-09 13:09:05.882259: Pseudo dice [0.5407, 0.0, 0.7048, 0.395, 0.4921, 0.5544, 0.4522] +2026-04-09 13:09:05.884019: Epoch time: 102.6 s +2026-04-09 13:09:07.035132: +2026-04-09 13:09:07.039237: Epoch 735 +2026-04-09 13:09:07.041238: Current learning rate: 0.00303 +2026-04-09 13:10:48.156935: train_loss -0.4374 +2026-04-09 13:10:48.163293: val_loss -0.4385 +2026-04-09 13:10:48.166274: Pseudo dice [0.6855, 0.0, 0.7242, 0.0061, 0.4112, 0.6916, 0.638] +2026-04-09 13:10:48.168340: Epoch time: 101.13 s +2026-04-09 13:10:49.304144: +2026-04-09 13:10:49.307987: Epoch 736 +2026-04-09 13:10:49.309851: Current learning rate: 0.00302 +2026-04-09 13:12:31.664437: train_loss -0.4327 +2026-04-09 13:12:31.684256: val_loss -0.3828 +2026-04-09 13:12:31.687412: Pseudo dice [0.3292, 0.0, 0.7587, 0.5581, 0.5359, 0.1358, 0.785] +2026-04-09 13:12:31.690022: Epoch time: 102.36 s +2026-04-09 13:12:32.831903: +2026-04-09 13:12:32.833979: Epoch 737 +2026-04-09 13:12:32.836359: Current learning rate: 0.00301 +2026-04-09 13:14:14.430727: train_loss -0.4654 +2026-04-09 13:14:14.439177: val_loss -0.3662 +2026-04-09 13:14:14.441494: Pseudo dice [0.0504, 0.0, 0.6099, 0.8961, 0.4677, 0.1749, 0.9185] +2026-04-09 13:14:14.443816: Epoch time: 101.6 s +2026-04-09 13:14:15.598927: +2026-04-09 13:14:15.601326: Epoch 738 +2026-04-09 13:14:15.603751: Current learning rate: 0.003 +2026-04-09 13:15:57.270383: train_loss -0.4434 +2026-04-09 13:15:57.276154: val_loss -0.2063 +2026-04-09 13:15:57.278241: Pseudo dice [0.0053, 0.0, 0.734, 0.4129, 0.4595, 0.0877, 0.5387] +2026-04-09 13:15:57.280607: Epoch time: 101.67 s +2026-04-09 13:15:58.417967: +2026-04-09 13:15:58.419805: Epoch 739 +2026-04-09 13:15:58.421861: Current learning rate: 0.00299 +2026-04-09 13:17:40.447922: train_loss -0.4231 +2026-04-09 13:17:40.456757: val_loss -0.4363 +2026-04-09 13:17:40.459620: Pseudo dice [0.37, 0.0, 0.6368, 0.515, 0.6093, 0.7887, 0.7072] +2026-04-09 13:17:40.464480: Epoch time: 102.03 s +2026-04-09 13:17:41.616608: +2026-04-09 13:17:41.618999: Epoch 740 +2026-04-09 13:17:41.620828: Current learning rate: 0.00297 +2026-04-09 13:19:23.219365: train_loss -0.4526 +2026-04-09 13:19:23.225375: val_loss -0.4374 +2026-04-09 13:19:23.227809: Pseudo dice [0.3909, 0.0, 0.6769, 0.3347, 0.497, 0.6378, 0.9069] +2026-04-09 13:19:23.230483: Epoch time: 101.61 s +2026-04-09 13:19:24.374259: +2026-04-09 13:19:24.376269: Epoch 741 +2026-04-09 13:19:24.378346: Current learning rate: 0.00296 +2026-04-09 13:21:07.363371: train_loss -0.4544 +2026-04-09 13:21:07.374568: val_loss -0.413 +2026-04-09 13:21:07.376986: Pseudo dice [0.2927, 0.0, 0.6775, 0.7347, 0.3524, 0.6342, 0.4219] +2026-04-09 13:21:07.381768: Epoch time: 102.99 s +2026-04-09 13:21:08.517513: +2026-04-09 13:21:08.519359: Epoch 742 +2026-04-09 13:21:08.521288: Current learning rate: 0.00295 +2026-04-09 13:22:49.227294: train_loss -0.4506 +2026-04-09 13:22:49.232311: val_loss -0.3485 +2026-04-09 13:22:49.234721: Pseudo dice [0.2334, 0.0, 0.7208, 0.3426, 0.444, 0.0765, 0.2998] +2026-04-09 13:22:49.236542: Epoch time: 100.71 s +2026-04-09 13:22:50.370456: +2026-04-09 13:22:50.372222: Epoch 743 +2026-04-09 13:22:50.373953: Current learning rate: 0.00294 +2026-04-09 13:24:34.632995: train_loss -0.4492 +2026-04-09 13:24:34.639263: val_loss -0.3353 +2026-04-09 13:24:34.642111: Pseudo dice [0.5762, 0.0, 0.6627, 0.289, 0.3788, 0.0541, 0.516] +2026-04-09 13:24:34.645327: Epoch time: 104.27 s +2026-04-09 13:24:37.055802: +2026-04-09 13:24:37.058195: Epoch 744 +2026-04-09 13:24:37.060201: Current learning rate: 0.00293 +2026-04-09 13:26:24.406929: train_loss -0.4553 +2026-04-09 13:26:24.416592: val_loss -0.325 +2026-04-09 13:26:24.420635: Pseudo dice [0.2631, 0.0, 0.5607, 0.875, 0.6605, 0.1605, 0.1693] +2026-04-09 13:26:24.424882: Epoch time: 107.35 s +2026-04-09 13:26:25.797753: +2026-04-09 13:26:25.800276: Epoch 745 +2026-04-09 13:26:25.802405: Current learning rate: 0.00292 +2026-04-09 13:28:11.133845: train_loss -0.4409 +2026-04-09 13:28:11.139047: val_loss -0.3878 +2026-04-09 13:28:11.141458: Pseudo dice [0.5275, 0.0, 0.5532, 0.3911, 0.6589, 0.0192, 0.8707] +2026-04-09 13:28:11.143706: Epoch time: 105.34 s +2026-04-09 13:28:12.280981: +2026-04-09 13:28:12.286424: Epoch 746 +2026-04-09 13:28:12.288019: Current learning rate: 0.00291 +2026-04-09 13:30:01.171742: train_loss -0.4294 +2026-04-09 13:30:01.177802: val_loss -0.3512 +2026-04-09 13:30:01.180741: Pseudo dice [0.2528, 0.0, 0.6882, 0.0835, 0.5306, 0.0919, 0.738] +2026-04-09 13:30:01.183633: Epoch time: 108.89 s +2026-04-09 13:30:02.320517: +2026-04-09 13:30:02.323086: Epoch 747 +2026-04-09 13:30:02.325684: Current learning rate: 0.0029 +2026-04-09 13:31:54.317427: train_loss -0.4458 +2026-04-09 13:31:54.322591: val_loss -0.2346 +2026-04-09 13:31:54.324750: Pseudo dice [0.6611, 0.0, 0.3829, 0.0084, 0.3103, 0.0104, 0.4159] +2026-04-09 13:31:54.326585: Epoch time: 112.0 s +2026-04-09 13:31:55.484624: +2026-04-09 13:31:55.488485: Epoch 748 +2026-04-09 13:31:55.490289: Current learning rate: 0.00289 +2026-04-09 13:33:44.182203: train_loss -0.4195 +2026-04-09 13:33:44.188892: val_loss -0.3819 +2026-04-09 13:33:44.190969: Pseudo dice [0.8251, 0.0, 0.5778, 0.4041, 0.4609, 0.0279, 0.818] +2026-04-09 13:33:44.193177: Epoch time: 108.7 s +2026-04-09 13:33:45.329704: +2026-04-09 13:33:45.333615: Epoch 749 +2026-04-09 13:33:45.335808: Current learning rate: 0.00288 +2026-04-09 13:35:29.357265: train_loss -0.4412 +2026-04-09 13:35:29.365144: val_loss -0.3911 +2026-04-09 13:35:29.368108: Pseudo dice [0.4428, 0.0, 0.4784, 0.3833, 0.2562, 0.3376, 0.6935] +2026-04-09 13:35:29.370536: Epoch time: 104.03 s +2026-04-09 13:35:32.317216: +2026-04-09 13:35:32.319690: Epoch 750 +2026-04-09 13:35:32.321504: Current learning rate: 0.00287 +2026-04-09 13:37:13.789520: train_loss -0.4192 +2026-04-09 13:37:13.797746: val_loss -0.0913 +2026-04-09 13:37:13.800760: Pseudo dice [0.5308, 0.0, 0.1861, 0.0228, 0.5629, 0.0041, 0.4315] +2026-04-09 13:37:13.804291: Epoch time: 101.48 s +2026-04-09 13:37:14.954806: +2026-04-09 13:37:14.965576: Epoch 751 +2026-04-09 13:37:14.968218: Current learning rate: 0.00286 +2026-04-09 13:40:36.549844: train_loss -0.4333 +2026-04-09 13:40:36.562627: val_loss -0.2837 +2026-04-09 13:40:36.564454: Pseudo dice [0.4901, 0.0, 0.706, 0.0045, 0.4429, 0.0329, 0.5083] +2026-04-09 13:40:36.574467: Epoch time: 201.6 s +2026-04-09 13:40:37.727914: +2026-04-09 13:40:37.729993: Epoch 752 +2026-04-09 13:40:37.732280: Current learning rate: 0.00285 +2026-04-09 13:42:46.045593: train_loss -0.4574 +2026-04-09 13:42:46.055831: val_loss -0.414 +2026-04-09 13:42:46.058441: Pseudo dice [0.4347, 0.0, 0.6011, 0.8259, 0.3956, 0.6177, 0.736] +2026-04-09 13:42:46.061265: Epoch time: 128.32 s +2026-04-09 13:42:47.275562: +2026-04-09 13:42:47.277792: Epoch 753 +2026-04-09 13:42:47.279377: Current learning rate: 0.00284 +2026-04-09 13:44:37.848238: train_loss -0.4295 +2026-04-09 13:44:37.855037: val_loss -0.4188 +2026-04-09 13:44:37.857427: Pseudo dice [0.228, 0.0, 0.61, 0.24, 0.4216, 0.6053, 0.636] +2026-04-09 13:44:37.860300: Epoch time: 110.58 s +2026-04-09 13:44:39.021533: +2026-04-09 13:44:39.023635: Epoch 754 +2026-04-09 13:44:39.025357: Current learning rate: 0.00283 +2026-04-09 13:47:00.848372: train_loss -0.4441 +2026-04-09 13:47:00.855830: val_loss -0.419 +2026-04-09 13:47:00.858623: Pseudo dice [0.2339, 0.0, 0.6589, 0.8209, 0.3055, 0.5615, 0.836] +2026-04-09 13:47:00.861918: Epoch time: 141.83 s +2026-04-09 13:47:02.021607: +2026-04-09 13:47:02.024210: Epoch 755 +2026-04-09 13:47:02.026786: Current learning rate: 0.00282 +2026-04-09 13:48:52.539065: train_loss -0.4669 +2026-04-09 13:48:52.544562: val_loss -0.4052 +2026-04-09 13:48:52.546639: Pseudo dice [0.5384, 0.0, 0.7512, 0.3265, 0.6209, 0.1034, 0.7584] +2026-04-09 13:48:52.549230: Epoch time: 110.52 s +2026-04-09 13:48:53.691969: +2026-04-09 13:48:53.694036: Epoch 756 +2026-04-09 13:48:53.695943: Current learning rate: 0.00281 +2026-04-09 13:51:33.569997: train_loss -0.4507 +2026-04-09 13:51:33.577934: val_loss -0.3909 +2026-04-09 13:51:33.581929: Pseudo dice [0.3658, 0.0, 0.6878, 0.4724, 0.2752, 0.6551, 0.1229] +2026-04-09 13:51:33.584767: Epoch time: 159.88 s +2026-04-09 13:51:34.797309: +2026-04-09 13:51:34.799418: Epoch 757 +2026-04-09 13:51:34.801193: Current learning rate: 0.0028 +2026-04-09 13:53:29.357326: train_loss -0.4407 +2026-04-09 13:53:29.362853: val_loss -0.2211 +2026-04-09 13:53:29.365330: Pseudo dice [0.6963, 0.0, 0.7824, 0.0529, 0.2708, 0.0189, 0.5437] +2026-04-09 13:53:29.367479: Epoch time: 114.56 s +2026-04-09 13:53:30.533638: +2026-04-09 13:53:30.535522: Epoch 758 +2026-04-09 13:53:30.537524: Current learning rate: 0.00279 +2026-04-09 13:55:26.104853: train_loss -0.4608 +2026-04-09 13:55:26.112524: val_loss -0.3081 +2026-04-09 13:55:26.114333: Pseudo dice [0.3702, 0.0, 0.7607, 0.6159, 0.6003, 0.0516, 0.6937] +2026-04-09 13:55:26.118108: Epoch time: 115.57 s +2026-04-09 13:55:27.270860: +2026-04-09 13:55:27.275391: Epoch 759 +2026-04-09 13:55:27.278160: Current learning rate: 0.00278 +2026-04-09 13:57:08.148313: train_loss -0.4686 +2026-04-09 13:57:08.154720: val_loss -0.3917 +2026-04-09 13:57:08.157546: Pseudo dice [0.3802, 0.0, 0.6253, 0.0335, 0.5563, 0.093, 0.816] +2026-04-09 13:57:08.160257: Epoch time: 100.88 s +2026-04-09 13:57:09.305581: +2026-04-09 13:57:09.308508: Epoch 760 +2026-04-09 13:57:09.310481: Current learning rate: 0.00277 +2026-04-09 13:58:50.884893: train_loss -0.4168 +2026-04-09 13:58:50.890705: val_loss -0.3564 +2026-04-09 13:58:50.893891: Pseudo dice [0.7475, 0.0, 0.6789, 0.465, 0.4255, 0.0419, 0.8594] +2026-04-09 13:58:50.896109: Epoch time: 101.58 s +2026-04-09 13:58:52.054118: +2026-04-09 13:58:52.055969: Epoch 761 +2026-04-09 13:58:52.057544: Current learning rate: 0.00276 +2026-04-09 14:00:32.791644: train_loss -0.4399 +2026-04-09 14:00:32.797419: val_loss -0.3396 +2026-04-09 14:00:32.799963: Pseudo dice [0.6206, 0.0, 0.6635, 0.2866, 0.4943, 0.1074, 0.7101] +2026-04-09 14:00:32.802263: Epoch time: 100.74 s +2026-04-09 14:00:33.956924: +2026-04-09 14:00:33.958972: Epoch 762 +2026-04-09 14:00:33.960613: Current learning rate: 0.00275 +2026-04-09 14:02:15.383448: train_loss -0.4617 +2026-04-09 14:02:15.390098: val_loss -0.4174 +2026-04-09 14:02:15.392276: Pseudo dice [0.3242, 0.0, 0.502, 0.0055, 0.5577, 0.7596, 0.8047] +2026-04-09 14:02:15.394965: Epoch time: 101.43 s +2026-04-09 14:02:17.571850: +2026-04-09 14:02:17.574228: Epoch 763 +2026-04-09 14:02:17.576283: Current learning rate: 0.00274 +2026-04-09 14:03:59.050487: train_loss -0.441 +2026-04-09 14:03:59.058798: val_loss -0.3396 +2026-04-09 14:03:59.062040: Pseudo dice [0.5892, 0.0, 0.5293, 0.6397, 0.3663, 0.0449, 0.748] +2026-04-09 14:03:59.065279: Epoch time: 101.48 s +2026-04-09 14:04:00.240839: +2026-04-09 14:04:00.242834: Epoch 764 +2026-04-09 14:04:00.244594: Current learning rate: 0.00273 +2026-04-09 14:05:41.814558: train_loss -0.4666 +2026-04-09 14:05:41.825252: val_loss -0.3424 +2026-04-09 14:05:41.829717: Pseudo dice [0.3679, 0.0, 0.5257, 0.0017, 0.4507, 0.1925, 0.8035] +2026-04-09 14:05:41.832499: Epoch time: 101.58 s +2026-04-09 14:05:43.009600: +2026-04-09 14:05:43.011279: Epoch 765 +2026-04-09 14:05:43.013846: Current learning rate: 0.00272 +2026-04-09 14:07:24.152974: train_loss -0.4531 +2026-04-09 14:07:24.158981: val_loss -0.3873 +2026-04-09 14:07:24.160864: Pseudo dice [0.7886, 0.0, 0.605, 0.2682, 0.5175, 0.1318, 0.8026] +2026-04-09 14:07:24.163177: Epoch time: 101.15 s +2026-04-09 14:07:25.347937: +2026-04-09 14:07:25.349694: Epoch 766 +2026-04-09 14:07:25.351114: Current learning rate: 0.00271 +2026-04-09 14:09:06.041269: train_loss -0.4602 +2026-04-09 14:09:06.048987: val_loss -0.2208 +2026-04-09 14:09:06.051541: Pseudo dice [0.3316, 0.0, 0.7276, 0.6161, 0.5085, 0.0257, 0.8791] +2026-04-09 14:09:06.053397: Epoch time: 100.7 s +2026-04-09 14:09:07.208468: +2026-04-09 14:09:07.210409: Epoch 767 +2026-04-09 14:09:07.212447: Current learning rate: 0.0027 +2026-04-09 14:10:47.823616: train_loss -0.4679 +2026-04-09 14:10:47.832532: val_loss -0.3557 +2026-04-09 14:10:47.834526: Pseudo dice [0.6737, 0.0, 0.5576, 0.7054, 0.5661, 0.0504, 0.3215] +2026-04-09 14:10:47.837817: Epoch time: 100.62 s +2026-04-09 14:10:49.023714: +2026-04-09 14:10:49.026523: Epoch 768 +2026-04-09 14:10:49.028587: Current learning rate: 0.00268 +2026-04-09 14:12:30.744620: train_loss -0.4561 +2026-04-09 14:12:30.752922: val_loss -0.3133 +2026-04-09 14:12:30.755275: Pseudo dice [0.6835, 0.0, 0.5854, 0.6675, 0.3057, 0.0155, 0.5012] +2026-04-09 14:12:30.757681: Epoch time: 101.72 s +2026-04-09 14:12:31.926726: +2026-04-09 14:12:31.929122: Epoch 769 +2026-04-09 14:12:31.932449: Current learning rate: 0.00267 +2026-04-09 14:14:12.363306: train_loss -0.4585 +2026-04-09 14:14:12.373575: val_loss -0.3862 +2026-04-09 14:14:12.375738: Pseudo dice [0.5002, 0.0, 0.6846, 0.8591, 0.5401, 0.1154, 0.5431] +2026-04-09 14:14:12.378033: Epoch time: 100.44 s +2026-04-09 14:14:13.558521: +2026-04-09 14:14:13.560165: Epoch 770 +2026-04-09 14:14:13.561651: Current learning rate: 0.00266 +2026-04-09 14:15:54.745875: train_loss -0.4218 +2026-04-09 14:15:54.756089: val_loss -0.1567 +2026-04-09 14:15:54.759224: Pseudo dice [0.1356, 0.0, 0.4887, 0.416, 0.4873, 0.0014, 0.8168] +2026-04-09 14:15:54.761513: Epoch time: 101.19 s +2026-04-09 14:15:55.937807: +2026-04-09 14:15:55.940655: Epoch 771 +2026-04-09 14:15:55.942445: Current learning rate: 0.00265 +2026-04-09 14:17:36.634253: train_loss -0.4253 +2026-04-09 14:17:36.647615: val_loss -0.4276 +2026-04-09 14:17:36.649624: Pseudo dice [0.6245, 0.0, 0.7587, 0.3609, 0.5613, 0.7268, 0.6364] +2026-04-09 14:17:36.652301: Epoch time: 100.7 s +2026-04-09 14:17:37.826226: +2026-04-09 14:17:37.827696: Epoch 772 +2026-04-09 14:17:37.828993: Current learning rate: 0.00264 +2026-04-09 14:19:19.102790: train_loss -0.4396 +2026-04-09 14:19:19.112313: val_loss -0.2793 +2026-04-09 14:19:19.114053: Pseudo dice [0.327, 0.0, 0.5603, 0.0241, 0.3034, 0.0651, 0.678] +2026-04-09 14:19:19.117269: Epoch time: 101.28 s +2026-04-09 14:19:20.304475: +2026-04-09 14:19:20.306442: Epoch 773 +2026-04-09 14:19:20.308800: Current learning rate: 0.00263 +2026-04-09 14:21:01.221862: train_loss -0.4229 +2026-04-09 14:21:01.233080: val_loss -0.3783 +2026-04-09 14:21:01.235425: Pseudo dice [0.5129, 0.0, 0.6467, 0.6321, 0.581, 0.071, 0.8165] +2026-04-09 14:21:01.237820: Epoch time: 100.92 s +2026-04-09 14:21:02.420838: +2026-04-09 14:21:02.422657: Epoch 774 +2026-04-09 14:21:02.424441: Current learning rate: 0.00262 +2026-04-09 14:22:43.288269: train_loss -0.4608 +2026-04-09 14:22:43.295667: val_loss -0.3989 +2026-04-09 14:22:43.297559: Pseudo dice [0.409, 0.0, 0.6345, 0.1656, 0.5739, 0.2604, 0.8335] +2026-04-09 14:22:43.299953: Epoch time: 100.87 s +2026-04-09 14:22:44.496470: +2026-04-09 14:22:44.498122: Epoch 775 +2026-04-09 14:22:44.499639: Current learning rate: 0.00261 +2026-04-09 14:24:25.393377: train_loss -0.4662 +2026-04-09 14:24:25.402146: val_loss -0.3898 +2026-04-09 14:24:25.404948: Pseudo dice [0.7988, 0.0, 0.7359, 0.6252, 0.4184, 0.0836, 0.7375] +2026-04-09 14:24:25.408474: Epoch time: 100.9 s +2026-04-09 14:24:26.591616: +2026-04-09 14:24:26.593656: Epoch 776 +2026-04-09 14:24:26.595547: Current learning rate: 0.0026 +2026-04-09 14:26:07.252946: train_loss -0.4508 +2026-04-09 14:26:07.260412: val_loss -0.3772 +2026-04-09 14:26:07.262417: Pseudo dice [0.1174, 0.0, 0.7655, 0.0108, 0.4184, 0.1257, 0.7872] +2026-04-09 14:26:07.265384: Epoch time: 100.66 s +2026-04-09 14:26:08.445740: +2026-04-09 14:26:08.448039: Epoch 777 +2026-04-09 14:26:08.449515: Current learning rate: 0.00259 +2026-04-09 14:27:49.809198: train_loss -0.4519 +2026-04-09 14:27:49.815691: val_loss -0.2851 +2026-04-09 14:27:49.818172: Pseudo dice [0.4519, 0.0, 0.5714, 0.6436, 0.3748, 0.1774, 0.7037] +2026-04-09 14:27:49.819941: Epoch time: 101.37 s +2026-04-09 14:27:50.990301: +2026-04-09 14:27:50.992143: Epoch 778 +2026-04-09 14:27:50.993764: Current learning rate: 0.00258 +2026-04-09 14:29:31.576495: train_loss -0.4463 +2026-04-09 14:29:31.581945: val_loss -0.3243 +2026-04-09 14:29:31.583683: Pseudo dice [0.3569, 0.0, 0.5426, 0.556, 0.4009, 0.0303, 0.6677] +2026-04-09 14:29:31.586201: Epoch time: 100.59 s +2026-04-09 14:29:32.753706: +2026-04-09 14:29:32.755443: Epoch 779 +2026-04-09 14:29:32.756874: Current learning rate: 0.00257 +2026-04-09 14:31:13.484845: train_loss -0.4538 +2026-04-09 14:31:13.492865: val_loss -0.3383 +2026-04-09 14:31:13.495109: Pseudo dice [0.5178, 0.0, 0.642, 0.5568, 0.6145, 0.1467, 0.7701] +2026-04-09 14:31:13.498527: Epoch time: 100.73 s +2026-04-09 14:31:14.673019: +2026-04-09 14:31:14.675429: Epoch 780 +2026-04-09 14:31:14.677732: Current learning rate: 0.00256 +2026-04-09 14:32:55.226087: train_loss -0.4596 +2026-04-09 14:32:55.232150: val_loss -0.3271 +2026-04-09 14:32:55.233874: Pseudo dice [0.7618, 0.0, 0.5122, 0.5977, 0.4675, 0.0079, 0.8259] +2026-04-09 14:32:55.235871: Epoch time: 100.56 s +2026-04-09 14:32:56.407680: +2026-04-09 14:32:56.409443: Epoch 781 +2026-04-09 14:32:56.411219: Current learning rate: 0.00255 +2026-04-09 14:34:37.136747: train_loss -0.4665 +2026-04-09 14:34:37.146159: val_loss -0.4187 +2026-04-09 14:34:37.148922: Pseudo dice [0.5914, 0.0, 0.661, 0.2937, 0.5355, 0.5932, 0.7791] +2026-04-09 14:34:37.151593: Epoch time: 100.73 s +2026-04-09 14:34:38.302454: +2026-04-09 14:34:38.305165: Epoch 782 +2026-04-09 14:34:38.309736: Current learning rate: 0.00254 +2026-04-09 14:36:20.954182: train_loss -0.4631 +2026-04-09 14:36:20.961421: val_loss -0.4547 +2026-04-09 14:36:20.963596: Pseudo dice [0.4399, 0.0, 0.599, 0.7367, 0.6578, 0.7161, 0.7736] +2026-04-09 14:36:20.966620: Epoch time: 102.65 s +2026-04-09 14:36:22.127551: +2026-04-09 14:36:22.129811: Epoch 783 +2026-04-09 14:36:22.132548: Current learning rate: 0.00253 +2026-04-09 14:38:03.961184: train_loss -0.4646 +2026-04-09 14:38:03.968742: val_loss -0.4497 +2026-04-09 14:38:03.971376: Pseudo dice [0.722, 0.0, 0.6108, 0.8453, 0.548, 0.662, 0.8996] +2026-04-09 14:38:03.973656: Epoch time: 101.84 s +2026-04-09 14:38:05.145443: +2026-04-09 14:38:05.147356: Epoch 784 +2026-04-09 14:38:05.149855: Current learning rate: 0.00252 +2026-04-09 14:39:46.720201: train_loss -0.4595 +2026-04-09 14:39:46.727596: val_loss -0.4135 +2026-04-09 14:39:46.729822: Pseudo dice [0.2958, 0.0, 0.5782, 0.7421, 0.3374, 0.0624, 0.8706] +2026-04-09 14:39:46.731811: Epoch time: 101.58 s +2026-04-09 14:39:47.904081: +2026-04-09 14:39:47.906180: Epoch 785 +2026-04-09 14:39:47.908109: Current learning rate: 0.00251 +2026-04-09 14:41:28.526575: train_loss -0.4614 +2026-04-09 14:41:28.535549: val_loss -0.4583 +2026-04-09 14:41:28.538171: Pseudo dice [0.3799, 0.0, 0.6909, 0.8667, 0.546, 0.7692, 0.7158] +2026-04-09 14:41:28.539905: Epoch time: 100.63 s +2026-04-09 14:41:29.714746: +2026-04-09 14:41:29.717740: Epoch 786 +2026-04-09 14:41:29.720260: Current learning rate: 0.0025 +2026-04-09 14:43:11.280855: train_loss -0.4762 +2026-04-09 14:43:11.289511: val_loss -0.2717 +2026-04-09 14:43:11.291752: Pseudo dice [0.7637, 0.0, 0.5465, 0.4811, 0.3816, 0.0789, 0.7891] +2026-04-09 14:43:11.297740: Epoch time: 101.57 s +2026-04-09 14:43:12.457666: +2026-04-09 14:43:12.460552: Epoch 787 +2026-04-09 14:43:12.462639: Current learning rate: 0.00249 +2026-04-09 14:44:53.358224: train_loss -0.4614 +2026-04-09 14:44:53.367307: val_loss -0.4435 +2026-04-09 14:44:53.369623: Pseudo dice [0.8415, 0.0, 0.7395, 0.3317, 0.5537, 0.861, 0.869] +2026-04-09 14:44:53.371844: Epoch time: 100.9 s +2026-04-09 14:44:54.554665: +2026-04-09 14:44:54.557326: Epoch 788 +2026-04-09 14:44:54.559410: Current learning rate: 0.00248 +2026-04-09 14:46:36.252595: train_loss -0.4383 +2026-04-09 14:46:36.262648: val_loss -0.2632 +2026-04-09 14:46:36.265421: Pseudo dice [0.3658, 0.0, 0.5739, 0.3668, 0.3151, 0.0555, 0.5814] +2026-04-09 14:46:36.267732: Epoch time: 101.7 s +2026-04-09 14:46:37.433211: +2026-04-09 14:46:37.443309: Epoch 789 +2026-04-09 14:46:37.445370: Current learning rate: 0.00247 +2026-04-09 14:48:18.103136: train_loss -0.451 +2026-04-09 14:48:18.111065: val_loss -0.3268 +2026-04-09 14:48:18.113874: Pseudo dice [0.2022, 0.0, 0.3527, 0.09, 0.5173, 0.0515, 0.7343] +2026-04-09 14:48:18.117053: Epoch time: 100.67 s +2026-04-09 14:48:19.300805: +2026-04-09 14:48:19.302423: Epoch 790 +2026-04-09 14:48:19.304127: Current learning rate: 0.00245 +2026-04-09 14:50:00.334807: train_loss -0.4621 +2026-04-09 14:50:00.340757: val_loss -0.2543 +2026-04-09 14:50:00.342394: Pseudo dice [0.6427, 0.0, 0.6413, 0.1796, 0.5558, 0.0316, 0.8111] +2026-04-09 14:50:00.344411: Epoch time: 101.04 s +2026-04-09 14:50:01.520017: +2026-04-09 14:50:01.521830: Epoch 791 +2026-04-09 14:50:01.523393: Current learning rate: 0.00244 +2026-04-09 14:51:43.206191: train_loss -0.4336 +2026-04-09 14:51:43.213862: val_loss -0.4324 +2026-04-09 14:51:43.216347: Pseudo dice [0.4529, 0.0, 0.4543, 0.6348, 0.5279, 0.5785, 0.8315] +2026-04-09 14:51:43.218503: Epoch time: 101.69 s +2026-04-09 14:51:44.397979: +2026-04-09 14:51:44.399601: Epoch 792 +2026-04-09 14:51:44.401569: Current learning rate: 0.00243 +2026-04-09 14:53:25.615429: train_loss -0.4587 +2026-04-09 14:53:25.621855: val_loss -0.3622 +2026-04-09 14:53:25.623794: Pseudo dice [0.5896, 0.0, 0.7106, 0.6839, 0.5654, 0.1639, 0.8378] +2026-04-09 14:53:25.626079: Epoch time: 101.22 s +2026-04-09 14:53:26.787574: +2026-04-09 14:53:26.789231: Epoch 793 +2026-04-09 14:53:26.790839: Current learning rate: 0.00242 +2026-04-09 14:55:07.938734: train_loss -0.4609 +2026-04-09 14:55:07.945220: val_loss -0.3658 +2026-04-09 14:55:07.947735: Pseudo dice [0.541, 0.0, 0.5859, 0.0213, 0.4846, 0.0794, 0.8745] +2026-04-09 14:55:07.950276: Epoch time: 101.15 s +2026-04-09 14:55:09.134877: +2026-04-09 14:55:09.136803: Epoch 794 +2026-04-09 14:55:09.139872: Current learning rate: 0.00241 +2026-04-09 14:56:50.378854: train_loss -0.4779 +2026-04-09 14:56:50.389786: val_loss -0.3101 +2026-04-09 14:56:50.392137: Pseudo dice [0.6359, 0.0, 0.3969, 0.6721, 0.6681, 0.0542, 0.8217] +2026-04-09 14:56:50.395530: Epoch time: 101.25 s +2026-04-09 14:56:51.583692: +2026-04-09 14:56:51.585865: Epoch 795 +2026-04-09 14:56:51.587495: Current learning rate: 0.0024 +2026-04-09 14:58:33.287100: train_loss -0.4462 +2026-04-09 14:58:33.293679: val_loss -0.4301 +2026-04-09 14:58:33.295985: Pseudo dice [0.5514, 0.0, 0.7152, 0.1763, 0.45, 0.8199, 0.7003] +2026-04-09 14:58:33.298482: Epoch time: 101.71 s +2026-04-09 14:58:34.459890: +2026-04-09 14:58:34.462577: Epoch 796 +2026-04-09 14:58:34.465681: Current learning rate: 0.00239 +2026-04-09 15:00:15.898610: train_loss -0.4711 +2026-04-09 15:00:15.905119: val_loss -0.2862 +2026-04-09 15:00:15.907212: Pseudo dice [0.5344, 0.0, 0.6912, 0.6494, 0.6372, 0.0386, 0.8604] +2026-04-09 15:00:15.909845: Epoch time: 101.44 s +2026-04-09 15:00:17.084109: +2026-04-09 15:00:17.085932: Epoch 797 +2026-04-09 15:00:17.087936: Current learning rate: 0.00238 +2026-04-09 15:01:58.805518: train_loss -0.4514 +2026-04-09 15:01:58.813129: val_loss -0.318 +2026-04-09 15:01:58.815636: Pseudo dice [0.8015, 0.0, 0.5355, 0.2503, 0.4254, 0.0343, 0.6083] +2026-04-09 15:01:58.817974: Epoch time: 101.72 s +2026-04-09 15:01:59.991073: +2026-04-09 15:01:59.993019: Epoch 798 +2026-04-09 15:01:59.995083: Current learning rate: 0.00237 +2026-04-09 15:03:40.550330: train_loss -0.4578 +2026-04-09 15:03:40.565521: val_loss -0.4443 +2026-04-09 15:03:40.567699: Pseudo dice [0.7623, 0.0, 0.7, 0.8488, 0.3745, 0.8008, 0.5847] +2026-04-09 15:03:40.570986: Epoch time: 100.56 s +2026-04-09 15:03:41.754890: +2026-04-09 15:03:41.757257: Epoch 799 +2026-04-09 15:03:41.758788: Current learning rate: 0.00236 +2026-04-09 15:05:23.618483: train_loss -0.4605 +2026-04-09 15:05:23.625944: val_loss -0.4355 +2026-04-09 15:05:23.628310: Pseudo dice [0.7781, 0.0, 0.7083, 0.7107, 0.6701, 0.1704, 0.9169] +2026-04-09 15:05:23.630766: Epoch time: 101.87 s +2026-04-09 15:05:26.486563: +2026-04-09 15:05:26.489538: Epoch 800 +2026-04-09 15:05:26.491513: Current learning rate: 0.00235 +2026-04-09 15:07:07.844382: train_loss -0.459 +2026-04-09 15:07:07.866116: val_loss -0.4078 +2026-04-09 15:07:07.868543: Pseudo dice [0.2862, 0.0, 0.7353, 0.7254, 0.2908, 0.6375, 0.732] +2026-04-09 15:07:07.870802: Epoch time: 101.36 s +2026-04-09 15:07:09.044553: +2026-04-09 15:07:09.047227: Epoch 801 +2026-04-09 15:07:09.049635: Current learning rate: 0.00234 +2026-04-09 15:08:50.589479: train_loss -0.4679 +2026-04-09 15:08:50.595724: val_loss -0.3097 +2026-04-09 15:08:50.597852: Pseudo dice [0.4828, 0.0, 0.4533, 0.1037, 0.5237, 0.062, 0.662] +2026-04-09 15:08:50.600541: Epoch time: 101.55 s +2026-04-09 15:08:51.763372: +2026-04-09 15:08:51.765814: Epoch 802 +2026-04-09 15:08:51.767499: Current learning rate: 0.00233 +2026-04-09 15:10:33.185111: train_loss -0.4625 +2026-04-09 15:10:33.191792: val_loss -0.2668 +2026-04-09 15:10:33.194779: Pseudo dice [0.4318, 0.0, 0.3298, 0.8394, 0.3104, 0.0287, 0.5136] +2026-04-09 15:10:33.198710: Epoch time: 101.42 s +2026-04-09 15:10:34.370722: +2026-04-09 15:10:34.372978: Epoch 803 +2026-04-09 15:10:34.375557: Current learning rate: 0.00232 +2026-04-09 15:12:16.025970: train_loss -0.474 +2026-04-09 15:12:16.034635: val_loss -0.4118 +2026-04-09 15:12:16.037298: Pseudo dice [0.144, 0.0, 0.6849, 0.4158, 0.314, 0.4637, 0.3015] +2026-04-09 15:12:16.040568: Epoch time: 101.66 s +2026-04-09 15:12:17.223426: +2026-04-09 15:12:17.226571: Epoch 804 +2026-04-09 15:12:17.228970: Current learning rate: 0.00231 +2026-04-09 15:13:59.619397: train_loss -0.4429 +2026-04-09 15:13:59.629978: val_loss -0.3829 +2026-04-09 15:13:59.632870: Pseudo dice [0.7363, 0.0, 0.7589, 0.3803, 0.4116, 0.0412, 0.4766] +2026-04-09 15:13:59.636880: Epoch time: 102.4 s +2026-04-09 15:14:00.881650: +2026-04-09 15:14:00.884242: Epoch 805 +2026-04-09 15:14:00.889512: Current learning rate: 0.0023 +2026-04-09 15:15:42.806377: train_loss -0.4806 +2026-04-09 15:15:42.815305: val_loss -0.3454 +2026-04-09 15:15:42.817688: Pseudo dice [0.2967, 0.0, 0.688, 0.6742, 0.5102, 0.0488, 0.7508] +2026-04-09 15:15:42.820652: Epoch time: 101.93 s +2026-04-09 15:15:44.079981: +2026-04-09 15:15:44.083049: Epoch 806 +2026-04-09 15:15:44.085686: Current learning rate: 0.00229 +2026-04-09 15:17:25.171087: train_loss -0.4862 +2026-04-09 15:17:25.182014: val_loss -0.3568 +2026-04-09 15:17:25.185174: Pseudo dice [0.4514, 0.0, 0.6401, 0.6131, 0.5641, 0.3081, 0.7592] +2026-04-09 15:17:25.187428: Epoch time: 101.09 s +2026-04-09 15:17:26.371544: +2026-04-09 15:17:26.374521: Epoch 807 +2026-04-09 15:17:26.380466: Current learning rate: 0.00228 +2026-04-09 15:19:07.413656: train_loss -0.4775 +2026-04-09 15:19:07.419711: val_loss -0.3841 +2026-04-09 15:19:07.421667: Pseudo dice [0.5729, 0.0, 0.7031, 0.7023, 0.5889, 0.0396, 0.6712] +2026-04-09 15:19:07.424133: Epoch time: 101.05 s +2026-04-09 15:19:08.595074: +2026-04-09 15:19:08.596761: Epoch 808 +2026-04-09 15:19:08.598243: Current learning rate: 0.00226 +2026-04-09 15:20:49.263622: train_loss -0.4798 +2026-04-09 15:20:49.284842: val_loss -0.4666 +2026-04-09 15:20:49.287107: Pseudo dice [0.7013, 0.0, 0.6378, 0.7615, 0.5786, 0.6362, 0.8901] +2026-04-09 15:20:49.289473: Epoch time: 100.67 s +2026-04-09 15:20:50.458405: +2026-04-09 15:20:50.460838: Epoch 809 +2026-04-09 15:20:50.462987: Current learning rate: 0.00225 +2026-04-09 15:22:31.465358: train_loss -0.4726 +2026-04-09 15:22:31.472815: val_loss -0.4421 +2026-04-09 15:22:31.475698: Pseudo dice [0.159, 0.0, 0.7803, 0.8362, 0.5767, 0.839, 0.5326] +2026-04-09 15:22:31.478830: Epoch time: 101.01 s +2026-04-09 15:22:32.649635: +2026-04-09 15:22:32.651628: Epoch 810 +2026-04-09 15:22:32.653375: Current learning rate: 0.00224 +2026-04-09 15:24:14.010211: train_loss -0.4714 +2026-04-09 15:24:14.016430: val_loss -0.4017 +2026-04-09 15:24:14.024189: Pseudo dice [0.6271, 0.0, 0.6124, 0.7726, 0.503, 0.7686, 0.2461] +2026-04-09 15:24:14.026916: Epoch time: 101.36 s +2026-04-09 15:24:15.257239: +2026-04-09 15:24:15.260405: Epoch 811 +2026-04-09 15:24:15.262530: Current learning rate: 0.00223 +2026-04-09 15:25:56.422592: train_loss -0.4699 +2026-04-09 15:25:56.430289: val_loss -0.4202 +2026-04-09 15:25:56.433113: Pseudo dice [0.4888, 0.0, 0.5636, 0.7689, 0.4969, 0.41, 0.8758] +2026-04-09 15:25:56.435994: Epoch time: 101.17 s +2026-04-09 15:25:57.639941: +2026-04-09 15:25:57.642521: Epoch 812 +2026-04-09 15:25:57.645428: Current learning rate: 0.00222 +2026-04-09 15:27:38.418238: train_loss -0.4807 +2026-04-09 15:27:38.425432: val_loss -0.407 +2026-04-09 15:27:38.427416: Pseudo dice [0.6145, 0.0, 0.6852, 0.0245, 0.3254, 0.8077, 0.7991] +2026-04-09 15:27:38.429698: Epoch time: 100.78 s +2026-04-09 15:27:39.615208: +2026-04-09 15:27:39.617294: Epoch 813 +2026-04-09 15:27:39.619568: Current learning rate: 0.00221 +2026-04-09 15:29:31.843734: train_loss -0.4767 +2026-04-09 15:29:31.871039: val_loss -0.4246 +2026-04-09 15:29:31.873202: Pseudo dice [0.3165, 0.0, 0.7396, 0.7679, 0.3857, 0.8348, 0.8376] +2026-04-09 15:29:31.875833: Epoch time: 112.23 s +2026-04-09 15:29:33.058446: +2026-04-09 15:29:33.060554: Epoch 814 +2026-04-09 15:29:33.063172: Current learning rate: 0.0022 +2026-04-09 15:31:15.207822: train_loss -0.4537 +2026-04-09 15:31:15.215043: val_loss -0.3683 +2026-04-09 15:31:15.218539: Pseudo dice [0.7739, 0.0, 0.6464, 0.2235, 0.43, 0.2434, 0.5034] +2026-04-09 15:31:15.220363: Epoch time: 102.15 s +2026-04-09 15:31:16.392732: +2026-04-09 15:31:16.396079: Epoch 815 +2026-04-09 15:31:16.399321: Current learning rate: 0.00219 +2026-04-09 15:32:59.591169: train_loss -0.4835 +2026-04-09 15:32:59.599565: val_loss -0.4419 +2026-04-09 15:32:59.602244: Pseudo dice [0.2903, 0.0, 0.7149, 0.0122, 0.5862, 0.719, 0.8487] +2026-04-09 15:32:59.605787: Epoch time: 103.2 s +2026-04-09 15:33:00.800425: +2026-04-09 15:33:00.804159: Epoch 816 +2026-04-09 15:33:00.806692: Current learning rate: 0.00218 +2026-04-09 15:34:41.959892: train_loss -0.4728 +2026-04-09 15:34:41.968655: val_loss -0.3516 +2026-04-09 15:34:41.971378: Pseudo dice [0.8135, 0.0, 0.7319, 0.0849, 0.6231, 0.1475, 0.8596] +2026-04-09 15:34:41.974605: Epoch time: 101.16 s +2026-04-09 15:34:43.158079: +2026-04-09 15:34:43.160896: Epoch 817 +2026-04-09 15:34:43.162663: Current learning rate: 0.00217 +2026-04-09 15:36:24.823881: train_loss -0.4873 +2026-04-09 15:36:24.829737: val_loss -0.3903 +2026-04-09 15:36:24.831732: Pseudo dice [0.2735, 0.0, 0.67, 0.8421, 0.6273, 0.0445, 0.885] +2026-04-09 15:36:24.834196: Epoch time: 101.67 s +2026-04-09 15:36:26.010310: +2026-04-09 15:36:26.012786: Epoch 818 +2026-04-09 15:36:26.016147: Current learning rate: 0.00216 +2026-04-09 15:38:07.468959: train_loss -0.4895 +2026-04-09 15:38:07.476232: val_loss -0.3601 +2026-04-09 15:38:07.478710: Pseudo dice [0.3672, 0.0, 0.5449, 0.0401, 0.5102, 0.1134, 0.6311] +2026-04-09 15:38:07.482350: Epoch time: 101.46 s +2026-04-09 15:38:08.673291: +2026-04-09 15:38:08.674991: Epoch 819 +2026-04-09 15:38:08.676908: Current learning rate: 0.00215 +2026-04-09 15:39:50.107682: train_loss -0.4817 +2026-04-09 15:39:50.116264: val_loss -0.4155 +2026-04-09 15:39:50.118645: Pseudo dice [0.2925, 0.0, 0.6478, 0.1528, 0.421, 0.8191, 0.4574] +2026-04-09 15:39:50.121095: Epoch time: 101.44 s +2026-04-09 15:39:51.216154: +2026-04-09 15:39:51.218027: Epoch 820 +2026-04-09 15:39:51.219961: Current learning rate: 0.00214 +2026-04-09 15:41:33.312550: train_loss -0.4958 +2026-04-09 15:41:33.319454: val_loss -0.3778 +2026-04-09 15:41:33.322319: Pseudo dice [0.4343, 0.0, 0.6961, 0.8338, 0.5174, 0.1485, 0.7814] +2026-04-09 15:41:33.324567: Epoch time: 102.1 s +2026-04-09 15:41:34.437207: +2026-04-09 15:41:34.439811: Epoch 821 +2026-04-09 15:41:34.441664: Current learning rate: 0.00213 +2026-04-09 15:43:15.049223: train_loss -0.4834 +2026-04-09 15:43:15.054603: val_loss -0.4672 +2026-04-09 15:43:15.057303: Pseudo dice [0.7782, 0.0, 0.583, 0.2907, 0.4793, 0.7363, 0.5553] +2026-04-09 15:43:15.059691: Epoch time: 100.62 s +2026-04-09 15:43:16.149409: +2026-04-09 15:43:16.150995: Epoch 822 +2026-04-09 15:43:16.152747: Current learning rate: 0.00212 +2026-04-09 15:44:58.128469: train_loss -0.4855 +2026-04-09 15:44:58.136578: val_loss -0.3352 +2026-04-09 15:44:58.138975: Pseudo dice [0.6523, 0.0, 0.6341, 0.6386, 0.6795, 0.1359, 0.8483] +2026-04-09 15:44:58.144252: Epoch time: 101.98 s +2026-04-09 15:44:59.256936: +2026-04-09 15:44:59.259606: Epoch 823 +2026-04-09 15:44:59.262146: Current learning rate: 0.0021 +2026-04-09 15:46:41.190252: train_loss -0.4789 +2026-04-09 15:46:41.196852: val_loss -0.412 +2026-04-09 15:46:41.199109: Pseudo dice [0.4627, 0.0, 0.7108, 0.8648, 0.4505, 0.1722, 0.8445] +2026-04-09 15:46:41.201454: Epoch time: 101.94 s +2026-04-09 15:46:42.310703: +2026-04-09 15:46:42.313840: Epoch 824 +2026-04-09 15:46:42.317494: Current learning rate: 0.00209 +2026-04-09 15:48:24.277060: train_loss -0.4716 +2026-04-09 15:48:24.283151: val_loss -0.3443 +2026-04-09 15:48:24.286030: Pseudo dice [0.5384, 0.0, 0.5432, 0.6708, 0.4724, 0.1834, 0.6504] +2026-04-09 15:48:24.288429: Epoch time: 101.97 s +2026-04-09 15:48:25.396784: +2026-04-09 15:48:25.398353: Epoch 825 +2026-04-09 15:48:25.399835: Current learning rate: 0.00208 +2026-04-09 15:50:06.894373: train_loss -0.4725 +2026-04-09 15:50:06.902587: val_loss -0.4093 +2026-04-09 15:50:06.905992: Pseudo dice [0.3673, 0.0, 0.746, 0.7026, 0.5927, 0.1662, 0.6796] +2026-04-09 15:50:06.909247: Epoch time: 101.5 s +2026-04-09 15:50:08.019979: +2026-04-09 15:50:08.022232: Epoch 826 +2026-04-09 15:50:08.024146: Current learning rate: 0.00207 +2026-04-09 15:51:49.896639: train_loss -0.4777 +2026-04-09 15:51:49.905147: val_loss -0.3916 +2026-04-09 15:51:49.907340: Pseudo dice [0.5402, 0.0, 0.7142, 0.2576, 0.2798, 0.3575, 0.6894] +2026-04-09 15:51:49.909873: Epoch time: 101.88 s +2026-04-09 15:51:51.031883: +2026-04-09 15:51:51.033843: Epoch 827 +2026-04-09 15:51:51.035435: Current learning rate: 0.00206 +2026-04-09 15:53:31.573227: train_loss -0.4667 +2026-04-09 15:53:31.578732: val_loss -0.4284 +2026-04-09 15:53:31.581748: Pseudo dice [0.7389, 0.0, 0.5376, 0.7132, 0.6147, 0.5263, 0.5793] +2026-04-09 15:53:31.585664: Epoch time: 100.54 s +2026-04-09 15:53:32.718325: +2026-04-09 15:53:32.720690: Epoch 828 +2026-04-09 15:53:32.722645: Current learning rate: 0.00205 +2026-04-09 15:55:14.082791: train_loss -0.4724 +2026-04-09 15:55:14.089748: val_loss -0.4094 +2026-04-09 15:55:14.092063: Pseudo dice [0.5454, 0.0, 0.7123, 0.4409, 0.3922, 0.1615, 0.4516] +2026-04-09 15:55:14.094882: Epoch time: 101.37 s +2026-04-09 15:55:15.204437: +2026-04-09 15:55:15.206552: Epoch 829 +2026-04-09 15:55:15.209015: Current learning rate: 0.00204 +2026-04-09 15:56:56.770053: train_loss -0.4739 +2026-04-09 15:56:56.776482: val_loss -0.3602 +2026-04-09 15:56:56.779037: Pseudo dice [0.1524, 0.0, 0.4714, 0.7367, 0.4981, 0.1663, 0.7144] +2026-04-09 15:56:56.782562: Epoch time: 101.57 s +2026-04-09 15:56:57.912406: +2026-04-09 15:56:57.914932: Epoch 830 +2026-04-09 15:56:57.916845: Current learning rate: 0.00203 +2026-04-09 15:58:39.566966: train_loss -0.4842 +2026-04-09 15:58:39.575549: val_loss -0.231 +2026-04-09 15:58:39.578435: Pseudo dice [0.3708, 0.0, 0.1472, 0.1753, 0.7014, 0.0562, 0.7406] +2026-04-09 15:58:39.581351: Epoch time: 101.66 s +2026-04-09 15:58:40.709787: +2026-04-09 15:58:40.712498: Epoch 831 +2026-04-09 15:58:40.715890: Current learning rate: 0.00202 +2026-04-09 16:00:21.844657: train_loss -0.4835 +2026-04-09 16:00:21.851001: val_loss -0.4136 +2026-04-09 16:00:21.853139: Pseudo dice [0.3453, 0.0, 0.6746, 0.6822, 0.6192, 0.2779, 0.7874] +2026-04-09 16:00:21.855920: Epoch time: 101.14 s +2026-04-09 16:00:22.972615: +2026-04-09 16:00:22.974671: Epoch 832 +2026-04-09 16:00:22.976424: Current learning rate: 0.00201 +2026-04-09 16:02:04.360543: train_loss -0.4803 +2026-04-09 16:02:04.367830: val_loss -0.386 +2026-04-09 16:02:04.369948: Pseudo dice [0.8034, 0.0, 0.667, 0.244, 0.5977, 0.0707, 0.6239] +2026-04-09 16:02:04.372792: Epoch time: 101.39 s +2026-04-09 16:02:05.490095: +2026-04-09 16:02:05.491822: Epoch 833 +2026-04-09 16:02:05.494212: Current learning rate: 0.002 +2026-04-09 16:03:47.074632: train_loss -0.4807 +2026-04-09 16:03:47.081729: val_loss -0.4081 +2026-04-09 16:03:47.084171: Pseudo dice [0.2467, 0.0, 0.7464, 0.6983, 0.5292, 0.467, 0.4402] +2026-04-09 16:03:47.086464: Epoch time: 101.59 s +2026-04-09 16:03:48.217632: +2026-04-09 16:03:48.223063: Epoch 834 +2026-04-09 16:03:48.225491: Current learning rate: 0.00199 +2026-04-09 16:05:29.099454: train_loss -0.4864 +2026-04-09 16:05:29.105531: val_loss -0.2912 +2026-04-09 16:05:29.109262: Pseudo dice [0.8131, 0.0, 0.4724, 0.1208, 0.6026, 0.1545, 0.652] +2026-04-09 16:05:29.112042: Epoch time: 100.89 s +2026-04-09 16:05:30.228077: +2026-04-09 16:05:30.230573: Epoch 835 +2026-04-09 16:05:30.232727: Current learning rate: 0.00198 +2026-04-09 16:07:11.592603: train_loss -0.4714 +2026-04-09 16:07:11.601741: val_loss -0.3232 +2026-04-09 16:07:11.603992: Pseudo dice [0.5905, 0.0, 0.6336, 0.7241, 0.5822, 0.088, 0.7267] +2026-04-09 16:07:11.606689: Epoch time: 101.37 s +2026-04-09 16:07:12.905779: +2026-04-09 16:07:12.907625: Epoch 836 +2026-04-09 16:07:12.909171: Current learning rate: 0.00196 +2026-04-09 16:08:53.752332: train_loss -0.4788 +2026-04-09 16:08:53.760955: val_loss -0.3 +2026-04-09 16:08:53.762862: Pseudo dice [0.8185, 0.0, 0.548, 0.7096, 0.3924, 0.1487, 0.5905] +2026-04-09 16:08:53.764944: Epoch time: 100.85 s +2026-04-09 16:08:54.880495: +2026-04-09 16:08:54.882171: Epoch 837 +2026-04-09 16:08:54.884062: Current learning rate: 0.00195 +2026-04-09 16:10:36.820300: train_loss -0.4742 +2026-04-09 16:10:36.831492: val_loss -0.3843 +2026-04-09 16:10:36.834198: Pseudo dice [0.2774, 0.0, 0.8003, 0.6514, 0.2811, 0.115, 0.7922] +2026-04-09 16:10:36.836739: Epoch time: 101.94 s +2026-04-09 16:10:37.952758: +2026-04-09 16:10:37.955977: Epoch 838 +2026-04-09 16:10:37.958048: Current learning rate: 0.00194 +2026-04-09 16:12:19.010651: train_loss -0.4643 +2026-04-09 16:12:19.025005: val_loss -0.4329 +2026-04-09 16:12:19.029379: Pseudo dice [0.5672, 0.0, 0.6345, 0.5543, 0.5746, 0.7591, 0.6754] +2026-04-09 16:12:19.032012: Epoch time: 101.06 s +2026-04-09 16:12:20.139810: +2026-04-09 16:12:20.141775: Epoch 839 +2026-04-09 16:12:20.143452: Current learning rate: 0.00193 +2026-04-09 16:14:00.806983: train_loss -0.4785 +2026-04-09 16:14:00.812675: val_loss -0.448 +2026-04-09 16:14:00.814580: Pseudo dice [0.7853, 0.0, 0.7734, 0.8435, 0.5587, 0.6962, 0.665] +2026-04-09 16:14:00.816941: Epoch time: 100.67 s +2026-04-09 16:14:01.927429: +2026-04-09 16:14:01.929521: Epoch 840 +2026-04-09 16:14:01.931452: Current learning rate: 0.00192 +2026-04-09 16:15:43.395151: train_loss -0.4899 +2026-04-09 16:15:43.402837: val_loss -0.3434 +2026-04-09 16:15:43.408019: Pseudo dice [0.2265, 0.0, 0.7763, 0.0164, 0.6278, 0.1792, 0.6383] +2026-04-09 16:15:43.419258: Epoch time: 101.47 s +2026-04-09 16:15:45.527372: +2026-04-09 16:15:45.529799: Epoch 841 +2026-04-09 16:15:45.531847: Current learning rate: 0.00191 +2026-04-09 16:17:27.889331: train_loss -0.4561 +2026-04-09 16:17:27.896978: val_loss -0.4136 +2026-04-09 16:17:27.899579: Pseudo dice [0.3677, 0.0, 0.4759, 0.4767, 0.4629, 0.8143, 0.5588] +2026-04-09 16:17:27.904048: Epoch time: 102.37 s +2026-04-09 16:17:29.025921: +2026-04-09 16:17:29.028692: Epoch 842 +2026-04-09 16:17:29.031592: Current learning rate: 0.0019 +2026-04-09 16:19:10.876194: train_loss -0.4804 +2026-04-09 16:19:10.884423: val_loss -0.4502 +2026-04-09 16:19:10.886770: Pseudo dice [0.3861, 0.0, 0.6075, 0.5556, 0.5819, 0.7742, 0.7726] +2026-04-09 16:19:10.889789: Epoch time: 101.85 s +2026-04-09 16:19:11.999479: +2026-04-09 16:19:12.002304: Epoch 843 +2026-04-09 16:19:12.004689: Current learning rate: 0.00189 +2026-04-09 16:20:52.393417: train_loss -0.4865 +2026-04-09 16:20:52.409596: val_loss -0.4459 +2026-04-09 16:20:52.416218: Pseudo dice [0.7533, 0.0, 0.7336, 0.5926, 0.394, 0.7203, 0.8684] +2026-04-09 16:20:52.421437: Epoch time: 100.4 s +2026-04-09 16:20:53.541536: +2026-04-09 16:20:53.544034: Epoch 844 +2026-04-09 16:20:53.545758: Current learning rate: 0.00188 +2026-04-09 16:22:34.211720: train_loss -0.477 +2026-04-09 16:22:34.217792: val_loss -0.3864 +2026-04-09 16:22:34.219758: Pseudo dice [0.5539, 0.0, 0.7453, 0.7983, 0.4165, 0.1779, 0.6013] +2026-04-09 16:22:34.221985: Epoch time: 100.67 s +2026-04-09 16:22:35.324338: +2026-04-09 16:22:35.327293: Epoch 845 +2026-04-09 16:22:35.329963: Current learning rate: 0.00187 +2026-04-09 16:24:16.991111: train_loss -0.4676 +2026-04-09 16:24:16.999830: val_loss -0.4001 +2026-04-09 16:24:17.002572: Pseudo dice [0.4468, 0.0, 0.543, 0.5434, 0.241, 0.7662, 0.5446] +2026-04-09 16:24:17.004916: Epoch time: 101.67 s +2026-04-09 16:24:18.125556: +2026-04-09 16:24:18.127795: Epoch 846 +2026-04-09 16:24:18.130958: Current learning rate: 0.00186 +2026-04-09 16:25:58.684460: train_loss -0.4875 +2026-04-09 16:25:58.692384: val_loss -0.2679 +2026-04-09 16:25:58.694744: Pseudo dice [0.5068, 0.0, 0.7812, 0.1882, 0.672, 0.069, 0.8213] +2026-04-09 16:25:58.697122: Epoch time: 100.56 s +2026-04-09 16:25:59.814518: +2026-04-09 16:25:59.816464: Epoch 847 +2026-04-09 16:25:59.818143: Current learning rate: 0.00185 +2026-04-09 16:27:40.959306: train_loss -0.4736 +2026-04-09 16:27:40.970634: val_loss -0.4023 +2026-04-09 16:27:40.977059: Pseudo dice [0.5064, 0.0, 0.4385, 0.5255, 0.656, 0.4746, 0.8654] +2026-04-09 16:27:40.980847: Epoch time: 101.15 s +2026-04-09 16:27:42.108541: +2026-04-09 16:27:42.110477: Epoch 848 +2026-04-09 16:27:42.112586: Current learning rate: 0.00184 +2026-04-09 16:29:23.534923: train_loss -0.4832 +2026-04-09 16:29:23.543399: val_loss -0.4416 +2026-04-09 16:29:23.546638: Pseudo dice [0.5516, 0.0, 0.7268, 0.6145, 0.6526, 0.6297, 0.8059] +2026-04-09 16:29:23.548896: Epoch time: 101.43 s +2026-04-09 16:29:24.677331: +2026-04-09 16:29:24.679695: Epoch 849 +2026-04-09 16:29:24.681829: Current learning rate: 0.00182 +2026-04-09 16:31:08.221422: train_loss -0.4673 +2026-04-09 16:31:08.233279: val_loss -0.3929 +2026-04-09 16:31:08.236139: Pseudo dice [0.7216, 0.0, 0.634, 0.7284, 0.1706, 0.4541, 0.4007] +2026-04-09 16:31:08.239391: Epoch time: 103.55 s +2026-04-09 16:31:11.207368: +2026-04-09 16:31:11.211891: Epoch 850 +2026-04-09 16:31:11.214533: Current learning rate: 0.00181 +2026-04-09 16:32:52.476047: train_loss -0.4592 +2026-04-09 16:32:52.482580: val_loss -0.2923 +2026-04-09 16:32:52.485419: Pseudo dice [0.5937, 0.0, 0.5648, 0.6568, 0.4883, 0.033, 0.6754] +2026-04-09 16:32:52.487640: Epoch time: 101.27 s +2026-04-09 16:32:53.595428: +2026-04-09 16:32:53.598031: Epoch 851 +2026-04-09 16:32:53.599962: Current learning rate: 0.0018 +2026-04-09 16:34:34.981896: train_loss -0.466 +2026-04-09 16:34:34.989651: val_loss -0.4056 +2026-04-09 16:34:34.991642: Pseudo dice [0.7784, 0.0, 0.712, 0.3988, 0.2981, 0.6629, 0.2544] +2026-04-09 16:34:34.994288: Epoch time: 101.39 s +2026-04-09 16:34:36.100657: +2026-04-09 16:34:36.102542: Epoch 852 +2026-04-09 16:34:36.104732: Current learning rate: 0.00179 +2026-04-09 16:36:17.631030: train_loss -0.4777 +2026-04-09 16:36:17.638426: val_loss -0.4026 +2026-04-09 16:36:17.640488: Pseudo dice [0.7484, 0.0, 0.6277, 0.7445, 0.3764, 0.6573, 0.5499] +2026-04-09 16:36:17.646416: Epoch time: 101.53 s +2026-04-09 16:36:18.748372: +2026-04-09 16:36:18.750553: Epoch 853 +2026-04-09 16:36:18.752111: Current learning rate: 0.00178 +2026-04-09 16:38:00.322451: train_loss -0.4684 +2026-04-09 16:38:00.330783: val_loss -0.3918 +2026-04-09 16:38:00.333490: Pseudo dice [0.875, 0.0, 0.6421, 0.4175, 0.5914, 0.1219, 0.515] +2026-04-09 16:38:00.337133: Epoch time: 101.58 s +2026-04-09 16:38:01.457492: +2026-04-09 16:38:01.459466: Epoch 854 +2026-04-09 16:38:01.462219: Current learning rate: 0.00177 +2026-04-09 16:39:42.977003: train_loss -0.4772 +2026-04-09 16:39:42.981934: val_loss -0.4224 +2026-04-09 16:39:42.984894: Pseudo dice [0.5275, 0.0, 0.6225, 0.3125, 0.4168, 0.3472, 0.655] +2026-04-09 16:39:42.987781: Epoch time: 101.52 s +2026-04-09 16:39:44.095900: +2026-04-09 16:39:44.098295: Epoch 855 +2026-04-09 16:39:44.100332: Current learning rate: 0.00176 +2026-04-09 16:41:25.439888: train_loss -0.4831 +2026-04-09 16:41:25.447569: val_loss -0.3793 +2026-04-09 16:41:25.450269: Pseudo dice [0.7543, 0.0, 0.6434, 0.8421, 0.3489, 0.1439, 0.7911] +2026-04-09 16:41:25.452759: Epoch time: 101.35 s +2026-04-09 16:41:26.555872: +2026-04-09 16:41:26.557624: Epoch 856 +2026-04-09 16:41:26.559546: Current learning rate: 0.00175 +2026-04-09 16:43:08.464647: train_loss -0.4706 +2026-04-09 16:43:08.473898: val_loss -0.3618 +2026-04-09 16:43:08.475822: Pseudo dice [0.7232, 0.0, 0.5163, 0.5888, 0.6046, 0.1072, 0.7491] +2026-04-09 16:43:08.478201: Epoch time: 101.91 s +2026-04-09 16:43:09.590671: +2026-04-09 16:43:09.592872: Epoch 857 +2026-04-09 16:43:09.594765: Current learning rate: 0.00174 +2026-04-09 16:44:51.088450: train_loss -0.4825 +2026-04-09 16:44:51.095430: val_loss -0.3981 +2026-04-09 16:44:51.097749: Pseudo dice [0.4322, 0.0, 0.7624, 0.6815, 0.5795, 0.1226, 0.8762] +2026-04-09 16:44:51.100782: Epoch time: 101.5 s +2026-04-09 16:44:52.204879: +2026-04-09 16:44:52.206720: Epoch 858 +2026-04-09 16:44:52.208618: Current learning rate: 0.00173 +2026-04-09 16:46:33.857240: train_loss -0.4832 +2026-04-09 16:46:33.864671: val_loss -0.3311 +2026-04-09 16:46:33.866510: Pseudo dice [0.7839, 0.0, 0.5417, 0.2215, 0.4992, 0.396, 0.7783] +2026-04-09 16:46:33.868741: Epoch time: 101.66 s +2026-04-09 16:46:34.980495: +2026-04-09 16:46:34.982138: Epoch 859 +2026-04-09 16:46:34.984280: Current learning rate: 0.00172 +2026-04-09 16:48:16.539992: train_loss -0.4726 +2026-04-09 16:48:16.547009: val_loss -0.3376 +2026-04-09 16:48:16.550742: Pseudo dice [0.6328, 0.0, 0.7007, 0.3305, 0.2282, 0.1236, 0.4522] +2026-04-09 16:48:16.553952: Epoch time: 101.56 s +2026-04-09 16:48:17.669954: +2026-04-09 16:48:17.672333: Epoch 860 +2026-04-09 16:48:17.674220: Current learning rate: 0.0017 +2026-04-09 16:49:59.253969: train_loss -0.4793 +2026-04-09 16:49:59.262615: val_loss -0.3994 +2026-04-09 16:49:59.266532: Pseudo dice [0.419, 0.0, 0.7201, 0.7283, 0.341, 0.4311, 0.5729] +2026-04-09 16:49:59.271243: Epoch time: 101.59 s +2026-04-09 16:50:01.338428: +2026-04-09 16:50:01.341668: Epoch 861 +2026-04-09 16:50:01.344482: Current learning rate: 0.00169 +2026-04-09 16:51:42.306201: train_loss -0.4852 +2026-04-09 16:51:42.324859: val_loss -0.4047 +2026-04-09 16:51:42.326716: Pseudo dice [0.6474, 0.0, 0.6422, 0.4582, 0.5996, 0.3149, 0.5967] +2026-04-09 16:51:42.329080: Epoch time: 100.97 s +2026-04-09 16:51:43.435606: +2026-04-09 16:51:43.437924: Epoch 862 +2026-04-09 16:51:43.439592: Current learning rate: 0.00168 +2026-04-09 16:53:24.994910: train_loss -0.4717 +2026-04-09 16:53:25.001689: val_loss -0.4171 +2026-04-09 16:53:25.005508: Pseudo dice [0.2463, 0.0, 0.7209, 0.6938, 0.5222, 0.8042, 0.7716] +2026-04-09 16:53:25.009650: Epoch time: 101.56 s +2026-04-09 16:53:26.106835: +2026-04-09 16:53:26.110461: Epoch 863 +2026-04-09 16:53:26.112840: Current learning rate: 0.00167 +2026-04-09 16:55:07.852254: train_loss -0.4825 +2026-04-09 16:55:07.858992: val_loss -0.4189 +2026-04-09 16:55:07.860892: Pseudo dice [0.5552, 0.0, 0.6062, 0.7983, 0.4186, 0.4397, 0.8144] +2026-04-09 16:55:07.864102: Epoch time: 101.75 s +2026-04-09 16:55:08.970662: +2026-04-09 16:55:08.973326: Epoch 864 +2026-04-09 16:55:08.975153: Current learning rate: 0.00166 +2026-04-09 16:56:50.409279: train_loss -0.4705 +2026-04-09 16:56:50.415553: val_loss -0.3451 +2026-04-09 16:56:50.417457: Pseudo dice [0.2174, 0.0, 0.7143, 0.7222, 0.5912, 0.075, 0.769] +2026-04-09 16:56:50.419684: Epoch time: 101.44 s +2026-04-09 16:56:51.520861: +2026-04-09 16:56:51.522703: Epoch 865 +2026-04-09 16:56:51.524437: Current learning rate: 0.00165 +2026-04-09 16:58:32.995043: train_loss -0.4846 +2026-04-09 16:58:33.002108: val_loss -0.2929 +2026-04-09 16:58:33.004018: Pseudo dice [0.2775, 0.0, 0.5245, 0.1627, 0.4534, 0.1384, 0.8186] +2026-04-09 16:58:33.007173: Epoch time: 101.48 s +2026-04-09 16:58:34.105880: +2026-04-09 16:58:34.107845: Epoch 866 +2026-04-09 16:58:34.109936: Current learning rate: 0.00164 +2026-04-09 17:00:15.268104: train_loss -0.4873 +2026-04-09 17:00:15.274780: val_loss -0.4439 +2026-04-09 17:00:15.277268: Pseudo dice [0.3055, 0.0, 0.7118, 0.0133, 0.5636, 0.3314, 0.7111] +2026-04-09 17:00:15.279712: Epoch time: 101.17 s +2026-04-09 17:00:16.380644: +2026-04-09 17:00:16.382320: Epoch 867 +2026-04-09 17:00:16.384375: Current learning rate: 0.00163 +2026-04-09 17:01:58.511899: train_loss -0.4875 +2026-04-09 17:01:58.518761: val_loss -0.4008 +2026-04-09 17:01:58.520825: Pseudo dice [0.2686, 0.0, 0.6185, 0.5506, 0.2708, 0.5082, 0.4587] +2026-04-09 17:01:58.523072: Epoch time: 102.13 s +2026-04-09 17:01:59.617506: +2026-04-09 17:01:59.619315: Epoch 868 +2026-04-09 17:01:59.620939: Current learning rate: 0.00162 +2026-04-09 17:03:40.831897: train_loss -0.4864 +2026-04-09 17:03:40.839231: val_loss -0.3122 +2026-04-09 17:03:40.842126: Pseudo dice [0.7564, 0.0, 0.711, 0.7085, 0.4915, 0.0847, 0.7631] +2026-04-09 17:03:40.844167: Epoch time: 101.22 s +2026-04-09 17:03:41.959545: +2026-04-09 17:03:41.961258: Epoch 869 +2026-04-09 17:03:41.963068: Current learning rate: 0.00161 +2026-04-09 17:05:23.967773: train_loss -0.491 +2026-04-09 17:05:23.972659: val_loss -0.4378 +2026-04-09 17:05:23.975209: Pseudo dice [0.5929, 0.0, 0.66, 0.4402, 0.6212, 0.7252, 0.7583] +2026-04-09 17:05:23.977415: Epoch time: 102.01 s +2026-04-09 17:05:25.071022: +2026-04-09 17:05:25.075020: Epoch 870 +2026-04-09 17:05:25.078000: Current learning rate: 0.00159 +2026-04-09 17:07:06.561266: train_loss -0.4891 +2026-04-09 17:07:06.568451: val_loss -0.3656 +2026-04-09 17:07:06.572022: Pseudo dice [0.7036, 0.0, 0.7603, 0.8037, 0.5595, 0.0476, 0.785] +2026-04-09 17:07:06.575413: Epoch time: 101.49 s +2026-04-09 17:07:07.703224: +2026-04-09 17:07:07.706840: Epoch 871 +2026-04-09 17:07:07.708815: Current learning rate: 0.00158 +2026-04-09 17:08:49.815228: train_loss -0.4838 +2026-04-09 17:08:49.821650: val_loss -0.317 +2026-04-09 17:08:49.823737: Pseudo dice [0.4443, 0.0, 0.6459, 0.7868, 0.3424, 0.1106, 0.3928] +2026-04-09 17:08:49.827293: Epoch time: 102.12 s +2026-04-09 17:08:50.933904: +2026-04-09 17:08:50.936937: Epoch 872 +2026-04-09 17:08:50.940569: Current learning rate: 0.00157 +2026-04-09 17:10:32.144422: train_loss -0.4906 +2026-04-09 17:10:32.153392: val_loss -0.3041 +2026-04-09 17:10:32.157241: Pseudo dice [0.8788, 0.0, 0.6013, 0.0856, 0.4698, 0.1503, 0.625] +2026-04-09 17:10:32.160440: Epoch time: 101.21 s +2026-04-09 17:10:33.266026: +2026-04-09 17:10:33.268344: Epoch 873 +2026-04-09 17:10:33.270494: Current learning rate: 0.00156 +2026-04-09 17:12:14.767606: train_loss -0.4884 +2026-04-09 17:12:14.774559: val_loss -0.3933 +2026-04-09 17:12:14.778380: Pseudo dice [0.4475, 0.0, 0.645, 0.4313, 0.6011, 0.1508, 0.8973] +2026-04-09 17:12:14.783221: Epoch time: 101.5 s +2026-04-09 17:12:15.899281: +2026-04-09 17:12:15.901215: Epoch 874 +2026-04-09 17:12:15.904326: Current learning rate: 0.00155 +2026-04-09 17:13:57.056866: train_loss -0.505 +2026-04-09 17:13:57.065342: val_loss -0.3768 +2026-04-09 17:13:57.068390: Pseudo dice [0.8466, 0.0, 0.6556, 0.8941, 0.2875, 0.1973, 0.8545] +2026-04-09 17:13:57.070327: Epoch time: 101.16 s +2026-04-09 17:13:58.177900: +2026-04-09 17:13:58.180488: Epoch 875 +2026-04-09 17:13:58.182976: Current learning rate: 0.00154 +2026-04-09 17:15:38.592495: train_loss -0.4994 +2026-04-09 17:15:38.598831: val_loss -0.1634 +2026-04-09 17:15:38.601144: Pseudo dice [0.6822, 0.0, 0.4647, 0.5414, 0.5904, 0.0581, 0.8812] +2026-04-09 17:15:38.604290: Epoch time: 100.42 s +2026-04-09 17:15:39.698366: +2026-04-09 17:15:39.700710: Epoch 876 +2026-04-09 17:15:39.702244: Current learning rate: 0.00153 +2026-04-09 17:17:21.405736: train_loss -0.4797 +2026-04-09 17:17:21.414023: val_loss -0.3923 +2026-04-09 17:17:21.415940: Pseudo dice [0.4744, 0.0, 0.7598, 0.5769, 0.3681, 0.1429, 0.5311] +2026-04-09 17:17:21.418744: Epoch time: 101.71 s +2026-04-09 17:17:22.523250: +2026-04-09 17:17:22.525780: Epoch 877 +2026-04-09 17:17:22.527817: Current learning rate: 0.00152 +2026-04-09 17:19:04.001740: train_loss -0.4999 +2026-04-09 17:19:04.014476: val_loss -0.343 +2026-04-09 17:19:04.017475: Pseudo dice [0.8428, 0.0, 0.633, 0.4821, 0.5892, 0.1415, 0.8329] +2026-04-09 17:19:04.020715: Epoch time: 101.48 s +2026-04-09 17:19:05.125382: +2026-04-09 17:19:05.127785: Epoch 878 +2026-04-09 17:19:05.129424: Current learning rate: 0.00151 +2026-04-09 17:20:47.367351: train_loss -0.4926 +2026-04-09 17:20:47.373287: val_loss -0.4482 +2026-04-09 17:20:47.375554: Pseudo dice [0.7851, 0.0, 0.7016, 0.6294, 0.5653, 0.7099, 0.8468] +2026-04-09 17:20:47.377598: Epoch time: 102.25 s +2026-04-09 17:20:48.476653: +2026-04-09 17:20:48.478699: Epoch 879 +2026-04-09 17:20:48.480473: Current learning rate: 0.00149 +2026-04-09 17:22:30.032171: train_loss -0.4991 +2026-04-09 17:22:30.043026: val_loss -0.344 +2026-04-09 17:22:30.045491: Pseudo dice [0.2317, 0.0, 0.4808, 0.5537, 0.4486, 0.0602, 0.7841] +2026-04-09 17:22:30.047853: Epoch time: 101.56 s +2026-04-09 17:22:31.158299: +2026-04-09 17:22:31.160368: Epoch 880 +2026-04-09 17:22:31.162346: Current learning rate: 0.00148 +2026-04-09 17:24:13.883005: train_loss -0.4998 +2026-04-09 17:24:13.889189: val_loss -0.3049 +2026-04-09 17:24:13.901766: Pseudo dice [0.6351, 0.0, 0.6567, 0.3048, 0.5344, 0.0884, 0.8772] +2026-04-09 17:24:13.904854: Epoch time: 102.73 s +2026-04-09 17:24:15.016826: +2026-04-09 17:24:15.019308: Epoch 881 +2026-04-09 17:24:15.021195: Current learning rate: 0.00147 +2026-04-09 17:25:55.987123: train_loss -0.4815 +2026-04-09 17:25:55.993279: val_loss -0.3661 +2026-04-09 17:25:55.996126: Pseudo dice [0.6305, 0.0, 0.5369, 0.4419, 0.3496, 0.0399, 0.5934] +2026-04-09 17:25:55.998694: Epoch time: 100.97 s +2026-04-09 17:25:58.107217: +2026-04-09 17:25:58.109272: Epoch 882 +2026-04-09 17:25:58.110594: Current learning rate: 0.00146 +2026-04-09 17:27:40.304301: train_loss -0.4811 +2026-04-09 17:27:40.312557: val_loss -0.4554 +2026-04-09 17:27:40.314968: Pseudo dice [0.6373, 0.0, 0.6996, 0.0205, 0.6151, 0.7844, 0.801] +2026-04-09 17:27:40.317153: Epoch time: 102.2 s +2026-04-09 17:27:41.431479: +2026-04-09 17:27:41.435571: Epoch 883 +2026-04-09 17:27:41.438156: Current learning rate: 0.00145 +2026-04-09 17:29:22.662576: train_loss -0.4888 +2026-04-09 17:29:22.670618: val_loss -0.4382 +2026-04-09 17:29:22.672908: Pseudo dice [0.7034, 0.0, 0.7214, 0.2264, 0.5148, 0.2314, 0.7678] +2026-04-09 17:29:22.675240: Epoch time: 101.23 s +2026-04-09 17:29:23.785317: +2026-04-09 17:29:23.787933: Epoch 884 +2026-04-09 17:29:23.789821: Current learning rate: 0.00144 +2026-04-09 17:31:06.046581: train_loss -0.4912 +2026-04-09 17:31:06.053839: val_loss -0.3436 +2026-04-09 17:31:06.056340: Pseudo dice [0.3944, 0.0, 0.6826, 0.4558, 0.3986, 0.1986, 0.8502] +2026-04-09 17:31:06.078914: Epoch time: 102.26 s +2026-04-09 17:31:07.177870: +2026-04-09 17:31:07.180174: Epoch 885 +2026-04-09 17:31:07.182255: Current learning rate: 0.00143 +2026-04-09 17:32:49.802137: train_loss -0.4925 +2026-04-09 17:32:49.813212: val_loss -0.2302 +2026-04-09 17:32:49.821241: Pseudo dice [0.7553, 0.0, 0.5691, 0.1226, 0.3297, 0.0528, 0.5063] +2026-04-09 17:32:49.835146: Epoch time: 102.63 s +2026-04-09 17:32:50.960513: +2026-04-09 17:32:50.964087: Epoch 886 +2026-04-09 17:32:50.966976: Current learning rate: 0.00142 +2026-04-09 17:34:32.740307: train_loss -0.5007 +2026-04-09 17:34:32.750190: val_loss -0.2626 +2026-04-09 17:34:32.753007: Pseudo dice [0.8092, 0.0, 0.709, 0.8376, 0.6775, 0.224, 0.9327] +2026-04-09 17:34:32.758103: Epoch time: 101.78 s +2026-04-09 17:34:33.867365: +2026-04-09 17:34:33.869754: Epoch 887 +2026-04-09 17:34:33.872567: Current learning rate: 0.00141 +2026-04-09 17:36:15.141973: train_loss -0.5011 +2026-04-09 17:36:15.151835: val_loss -0.4346 +2026-04-09 17:36:15.154218: Pseudo dice [0.5071, 0.0, 0.3945, 0.6185, 0.2357, 0.4892, 0.8122] +2026-04-09 17:36:15.157403: Epoch time: 101.28 s +2026-04-09 17:36:16.277517: +2026-04-09 17:36:16.279692: Epoch 888 +2026-04-09 17:36:16.283325: Current learning rate: 0.00139 +2026-04-09 17:37:56.966138: train_loss -0.5127 +2026-04-09 17:37:56.973466: val_loss -0.4441 +2026-04-09 17:37:56.975631: Pseudo dice [0.2948, 0.0, 0.5616, 0.8636, 0.6443, 0.7295, 0.8138] +2026-04-09 17:37:56.978836: Epoch time: 100.69 s +2026-04-09 17:37:58.088156: +2026-04-09 17:37:58.090492: Epoch 889 +2026-04-09 17:37:58.092463: Current learning rate: 0.00138 +2026-04-09 17:39:39.191464: train_loss -0.5059 +2026-04-09 17:39:39.198146: val_loss -0.4052 +2026-04-09 17:39:39.200547: Pseudo dice [0.4533, 0.0, 0.5232, 0.8882, 0.1885, 0.2391, 0.3288] +2026-04-09 17:39:39.203068: Epoch time: 101.11 s +2026-04-09 17:39:40.297816: +2026-04-09 17:39:40.299823: Epoch 890 +2026-04-09 17:39:40.301293: Current learning rate: 0.00137 +2026-04-09 17:41:21.135155: train_loss -0.4936 +2026-04-09 17:41:21.141585: val_loss -0.3808 +2026-04-09 17:41:21.143292: Pseudo dice [0.6567, 0.0, 0.6658, 0.0696, 0.6356, 0.103, 0.86] +2026-04-09 17:41:21.145728: Epoch time: 100.84 s +2026-04-09 17:41:22.252129: +2026-04-09 17:41:22.253923: Epoch 891 +2026-04-09 17:41:22.255585: Current learning rate: 0.00136 +2026-04-09 17:43:03.243936: train_loss -0.4909 +2026-04-09 17:43:03.250068: val_loss -0.1732 +2026-04-09 17:43:03.251861: Pseudo dice [0.61, 0.0, 0.4854, 0.4339, 0.3281, 0.1432, 0.6388] +2026-04-09 17:43:03.253944: Epoch time: 100.99 s +2026-04-09 17:43:04.362338: +2026-04-09 17:43:04.364092: Epoch 892 +2026-04-09 17:43:04.365521: Current learning rate: 0.00135 +2026-04-09 17:44:45.237153: train_loss -0.4981 +2026-04-09 17:44:45.243203: val_loss -0.2645 +2026-04-09 17:44:45.245339: Pseudo dice [0.3775, 0.0, 0.667, 0.0543, 0.6055, 0.1797, 0.843] +2026-04-09 17:44:45.248543: Epoch time: 100.88 s +2026-04-09 17:44:46.370853: +2026-04-09 17:44:46.373904: Epoch 893 +2026-04-09 17:44:46.376321: Current learning rate: 0.00134 +2026-04-09 17:46:28.246797: train_loss -0.5018 +2026-04-09 17:46:28.256473: val_loss -0.303 +2026-04-09 17:46:28.261878: Pseudo dice [0.833, 0.0, 0.5513, 0.0175, 0.6867, 0.0266, 0.7323] +2026-04-09 17:46:28.265099: Epoch time: 101.88 s +2026-04-09 17:46:29.384914: +2026-04-09 17:46:29.388995: Epoch 894 +2026-04-09 17:46:29.393696: Current learning rate: 0.00133 +2026-04-09 17:48:10.998344: train_loss -0.5058 +2026-04-09 17:48:11.006260: val_loss -0.3939 +2026-04-09 17:48:11.009834: Pseudo dice [0.7674, 0.0, 0.4587, 0.3475, 0.5561, 0.2935, 0.7541] +2026-04-09 17:48:11.012499: Epoch time: 101.62 s +2026-04-09 17:48:12.119609: +2026-04-09 17:48:12.128148: Epoch 895 +2026-04-09 17:48:12.130506: Current learning rate: 0.00132 +2026-04-09 17:49:53.956709: train_loss -0.4905 +2026-04-09 17:49:53.964754: val_loss -0.2126 +2026-04-09 17:49:53.967476: Pseudo dice [0.7786, 0.0, 0.6818, 0.741, 0.2494, 0.1084, 0.5485] +2026-04-09 17:49:53.970364: Epoch time: 101.84 s +2026-04-09 17:49:55.094319: +2026-04-09 17:49:55.096094: Epoch 896 +2026-04-09 17:49:55.097786: Current learning rate: 0.0013 +2026-04-09 17:51:37.240949: train_loss -0.499 +2026-04-09 17:51:37.248536: val_loss -0.3896 +2026-04-09 17:51:37.251379: Pseudo dice [0.5474, 0.0, 0.722, 0.6688, 0.2753, 0.1615, 0.6629] +2026-04-09 17:51:37.254539: Epoch time: 102.15 s +2026-04-09 17:51:38.348011: +2026-04-09 17:51:38.350664: Epoch 897 +2026-04-09 17:51:38.353398: Current learning rate: 0.00129 +2026-04-09 17:53:19.081683: train_loss -0.48 +2026-04-09 17:53:19.089741: val_loss -0.3143 +2026-04-09 17:53:19.094265: Pseudo dice [0.8258, 0.0, 0.5313, 0.547, 0.4453, 0.3288, 0.7343] +2026-04-09 17:53:19.097233: Epoch time: 100.74 s +2026-04-09 17:53:20.201325: +2026-04-09 17:53:20.203882: Epoch 898 +2026-04-09 17:53:20.206353: Current learning rate: 0.00128 +2026-04-09 17:55:02.320035: train_loss -0.4917 +2026-04-09 17:55:02.328563: val_loss -0.3147 +2026-04-09 17:55:02.331289: Pseudo dice [0.4203, 0.0, 0.6883, 0.7453, 0.5403, 0.0644, 0.8109] +2026-04-09 17:55:02.334100: Epoch time: 102.12 s +2026-04-09 17:55:03.455638: +2026-04-09 17:55:03.458067: Epoch 899 +2026-04-09 17:55:03.460413: Current learning rate: 0.00127 +2026-04-09 17:56:44.503592: train_loss -0.4955 +2026-04-09 17:56:44.510308: val_loss -0.4758 +2026-04-09 17:56:44.512877: Pseudo dice [0.763, 0.0, 0.5407, 0.4849, 0.5542, 0.6819, 0.5964] +2026-04-09 17:56:44.515201: Epoch time: 101.05 s +2026-04-09 17:56:47.334609: +2026-04-09 17:56:47.337383: Epoch 900 +2026-04-09 17:56:47.339019: Current learning rate: 0.00126 +2026-04-09 17:58:29.327643: train_loss -0.5071 +2026-04-09 17:58:29.334691: val_loss -0.3719 +2026-04-09 17:58:29.336911: Pseudo dice [0.5468, 0.0, 0.645, 0.9249, 0.332, 0.1984, 0.2515] +2026-04-09 17:58:29.339734: Epoch time: 102.0 s +2026-04-09 17:58:30.450805: +2026-04-09 17:58:30.452976: Epoch 901 +2026-04-09 17:58:30.454583: Current learning rate: 0.00125 +2026-04-09 18:00:11.989019: train_loss -0.5046 +2026-04-09 18:00:11.996957: val_loss -0.1743 +2026-04-09 18:00:11.999563: Pseudo dice [0.8357, 0.0, 0.5865, 0.1757, 0.3858, 0.037, 0.7963] +2026-04-09 18:00:12.003215: Epoch time: 101.54 s +2026-04-09 18:00:13.123532: +2026-04-09 18:00:13.125722: Epoch 902 +2026-04-09 18:00:13.127727: Current learning rate: 0.00124 +2026-04-09 18:01:55.589833: train_loss -0.4921 +2026-04-09 18:01:55.601172: val_loss -0.4735 +2026-04-09 18:01:55.605613: Pseudo dice [0.8224, 0.0, 0.718, 0.8125, 0.4997, 0.6813, 0.8796] +2026-04-09 18:01:55.610702: Epoch time: 102.47 s +2026-04-09 18:01:56.723774: +2026-04-09 18:01:56.726829: Epoch 903 +2026-04-09 18:01:56.729490: Current learning rate: 0.00122 +2026-04-09 18:03:37.411634: train_loss -0.4937 +2026-04-09 18:03:37.418158: val_loss -0.4431 +2026-04-09 18:03:37.421862: Pseudo dice [0.2775, 0.0, 0.7064, 0.3956, 0.5526, 0.6521, 0.8562] +2026-04-09 18:03:37.424556: Epoch time: 100.69 s +2026-04-09 18:03:38.558606: +2026-04-09 18:03:38.560706: Epoch 904 +2026-04-09 18:03:38.562346: Current learning rate: 0.00121 +2026-04-09 18:05:20.483273: train_loss -0.5116 +2026-04-09 18:05:20.493792: val_loss -0.4431 +2026-04-09 18:05:20.497684: Pseudo dice [0.5729, 0.0, 0.7392, 0.8033, 0.6347, 0.7256, 0.8592] +2026-04-09 18:05:20.500891: Epoch time: 101.93 s +2026-04-09 18:05:21.608706: +2026-04-09 18:05:21.612167: Epoch 905 +2026-04-09 18:05:21.614363: Current learning rate: 0.0012 +2026-04-09 18:07:02.951537: train_loss -0.4887 +2026-04-09 18:07:02.958101: val_loss -0.2536 +2026-04-09 18:07:02.960447: Pseudo dice [0.7548, 0.0, 0.6361, 0.615, 0.4782, 0.0254, 0.854] +2026-04-09 18:07:02.963172: Epoch time: 101.35 s +2026-04-09 18:07:04.074011: +2026-04-09 18:07:04.076663: Epoch 906 +2026-04-09 18:07:04.079087: Current learning rate: 0.00119 +2026-04-09 18:08:46.237314: train_loss -0.4868 +2026-04-09 18:08:46.245448: val_loss -0.3468 +2026-04-09 18:08:46.247953: Pseudo dice [0.3379, 0.0, 0.6186, 0.6642, 0.5441, 0.1302, 0.8677] +2026-04-09 18:08:46.250848: Epoch time: 102.17 s +2026-04-09 18:08:47.362224: +2026-04-09 18:08:47.364476: Epoch 907 +2026-04-09 18:08:47.367000: Current learning rate: 0.00118 +2026-04-09 18:10:34.666524: train_loss -0.4992 +2026-04-09 18:10:34.680086: val_loss -0.4337 +2026-04-09 18:10:34.685648: Pseudo dice [0.2616, 0.0, 0.5612, 0.2649, 0.6394, 0.7541, 0.8534] +2026-04-09 18:10:34.693419: Epoch time: 107.31 s +2026-04-09 18:10:35.826380: +2026-04-09 18:10:35.831395: Epoch 908 +2026-04-09 18:10:35.835360: Current learning rate: 0.00117 +2026-04-09 18:12:21.553365: train_loss -0.5002 +2026-04-09 18:12:21.569160: val_loss -0.3555 +2026-04-09 18:12:21.574861: Pseudo dice [0.6554, 0.0, 0.7344, 0.2385, 0.4983, 0.1695, 0.8399] +2026-04-09 18:12:21.582834: Epoch time: 105.73 s +2026-04-09 18:12:22.727771: +2026-04-09 18:12:22.733698: Epoch 909 +2026-04-09 18:12:22.739289: Current learning rate: 0.00116 +2026-04-09 18:14:09.570992: train_loss -0.4997 +2026-04-09 18:14:09.593312: val_loss -0.2484 +2026-04-09 18:14:09.605341: Pseudo dice [0.5042, 0.0, 0.5214, 0.5449, 0.6762, 0.0409, 0.8628] +2026-04-09 18:14:09.619604: Epoch time: 106.85 s +2026-04-09 18:14:10.765071: +2026-04-09 18:14:10.771846: Epoch 910 +2026-04-09 18:14:10.784072: Current learning rate: 0.00115 +2026-04-09 18:15:57.857586: train_loss -0.5016 +2026-04-09 18:15:57.868924: val_loss -0.4503 +2026-04-09 18:15:57.878520: Pseudo dice [0.4122, 0.0, 0.689, 0.5054, 0.6987, 0.8252, 0.863] +2026-04-09 18:15:57.887052: Epoch time: 107.1 s +2026-04-09 18:15:59.010735: +2026-04-09 18:15:59.016427: Epoch 911 +2026-04-09 18:15:59.020818: Current learning rate: 0.00113 +2026-04-09 18:17:47.938558: train_loss -0.4926 +2026-04-09 18:17:47.954052: val_loss -0.4365 +2026-04-09 18:17:47.959774: Pseudo dice [0.5719, 0.0, 0.781, 0.4504, 0.4697, 0.279, 0.6466] +2026-04-09 18:17:47.966479: Epoch time: 108.93 s +2026-04-09 18:17:49.087682: +2026-04-09 18:17:49.092830: Epoch 912 +2026-04-09 18:17:49.097347: Current learning rate: 0.00112 +2026-04-09 18:19:33.748449: train_loss -0.5184 +2026-04-09 18:19:33.770235: val_loss -0.204 +2026-04-09 18:19:33.775378: Pseudo dice [0.6713, 0.0, 0.6324, 0.5931, 0.4461, 0.0872, 0.7805] +2026-04-09 18:19:33.782522: Epoch time: 104.66 s +2026-04-09 18:19:34.916529: +2026-04-09 18:19:34.921309: Epoch 913 +2026-04-09 18:19:34.926064: Current learning rate: 0.00111 +2026-04-09 18:21:25.208238: train_loss -0.502 +2026-04-09 18:21:25.223966: val_loss -0.4332 +2026-04-09 18:21:25.231476: Pseudo dice [0.5823, 0.0, 0.7226, 0.5801, 0.4352, 0.7907, 0.5727] +2026-04-09 18:21:25.241535: Epoch time: 110.29 s +2026-04-09 18:21:26.366871: +2026-04-09 18:21:26.371086: Epoch 914 +2026-04-09 18:21:26.375245: Current learning rate: 0.0011 +2026-04-09 18:23:12.564575: train_loss -0.4942 +2026-04-09 18:23:12.580657: val_loss -0.3238 +2026-04-09 18:23:12.585296: Pseudo dice [0.3989, 0.0, 0.6678, 0.2121, 0.5392, 0.1704, 0.8573] +2026-04-09 18:23:12.591403: Epoch time: 106.2 s +2026-04-09 18:23:13.694165: +2026-04-09 18:23:13.698342: Epoch 915 +2026-04-09 18:23:13.702582: Current learning rate: 0.00109 +2026-04-09 18:25:01.446446: train_loss -0.4995 +2026-04-09 18:25:01.460344: val_loss -0.3247 +2026-04-09 18:25:01.469186: Pseudo dice [0.783, 0.0, 0.8281, 0.1408, 0.588, 0.0749, 0.7812] +2026-04-09 18:25:01.476786: Epoch time: 107.76 s +2026-04-09 18:25:02.604465: +2026-04-09 18:25:02.609918: Epoch 916 +2026-04-09 18:25:02.614117: Current learning rate: 0.00108 +2026-04-09 18:26:53.345696: train_loss -0.5074 +2026-04-09 18:26:53.361923: val_loss -0.4025 +2026-04-09 18:26:53.368154: Pseudo dice [0.5453, 0.0, 0.8092, 0.3919, 0.5901, 0.0939, 0.8251] +2026-04-09 18:26:53.376169: Epoch time: 110.74 s +2026-04-09 18:26:54.484734: +2026-04-09 18:26:54.491253: Epoch 917 +2026-04-09 18:26:54.497240: Current learning rate: 0.00106 +2026-04-09 18:28:44.727978: train_loss -0.5006 +2026-04-09 18:28:44.743835: val_loss -0.2937 +2026-04-09 18:28:44.748044: Pseudo dice [0.6742, 0.0, 0.6549, 0.7077, 0.5356, 0.0726, 0.8859] +2026-04-09 18:28:44.753535: Epoch time: 110.25 s +2026-04-09 18:28:45.896240: +2026-04-09 18:28:45.900707: Epoch 918 +2026-04-09 18:28:45.911736: Current learning rate: 0.00105 +2026-04-09 18:30:37.815596: train_loss -0.5067 +2026-04-09 18:30:37.835520: val_loss -0.4365 +2026-04-09 18:30:37.841639: Pseudo dice [0.2835, 0.0, 0.6062, 0.1424, 0.527, 0.8426, 0.7194] +2026-04-09 18:30:37.848433: Epoch time: 111.92 s +2026-04-09 18:30:38.975925: +2026-04-09 18:30:38.980020: Epoch 919 +2026-04-09 18:30:38.986415: Current learning rate: 0.00104 +2026-04-09 18:32:27.462254: train_loss -0.5053 +2026-04-09 18:32:27.480553: val_loss -0.4447 +2026-04-09 18:32:27.488111: Pseudo dice [0.5742, 0.0, 0.6689, 0.0129, 0.6137, 0.7397, 0.7524] +2026-04-09 18:32:27.497946: Epoch time: 108.49 s +2026-04-09 18:32:28.642214: +2026-04-09 18:32:28.654171: Epoch 920 +2026-04-09 18:32:28.658867: Current learning rate: 0.00103 +2026-04-09 18:34:13.032402: train_loss -0.5025 +2026-04-09 18:34:13.046082: val_loss -0.3365 +2026-04-09 18:34:13.050584: Pseudo dice [0.5941, 0.0, 0.7723, 0.2177, 0.6781, 0.1789, 0.7137] +2026-04-09 18:34:13.056331: Epoch time: 104.39 s +2026-04-09 18:34:14.176396: +2026-04-09 18:34:14.183434: Epoch 921 +2026-04-09 18:34:14.189872: Current learning rate: 0.00102 +2026-04-09 18:35:59.172409: train_loss -0.5144 +2026-04-09 18:35:59.191754: val_loss -0.4766 +2026-04-09 18:35:59.196572: Pseudo dice [0.3738, 0.0, 0.6925, 0.8781, 0.6539, 0.7824, 0.8487] +2026-04-09 18:35:59.203886: Epoch time: 105.0 s +2026-04-09 18:36:00.328691: +2026-04-09 18:36:00.333413: Epoch 922 +2026-04-09 18:36:00.337338: Current learning rate: 0.00101 +2026-04-09 18:37:46.259101: train_loss -0.5057 +2026-04-09 18:37:46.271575: val_loss -0.3685 +2026-04-09 18:37:46.277573: Pseudo dice [0.3893, 0.0, 0.8441, 0.0729, 0.5939, 0.1994, 0.8393] +2026-04-09 18:37:46.282930: Epoch time: 105.93 s +2026-04-09 18:37:47.417286: +2026-04-09 18:37:47.421528: Epoch 923 +2026-04-09 18:37:47.425023: Current learning rate: 0.001 +2026-04-09 18:39:32.075489: train_loss -0.514 +2026-04-09 18:39:32.092328: val_loss -0.3739 +2026-04-09 18:39:32.095897: Pseudo dice [0.7735, 0.0, 0.6061, 0.4222, 0.6661, 0.2487, 0.5936] +2026-04-09 18:39:32.098898: Epoch time: 104.66 s +2026-04-09 18:39:33.226180: +2026-04-09 18:39:33.229335: Epoch 924 +2026-04-09 18:39:33.233536: Current learning rate: 0.00098 +2026-04-09 18:41:18.269075: train_loss -0.5007 +2026-04-09 18:41:18.285843: val_loss -0.4596 +2026-04-09 18:41:18.290066: Pseudo dice [0.6161, 0.0, 0.6031, 0.7163, 0.5212, 0.7799, 0.8459] +2026-04-09 18:41:18.294914: Epoch time: 105.05 s +2026-04-09 18:41:19.427207: +2026-04-09 18:41:19.431798: Epoch 925 +2026-04-09 18:41:19.438093: Current learning rate: 0.00097 +2026-04-09 18:43:12.350839: train_loss -0.5054 +2026-04-09 18:43:12.386757: val_loss -0.4463 +2026-04-09 18:43:12.396658: Pseudo dice [0.2679, 0.0, 0.776, 0.0672, 0.603, 0.7623, 0.7784] +2026-04-09 18:43:12.407372: Epoch time: 112.93 s +2026-04-09 18:43:13.527307: +2026-04-09 18:43:13.539195: Epoch 926 +2026-04-09 18:43:13.548167: Current learning rate: 0.00096 +2026-04-09 18:45:04.646875: train_loss -0.5184 +2026-04-09 18:45:04.680238: val_loss -0.3625 +2026-04-09 18:45:04.697127: Pseudo dice [0.6048, 0.0, 0.5614, 0.756, 0.4563, 0.2003, 0.793] +2026-04-09 18:45:04.712720: Epoch time: 111.12 s +2026-04-09 18:45:05.869371: +2026-04-09 18:45:05.881633: Epoch 927 +2026-04-09 18:45:05.894005: Current learning rate: 0.00095 +2026-04-09 18:47:07.628417: train_loss -0.4939 +2026-04-09 18:47:07.652541: val_loss -0.2996 +2026-04-09 18:47:07.662264: Pseudo dice [0.6997, 0.0, 0.7355, 0.0003, 0.713, 0.1454, 0.8785] +2026-04-09 18:47:07.673050: Epoch time: 121.76 s +2026-04-09 18:47:08.820853: +2026-04-09 18:47:08.830809: Epoch 928 +2026-04-09 18:47:08.838042: Current learning rate: 0.00094 +2026-04-09 18:49:08.639550: train_loss -0.4908 +2026-04-09 18:49:08.657960: val_loss -0.4599 +2026-04-09 18:49:08.666142: Pseudo dice [0.3988, 0.0, 0.8419, 0.2548, 0.261, 0.8499, 0.6104] +2026-04-09 18:49:08.673957: Epoch time: 119.82 s +2026-04-09 18:49:09.804585: +2026-04-09 18:49:09.813806: Epoch 929 +2026-04-09 18:49:09.821629: Current learning rate: 0.00092 +2026-04-09 18:51:06.890838: train_loss -0.5151 +2026-04-09 18:51:06.910479: val_loss -0.4494 +2026-04-09 18:51:06.917641: Pseudo dice [0.413, 0.0, 0.6904, 0.5695, 0.6937, 0.6205, 0.8319] +2026-04-09 18:51:06.925819: Epoch time: 117.09 s +2026-04-09 18:51:08.066791: +2026-04-09 18:51:08.077207: Epoch 930 +2026-04-09 18:51:08.084772: Current learning rate: 0.00091 +2026-04-09 18:53:02.875418: train_loss -0.491 +2026-04-09 18:53:02.896516: val_loss -0.4245 +2026-04-09 18:53:02.903131: Pseudo dice [0.8108, 0.0, 0.5004, 0.397, 0.418, 0.3783, 0.4913] +2026-04-09 18:53:02.911226: Epoch time: 114.81 s +2026-04-09 18:53:04.055879: +2026-04-09 18:53:04.061416: Epoch 931 +2026-04-09 18:53:04.068150: Current learning rate: 0.0009 +2026-04-09 18:54:48.634769: train_loss -0.4957 +2026-04-09 18:54:48.650338: val_loss -0.4373 +2026-04-09 18:54:48.655994: Pseudo dice [0.4097, 0.0, 0.6967, 0.415, 0.4513, 0.1522, 0.8382] +2026-04-09 18:54:48.665027: Epoch time: 104.58 s +2026-04-09 18:54:49.780569: +2026-04-09 18:54:49.789526: Epoch 932 +2026-04-09 18:54:49.794325: Current learning rate: 0.00089 +2026-04-09 18:56:37.516285: train_loss -0.5036 +2026-04-09 18:56:37.534454: val_loss -0.4136 +2026-04-09 18:56:37.540760: Pseudo dice [0.2425, 0.0, 0.7353, 0.3498, 0.5892, 0.6562, 0.7118] +2026-04-09 18:56:37.546705: Epoch time: 107.74 s +2026-04-09 18:56:38.697903: +2026-04-09 18:56:38.702960: Epoch 933 +2026-04-09 18:56:38.711023: Current learning rate: 0.00088 +2026-04-09 18:58:24.783341: train_loss -0.5048 +2026-04-09 18:58:24.798436: val_loss -0.4438 +2026-04-09 18:58:24.803951: Pseudo dice [0.4077, 0.0, 0.6668, 0.4448, 0.6149, 0.1635, 0.8237] +2026-04-09 18:58:24.814003: Epoch time: 106.09 s +2026-04-09 18:58:25.946722: +2026-04-09 18:58:25.954967: Epoch 934 +2026-04-09 18:58:25.961446: Current learning rate: 0.00087 +2026-04-09 19:00:12.086085: train_loss -0.5107 +2026-04-09 19:00:12.098358: val_loss -0.3368 +2026-04-09 19:00:12.102467: Pseudo dice [0.3006, 0.0, 0.7627, 0.4672, 0.6576, 0.1282, 0.8572] +2026-04-09 19:00:12.109840: Epoch time: 106.14 s +2026-04-09 19:00:13.231840: +2026-04-09 19:00:13.237617: Epoch 935 +2026-04-09 19:00:13.242674: Current learning rate: 0.00085 +2026-04-09 19:02:03.142947: train_loss -0.5213 +2026-04-09 19:02:03.160429: val_loss -0.2976 +2026-04-09 19:02:03.167853: Pseudo dice [0.8263, 0.0, 0.7966, 0.7796, 0.5196, 0.033, 0.4195] +2026-04-09 19:02:03.172566: Epoch time: 109.91 s +2026-04-09 19:02:04.294990: +2026-04-09 19:02:04.301335: Epoch 936 +2026-04-09 19:02:04.307567: Current learning rate: 0.00084 +2026-04-09 19:03:49.810467: train_loss -0.5043 +2026-04-09 19:03:49.819078: val_loss -0.401 +2026-04-09 19:03:49.823364: Pseudo dice [0.4347, 0.0, 0.581, 0.6848, 0.292, 0.1939, 0.3721] +2026-04-09 19:03:49.829343: Epoch time: 105.52 s +2026-04-09 19:03:50.946115: +2026-04-09 19:03:50.951722: Epoch 937 +2026-04-09 19:03:50.954399: Current learning rate: 0.00083 +2026-04-09 19:05:37.946993: train_loss -0.5123 +2026-04-09 19:05:37.961210: val_loss -0.3447 +2026-04-09 19:05:37.966329: Pseudo dice [0.148, 0.0, 0.7496, 0.6105, 0.5097, 0.1536, 0.7714] +2026-04-09 19:05:37.974320: Epoch time: 107.0 s +2026-04-09 19:05:39.102084: +2026-04-09 19:05:39.106509: Epoch 938 +2026-04-09 19:05:39.110160: Current learning rate: 0.00082 +2026-04-09 19:07:23.373527: train_loss -0.5102 +2026-04-09 19:07:23.386344: val_loss -0.3816 +2026-04-09 19:07:23.389831: Pseudo dice [0.6807, 0.0, 0.6, 0.72, 0.6981, 0.0743, 0.7649] +2026-04-09 19:07:23.393224: Epoch time: 104.27 s +2026-04-09 19:07:24.540649: +2026-04-09 19:07:24.544338: Epoch 939 +2026-04-09 19:07:24.551882: Current learning rate: 0.00081 +2026-04-09 19:09:14.632749: train_loss -0.5067 +2026-04-09 19:09:14.648400: val_loss -0.3786 +2026-04-09 19:09:14.655190: Pseudo dice [0.6279, 0.0, 0.6944, 0.7424, 0.5619, 0.0765, 0.8115] +2026-04-09 19:09:14.659890: Epoch time: 110.1 s +2026-04-09 19:09:15.782270: +2026-04-09 19:09:15.785807: Epoch 940 +2026-04-09 19:09:15.789393: Current learning rate: 0.00079 +2026-04-09 19:11:17.296802: train_loss -0.4974 +2026-04-09 19:11:17.307840: val_loss -0.4516 +2026-04-09 19:11:17.312262: Pseudo dice [0.3991, 0.0, 0.5778, 0.8945, 0.5064, 0.8274, 0.4904] +2026-04-09 19:11:17.321549: Epoch time: 121.52 s +2026-04-09 19:11:18.655209: +2026-04-09 19:11:18.659777: Epoch 941 +2026-04-09 19:11:18.663725: Current learning rate: 0.00078 +2026-04-09 19:13:07.426556: train_loss -0.5041 +2026-04-09 19:13:07.445803: val_loss -0.4281 +2026-04-09 19:13:07.451805: Pseudo dice [0.7509, 0.0, 0.6937, 0.4509, 0.422, 0.7553, 0.6467] +2026-04-09 19:13:07.457801: Epoch time: 108.77 s +2026-04-09 19:13:09.565988: +2026-04-09 19:13:09.571093: Epoch 942 +2026-04-09 19:13:09.576245: Current learning rate: 0.00077 +2026-04-09 19:14:58.177962: train_loss -0.5113 +2026-04-09 19:14:58.195518: val_loss -0.4439 +2026-04-09 19:14:58.202135: Pseudo dice [0.7505, 0.0, 0.6873, 0.3428, 0.4644, 0.8483, 0.3161] +2026-04-09 19:14:58.210352: Epoch time: 108.62 s +2026-04-09 19:14:59.344226: +2026-04-09 19:14:59.350635: Epoch 943 +2026-04-09 19:14:59.359638: Current learning rate: 0.00076 +2026-04-09 19:16:45.761340: train_loss -0.5137 +2026-04-09 19:16:45.772461: val_loss -0.4554 +2026-04-09 19:16:45.776274: Pseudo dice [0.5445, 0.0, 0.706, 0.1252, 0.5538, 0.7741, 0.7193] +2026-04-09 19:16:45.779081: Epoch time: 106.42 s +2026-04-09 19:16:46.906600: +2026-04-09 19:16:46.911130: Epoch 944 +2026-04-09 19:16:46.916919: Current learning rate: 0.00075 +2026-04-09 19:18:33.735201: train_loss -0.5096 +2026-04-09 19:18:33.747792: val_loss -0.4502 +2026-04-09 19:18:33.751944: Pseudo dice [0.3119, 0.0, 0.7458, 0.107, 0.6103, 0.8064, 0.88] +2026-04-09 19:18:33.771291: Epoch time: 106.83 s +2026-04-09 19:18:34.908354: +2026-04-09 19:18:34.911590: Epoch 945 +2026-04-09 19:18:34.915647: Current learning rate: 0.00074 +2026-04-09 19:20:29.068737: train_loss -0.5041 +2026-04-09 19:20:29.083311: val_loss -0.4473 +2026-04-09 19:20:29.102895: Pseudo dice [0.7337, 0.0, 0.6307, 0.8353, 0.4022, 0.81, 0.664] +2026-04-09 19:20:29.106649: Epoch time: 114.16 s +2026-04-09 19:20:30.227638: +2026-04-09 19:20:30.236189: Epoch 946 +2026-04-09 19:20:30.241844: Current learning rate: 0.00072 +2026-04-09 19:22:18.660608: train_loss -0.5171 +2026-04-09 19:22:18.679639: val_loss -0.349 +2026-04-09 19:22:18.686068: Pseudo dice [0.809, 0.0, 0.7176, 0.9293, 0.5533, 0.1554, 0.8775] +2026-04-09 19:22:18.692094: Epoch time: 108.44 s +2026-04-09 19:22:19.832826: +2026-04-09 19:22:19.837862: Epoch 947 +2026-04-09 19:22:19.843487: Current learning rate: 0.00071 +2026-04-09 19:24:07.624979: train_loss -0.5041 +2026-04-09 19:24:07.639499: val_loss -0.3756 +2026-04-09 19:24:07.643335: Pseudo dice [0.8607, 0.0, 0.6104, 0.7613, 0.4945, 0.0644, 0.7456] +2026-04-09 19:24:07.651423: Epoch time: 107.8 s +2026-04-09 19:24:08.771203: +2026-04-09 19:24:08.776378: Epoch 948 +2026-04-09 19:24:08.781013: Current learning rate: 0.0007 +2026-04-09 19:25:55.064069: train_loss -0.5142 +2026-04-09 19:25:55.078975: val_loss -0.4294 +2026-04-09 19:25:55.087148: Pseudo dice [0.8079, 0.0, 0.7731, 0.4552, 0.2264, 0.8485, 0.5454] +2026-04-09 19:25:55.095398: Epoch time: 106.3 s +2026-04-09 19:25:56.219713: +2026-04-09 19:25:56.226397: Epoch 949 +2026-04-09 19:25:56.234296: Current learning rate: 0.00069 +2026-04-09 19:27:50.793609: train_loss -0.5146 +2026-04-09 19:27:50.816830: val_loss -0.4769 +2026-04-09 19:27:50.828333: Pseudo dice [0.7984, 0.0, 0.7605, 0.1317, 0.5222, 0.7913, 0.6705] +2026-04-09 19:27:50.836359: Epoch time: 114.58 s +2026-04-09 19:27:54.361525: +2026-04-09 19:27:54.369160: Epoch 950 +2026-04-09 19:27:54.376428: Current learning rate: 0.00067 +2026-04-09 19:29:51.458079: train_loss -0.5077 +2026-04-09 19:29:51.476618: val_loss -0.4784 +2026-04-09 19:29:51.484243: Pseudo dice [0.3184, 0.0, 0.7601, 0.4331, 0.7229, 0.7902, 0.8828] +2026-04-09 19:29:51.493522: Epoch time: 117.1 s +2026-04-09 19:29:52.635556: +2026-04-09 19:29:52.641806: Epoch 951 +2026-04-09 19:29:52.646932: Current learning rate: 0.00066 +2026-04-09 19:31:54.569997: train_loss -0.5017 +2026-04-09 19:31:54.585232: val_loss -0.4786 +2026-04-09 19:31:54.591572: Pseudo dice [0.8381, 0.0, 0.7027, 0.5953, 0.6365, 0.8418, 0.7913] +2026-04-09 19:31:54.598325: Epoch time: 121.94 s +2026-04-09 19:31:55.733391: +2026-04-09 19:31:55.741107: Epoch 952 +2026-04-09 19:31:55.747485: Current learning rate: 0.00065 +2026-04-09 19:33:59.721745: train_loss -0.5182 +2026-04-09 19:33:59.740168: val_loss -0.4237 +2026-04-09 19:33:59.747447: Pseudo dice [0.4057, 0.0, 0.7747, 0.0262, 0.666, 0.4175, 0.8891] +2026-04-09 19:33:59.755496: Epoch time: 123.99 s +2026-04-09 19:34:00.890717: +2026-04-09 19:34:00.895386: Epoch 953 +2026-04-09 19:34:00.901184: Current learning rate: 0.00064 +2026-04-09 19:36:07.275493: train_loss -0.5058 +2026-04-09 19:36:07.304168: val_loss -0.3865 +2026-04-09 19:36:07.314608: Pseudo dice [0.6556, 0.0, 0.4924, 0.9015, 0.5563, 0.1088, 0.9339] +2026-04-09 19:36:07.325579: Epoch time: 126.39 s +2026-04-09 19:36:08.458284: +2026-04-09 19:36:08.465081: Epoch 954 +2026-04-09 19:36:08.470155: Current learning rate: 0.00063 +2026-04-09 19:38:07.751206: train_loss -0.5218 +2026-04-09 19:38:07.770575: val_loss -0.45 +2026-04-09 19:38:07.776613: Pseudo dice [0.3822, 0.0, 0.708, 0.4166, 0.6717, 0.6588, 0.8598] +2026-04-09 19:38:07.782014: Epoch time: 119.3 s +2026-04-09 19:38:08.925726: +2026-04-09 19:38:08.932745: Epoch 955 +2026-04-09 19:38:08.937597: Current learning rate: 0.00061 +2026-04-09 19:40:08.245945: train_loss -0.5096 +2026-04-09 19:40:08.267158: val_loss -0.3312 +2026-04-09 19:40:08.272350: Pseudo dice [0.4412, 0.0, 0.585, 0.3684, 0.3824, 0.154, 0.5923] +2026-04-09 19:40:08.280040: Epoch time: 119.32 s +2026-04-09 19:40:09.430496: +2026-04-09 19:40:09.434850: Epoch 956 +2026-04-09 19:40:09.439860: Current learning rate: 0.0006 +2026-04-09 19:42:08.521884: train_loss -0.5194 +2026-04-09 19:42:08.538928: val_loss -0.4058 +2026-04-09 19:42:08.542976: Pseudo dice [0.5296, 0.0, 0.8131, 0.8291, 0.7563, 0.1119, 0.8161] +2026-04-09 19:42:08.548083: Epoch time: 119.09 s +2026-04-09 19:42:09.698531: +2026-04-09 19:42:09.704478: Epoch 957 +2026-04-09 19:42:09.710101: Current learning rate: 0.00059 +2026-04-09 19:43:56.547297: train_loss -0.5081 +2026-04-09 19:43:56.558627: val_loss -0.4577 +2026-04-09 19:43:56.563935: Pseudo dice [0.5617, 0.0, 0.7543, 0.397, 0.5958, 0.8169, 0.9061] +2026-04-09 19:43:56.567359: Epoch time: 106.85 s +2026-04-09 19:43:57.701102: +2026-04-09 19:43:57.705794: Epoch 958 +2026-04-09 19:43:57.710168: Current learning rate: 0.00058 +2026-04-09 19:45:59.661407: train_loss -0.5145 +2026-04-09 19:45:59.681047: val_loss -0.2358 +2026-04-09 19:45:59.686009: Pseudo dice [0.4943, 0.0, 0.5834, 0.0537, 0.6843, 0.1299, 0.4317] +2026-04-09 19:45:59.691941: Epoch time: 121.96 s +2026-04-09 19:46:00.909730: +2026-04-09 19:46:00.919025: Epoch 959 +2026-04-09 19:46:00.925211: Current learning rate: 0.00056 +2026-04-09 19:49:50.685998: train_loss -0.4998 +2026-04-09 19:49:50.696800: val_loss -0.4123 +2026-04-09 19:49:50.701071: Pseudo dice [0.4865, 0.0, 0.6779, 0.2947, 0.6465, 0.6666, 0.7324] +2026-04-09 19:49:50.705668: Epoch time: 229.78 s +2026-04-09 19:49:51.822495: +2026-04-09 19:49:51.826084: Epoch 960 +2026-04-09 19:49:51.830576: Current learning rate: 0.00055 +2026-04-09 19:51:50.497977: train_loss -0.5174 +2026-04-09 19:51:50.508859: val_loss -0.3958 +2026-04-09 19:51:50.515340: Pseudo dice [0.448, 0.0, 0.7784, 0.006, 0.606, 0.3701, 0.8651] +2026-04-09 19:51:50.522330: Epoch time: 118.68 s +2026-04-09 19:51:51.681098: +2026-04-09 19:51:51.684174: Epoch 961 +2026-04-09 19:51:51.687016: Current learning rate: 0.00054 +2026-04-09 19:53:41.820889: train_loss -0.5168 +2026-04-09 19:53:41.834916: val_loss -0.4659 +2026-04-09 19:53:41.840006: Pseudo dice [0.3699, 0.0, 0.7196, 0.1226, 0.5915, 0.8695, 0.7739] +2026-04-09 19:53:41.845228: Epoch time: 110.14 s +2026-04-09 19:53:42.994804: +2026-04-09 19:53:43.002575: Epoch 962 +2026-04-09 19:53:43.006571: Current learning rate: 0.00053 +2026-04-09 19:55:29.734101: train_loss -0.5268 +2026-04-09 19:55:29.746681: val_loss -0.3691 +2026-04-09 19:55:29.753268: Pseudo dice [0.6324, 0.0, 0.6299, 0.3833, 0.6041, 0.227, 0.8738] +2026-04-09 19:55:29.759125: Epoch time: 106.74 s +2026-04-09 19:55:30.917016: +2026-04-09 19:55:30.924471: Epoch 963 +2026-04-09 19:55:30.929324: Current learning rate: 0.00051 +2026-04-09 19:57:19.229202: train_loss -0.5098 +2026-04-09 19:57:19.244460: val_loss -0.2292 +2026-04-09 19:57:19.249066: Pseudo dice [0.4752, 0.0, 0.6611, 0.0099, 0.5925, 0.2746, 0.5927] +2026-04-09 19:57:19.254222: Epoch time: 108.32 s +2026-04-09 19:57:20.418774: +2026-04-09 19:57:20.428122: Epoch 964 +2026-04-09 19:57:20.434839: Current learning rate: 0.0005 +2026-04-09 19:59:05.212614: train_loss -0.5178 +2026-04-09 19:59:05.229087: val_loss -0.3312 +2026-04-09 19:59:05.232670: Pseudo dice [0.3614, 0.0, 0.7919, 0.1051, 0.7448, 0.1528, 0.7295] +2026-04-09 19:59:05.236663: Epoch time: 104.8 s +2026-04-09 19:59:06.382212: +2026-04-09 19:59:06.387913: Epoch 965 +2026-04-09 19:59:06.391893: Current learning rate: 0.00049 +2026-04-09 20:00:52.244112: train_loss -0.5261 +2026-04-09 20:00:52.261398: val_loss -0.483 +2026-04-09 20:00:52.266026: Pseudo dice [0.5902, 0.0, 0.7185, 0.6689, 0.6172, 0.8298, 0.8741] +2026-04-09 20:00:52.276189: Epoch time: 105.87 s +2026-04-09 20:00:53.424345: +2026-04-09 20:00:53.429247: Epoch 966 +2026-04-09 20:00:53.433044: Current learning rate: 0.00048 +2026-04-09 20:02:39.072270: train_loss -0.5039 +2026-04-09 20:02:39.084011: val_loss -0.3902 +2026-04-09 20:02:39.086976: Pseudo dice [0.8081, 0.0, 0.7029, 0.2353, 0.4658, 0.131, 0.5398] +2026-04-09 20:02:39.090310: Epoch time: 105.65 s +2026-04-09 20:02:40.240189: +2026-04-09 20:02:40.243525: Epoch 967 +2026-04-09 20:02:40.245762: Current learning rate: 0.00046 +2026-04-09 20:04:32.027470: train_loss -0.5115 +2026-04-09 20:04:32.044917: val_loss -0.4739 +2026-04-09 20:04:32.052068: Pseudo dice [0.5718, 0.0, 0.6447, 0.6737, 0.6002, 0.7901, 0.8822] +2026-04-09 20:04:32.058429: Epoch time: 111.79 s +2026-04-09 20:04:33.214784: +2026-04-09 20:04:33.219108: Epoch 968 +2026-04-09 20:04:33.226003: Current learning rate: 0.00045 +2026-04-09 20:06:23.436210: train_loss -0.5188 +2026-04-09 20:06:23.455948: val_loss -0.4758 +2026-04-09 20:06:23.463496: Pseudo dice [0.8396, 0.0, 0.7395, 0.7853, 0.6817, 0.7615, 0.8385] +2026-04-09 20:06:23.469892: Epoch time: 110.22 s +2026-04-09 20:06:24.658834: +2026-04-09 20:06:24.662594: Epoch 969 +2026-04-09 20:06:24.670146: Current learning rate: 0.00044 +2026-04-09 20:08:09.608144: train_loss -0.5122 +2026-04-09 20:08:09.621131: val_loss -0.084 +2026-04-09 20:08:09.625909: Pseudo dice [0.6789, 0.0, 0.6512, 0.157, 0.5854, 0.0245, 0.8942] +2026-04-09 20:08:09.630886: Epoch time: 104.95 s +2026-04-09 20:08:10.775239: +2026-04-09 20:08:10.778407: Epoch 970 +2026-04-09 20:08:10.782649: Current learning rate: 0.00043 +2026-04-09 20:09:58.545812: train_loss -0.5177 +2026-04-09 20:09:58.564824: val_loss -0.4591 +2026-04-09 20:09:58.569896: Pseudo dice [0.8361, 0.0, 0.7009, 0.3646, 0.3056, 0.8881, 0.6228] +2026-04-09 20:09:58.577887: Epoch time: 107.77 s +2026-04-09 20:09:59.717250: +2026-04-09 20:09:59.725239: Epoch 971 +2026-04-09 20:09:59.732485: Current learning rate: 0.00041 +2026-04-09 20:11:46.992338: train_loss -0.5171 +2026-04-09 20:11:47.007492: val_loss -0.4414 +2026-04-09 20:11:47.013172: Pseudo dice [0.6739, 0.0, 0.6932, 0.6349, 0.6453, 0.2473, 0.8734] +2026-04-09 20:11:47.019742: Epoch time: 107.28 s +2026-04-09 20:11:48.169183: +2026-04-09 20:11:48.174386: Epoch 972 +2026-04-09 20:11:48.178886: Current learning rate: 0.0004 +2026-04-09 20:13:44.394028: train_loss -0.5095 +2026-04-09 20:13:44.410824: val_loss -0.4348 +2026-04-09 20:13:44.415963: Pseudo dice [0.6331, 0.0, 0.7254, 0.0025, 0.6851, 0.1808, 0.8867] +2026-04-09 20:13:44.422712: Epoch time: 116.23 s +2026-04-09 20:13:45.618779: +2026-04-09 20:13:45.623259: Epoch 973 +2026-04-09 20:13:45.628376: Current learning rate: 0.00039 +2026-04-09 20:15:30.769719: train_loss -0.5155 +2026-04-09 20:15:30.784781: val_loss -0.4368 +2026-04-09 20:15:30.790482: Pseudo dice [0.6211, 0.0, 0.7475, 0.5791, 0.5127, 0.279, 0.9131] +2026-04-09 20:15:30.796601: Epoch time: 105.15 s +2026-04-09 20:15:31.957071: +2026-04-09 20:15:31.961732: Epoch 974 +2026-04-09 20:15:31.966506: Current learning rate: 0.00037 +2026-04-09 20:17:23.960530: train_loss -0.5148 +2026-04-09 20:17:23.976279: val_loss -0.4588 +2026-04-09 20:17:23.982320: Pseudo dice [0.728, 0.0, 0.7346, 0.877, 0.505, 0.815, 0.8658] +2026-04-09 20:17:23.987909: Epoch time: 112.01 s +2026-04-09 20:17:25.127990: +2026-04-09 20:17:25.132329: Epoch 975 +2026-04-09 20:17:25.137382: Current learning rate: 0.00036 +2026-04-09 20:19:21.157259: train_loss -0.5215 +2026-04-09 20:19:21.183440: val_loss -0.4337 +2026-04-09 20:19:21.190716: Pseudo dice [0.5108, 0.0, 0.6837, 0.7228, 0.5716, 0.3712, 0.8624] +2026-04-09 20:19:21.198705: Epoch time: 116.03 s +2026-04-09 20:19:22.438136: +2026-04-09 20:19:22.444549: Epoch 976 +2026-04-09 20:19:22.451116: Current learning rate: 0.00035 +2026-04-09 20:21:13.865267: train_loss -0.5115 +2026-04-09 20:21:13.878465: val_loss -0.4 +2026-04-09 20:21:13.886268: Pseudo dice [0.4546, 0.0, 0.6464, 0.4907, 0.617, 0.3821, 0.624] +2026-04-09 20:21:13.892960: Epoch time: 111.43 s +2026-04-09 20:21:15.040199: +2026-04-09 20:21:15.043934: Epoch 977 +2026-04-09 20:21:15.049686: Current learning rate: 0.00034 +2026-04-09 20:23:02.084925: train_loss -0.5215 +2026-04-09 20:23:02.098424: val_loss -0.4836 +2026-04-09 20:23:02.104719: Pseudo dice [0.5099, 0.0, 0.7243, 0.7745, 0.6607, 0.7956, 0.8181] +2026-04-09 20:23:02.110573: Epoch time: 107.05 s +2026-04-09 20:23:03.252784: +2026-04-09 20:23:03.256384: Epoch 978 +2026-04-09 20:23:03.261084: Current learning rate: 0.00032 +2026-04-09 20:24:49.378294: train_loss -0.5201 +2026-04-09 20:24:49.392095: val_loss -0.1631 +2026-04-09 20:24:49.397438: Pseudo dice [0.2733, 0.0, 0.6727, 0.6829, 0.4914, 0.1136, 0.7316] +2026-04-09 20:24:49.403816: Epoch time: 106.13 s +2026-04-09 20:24:50.561071: +2026-04-09 20:24:50.566877: Epoch 979 +2026-04-09 20:24:50.571354: Current learning rate: 0.00031 +2026-04-09 20:26:34.708859: train_loss -0.5122 +2026-04-09 20:26:34.718301: val_loss -0.4019 +2026-04-09 20:26:34.721755: Pseudo dice [0.7702, 0.0, 0.7329, 0.6054, 0.6076, 0.2723, 0.6676] +2026-04-09 20:26:34.726069: Epoch time: 104.15 s +2026-04-09 20:26:35.861605: +2026-04-09 20:26:35.864433: Epoch 980 +2026-04-09 20:26:35.866917: Current learning rate: 0.0003 +2026-04-09 20:28:20.720618: train_loss -0.5076 +2026-04-09 20:28:20.732763: val_loss -0.3717 +2026-04-09 20:28:20.738001: Pseudo dice [0.8319, 0.0, 0.5065, 0.646, 0.4062, 0.2179, 0.7299] +2026-04-09 20:28:20.741412: Epoch time: 104.86 s +2026-04-09 20:28:22.801904: +2026-04-09 20:28:22.804728: Epoch 981 +2026-04-09 20:28:22.806867: Current learning rate: 0.00028 +2026-04-09 20:30:06.831150: train_loss -0.5125 +2026-04-09 20:30:06.843263: val_loss -0.4703 +2026-04-09 20:30:06.847091: Pseudo dice [0.5324, 0.0, 0.8061, 0.8009, 0.6525, 0.144, 0.8291] +2026-04-09 20:30:06.852358: Epoch time: 104.03 s +2026-04-09 20:30:07.993993: +2026-04-09 20:30:07.999180: Epoch 982 +2026-04-09 20:30:08.005292: Current learning rate: 0.00027 +2026-04-09 20:31:51.563131: train_loss -0.5114 +2026-04-09 20:31:51.576021: val_loss -0.4964 +2026-04-09 20:31:51.586117: Pseudo dice [0.3838, 0.0, 0.8097, 0.6814, 0.4943, 0.8277, 0.8891] +2026-04-09 20:31:51.590127: Epoch time: 103.57 s +2026-04-09 20:31:52.764349: +2026-04-09 20:31:52.769672: Epoch 983 +2026-04-09 20:31:52.777248: Current learning rate: 0.00026 +2026-04-09 20:33:35.396728: train_loss -0.5305 +2026-04-09 20:33:35.404413: val_loss -0.3145 +2026-04-09 20:33:35.409645: Pseudo dice [0.6566, 0.0, 0.7451, 0.0499, 0.5369, 0.1234, 0.6322] +2026-04-09 20:33:35.413141: Epoch time: 102.64 s +2026-04-09 20:33:36.583019: +2026-04-09 20:33:36.587336: Epoch 984 +2026-04-09 20:33:36.591686: Current learning rate: 0.00024 +2026-04-09 20:35:20.673395: train_loss -0.5169 +2026-04-09 20:35:20.691752: val_loss -0.3223 +2026-04-09 20:35:20.696703: Pseudo dice [0.7932, 0.0, 0.7522, 0.8189, 0.637, 0.0402, 0.7995] +2026-04-09 20:35:20.702110: Epoch time: 104.09 s +2026-04-09 20:35:21.863803: +2026-04-09 20:35:21.868445: Epoch 985 +2026-04-09 20:35:21.874834: Current learning rate: 0.00023 +2026-04-09 20:37:05.070098: train_loss -0.5266 +2026-04-09 20:37:05.087414: val_loss -0.4704 +2026-04-09 20:37:05.091505: Pseudo dice [0.8413, 0.0, 0.8013, 0.8823, 0.4531, 0.7017, 0.5272] +2026-04-09 20:37:05.099471: Epoch time: 103.21 s +2026-04-09 20:37:06.276411: +2026-04-09 20:37:06.279763: Epoch 986 +2026-04-09 20:37:06.290100: Current learning rate: 0.00021 +2026-04-09 20:38:51.299580: train_loss -0.5263 +2026-04-09 20:38:51.318465: val_loss -0.385 +2026-04-09 20:38:51.324843: Pseudo dice [0.3847, 0.0, 0.816, 0.0028, 0.7303, 0.1446, 0.701] +2026-04-09 20:38:51.329710: Epoch time: 105.03 s +2026-04-09 20:38:52.482357: +2026-04-09 20:38:52.486710: Epoch 987 +2026-04-09 20:38:52.490734: Current learning rate: 0.0002 +2026-04-09 20:40:34.433676: train_loss -0.5266 +2026-04-09 20:40:34.442695: val_loss -0.4491 +2026-04-09 20:40:34.445242: Pseudo dice [0.3452, 0.0, 0.3839, 0.2284, 0.7136, 0.6572, 0.821] +2026-04-09 20:40:34.450836: Epoch time: 101.95 s +2026-04-09 20:40:35.601341: +2026-04-09 20:40:35.604980: Epoch 988 +2026-04-09 20:40:35.608635: Current learning rate: 0.00019 +2026-04-09 20:42:19.483458: train_loss -0.5224 +2026-04-09 20:42:19.495692: val_loss -0.4341 +2026-04-09 20:42:19.500942: Pseudo dice [0.357, 0.0, 0.7001, 0.8369, 0.5316, 0.2097, 0.8313] +2026-04-09 20:42:19.505143: Epoch time: 103.89 s +2026-04-09 20:42:20.664925: +2026-04-09 20:42:20.668350: Epoch 989 +2026-04-09 20:42:20.672228: Current learning rate: 0.00017 +2026-04-09 20:44:03.716915: train_loss -0.5185 +2026-04-09 20:44:03.728752: val_loss -0.434 +2026-04-09 20:44:03.733520: Pseudo dice [0.5071, 0.0, 0.784, 0.6525, 0.4397, 0.8238, 0.2748] +2026-04-09 20:44:03.740729: Epoch time: 103.06 s +2026-04-09 20:44:04.900469: +2026-04-09 20:44:04.907426: Epoch 990 +2026-04-09 20:44:04.912222: Current learning rate: 0.00016 +2026-04-09 20:45:49.028212: train_loss -0.5249 +2026-04-09 20:45:49.036372: val_loss -0.3968 +2026-04-09 20:45:49.039112: Pseudo dice [0.6912, 0.0, 0.7335, 0.9189, 0.4545, 0.0733, 0.5347] +2026-04-09 20:45:49.042663: Epoch time: 104.13 s +2026-04-09 20:45:50.202470: +2026-04-09 20:45:50.205113: Epoch 991 +2026-04-09 20:45:50.207884: Current learning rate: 0.00014 +2026-04-09 20:47:33.080980: train_loss -0.5258 +2026-04-09 20:47:33.091632: val_loss -0.3292 +2026-04-09 20:47:33.096706: Pseudo dice [0.4426, 0.0, 0.541, 0.5315, 0.6607, 0.0968, 0.8841] +2026-04-09 20:47:33.099945: Epoch time: 102.88 s +2026-04-09 20:47:34.240602: +2026-04-09 20:47:34.243179: Epoch 992 +2026-04-09 20:47:34.246348: Current learning rate: 0.00013 +2026-04-09 20:49:18.071922: train_loss -0.525 +2026-04-09 20:49:18.090017: val_loss -0.4537 +2026-04-09 20:49:18.094506: Pseudo dice [0.4245, 0.0, 0.6708, 0.6143, 0.7123, 0.8646, 0.7719] +2026-04-09 20:49:18.104303: Epoch time: 103.83 s +2026-04-09 20:49:19.259440: +2026-04-09 20:49:19.263575: Epoch 993 +2026-04-09 20:49:19.269460: Current learning rate: 0.00011 +2026-04-09 20:51:04.216505: train_loss -0.5202 +2026-04-09 20:51:04.228179: val_loss -0.4178 +2026-04-09 20:51:04.230860: Pseudo dice [0.825, 0.0, 0.7303, 0.7976, 0.6757, 0.0844, 0.8779] +2026-04-09 20:51:04.233690: Epoch time: 104.96 s +2026-04-09 20:51:05.384670: +2026-04-09 20:51:05.387715: Epoch 994 +2026-04-09 20:51:05.391804: Current learning rate: 0.0001 +2026-04-09 20:52:48.704807: train_loss -0.5223 +2026-04-09 20:52:48.717274: val_loss -0.387 +2026-04-09 20:52:48.725760: Pseudo dice [0.5233, 0.0, 0.6288, 0.4533, 0.6642, 0.2528, 0.846] +2026-04-09 20:52:48.729065: Epoch time: 103.32 s +2026-04-09 20:52:49.873843: +2026-04-09 20:52:49.878520: Epoch 995 +2026-04-09 20:52:49.883151: Current learning rate: 8e-05 +2026-04-09 20:54:33.056501: train_loss -0.5287 +2026-04-09 20:54:33.069335: val_loss -0.427 +2026-04-09 20:54:33.072919: Pseudo dice [0.8505, 0.0, 0.7873, 0.0931, 0.5561, 0.3859, 0.4568] +2026-04-09 20:54:33.078007: Epoch time: 103.19 s +2026-04-09 20:54:34.237443: +2026-04-09 20:54:34.242219: Epoch 996 +2026-04-09 20:54:34.247260: Current learning rate: 7e-05 +2026-04-09 20:56:16.163341: train_loss -0.517 +2026-04-09 20:56:16.172569: val_loss -0.3941 +2026-04-09 20:56:16.176115: Pseudo dice [0.5545, 0.0, 0.7686, 0.1332, 0.6467, 0.1026, 0.903] +2026-04-09 20:56:16.180071: Epoch time: 101.93 s +2026-04-09 20:56:17.316728: +2026-04-09 20:56:17.322093: Epoch 997 +2026-04-09 20:56:17.324791: Current learning rate: 5e-05 +2026-04-09 20:58:00.029103: train_loss -0.5332 +2026-04-09 20:58:00.040169: val_loss -0.3396 +2026-04-09 20:58:00.044185: Pseudo dice [0.493, 0.0, 0.5976, 0.2197, 0.6786, 0.1848, 0.8404] +2026-04-09 20:58:00.047915: Epoch time: 102.72 s +2026-04-09 20:58:01.174552: +2026-04-09 20:58:01.179338: Epoch 998 +2026-04-09 20:58:01.183268: Current learning rate: 4e-05 +2026-04-09 20:59:45.501517: train_loss -0.5194 +2026-04-09 20:59:45.518018: val_loss -0.475 +2026-04-09 20:59:45.524848: Pseudo dice [0.8423, 0.0, 0.7874, 0.2276, 0.589, 0.8123, 0.8044] +2026-04-09 20:59:45.532267: Epoch time: 104.33 s +2026-04-09 20:59:46.679354: +2026-04-09 20:59:46.694521: Epoch 999 +2026-04-09 20:59:46.707355: Current learning rate: 2e-05 +2026-04-09 21:01:31.545774: train_loss -0.5209 +2026-04-09 21:01:31.559014: val_loss -0.4757 +2026-04-09 21:01:31.563818: Pseudo dice [0.5608, 0.0, 0.5843, 0.6417, 0.6934, 0.7612, 0.9107] +2026-04-09 21:01:31.567642: Epoch time: 104.87 s +2026-04-09 21:01:34.528559: Training done. +2026-04-09 21:01:35.028714: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-09 21:01:35.039140: The split file contains 5 splits. +2026-04-09 21:01:35.046960: Desired fold for training: 1 +2026-04-09 21:01:35.054904: This split has 387 training and 97 validation cases. +2026-04-09 21:01:35.058640: predicting MSWAL_0008 +2026-04-09 21:01:35.090324: MSWAL_0008, shape torch.Size([1, 201, 537, 537]), rank 0 +2026-04-09 21:02:37.329319: predicting MSWAL_0009 +2026-04-09 21:02:37.365702: MSWAL_0009, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:02:50.081861: predicting MSWAL_0027 +2026-04-09 21:02:50.103766: MSWAL_0027, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 21:02:59.279233: predicting MSWAL_0029 +2026-04-09 21:02:59.296274: MSWAL_0029, shape torch.Size([1, 185, 527, 527]), rank 0 +2026-04-09 21:03:21.644283: predicting MSWAL_0032 +2026-04-09 21:03:21.664436: MSWAL_0032, shape torch.Size([1, 221, 507, 507]), rank 0 +2026-04-09 21:03:34.731264: predicting MSWAL_0034 +2026-04-09 21:03:34.759716: MSWAL_0034, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:03:47.815326: predicting MSWAL_0045 +2026-04-09 21:03:47.830553: MSWAL_0045, shape torch.Size([1, 209, 531, 531]), rank 0 +2026-04-09 21:04:10.163265: predicting MSWAL_0052 +2026-04-09 21:04:10.189893: MSWAL_0052, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:04:22.892622: predicting MSWAL_0056 +2026-04-09 21:04:22.929690: MSWAL_0056, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:04:36.075960: predicting MSWAL_0067 +2026-04-09 21:04:36.097155: MSWAL_0067, shape torch.Size([1, 177, 557, 557]), rank 0 +2026-04-09 21:04:58.527168: predicting MSWAL_0075 +2026-04-09 21:04:58.547724: MSWAL_0075, shape torch.Size([1, 193, 605, 605]), rank 0 +2026-04-09 21:05:23.246871: predicting MSWAL_0077 +2026-04-09 21:05:23.268564: MSWAL_0077, shape torch.Size([1, 165, 524, 524]), rank 0 +2026-04-09 21:05:38.627491: predicting MSWAL_0083 +2026-04-09 21:05:38.658008: MSWAL_0083, shape torch.Size([1, 177, 527, 527]), rank 0 +2026-04-09 21:06:01.734780: predicting MSWAL_0086 +2026-04-09 21:06:01.754845: MSWAL_0086, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:06:15.153879: predicting MSWAL_0092 +2026-04-09 21:06:15.178279: MSWAL_0092, shape torch.Size([1, 197, 507, 507]), rank 0 +2026-04-09 21:06:27.842131: predicting MSWAL_0101 +2026-04-09 21:06:27.858342: MSWAL_0101, shape torch.Size([1, 162, 444, 444]), rank 0 +2026-04-09 21:06:36.215785: predicting MSWAL_0105 +2026-04-09 21:06:36.237047: MSWAL_0105, shape torch.Size([1, 181, 572, 572]), rank 0 +2026-04-09 21:06:58.698982: predicting MSWAL_0108 +2026-04-09 21:06:58.724466: MSWAL_0108, shape torch.Size([1, 180, 507, 507]), rank 0 +2026-04-09 21:07:11.443389: predicting MSWAL_0110 +2026-04-09 21:07:11.481038: MSWAL_0110, shape torch.Size([1, 217, 507, 507]), rank 0 +2026-04-09 21:07:25.399047: predicting MSWAL_0128 +2026-04-09 21:07:25.436224: MSWAL_0128, shape torch.Size([1, 237, 575, 575]), rank 0 +2026-04-09 21:07:55.249952: predicting MSWAL_0151 +2026-04-09 21:07:55.273363: MSWAL_0151, shape torch.Size([1, 458, 535, 535]), rank 0 +2026-04-09 21:08:55.511532: predicting MSWAL_0165 +2026-04-09 21:08:55.570735: MSWAL_0165, shape torch.Size([1, 318, 480, 480]), rank 0 +2026-04-09 21:09:17.361169: predicting MSWAL_0166 +2026-04-09 21:09:17.404679: MSWAL_0166, shape torch.Size([1, 185, 553, 553]), rank 0 +2026-04-09 21:09:39.819097: predicting MSWAL_0167 +2026-04-09 21:09:39.843220: MSWAL_0167, shape torch.Size([1, 388, 539, 539]), rank 0 +2026-04-09 21:10:27.742508: predicting MSWAL_0182 +2026-04-09 21:10:27.783239: MSWAL_0182, shape torch.Size([1, 170, 465, 465]), rank 0 +2026-04-09 21:10:40.433166: predicting MSWAL_0184 +2026-04-09 21:10:40.456686: MSWAL_0184, shape torch.Size([1, 177, 581, 581]), rank 0 +2026-04-09 21:11:03.206075: predicting MSWAL_0186 +2026-04-09 21:11:03.225844: MSWAL_0186, shape torch.Size([1, 168, 507, 507]), rank 0 +2026-04-09 21:11:12.056588: predicting MSWAL_0219 +2026-04-09 21:11:12.078190: MSWAL_0219, shape torch.Size([1, 165, 543, 543]), rank 0 +2026-04-09 21:11:27.074220: predicting MSWAL_0228 +2026-04-09 21:11:27.104074: MSWAL_0228, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:11:40.032053: predicting MSWAL_0229 +2026-04-09 21:11:40.051831: MSWAL_0229, shape torch.Size([1, 165, 561, 561]), rank 0 +2026-04-09 21:11:55.514194: predicting MSWAL_0230 +2026-04-09 21:11:55.543766: MSWAL_0230, shape torch.Size([1, 391, 528, 528]), rank 0 +2026-04-09 21:12:40.058708: predicting MSWAL_0238 +2026-04-09 21:12:40.096663: MSWAL_0238, shape torch.Size([1, 440, 585, 585]), rank 0 +2026-04-09 21:13:32.855691: predicting MSWAL_0246 +2026-04-09 21:13:32.892005: MSWAL_0246, shape torch.Size([1, 300, 519, 519]), rank 0 +2026-04-09 21:14:09.723974: predicting MSWAL_0263 +2026-04-09 21:14:09.764832: MSWAL_0263, shape torch.Size([1, 252, 480, 480]), rank 0 +2026-04-09 21:14:26.807888: predicting MSWAL_0270 +2026-04-09 21:14:26.837643: MSWAL_0270, shape torch.Size([1, 319, 535, 535]), rank 0 +2026-04-09 21:15:04.178049: predicting MSWAL_0272 +2026-04-09 21:15:04.218912: MSWAL_0272, shape torch.Size([1, 397, 641, 641]), rank 0 +2026-04-09 21:16:25.028981: predicting MSWAL_0278 +2026-04-09 21:16:25.069153: MSWAL_0278, shape torch.Size([1, 304, 496, 496]), rank 0 +2026-04-09 21:16:46.470602: predicting MSWAL_0288 +2026-04-09 21:16:46.513715: MSWAL_0288, shape torch.Size([1, 217, 507, 507]), rank 0 +2026-04-09 21:16:59.658154: predicting MSWAL_0289 +2026-04-09 21:16:59.697952: MSWAL_0289, shape torch.Size([1, 177, 445, 445]), rank 0 +2026-04-09 21:17:12.222336: predicting MSWAL_0311 +2026-04-09 21:17:12.251903: MSWAL_0311, shape torch.Size([1, 197, 480, 480]), rank 0 +2026-04-09 21:17:25.009930: predicting MSWAL_0312 +2026-04-09 21:17:25.028963: MSWAL_0312, shape torch.Size([1, 165, 507, 507]), rank 0 +2026-04-09 21:17:34.163864: predicting MSWAL_0317 +2026-04-09 21:17:34.190330: MSWAL_0317, shape torch.Size([1, 213, 507, 507]), rank 0 +2026-04-09 21:17:47.618447: predicting MSWAL_0318 +2026-04-09 21:17:47.646834: MSWAL_0318, shape torch.Size([1, 181, 480, 480]), rank 0 +2026-04-09 21:18:00.194848: predicting MSWAL_0332 +2026-04-09 21:18:00.220652: MSWAL_0332, shape torch.Size([1, 197, 480, 480]), rank 0 +2026-04-09 21:18:12.940906: predicting MSWAL_0336 +2026-04-09 21:18:12.962870: MSWAL_0336, shape torch.Size([1, 301, 507, 507]), rank 0 +2026-04-09 21:18:34.301488: predicting MSWAL_0344 +2026-04-09 21:18:34.337686: MSWAL_0344, shape torch.Size([1, 309, 556, 556]), rank 0 +2026-04-09 21:19:13.566520: predicting MSWAL_0345 +2026-04-09 21:19:13.606756: MSWAL_0345, shape torch.Size([1, 200, 593, 593]), rank 0 +2026-04-09 21:19:36.359151: predicting MSWAL_0353 +2026-04-09 21:19:36.387050: MSWAL_0353, shape torch.Size([1, 544, 629, 629]), rank 0 +2026-04-09 21:20:43.820089: predicting MSWAL_0355 +2026-04-09 21:20:43.884490: MSWAL_0355, shape torch.Size([1, 453, 553, 553]), rank 0 +2026-04-09 21:21:43.234257: predicting MSWAL_0361 +2026-04-09 21:21:43.280018: MSWAL_0361, shape torch.Size([1, 317, 551, 551]), rank 0 +2026-04-09 21:22:21.230327: predicting MSWAL_0365 +2026-04-09 21:22:21.260761: MSWAL_0365, shape torch.Size([1, 333, 560, 560]), rank 0 +2026-04-09 21:23:01.842886: predicting MSWAL_0366 +2026-04-09 21:23:01.891649: MSWAL_0366, shape torch.Size([1, 293, 497, 497]), rank 0 +2026-04-09 21:23:23.055159: predicting MSWAL_0369 +2026-04-09 21:23:23.091938: MSWAL_0369, shape torch.Size([1, 379, 629, 629]), rank 0 +2026-04-09 21:24:08.187688: predicting MSWAL_0382 +2026-04-09 21:24:08.235975: MSWAL_0382, shape torch.Size([1, 305, 507, 507]), rank 0 +2026-04-09 21:24:29.649832: predicting MSWAL_0393 +2026-04-09 21:24:29.693239: MSWAL_0393, shape torch.Size([1, 307, 485, 485]), rank 0 +2026-04-09 21:24:52.142184: predicting MSWAL_0399 +2026-04-09 21:24:52.170509: MSWAL_0399, shape torch.Size([1, 253, 507, 507]), rank 0 +2026-04-09 21:25:10.397578: predicting MSWAL_0411 +2026-04-09 21:25:10.442799: MSWAL_0411, shape torch.Size([1, 217, 507, 507]), rank 0 +2026-04-09 21:25:23.131263: predicting MSWAL_0429 +2026-04-09 21:25:23.175185: MSWAL_0429, shape torch.Size([1, 337, 547, 547]), rank 0 +2026-04-09 21:26:10.364473: predicting MSWAL_0431 +2026-04-09 21:26:10.402420: MSWAL_0431, shape torch.Size([1, 349, 507, 507]), rank 0 +2026-04-09 21:26:36.065797: predicting MSWAL_0447 +2026-04-09 21:26:36.100008: MSWAL_0447, shape torch.Size([1, 237, 507, 507]), rank 0 +2026-04-09 21:26:53.397239: predicting MSWAL_0452 +2026-04-09 21:26:53.422984: MSWAL_0452, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 21:27:02.114689: predicting MSWAL_0455 +2026-04-09 21:27:02.134721: MSWAL_0455, shape torch.Size([1, 205, 508, 508]), rank 0 +2026-04-09 21:27:15.787953: predicting MSWAL_0461 +2026-04-09 21:27:15.807940: MSWAL_0461, shape torch.Size([1, 297, 608, 608]), rank 0 +2026-04-09 21:27:55.539526: predicting MSWAL_0479 +2026-04-09 21:27:55.584225: MSWAL_0479, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 21:28:05.025254: predicting MSWAL_0489 +2026-04-09 21:28:05.045806: MSWAL_0489, shape torch.Size([1, 217, 507, 507]), rank 0 +2026-04-09 21:28:18.151742: predicting MSWAL_0501 +2026-04-09 21:28:18.191440: MSWAL_0501, shape torch.Size([1, 209, 507, 507]), rank 0 +2026-04-09 21:28:31.073641: predicting MSWAL_0507 +2026-04-09 21:28:31.103002: MSWAL_0507, shape torch.Size([1, 197, 525, 525]), rank 0 +2026-04-09 21:28:54.267640: predicting MSWAL_0509 +2026-04-09 21:28:54.288390: MSWAL_0509, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:29:07.056773: predicting MSWAL_0512 +2026-04-09 21:29:07.074818: MSWAL_0512, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:29:19.901205: predicting MSWAL_0518 +2026-04-09 21:29:19.935489: MSWAL_0518, shape torch.Size([1, 261, 507, 507]), rank 0 +2026-04-09 21:29:37.110930: predicting MSWAL_0519 +2026-04-09 21:29:37.140830: MSWAL_0519, shape torch.Size([1, 177, 520, 520]), rank 0 +2026-04-09 21:29:59.508269: predicting MSWAL_0524 +2026-04-09 21:29:59.547935: MSWAL_0524, shape torch.Size([1, 149, 543, 543]), rank 0 +2026-04-09 21:30:14.518883: predicting MSWAL_0534 +2026-04-09 21:30:14.546067: MSWAL_0534, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:30:27.393111: predicting MSWAL_0555 +2026-04-09 21:30:27.419971: MSWAL_0555, shape torch.Size([1, 299, 467, 467]), rank 0 +2026-04-09 21:30:48.403830: predicting MSWAL_0566 +2026-04-09 21:30:48.440823: MSWAL_0566, shape torch.Size([1, 433, 643, 643]), rank 0 +2026-04-09 21:32:09.330582: predicting MSWAL_0567 +2026-04-09 21:32:09.387724: MSWAL_0567, shape torch.Size([1, 237, 495, 495]), rank 0 +2026-04-09 21:32:26.406103: predicting MSWAL_0575 +2026-04-09 21:32:26.435990: MSWAL_0575, shape torch.Size([1, 325, 583, 583]), rank 0 +2026-04-09 21:33:03.993881: predicting MSWAL_0578 +2026-04-09 21:33:04.037754: MSWAL_0578, shape torch.Size([1, 178, 480, 480]), rank 0 +2026-04-09 21:33:17.087350: predicting MSWAL_0580 +2026-04-09 21:33:17.117146: MSWAL_0580, shape torch.Size([1, 296, 591, 591]), rank 0 +2026-04-09 21:33:54.548499: predicting MSWAL_0583 +2026-04-09 21:33:54.583666: MSWAL_0583, shape torch.Size([1, 308, 480, 480]), rank 0 +2026-04-09 21:34:15.745228: predicting MSWAL_0584 +2026-04-09 21:34:15.775851: MSWAL_0584, shape torch.Size([1, 154, 512, 512]), rank 0 +2026-04-09 21:34:24.509792: predicting MSWAL_0586 +2026-04-09 21:34:24.526324: MSWAL_0586, shape torch.Size([1, 189, 568, 568]), rank 0 +2026-04-09 21:34:47.038570: predicting MSWAL_0614 +2026-04-09 21:34:47.077751: MSWAL_0614, shape torch.Size([1, 343, 575, 575]), rank 0 +2026-04-09 21:35:31.594421: predicting MSWAL_0617 +2026-04-09 21:35:31.645633: MSWAL_0617, shape torch.Size([1, 178, 497, 497]), rank 0 +2026-04-09 21:35:44.457519: predicting MSWAL_0629 +2026-04-09 21:35:44.490939: MSWAL_0629, shape torch.Size([1, 316, 541, 541]), rank 0 +2026-04-09 21:36:21.764759: predicting MSWAL_0636 +2026-04-09 21:36:21.794073: MSWAL_0636, shape torch.Size([1, 308, 539, 539]), rank 0 +2026-04-09 21:36:59.193765: predicting MSWAL_0638 +2026-04-09 21:36:59.238524: MSWAL_0638, shape torch.Size([1, 490, 480, 480]), rank 0 +2026-04-09 21:37:32.817755: predicting MSWAL_0650 +2026-04-09 21:37:32.860059: MSWAL_0650, shape torch.Size([1, 283, 507, 507]), rank 0 +2026-04-09 21:37:54.153446: predicting MSWAL_0654 +2026-04-09 21:37:54.179083: MSWAL_0654, shape torch.Size([1, 321, 507, 507]), rank 0 +2026-04-09 21:38:16.338948: predicting MSWAL_0655 +2026-04-09 21:38:16.367463: MSWAL_0655, shape torch.Size([1, 308, 556, 556]), rank 0 +2026-04-09 21:38:53.987266: predicting MSWAL_0663 +2026-04-09 21:38:54.015551: MSWAL_0663, shape torch.Size([1, 290, 511, 511]), rank 0 +2026-04-09 21:39:15.581468: predicting MSWAL_0667 +2026-04-09 21:39:15.622205: MSWAL_0667, shape torch.Size([1, 277, 508, 508]), rank 0 +2026-04-09 21:39:33.097524: predicting MSWAL_0670 +2026-04-09 21:39:33.127378: MSWAL_0670, shape torch.Size([1, 352, 549, 549]), rank 0 +2026-04-09 21:40:17.789099: predicting MSWAL_0673 +2026-04-09 21:40:17.819629: MSWAL_0673, shape torch.Size([1, 324, 528, 528]), rank 0 +2026-04-09 21:40:55.927866: predicting MSWAL_0674 +2026-04-09 21:40:55.961195: MSWAL_0674, shape torch.Size([1, 381, 599, 599]), rank 0 +2026-04-09 21:41:40.855457: predicting MSWAL_0681 +2026-04-09 21:41:40.889045: MSWAL_0681, shape torch.Size([1, 365, 543, 543]), rank 0 +2026-04-09 21:42:25.551648: predicting MSWAL_0694 +2026-04-09 21:42:25.585023: MSWAL_0694, shape torch.Size([1, 276, 544, 544]), rank 0 +2026-04-09 21:44:52.793602: Validation complete +2026-04-09 21:44:52.796379: Mean Validation Dice: 0.3812345109991798 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/checkpoint_best.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..081ef0274252952468534b221886325d50cc9b90 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1508079a1fa76d3ddfea37af18059a0898f826e9e1e78368bd5efddf9fcfb2dd +size 1129421458 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/checkpoint_final.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/checkpoint_final.pth new file mode 100644 index 0000000000000000000000000000000000000000..46c69fed8c4b67a8d9d368b767a08261349fb770 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/checkpoint_final.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a4cbb4b929e0cc4574dba3c158a8bea77ddf0fce16c917d9bf0f413e23d756b +size 1129849686 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/debug.json b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/debug.json new file mode 100644 index 0000000000000000000000000000000000000000..aad411b34022b3752329b813812b7a5b725eb365 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/debug.json @@ -0,0 +1,53 @@ +{ + "_best_ema": "None", + "batch_size": "2", + "configuration_manager": "{'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}", + "configuration_name": "3d_fullres", + "cudnn_version": 90100, + "current_epoch": "0", + "dataloader_train": "", + "dataloader_train.generator": "", + "dataloader_train.num_processes": "12", + "dataloader_train.transform": "None", + "dataloader_val": "", + "dataloader_val.generator": "", + "dataloader_val.num_processes": "6", + "dataloader_val.transform": "None", + "dataset_json": "{'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}", + "device": "cuda:0", + "disable_checkpointing": "False", + "enable_deep_supervision": "True", + "fold": "2", + "folder_with_segs_from_previous_stage": "None", + "gpu_name": "NVIDIA A100-SXM4-80GB", + "grad_scaler": "", + "hostname": "cn0072", + "inference_allowed_mirroring_axes": "(0, 1, 2)", + "initial_lr": "0.01", + "is_cascaded": "False", + "is_ddp": "False", + "label_manager": "", + "local_rank": "0", + "log_file": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/training_log_2026_4_8_15_55_49.txt", + "logger": "", + "loss": "DeepSupervisionWrapper(\n (loss): DC_and_CE_loss(\n (ce): RobustCrossEntropyLoss()\n (dc): OptimizedModule(\n (_orig_mod): MemoryEfficientSoftDiceLoss()\n )\n )\n)", + "lr_scheduler": "", + "my_init_kwargs": "{'plans': {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}, 'configuration': '3d_fullres', 'fold': 2, 'dataset_json': {'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}, 'unpack_dataset': True, 'device': device(type='cuda')}", + "network": "OptimizedModule", + "num_epochs": "1000", + "num_input_channels": "1", + "num_iterations_per_epoch": "250", + "num_val_iterations_per_epoch": "50", + "optimizer": "SGD (\nParameter Group 0\n dampening: 0\n differentiable: False\n foreach: None\n fused: None\n initial_lr: 0.01\n lr: 0.01\n maximize: False\n momentum: 0.99\n nesterov: True\n weight_decay: 3e-05\n)", + "output_folder": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2", + "output_folder_base": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres", + "oversample_foreground_percent": "0.33", + "plans_manager": "{'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}", + "preprocessed_dataset_folder": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/nnUNetPlans_3d_fullres", + "preprocessed_dataset_folder_base": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL", + "save_every": "50", + "torch_version": "2.5.0+cu121", + "unpack_dataset": "True", + "was_initialized": "True", + "weight_decay": "3e-05" +} \ No newline at end of file diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/progress.png b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/progress.png new file mode 100644 index 0000000000000000000000000000000000000000..5d2a49934e01aebe570d56be289c3100dcf8691a --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/progress.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:927c3e73a704dd39720abfa5351a03ea5b8b1691f38a5f7fd05834571b7034b9 +size 1163846 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/training_log_2026_4_8_15_28_32.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/training_log_2026_4_8_15_28_32.txt new file mode 100644 index 0000000000000000000000000000000000000000..9bf35666ebf6df7c10711e64b8ab54d84de328c8 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/training_log_2026_4_8_15_28_32.txt @@ -0,0 +1,11 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 15:28:32.557918: do_dummy_2d_data_aug: False +2026-04-08 15:28:32.605994: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-08 15:28:32.609672: The split file contains 5 splits. +2026-04-08 15:28:32.611791: Desired fold for training: 2 +2026-04-08 15:28:32.613461: This split has 387 training and 97 validation cases. diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/training_log_2026_4_8_15_55_49.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/training_log_2026_4_8_15_55_49.txt new file mode 100644 index 0000000000000000000000000000000000000000..e7b641a35af689b82024196f1511f9c3f58a672c --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_2/training_log_2026_4_8_15_55_49.txt @@ -0,0 +1,7337 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 15:55:49.684839: do_dummy_2d_data_aug: False +2026-04-08 15:55:49.826821: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-08 15:55:49.830982: The split file contains 5 splits. +2026-04-08 15:55:49.832647: Desired fold for training: 2 +2026-04-08 15:55:49.833853: This split has 387 training and 97 validation cases. +2026-04-08 15:55:57.633119: Using torch.compile... + +This is the configuration used by this training: +Configuration name: 3d_fullres + {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True} + +These are the global plan.json settings: + {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}} + +2026-04-08 15:55:58.948042: unpacking dataset... +2026-04-08 15:56:05.716245: unpacking done... +2026-04-08 15:56:05.737395: Unable to plot network architecture: nnUNet_compile is enabled! +2026-04-08 15:56:05.795969: +2026-04-08 15:56:05.797714: Epoch 0 +2026-04-08 15:56:05.799463: Current learning rate: 0.01 +2026-04-08 16:00:02.630732: train_loss 0.1846 +2026-04-08 16:00:02.636461: val_loss 0.06 +2026-04-08 16:00:02.638552: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:00:02.642105: Epoch time: 236.84 s +2026-04-08 16:00:02.645235: Yayy! New best EMA pseudo Dice: 0.0 +2026-04-08 16:00:05.479555: +2026-04-08 16:00:05.481367: Epoch 1 +2026-04-08 16:00:05.482647: Current learning rate: 0.00999 +2026-04-08 16:01:47.229754: train_loss 0.0775 +2026-04-08 16:01:47.236583: val_loss 0.0498 +2026-04-08 16:01:47.238399: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:01:47.241532: Epoch time: 101.75 s +2026-04-08 16:01:48.304128: +2026-04-08 16:01:48.306158: Epoch 2 +2026-04-08 16:01:48.307824: Current learning rate: 0.00998 +2026-04-08 16:03:29.646310: train_loss 0.0645 +2026-04-08 16:03:29.656206: val_loss 0.0533 +2026-04-08 16:03:29.658428: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:03:29.660442: Epoch time: 101.35 s +2026-04-08 16:03:30.760572: +2026-04-08 16:03:30.762775: Epoch 3 +2026-04-08 16:03:30.764277: Current learning rate: 0.00997 +2026-04-08 16:05:12.359827: train_loss 0.0584 +2026-04-08 16:05:12.368515: val_loss 0.0407 +2026-04-08 16:05:12.370662: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:05:12.372673: Epoch time: 101.6 s +2026-04-08 16:05:13.467230: +2026-04-08 16:05:13.468749: Epoch 4 +2026-04-08 16:05:13.470631: Current learning rate: 0.00996 +2026-04-08 16:06:54.988762: train_loss 0.0516 +2026-04-08 16:06:55.001237: val_loss 0.0541 +2026-04-08 16:06:55.006440: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:06:55.010601: Epoch time: 101.52 s +2026-04-08 16:06:56.233441: +2026-04-08 16:06:56.238562: Epoch 5 +2026-04-08 16:06:56.258869: Current learning rate: 0.00995 +2026-04-08 16:08:38.980795: train_loss 0.0527 +2026-04-08 16:08:38.988058: val_loss 0.0393 +2026-04-08 16:08:38.989880: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:08:38.992290: Epoch time: 102.75 s +2026-04-08 16:08:40.063224: +2026-04-08 16:08:40.065578: Epoch 6 +2026-04-08 16:08:40.067131: Current learning rate: 0.00995 +2026-04-08 16:10:21.646041: train_loss 0.0514 +2026-04-08 16:10:21.651804: val_loss 0.0399 +2026-04-08 16:10:21.654457: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:10:21.656889: Epoch time: 101.58 s +2026-04-08 16:10:22.750712: +2026-04-08 16:10:22.752632: Epoch 7 +2026-04-08 16:10:22.754183: Current learning rate: 0.00994 +2026-04-08 16:12:04.627841: train_loss 0.0606 +2026-04-08 16:12:04.634768: val_loss 0.0405 +2026-04-08 16:12:04.637888: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:12:04.639650: Epoch time: 101.88 s +2026-04-08 16:12:05.752108: +2026-04-08 16:12:05.753803: Epoch 8 +2026-04-08 16:12:05.755643: Current learning rate: 0.00993 +2026-04-08 16:13:47.914842: train_loss 0.0542 +2026-04-08 16:13:47.923522: val_loss 0.0363 +2026-04-08 16:13:47.925449: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:13:47.929904: Epoch time: 102.17 s +2026-04-08 16:13:49.020734: +2026-04-08 16:13:49.022791: Epoch 9 +2026-04-08 16:13:49.024583: Current learning rate: 0.00992 +2026-04-08 16:15:31.052562: train_loss 0.0543 +2026-04-08 16:15:31.059811: val_loss 0.0417 +2026-04-08 16:15:31.062804: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:15:31.065203: Epoch time: 102.03 s +2026-04-08 16:15:32.151212: +2026-04-08 16:15:32.153028: Epoch 10 +2026-04-08 16:15:32.154629: Current learning rate: 0.00991 +2026-04-08 16:17:13.869867: train_loss 0.0545 +2026-04-08 16:17:13.880204: val_loss 0.0457 +2026-04-08 16:17:13.882835: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:17:13.887550: Epoch time: 101.72 s +2026-04-08 16:17:14.945672: +2026-04-08 16:17:14.947245: Epoch 11 +2026-04-08 16:17:14.948548: Current learning rate: 0.0099 +2026-04-08 16:18:56.672856: train_loss 0.0562 +2026-04-08 16:18:56.680002: val_loss 0.0432 +2026-04-08 16:18:56.681947: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:18:56.683635: Epoch time: 101.73 s +2026-04-08 16:18:57.726286: +2026-04-08 16:18:57.727998: Epoch 12 +2026-04-08 16:18:57.729965: Current learning rate: 0.00989 +2026-04-08 16:20:40.456145: train_loss 0.0551 +2026-04-08 16:20:40.465082: val_loss 0.0666 +2026-04-08 16:20:40.466965: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:20:40.469178: Epoch time: 102.73 s +2026-04-08 16:20:41.542823: +2026-04-08 16:20:41.544408: Epoch 13 +2026-04-08 16:20:41.546244: Current learning rate: 0.00988 +2026-04-08 16:22:24.599322: train_loss 0.063 +2026-04-08 16:22:24.607436: val_loss 0.0394 +2026-04-08 16:22:24.610777: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:22:24.614579: Epoch time: 103.06 s +2026-04-08 16:22:25.671833: +2026-04-08 16:22:25.681375: Epoch 14 +2026-04-08 16:22:25.683764: Current learning rate: 0.00987 +2026-04-08 16:24:08.114678: train_loss 0.0592 +2026-04-08 16:24:08.124618: val_loss 0.0342 +2026-04-08 16:24:08.126626: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:24:08.129196: Epoch time: 102.44 s +2026-04-08 16:24:09.235329: +2026-04-08 16:24:09.239555: Epoch 15 +2026-04-08 16:24:09.244067: Current learning rate: 0.00986 +2026-04-08 16:25:51.536138: train_loss 0.053 +2026-04-08 16:25:51.542370: val_loss 0.0398 +2026-04-08 16:25:51.544022: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:25:51.546164: Epoch time: 102.3 s +2026-04-08 16:25:52.651662: +2026-04-08 16:25:52.653296: Epoch 16 +2026-04-08 16:25:52.655065: Current learning rate: 0.00986 +2026-04-08 16:27:36.735077: train_loss 0.048 +2026-04-08 16:27:36.745296: val_loss 0.0476 +2026-04-08 16:27:36.747913: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:27:36.749902: Epoch time: 104.09 s +2026-04-08 16:27:37.886916: +2026-04-08 16:27:37.888685: Epoch 17 +2026-04-08 16:27:37.890212: Current learning rate: 0.00985 +2026-04-08 16:29:20.121344: train_loss 0.0496 +2026-04-08 16:29:20.130950: val_loss 0.0284 +2026-04-08 16:29:20.132719: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:29:20.134915: Epoch time: 102.24 s +2026-04-08 16:29:22.521121: +2026-04-08 16:29:22.523479: Epoch 18 +2026-04-08 16:29:22.525213: Current learning rate: 0.00984 +2026-04-08 16:31:06.060899: train_loss 0.055 +2026-04-08 16:31:06.069236: val_loss 0.0284 +2026-04-08 16:31:06.073184: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:31:06.077613: Epoch time: 103.54 s +2026-04-08 16:31:07.172629: +2026-04-08 16:31:07.174399: Epoch 19 +2026-04-08 16:31:07.175766: Current learning rate: 0.00983 +2026-04-08 16:32:49.996801: train_loss 0.0319 +2026-04-08 16:32:50.011835: val_loss 0.0431 +2026-04-08 16:32:50.013851: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:32:50.016915: Epoch time: 102.83 s +2026-04-08 16:32:51.096678: +2026-04-08 16:32:51.098926: Epoch 20 +2026-04-08 16:32:51.100460: Current learning rate: 0.00982 +2026-04-08 16:34:33.585078: train_loss 0.0351 +2026-04-08 16:34:33.594700: val_loss 0.0198 +2026-04-08 16:34:33.596341: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:34:33.598252: Epoch time: 102.49 s +2026-04-08 16:34:34.719921: +2026-04-08 16:34:34.723093: Epoch 21 +2026-04-08 16:34:34.725369: Current learning rate: 0.00981 +2026-04-08 16:36:17.991890: train_loss 0.0477 +2026-04-08 16:36:17.998330: val_loss 0.0483 +2026-04-08 16:36:18.000178: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:36:18.002447: Epoch time: 103.28 s +2026-04-08 16:36:19.016563: +2026-04-08 16:36:19.020204: Epoch 22 +2026-04-08 16:36:19.026149: Current learning rate: 0.0098 +2026-04-08 16:38:01.546047: train_loss 0.0399 +2026-04-08 16:38:01.554347: val_loss 0.039 +2026-04-08 16:38:01.556633: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:38:01.558798: Epoch time: 102.53 s +2026-04-08 16:38:02.592194: +2026-04-08 16:38:02.593688: Epoch 23 +2026-04-08 16:38:02.595527: Current learning rate: 0.00979 +2026-04-08 16:39:46.312006: train_loss 0.0606 +2026-04-08 16:39:46.320707: val_loss 0.048 +2026-04-08 16:39:46.323195: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:39:46.326190: Epoch time: 103.72 s +2026-04-08 16:39:47.410885: +2026-04-08 16:39:47.413438: Epoch 24 +2026-04-08 16:39:47.415461: Current learning rate: 0.00978 +2026-04-08 16:41:30.581201: train_loss 0.0479 +2026-04-08 16:41:30.587849: val_loss 0.0369 +2026-04-08 16:41:30.589968: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:41:30.591902: Epoch time: 103.17 s +2026-04-08 16:41:31.695360: +2026-04-08 16:41:31.697213: Epoch 25 +2026-04-08 16:41:31.699512: Current learning rate: 0.00977 +2026-04-08 16:43:14.209451: train_loss 0.054 +2026-04-08 16:43:14.215964: val_loss 0.0325 +2026-04-08 16:43:14.217459: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:43:14.219306: Epoch time: 102.52 s +2026-04-08 16:43:15.256613: +2026-04-08 16:43:15.260170: Epoch 26 +2026-04-08 16:43:15.261981: Current learning rate: 0.00977 +2026-04-08 16:44:58.338963: train_loss 0.0456 +2026-04-08 16:44:58.347783: val_loss 0.0427 +2026-04-08 16:44:58.350293: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:44:58.352427: Epoch time: 103.08 s +2026-04-08 16:44:59.428044: +2026-04-08 16:44:59.430351: Epoch 27 +2026-04-08 16:44:59.432006: Current learning rate: 0.00976 +2026-04-08 16:46:41.318248: train_loss 0.0488 +2026-04-08 16:46:41.326276: val_loss 0.0393 +2026-04-08 16:46:41.329259: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:46:41.331473: Epoch time: 101.89 s +2026-04-08 16:46:42.371472: +2026-04-08 16:46:42.374040: Epoch 28 +2026-04-08 16:46:42.376945: Current learning rate: 0.00975 +2026-04-08 16:48:24.833114: train_loss 0.0531 +2026-04-08 16:48:24.840276: val_loss 0.0298 +2026-04-08 16:48:24.842021: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:48:24.843807: Epoch time: 102.46 s +2026-04-08 16:48:25.917714: +2026-04-08 16:48:25.919472: Epoch 29 +2026-04-08 16:48:25.921056: Current learning rate: 0.00974 +2026-04-08 16:50:08.871840: train_loss 0.0435 +2026-04-08 16:50:08.878391: val_loss 0.044 +2026-04-08 16:50:08.879835: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:50:08.881939: Epoch time: 102.96 s +2026-04-08 16:50:09.941501: +2026-04-08 16:50:09.943501: Epoch 30 +2026-04-08 16:50:09.946599: Current learning rate: 0.00973 +2026-04-08 16:51:53.035265: train_loss 0.0371 +2026-04-08 16:51:53.050890: val_loss 0.0402 +2026-04-08 16:51:53.053465: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:51:53.055541: Epoch time: 103.1 s +2026-04-08 16:51:54.167745: +2026-04-08 16:51:54.170108: Epoch 31 +2026-04-08 16:51:54.172561: Current learning rate: 0.00972 +2026-04-08 16:53:37.111190: train_loss 0.0445 +2026-04-08 16:53:37.122520: val_loss 0.034 +2026-04-08 16:53:37.126542: Pseudo dice [0.0, 0.0, 0.0675, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:53:37.136288: Epoch time: 102.95 s +2026-04-08 16:53:37.143284: Yayy! New best EMA pseudo Dice: 0.001 +2026-04-08 16:53:40.161600: +2026-04-08 16:53:40.164093: Epoch 32 +2026-04-08 16:53:40.165951: Current learning rate: 0.00971 +2026-04-08 16:55:22.554992: train_loss 0.0419 +2026-04-08 16:55:22.565254: val_loss 0.0304 +2026-04-08 16:55:22.569147: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:55:22.572022: Epoch time: 102.4 s +2026-04-08 16:55:23.675289: +2026-04-08 16:55:23.677899: Epoch 33 +2026-04-08 16:55:23.680762: Current learning rate: 0.0097 +2026-04-08 16:57:06.134548: train_loss 0.0326 +2026-04-08 16:57:06.141549: val_loss 0.0279 +2026-04-08 16:57:06.144143: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:57:06.146900: Epoch time: 102.46 s +2026-04-08 16:57:07.315507: +2026-04-08 16:57:07.317156: Epoch 34 +2026-04-08 16:57:07.319414: Current learning rate: 0.00969 +2026-04-08 16:58:49.404612: train_loss 0.0285 +2026-04-08 16:58:49.412503: val_loss 0.0339 +2026-04-08 16:58:49.414520: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:58:49.416897: Epoch time: 102.09 s +2026-04-08 16:58:50.490150: +2026-04-08 16:58:50.492377: Epoch 35 +2026-04-08 16:58:50.495090: Current learning rate: 0.00968 +2026-04-08 17:00:32.546186: train_loss 0.0335 +2026-04-08 17:00:32.553118: val_loss 0.0296 +2026-04-08 17:00:32.554764: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:00:32.557211: Epoch time: 102.06 s +2026-04-08 17:00:33.666539: +2026-04-08 17:00:33.669306: Epoch 36 +2026-04-08 17:00:33.670610: Current learning rate: 0.00968 +2026-04-08 17:02:16.663011: train_loss 0.045 +2026-04-08 17:02:16.679777: val_loss 0.0417 +2026-04-08 17:02:16.684142: Pseudo dice [0.0, 0.0, 0.0334, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:02:16.688267: Epoch time: 103.0 s +2026-04-08 17:02:16.692303: Yayy! New best EMA pseudo Dice: 0.001 +2026-04-08 17:02:19.680003: +2026-04-08 17:02:19.682317: Epoch 37 +2026-04-08 17:02:19.683723: Current learning rate: 0.00967 +2026-04-08 17:04:02.665658: train_loss 0.0377 +2026-04-08 17:04:02.672173: val_loss 0.0227 +2026-04-08 17:04:02.674100: Pseudo dice [0.0, 0.0, 0.1404, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:04:02.676410: Epoch time: 102.99 s +2026-04-08 17:04:02.679215: Yayy! New best EMA pseudo Dice: 0.0029 +2026-04-08 17:04:06.702322: +2026-04-08 17:04:06.703997: Epoch 38 +2026-04-08 17:04:06.705462: Current learning rate: 0.00966 +2026-04-08 17:05:49.570253: train_loss 0.0357 +2026-04-08 17:05:49.578290: val_loss 0.0305 +2026-04-08 17:05:49.581322: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:05:49.583381: Epoch time: 102.87 s +2026-04-08 17:05:50.714803: +2026-04-08 17:05:50.716462: Epoch 39 +2026-04-08 17:05:50.720047: Current learning rate: 0.00965 +2026-04-08 17:07:33.668510: train_loss 0.0412 +2026-04-08 17:07:33.679188: val_loss 0.0245 +2026-04-08 17:07:33.680913: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:07:33.684738: Epoch time: 102.96 s +2026-04-08 17:07:34.794217: +2026-04-08 17:07:34.799339: Epoch 40 +2026-04-08 17:07:34.803738: Current learning rate: 0.00964 +2026-04-08 17:09:17.566776: train_loss 0.0394 +2026-04-08 17:09:17.573194: val_loss 0.0231 +2026-04-08 17:09:17.575377: Pseudo dice [0.0, 0.0, 0.0017, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:09:17.578364: Epoch time: 102.78 s +2026-04-08 17:09:18.690116: +2026-04-08 17:09:18.691818: Epoch 41 +2026-04-08 17:09:18.695505: Current learning rate: 0.00963 +2026-04-08 17:11:00.705721: train_loss 0.0377 +2026-04-08 17:11:00.712525: val_loss 0.0225 +2026-04-08 17:11:00.714282: Pseudo dice [0.0, 0.0, 0.008, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:11:00.716681: Epoch time: 102.02 s +2026-04-08 17:11:01.786720: +2026-04-08 17:11:01.788645: Epoch 42 +2026-04-08 17:11:01.790692: Current learning rate: 0.00962 +2026-04-08 17:12:44.947653: train_loss 0.0195 +2026-04-08 17:12:44.953979: val_loss 0.024 +2026-04-08 17:12:44.955388: Pseudo dice [0.0, 0.0, 0.2423, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:12:44.958195: Epoch time: 103.16 s +2026-04-08 17:12:44.959796: Yayy! New best EMA pseudo Dice: 0.0053 +2026-04-08 17:12:47.801121: +2026-04-08 17:12:47.802622: Epoch 43 +2026-04-08 17:12:47.803966: Current learning rate: 0.00961 +2026-04-08 17:14:29.984115: train_loss 0.0316 +2026-04-08 17:14:29.989795: val_loss 0.0166 +2026-04-08 17:14:29.992488: Pseudo dice [0.0, 0.0, 0.1044, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:14:29.995339: Epoch time: 102.19 s +2026-04-08 17:14:29.997765: Yayy! New best EMA pseudo Dice: 0.0063 +2026-04-08 17:14:32.778938: +2026-04-08 17:14:32.780343: Epoch 44 +2026-04-08 17:14:32.781581: Current learning rate: 0.0096 +2026-04-08 17:16:15.496941: train_loss 0.0231 +2026-04-08 17:16:15.503122: val_loss 0.0153 +2026-04-08 17:16:15.505780: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:16:15.508017: Epoch time: 102.72 s +2026-04-08 17:16:16.538314: +2026-04-08 17:16:16.540441: Epoch 45 +2026-04-08 17:16:16.542103: Current learning rate: 0.00959 +2026-04-08 17:17:58.748376: train_loss 0.0215 +2026-04-08 17:17:58.755235: val_loss 0.0272 +2026-04-08 17:17:58.757518: Pseudo dice [0.0, 0.0, 0.3516, 0.0, 0.0, 0.0, 0.0308] +2026-04-08 17:17:58.759372: Epoch time: 102.21 s +2026-04-08 17:17:58.760529: Yayy! New best EMA pseudo Dice: 0.0106 +2026-04-08 17:18:01.614461: +2026-04-08 17:18:01.615939: Epoch 46 +2026-04-08 17:18:01.617296: Current learning rate: 0.00959 +2026-04-08 17:19:45.008134: train_loss 0.0329 +2026-04-08 17:19:45.015547: val_loss 0.0366 +2026-04-08 17:19:45.017378: Pseudo dice [0.0, 0.0, 0.2551, 0.0, 0.0, 0.0, 0.0072] +2026-04-08 17:19:45.019996: Epoch time: 103.4 s +2026-04-08 17:19:45.021569: Yayy! New best EMA pseudo Dice: 0.0132 +2026-04-08 17:19:47.817399: +2026-04-08 17:19:47.819082: Epoch 47 +2026-04-08 17:19:47.820844: Current learning rate: 0.00958 +2026-04-08 17:21:30.778538: train_loss 0.0214 +2026-04-08 17:21:30.785101: val_loss 0.0282 +2026-04-08 17:21:30.787204: Pseudo dice [0.0, 0.0, 0.3199, 0.0, 0.0, 0.0, 0.1434] +2026-04-08 17:21:30.789296: Epoch time: 102.96 s +2026-04-08 17:21:30.791999: Yayy! New best EMA pseudo Dice: 0.0185 +2026-04-08 17:21:33.550432: +2026-04-08 17:21:33.552349: Epoch 48 +2026-04-08 17:21:33.553964: Current learning rate: 0.00957 +2026-04-08 17:23:15.942644: train_loss 0.0206 +2026-04-08 17:23:15.949734: val_loss 0.0267 +2026-04-08 17:23:15.951802: Pseudo dice [0.0, 0.0, 0.1229, 0.0, 0.0, 0.0, 0.1556] +2026-04-08 17:23:15.953984: Epoch time: 102.39 s +2026-04-08 17:23:15.955527: Yayy! New best EMA pseudo Dice: 0.0207 +2026-04-08 17:23:18.753283: +2026-04-08 17:23:18.755233: Epoch 49 +2026-04-08 17:23:18.756445: Current learning rate: 0.00956 +2026-04-08 17:25:00.943944: train_loss 0.0043 +2026-04-08 17:25:00.950833: val_loss 0.016 +2026-04-08 17:25:00.952973: Pseudo dice [0.0, 0.0, 0.2049, 0.0, 0.0, 0.0, 0.3318] +2026-04-08 17:25:00.955606: Epoch time: 102.19 s +2026-04-08 17:25:02.686566: Yayy! New best EMA pseudo Dice: 0.0263 +2026-04-08 17:25:05.487158: +2026-04-08 17:25:05.488941: Epoch 50 +2026-04-08 17:25:05.490109: Current learning rate: 0.00955 +2026-04-08 17:26:48.014781: train_loss 0.0173 +2026-04-08 17:26:48.023112: val_loss 0.0096 +2026-04-08 17:26:48.024671: Pseudo dice [0.0, 0.0, 0.1614, 0.0, 0.0, 0.0061, 0.278] +2026-04-08 17:26:48.027266: Epoch time: 102.53 s +2026-04-08 17:26:48.028893: Yayy! New best EMA pseudo Dice: 0.03 +2026-04-08 17:26:50.840445: +2026-04-08 17:26:50.841782: Epoch 51 +2026-04-08 17:26:50.842978: Current learning rate: 0.00954 +2026-04-08 17:28:33.318690: train_loss 0.0098 +2026-04-08 17:28:33.325640: val_loss 0.0059 +2026-04-08 17:28:33.328165: Pseudo dice [0.0, 0.0, 0.4533, 0.0, 0.0, 0.0751, 0.2882] +2026-04-08 17:28:33.330900: Epoch time: 102.48 s +2026-04-08 17:28:33.332567: Yayy! New best EMA pseudo Dice: 0.0387 +2026-04-08 17:28:36.220842: +2026-04-08 17:28:36.222821: Epoch 52 +2026-04-08 17:28:36.224559: Current learning rate: 0.00953 +2026-04-08 17:30:18.185854: train_loss 0.0212 +2026-04-08 17:30:18.192452: val_loss 0.009 +2026-04-08 17:30:18.194053: Pseudo dice [0.0, 0.0, 0.4377, 0.0, 0.0, 0.1136, 0.189] +2026-04-08 17:30:18.196013: Epoch time: 101.97 s +2026-04-08 17:30:18.197543: Yayy! New best EMA pseudo Dice: 0.0454 +2026-04-08 17:30:20.872809: +2026-04-08 17:30:20.874239: Epoch 53 +2026-04-08 17:30:20.875610: Current learning rate: 0.00952 +2026-04-08 17:32:02.766163: train_loss 0.0086 +2026-04-08 17:32:02.774841: val_loss 0.0186 +2026-04-08 17:32:02.776559: Pseudo dice [0.0, 0.0, 0.1141, 0.0, 0.0, 0.047, 0.2775] +2026-04-08 17:32:02.778898: Epoch time: 101.9 s +2026-04-08 17:32:02.780219: Yayy! New best EMA pseudo Dice: 0.0471 +2026-04-08 17:32:05.535441: +2026-04-08 17:32:05.537029: Epoch 54 +2026-04-08 17:32:05.538309: Current learning rate: 0.00951 +2026-04-08 17:33:48.255144: train_loss 0.0011 +2026-04-08 17:33:48.263772: val_loss 0.0041 +2026-04-08 17:33:48.266751: Pseudo dice [0.0, 0.0, 0.5235, 0.0, 0.0, 0.1364, 0.4573] +2026-04-08 17:33:48.270137: Epoch time: 102.72 s +2026-04-08 17:33:48.271914: Yayy! New best EMA pseudo Dice: 0.0584 +2026-04-08 17:33:51.015294: +2026-04-08 17:33:51.017210: Epoch 55 +2026-04-08 17:33:51.018427: Current learning rate: 0.0095 +2026-04-08 17:35:34.350497: train_loss 0.0009 +2026-04-08 17:35:34.357399: val_loss -0.0118 +2026-04-08 17:35:34.360234: Pseudo dice [0.0, 0.0, 0.5356, 0.0, 0.0, 0.086, 0.1057] +2026-04-08 17:35:34.362452: Epoch time: 103.34 s +2026-04-08 17:35:34.364278: Yayy! New best EMA pseudo Dice: 0.0629 +2026-04-08 17:35:37.162364: +2026-04-08 17:35:37.164040: Epoch 56 +2026-04-08 17:35:37.165648: Current learning rate: 0.00949 +2026-04-08 17:37:19.594434: train_loss 0.0072 +2026-04-08 17:37:19.602467: val_loss -0.013 +2026-04-08 17:37:19.605079: Pseudo dice [0.0, 0.0, 0.4277, 0.0, 0.0, 0.1846, 0.2504] +2026-04-08 17:37:19.607907: Epoch time: 102.43 s +2026-04-08 17:37:19.609596: Yayy! New best EMA pseudo Dice: 0.0689 +2026-04-08 17:37:22.355871: +2026-04-08 17:37:22.358093: Epoch 57 +2026-04-08 17:37:22.359881: Current learning rate: 0.00949 +2026-04-08 17:39:04.484835: train_loss 0.0055 +2026-04-08 17:39:04.493609: val_loss -0.0134 +2026-04-08 17:39:04.495478: Pseudo dice [0.0, 0.0, 0.4665, 0.0, 0.0, 0.1647, 0.4677] +2026-04-08 17:39:04.497555: Epoch time: 102.13 s +2026-04-08 17:39:04.499306: Yayy! New best EMA pseudo Dice: 0.0777 +2026-04-08 17:39:07.249693: +2026-04-08 17:39:07.251267: Epoch 58 +2026-04-08 17:39:07.252649: Current learning rate: 0.00948 +2026-04-08 17:40:50.202669: train_loss -0.0073 +2026-04-08 17:40:50.208582: val_loss -0.0158 +2026-04-08 17:40:50.210203: Pseudo dice [0.0, 0.0, 0.3602, 0.0, 0.0, 0.2832, 0.3854] +2026-04-08 17:40:50.212179: Epoch time: 102.96 s +2026-04-08 17:40:50.214168: Yayy! New best EMA pseudo Dice: 0.0847 +2026-04-08 17:40:52.981878: +2026-04-08 17:40:52.983439: Epoch 59 +2026-04-08 17:40:52.984735: Current learning rate: 0.00947 +2026-04-08 17:42:35.122458: train_loss -0.0006 +2026-04-08 17:42:35.130325: val_loss -0.0095 +2026-04-08 17:42:35.131932: Pseudo dice [0.0, 0.0, 0.3851, 0.0, 0.0, 0.2104, 0.3104] +2026-04-08 17:42:35.134375: Epoch time: 102.14 s +2026-04-08 17:42:35.135843: Yayy! New best EMA pseudo Dice: 0.0891 +2026-04-08 17:42:37.975730: +2026-04-08 17:42:37.977218: Epoch 60 +2026-04-08 17:42:37.979393: Current learning rate: 0.00946 +2026-04-08 17:44:19.807606: train_loss -0.0061 +2026-04-08 17:44:19.813151: val_loss 0.0002 +2026-04-08 17:44:19.814746: Pseudo dice [0.0, 0.0, 0.4328, 0.0, 0.0, 0.1344, 0.4813] +2026-04-08 17:44:19.816922: Epoch time: 101.83 s +2026-04-08 17:44:19.818495: Yayy! New best EMA pseudo Dice: 0.0952 +2026-04-08 17:44:22.674351: +2026-04-08 17:44:22.675675: Epoch 61 +2026-04-08 17:44:22.677561: Current learning rate: 0.00945 +2026-04-08 17:46:05.064297: train_loss 0.0 +2026-04-08 17:46:05.084764: val_loss -0.001 +2026-04-08 17:46:05.087268: Pseudo dice [0.0, 0.0, 0.3415, 0.0, 0.0, 0.1398, 0.2784] +2026-04-08 17:46:05.103177: Epoch time: 102.39 s +2026-04-08 17:46:05.111780: Yayy! New best EMA pseudo Dice: 0.0965 +2026-04-08 17:46:08.216296: +2026-04-08 17:46:08.218019: Epoch 62 +2026-04-08 17:46:08.219229: Current learning rate: 0.00944 +2026-04-08 17:47:50.334110: train_loss -0.0065 +2026-04-08 17:47:50.341231: val_loss -0.0091 +2026-04-08 17:47:50.343605: Pseudo dice [0.0, 0.0, 0.3422, 0.0, 0.0, 0.1194, 0.0403] +2026-04-08 17:47:50.345633: Epoch time: 102.12 s +2026-04-08 17:47:51.459291: +2026-04-08 17:47:51.461148: Epoch 63 +2026-04-08 17:47:51.462518: Current learning rate: 0.00943 +2026-04-08 17:49:33.317369: train_loss -0.009 +2026-04-08 17:49:33.323191: val_loss -0.0015 +2026-04-08 17:49:33.324769: Pseudo dice [0.0, 0.0, 0.0235, 0.0, 0.0, 0.0966, 0.3646] +2026-04-08 17:49:33.326988: Epoch time: 101.86 s +2026-04-08 17:49:34.449960: +2026-04-08 17:49:34.451965: Epoch 64 +2026-04-08 17:49:34.454471: Current learning rate: 0.00942 +2026-04-08 17:51:16.159966: train_loss -0.0049 +2026-04-08 17:51:16.166933: val_loss -0.0046 +2026-04-08 17:51:16.169396: Pseudo dice [0.0, 0.0, 0.4462, 0.0, 0.0, 0.2855, 0.2259] +2026-04-08 17:51:16.173474: Epoch time: 101.71 s +2026-04-08 17:51:17.243985: +2026-04-08 17:51:17.245562: Epoch 65 +2026-04-08 17:51:17.247084: Current learning rate: 0.00941 +2026-04-08 17:52:59.363744: train_loss -0.0167 +2026-04-08 17:52:59.368866: val_loss -0.029 +2026-04-08 17:52:59.370436: Pseudo dice [0.0, 0.0, 0.4524, 0.0, 0.0, 0.3918, 0.2834] +2026-04-08 17:52:59.372438: Epoch time: 102.12 s +2026-04-08 17:52:59.373962: Yayy! New best EMA pseudo Dice: 0.1026 +2026-04-08 17:53:02.150832: +2026-04-08 17:53:02.152491: Epoch 66 +2026-04-08 17:53:02.154016: Current learning rate: 0.0094 +2026-04-08 17:54:44.080906: train_loss -1e-04 +2026-04-08 17:54:44.089178: val_loss -0.0204 +2026-04-08 17:54:44.091694: Pseudo dice [0.0095, 0.0, 0.5734, 0.0, 0.0, 0.1566, 0.6041] +2026-04-08 17:54:44.093958: Epoch time: 101.93 s +2026-04-08 17:54:44.095184: Yayy! New best EMA pseudo Dice: 0.1115 +2026-04-08 17:54:47.048913: +2026-04-08 17:54:47.050319: Epoch 67 +2026-04-08 17:54:47.051446: Current learning rate: 0.00939 +2026-04-08 17:56:28.690172: train_loss -0.0161 +2026-04-08 17:56:28.697510: val_loss -0.0305 +2026-04-08 17:56:28.699092: Pseudo dice [0.0462, 0.0, 0.6708, 0.0, 0.0, 0.4288, 0.3293] +2026-04-08 17:56:28.700951: Epoch time: 101.64 s +2026-04-08 17:56:28.702492: Yayy! New best EMA pseudo Dice: 0.1215 +2026-04-08 17:56:31.456349: +2026-04-08 17:56:31.457913: Epoch 68 +2026-04-08 17:56:31.459070: Current learning rate: 0.00939 +2026-04-08 17:58:13.357610: train_loss -0.0121 +2026-04-08 17:58:13.365510: val_loss -0.003 +2026-04-08 17:58:13.367269: Pseudo dice [0.0527, 0.0, 0.1061, 0.0, 0.0, 0.1495, 0.1534] +2026-04-08 17:58:13.369014: Epoch time: 101.9 s +2026-04-08 17:58:14.456730: +2026-04-08 17:58:14.458024: Epoch 69 +2026-04-08 17:58:14.459260: Current learning rate: 0.00938 +2026-04-08 17:59:56.359491: train_loss -0.0183 +2026-04-08 17:59:56.365309: val_loss -0.0148 +2026-04-08 17:59:56.366696: Pseudo dice [0.1213, 0.0, 0.3907, 0.0, 0.0, 0.2535, 0.2312] +2026-04-08 17:59:56.368907: Epoch time: 101.91 s +2026-04-08 17:59:57.444048: +2026-04-08 17:59:57.445549: Epoch 70 +2026-04-08 17:59:57.446738: Current learning rate: 0.00937 +2026-04-08 18:01:39.316313: train_loss -0.0121 +2026-04-08 18:01:39.322457: val_loss -0.0106 +2026-04-08 18:01:39.323922: Pseudo dice [0.0691, 0.0, 0.0767, 0.0, 0.0, 0.0678, 0.2958] +2026-04-08 18:01:39.325655: Epoch time: 101.87 s +2026-04-08 18:01:40.428519: +2026-04-08 18:01:40.430141: Epoch 71 +2026-04-08 18:01:40.431392: Current learning rate: 0.00936 +2026-04-08 18:03:22.523817: train_loss -0.0219 +2026-04-08 18:03:22.529912: val_loss -0.0291 +2026-04-08 18:03:22.531583: Pseudo dice [0.0271, 0.0, 0.6973, 0.0, 0.0, 0.3554, 0.4] +2026-04-08 18:03:22.533554: Epoch time: 102.1 s +2026-04-08 18:03:22.535897: Yayy! New best EMA pseudo Dice: 0.1237 +2026-04-08 18:03:25.305359: +2026-04-08 18:03:25.306915: Epoch 72 +2026-04-08 18:03:25.308120: Current learning rate: 0.00935 +2026-04-08 18:05:07.322802: train_loss -0.0219 +2026-04-08 18:05:07.333230: val_loss -0.0285 +2026-04-08 18:05:07.336132: Pseudo dice [0.1286, 0.0, 0.4716, 0.0, 0.0, 0.2207, 0.5085] +2026-04-08 18:05:07.338220: Epoch time: 102.02 s +2026-04-08 18:05:07.340135: Yayy! New best EMA pseudo Dice: 0.1303 +2026-04-08 18:05:10.083661: +2026-04-08 18:05:10.084850: Epoch 73 +2026-04-08 18:05:10.085991: Current learning rate: 0.00934 +2026-04-08 18:06:52.975162: train_loss -0.0301 +2026-04-08 18:06:52.981990: val_loss -0.0007 +2026-04-08 18:06:52.984215: Pseudo dice [0.1304, 0.0, 0.3753, 0.0, 0.0, 0.3332, 0.5552] +2026-04-08 18:06:52.986159: Epoch time: 102.89 s +2026-04-08 18:06:52.987743: Yayy! New best EMA pseudo Dice: 0.1372 +2026-04-08 18:06:55.788122: +2026-04-08 18:06:55.789662: Epoch 74 +2026-04-08 18:06:55.790897: Current learning rate: 0.00933 +2026-04-08 18:08:38.289623: train_loss -0.0224 +2026-04-08 18:08:38.295171: val_loss -0.0408 +2026-04-08 18:08:38.296777: Pseudo dice [0.0082, 0.0, 0.6133, 0.0, 0.0, 0.3573, 0.4709] +2026-04-08 18:08:38.298642: Epoch time: 102.5 s +2026-04-08 18:08:38.301169: Yayy! New best EMA pseudo Dice: 0.1442 +2026-04-08 18:08:40.982399: +2026-04-08 18:08:40.985343: Epoch 75 +2026-04-08 18:08:40.986522: Current learning rate: 0.00932 +2026-04-08 18:10:23.117101: train_loss -0.0102 +2026-04-08 18:10:23.122869: val_loss -0.0141 +2026-04-08 18:10:23.124493: Pseudo dice [0.1219, 0.0, 0.1999, 0.0, 0.0, 0.2707, 0.2336] +2026-04-08 18:10:23.126390: Epoch time: 102.14 s +2026-04-08 18:10:24.171989: +2026-04-08 18:10:24.173878: Epoch 76 +2026-04-08 18:10:24.175004: Current learning rate: 0.00931 +2026-04-08 18:12:05.930941: train_loss -0.0142 +2026-04-08 18:12:05.936819: val_loss -0.0162 +2026-04-08 18:12:05.938650: Pseudo dice [0.0317, 0.0173, 0.1093, 0.0, 0.0, 0.1475, 0.5313] +2026-04-08 18:12:05.941087: Epoch time: 101.76 s +2026-04-08 18:12:06.996763: +2026-04-08 18:12:07.002156: Epoch 77 +2026-04-08 18:12:07.006459: Current learning rate: 0.0093 +2026-04-08 18:13:48.748571: train_loss -0.0233 +2026-04-08 18:13:48.754795: val_loss -0.0145 +2026-04-08 18:13:48.756146: Pseudo dice [0.0769, 0.0295, 0.4014, 0.0, 0.0, 0.2355, 0.1041] +2026-04-08 18:13:48.758216: Epoch time: 101.76 s +2026-04-08 18:13:49.832911: +2026-04-08 18:13:49.834131: Epoch 78 +2026-04-08 18:13:49.835225: Current learning rate: 0.0093 +2026-04-08 18:15:32.805543: train_loss -0.0101 +2026-04-08 18:15:32.822930: val_loss -0.0363 +2026-04-08 18:15:32.826663: Pseudo dice [0.1875, 0.0351, 0.5603, 0.0, 0.0, 0.3068, 0.598] +2026-04-08 18:15:32.830233: Epoch time: 102.98 s +2026-04-08 18:15:32.872798: Yayy! New best EMA pseudo Dice: 0.1479 +2026-04-08 18:15:35.802107: +2026-04-08 18:15:35.804754: Epoch 79 +2026-04-08 18:15:35.823586: Current learning rate: 0.00929 +2026-04-08 18:17:17.829911: train_loss -0.0361 +2026-04-08 18:17:17.835191: val_loss -0.0172 +2026-04-08 18:17:17.836617: Pseudo dice [0.1978, 0.1167, 0.4225, 0.0, 0.0, 0.0783, 0.4657] +2026-04-08 18:17:17.838783: Epoch time: 102.03 s +2026-04-08 18:17:17.840258: Yayy! New best EMA pseudo Dice: 0.1514 +2026-04-08 18:17:20.587045: +2026-04-08 18:17:20.589447: Epoch 80 +2026-04-08 18:17:20.590841: Current learning rate: 0.00928 +2026-04-08 18:19:02.578069: train_loss -0.0384 +2026-04-08 18:19:02.582095: val_loss -0.0443 +2026-04-08 18:19:02.583643: Pseudo dice [0.2345, 0.2728, 0.5536, 0.0, 0.0, 0.2578, 0.491] +2026-04-08 18:19:02.585330: Epoch time: 101.99 s +2026-04-08 18:19:02.586785: Yayy! New best EMA pseudo Dice: 0.1621 +2026-04-08 18:19:05.200410: +2026-04-08 18:19:05.202276: Epoch 81 +2026-04-08 18:19:05.203558: Current learning rate: 0.00927 +2026-04-08 18:20:47.066233: train_loss -0.0461 +2026-04-08 18:20:47.070916: val_loss -0.0659 +2026-04-08 18:20:47.072259: Pseudo dice [0.1742, 0.0156, 0.4062, 0.0, 0.0, 0.2549, 0.5865] +2026-04-08 18:20:47.074009: Epoch time: 101.87 s +2026-04-08 18:20:47.075229: Yayy! New best EMA pseudo Dice: 0.1665 +2026-04-08 18:20:49.769183: +2026-04-08 18:20:49.771378: Epoch 82 +2026-04-08 18:20:49.772453: Current learning rate: 0.00926 +2026-04-08 18:22:31.516086: train_loss -0.0515 +2026-04-08 18:22:31.521088: val_loss -0.0656 +2026-04-08 18:22:31.522491: Pseudo dice [0.2733, 0.0924, 0.7174, 0.0, 0.0, 0.5432, 0.3835] +2026-04-08 18:22:31.524844: Epoch time: 101.75 s +2026-04-08 18:22:31.526295: Yayy! New best EMA pseudo Dice: 0.1785 +2026-04-08 18:22:33.882609: +2026-04-08 18:22:33.884803: Epoch 83 +2026-04-08 18:22:33.886039: Current learning rate: 0.00925 +2026-04-08 18:24:15.579387: train_loss -0.0322 +2026-04-08 18:24:15.583382: val_loss -0.0502 +2026-04-08 18:24:15.584560: Pseudo dice [0.146, 0.4322, 0.6868, 0.0, 0.0, 0.4101, 0.2478] +2026-04-08 18:24:15.586788: Epoch time: 101.7 s +2026-04-08 18:24:15.588273: Yayy! New best EMA pseudo Dice: 0.1881 +2026-04-08 18:24:18.148028: +2026-04-08 18:24:18.150541: Epoch 84 +2026-04-08 18:24:18.151797: Current learning rate: 0.00924 +2026-04-08 18:25:59.677946: train_loss -0.043 +2026-04-08 18:25:59.683527: val_loss -0.0599 +2026-04-08 18:25:59.685249: Pseudo dice [0.3541, 0.2966, 0.5613, 0.0, 0.0, 0.2078, 0.493] +2026-04-08 18:25:59.687195: Epoch time: 101.53 s +2026-04-08 18:25:59.688681: Yayy! New best EMA pseudo Dice: 0.1966 +2026-04-08 18:26:02.266845: +2026-04-08 18:26:02.269216: Epoch 85 +2026-04-08 18:26:02.270351: Current learning rate: 0.00923 +2026-04-08 18:27:44.065092: train_loss -0.0495 +2026-04-08 18:27:44.071859: val_loss -0.0645 +2026-04-08 18:27:44.074223: Pseudo dice [0.2779, 0.2033, 0.6154, 0.0, 0.0, 0.3138, 0.6946] +2026-04-08 18:27:44.076876: Epoch time: 101.8 s +2026-04-08 18:27:44.078488: Yayy! New best EMA pseudo Dice: 0.2071 +2026-04-08 18:27:46.833714: +2026-04-08 18:27:46.836123: Epoch 86 +2026-04-08 18:27:46.837378: Current learning rate: 0.00922 +2026-04-08 18:29:28.511680: train_loss -0.0547 +2026-04-08 18:29:28.521178: val_loss -0.0728 +2026-04-08 18:29:28.523764: Pseudo dice [0.3, 0.2391, 0.5008, 0.0, 0.0, 0.3736, 0.5781] +2026-04-08 18:29:28.527357: Epoch time: 101.68 s +2026-04-08 18:29:28.529571: Yayy! New best EMA pseudo Dice: 0.2148 +2026-04-08 18:29:31.266666: +2026-04-08 18:29:31.268936: Epoch 87 +2026-04-08 18:29:31.270242: Current learning rate: 0.00921 +2026-04-08 18:31:13.173443: train_loss -0.0533 +2026-04-08 18:31:13.177586: val_loss -0.0461 +2026-04-08 18:31:13.179790: Pseudo dice [0.25, 0.3822, 0.5718, 0.0, 0.0, 0.2062, 0.3446] +2026-04-08 18:31:13.181941: Epoch time: 101.91 s +2026-04-08 18:31:13.183547: Yayy! New best EMA pseudo Dice: 0.2184 +2026-04-08 18:31:15.989175: +2026-04-08 18:31:15.991739: Epoch 88 +2026-04-08 18:31:15.992919: Current learning rate: 0.0092 +2026-04-08 18:32:57.644109: train_loss -0.0378 +2026-04-08 18:32:57.647981: val_loss -0.0445 +2026-04-08 18:32:57.649631: Pseudo dice [0.0628, 0.0342, 0.0677, 0.0, 0.0, 0.3652, 0.6058] +2026-04-08 18:32:57.651560: Epoch time: 101.66 s +2026-04-08 18:32:58.662297: +2026-04-08 18:32:58.663664: Epoch 89 +2026-04-08 18:32:58.665014: Current learning rate: 0.0092 +2026-04-08 18:34:40.196746: train_loss -0.041 +2026-04-08 18:34:40.202349: val_loss -0.053 +2026-04-08 18:34:40.203848: Pseudo dice [0.0887, 0.1481, 0.4631, 0.0, 0.0, 0.384, 0.3618] +2026-04-08 18:34:40.205492: Epoch time: 101.54 s +2026-04-08 18:34:41.197975: +2026-04-08 18:34:41.199309: Epoch 90 +2026-04-08 18:34:41.200499: Current learning rate: 0.00919 +2026-04-08 18:36:23.771654: train_loss -0.0577 +2026-04-08 18:36:23.775548: val_loss -0.0654 +2026-04-08 18:36:23.776990: Pseudo dice [0.3643, 0.0714, 0.6459, 0.0, 0.0, 0.1365, 0.4172] +2026-04-08 18:36:23.778671: Epoch time: 102.58 s +2026-04-08 18:36:24.804741: +2026-04-08 18:36:24.806319: Epoch 91 +2026-04-08 18:36:24.807714: Current learning rate: 0.00918 +2026-04-08 18:38:06.758606: train_loss -0.065 +2026-04-08 18:38:06.764267: val_loss -0.0615 +2026-04-08 18:38:06.765701: Pseudo dice [0.4555, 0.5943, 0.698, 0.0, 0.0, 0.3961, 0.4534] +2026-04-08 18:38:06.768803: Epoch time: 101.96 s +2026-04-08 18:38:06.770257: Yayy! New best EMA pseudo Dice: 0.23 +2026-04-08 18:38:09.230371: +2026-04-08 18:38:09.233440: Epoch 92 +2026-04-08 18:38:09.234688: Current learning rate: 0.00917 +2026-04-08 18:39:50.958374: train_loss -0.0564 +2026-04-08 18:39:50.962937: val_loss -0.0615 +2026-04-08 18:39:50.964542: Pseudo dice [0.4601, 0.2278, 0.6343, 0.0, 0.0, 0.3548, 0.1122] +2026-04-08 18:39:50.966687: Epoch time: 101.73 s +2026-04-08 18:39:50.968039: Yayy! New best EMA pseudo Dice: 0.2325 +2026-04-08 18:39:53.692730: +2026-04-08 18:39:53.695193: Epoch 93 +2026-04-08 18:39:53.698855: Current learning rate: 0.00916 +2026-04-08 18:41:35.367990: train_loss -0.0603 +2026-04-08 18:41:35.372391: val_loss -0.0574 +2026-04-08 18:41:35.373946: Pseudo dice [0.32, 0.2166, 0.6517, 0.0, 0.0, 0.3566, 0.5706] +2026-04-08 18:41:35.375837: Epoch time: 101.68 s +2026-04-08 18:41:35.377561: Yayy! New best EMA pseudo Dice: 0.2395 +2026-04-08 18:41:37.801983: +2026-04-08 18:41:37.803689: Epoch 94 +2026-04-08 18:41:37.804732: Current learning rate: 0.00915 +2026-04-08 18:43:19.640832: train_loss -0.0391 +2026-04-08 18:43:19.645945: val_loss -0.0247 +2026-04-08 18:43:19.647559: Pseudo dice [0.299, 0.7322, 0.3653, 0.0, 0.0, 0.1813, 0.4331] +2026-04-08 18:43:19.649203: Epoch time: 101.84 s +2026-04-08 18:43:19.650596: Yayy! New best EMA pseudo Dice: 0.2443 +2026-04-08 18:43:22.285276: +2026-04-08 18:43:22.286789: Epoch 95 +2026-04-08 18:43:22.287925: Current learning rate: 0.00914 +2026-04-08 18:45:04.092145: train_loss -0.0573 +2026-04-08 18:45:04.104145: val_loss -0.057 +2026-04-08 18:45:04.105650: Pseudo dice [0.2968, 0.3085, 0.6763, 0.0, 0.0, 0.2547, 0.229] +2026-04-08 18:45:04.108906: Epoch time: 101.81 s +2026-04-08 18:45:04.110327: Yayy! New best EMA pseudo Dice: 0.2451 +2026-04-08 18:45:06.592882: +2026-04-08 18:45:06.595066: Epoch 96 +2026-04-08 18:45:06.596144: Current learning rate: 0.00913 +2026-04-08 18:46:48.410605: train_loss -0.0686 +2026-04-08 18:46:48.418731: val_loss -0.0568 +2026-04-08 18:46:48.420010: Pseudo dice [0.2583, 0.2788, 0.6118, 0.0044, 0.0, 0.4914, 0.3807] +2026-04-08 18:46:48.424268: Epoch time: 101.82 s +2026-04-08 18:46:48.425839: Yayy! New best EMA pseudo Dice: 0.2495 +2026-04-08 18:46:51.025536: +2026-04-08 18:46:51.028368: Epoch 97 +2026-04-08 18:46:51.029684: Current learning rate: 0.00912 +2026-04-08 18:48:32.755405: train_loss -0.0485 +2026-04-08 18:48:32.760302: val_loss -0.0584 +2026-04-08 18:48:32.763911: Pseudo dice [0.347, 0.5235, 0.4757, 0.0056, 0.0, 0.3261, 0.4099] +2026-04-08 18:48:32.767692: Epoch time: 101.73 s +2026-04-08 18:48:32.769062: Yayy! New best EMA pseudo Dice: 0.2544 +2026-04-08 18:48:35.344537: +2026-04-08 18:48:35.346682: Epoch 98 +2026-04-08 18:48:35.347856: Current learning rate: 0.00911 +2026-04-08 18:50:17.006817: train_loss -0.0617 +2026-04-08 18:50:17.011625: val_loss -0.0777 +2026-04-08 18:50:17.013520: Pseudo dice [0.4146, 0.7359, 0.6932, 0.0149, 0.0, 0.3214, 0.5281] +2026-04-08 18:50:17.018256: Epoch time: 101.67 s +2026-04-08 18:50:17.020090: Yayy! New best EMA pseudo Dice: 0.2676 +2026-04-08 18:50:19.697861: +2026-04-08 18:50:19.699778: Epoch 99 +2026-04-08 18:50:19.701240: Current learning rate: 0.0091 +2026-04-08 18:52:01.329328: train_loss -0.0493 +2026-04-08 18:52:01.334479: val_loss -0.0752 +2026-04-08 18:52:01.336153: Pseudo dice [0.4122, 0.5012, 0.6459, 0.0, 0.0, 0.3031, 0.2289] +2026-04-08 18:52:01.338565: Epoch time: 101.63 s +2026-04-08 18:52:02.758145: Yayy! New best EMA pseudo Dice: 0.2707 +2026-04-08 18:52:05.376756: +2026-04-08 18:52:05.377997: Epoch 100 +2026-04-08 18:52:05.379081: Current learning rate: 0.0091 +2026-04-08 18:53:47.075543: train_loss -0.0706 +2026-04-08 18:53:47.081037: val_loss -0.0612 +2026-04-08 18:53:47.083231: Pseudo dice [0.1847, 0.1369, 0.4743, 0.0104, 0.0, 0.4721, 0.6028] +2026-04-08 18:53:47.085609: Epoch time: 101.7 s +2026-04-08 18:53:48.124174: +2026-04-08 18:53:48.125629: Epoch 101 +2026-04-08 18:53:48.127513: Current learning rate: 0.00909 +2026-04-08 18:55:29.807334: train_loss -0.0774 +2026-04-08 18:55:29.811518: val_loss -0.0767 +2026-04-08 18:55:29.812992: Pseudo dice [0.3917, 0.0329, 0.5987, 0.0, 0.0, 0.4962, 0.3116] +2026-04-08 18:55:29.814654: Epoch time: 101.69 s +2026-04-08 18:55:30.856562: +2026-04-08 18:55:30.857941: Epoch 102 +2026-04-08 18:55:30.859136: Current learning rate: 0.00908 +2026-04-08 18:57:12.442182: train_loss -0.0546 +2026-04-08 18:57:12.447146: val_loss -0.0573 +2026-04-08 18:57:12.449366: Pseudo dice [0.2152, 0.3378, 0.5754, 0.0419, 0.0, 0.1565, 0.6098] +2026-04-08 18:57:12.451980: Epoch time: 101.59 s +2026-04-08 18:57:13.480115: +2026-04-08 18:57:13.481458: Epoch 103 +2026-04-08 18:57:13.482636: Current learning rate: 0.00907 +2026-04-08 18:58:55.199577: train_loss -0.0633 +2026-04-08 18:58:55.204188: val_loss -0.0687 +2026-04-08 18:58:55.205811: Pseudo dice [0.0823, 0.2373, 0.4813, 0.1206, 0.0, 0.2799, 0.494] +2026-04-08 18:58:55.207679: Epoch time: 101.72 s +2026-04-08 18:58:56.242379: +2026-04-08 18:58:56.243709: Epoch 104 +2026-04-08 18:58:56.244824: Current learning rate: 0.00906 +2026-04-08 19:00:37.943172: train_loss -0.0786 +2026-04-08 19:00:37.947836: val_loss -0.065 +2026-04-08 19:00:37.949406: Pseudo dice [0.2614, 0.4307, 0.6169, 0.0133, 0.0, 0.4146, 0.2847] +2026-04-08 19:00:37.951474: Epoch time: 101.7 s +2026-04-08 19:00:38.959004: +2026-04-08 19:00:38.960376: Epoch 105 +2026-04-08 19:00:38.961534: Current learning rate: 0.00905 +2026-04-08 19:02:20.740643: train_loss -0.0671 +2026-04-08 19:02:20.745074: val_loss -0.067 +2026-04-08 19:02:20.746615: Pseudo dice [0.3632, 0.4957, 0.4171, 0.0064, 0.0, 0.2088, 0.4038] +2026-04-08 19:02:20.748284: Epoch time: 101.78 s +2026-04-08 19:02:21.775413: +2026-04-08 19:02:21.789575: Epoch 106 +2026-04-08 19:02:21.790865: Current learning rate: 0.00904 +2026-04-08 19:04:03.403375: train_loss -0.0732 +2026-04-08 19:04:03.411879: val_loss -0.0614 +2026-04-08 19:04:03.428295: Pseudo dice [0.0914, 0.1019, 0.5057, 0.1161, 0.0, 0.7656, 0.5271] +2026-04-08 19:04:03.434606: Epoch time: 101.63 s +2026-04-08 19:04:03.438262: Yayy! New best EMA pseudo Dice: 0.2729 +2026-04-08 19:04:06.156814: +2026-04-08 19:04:06.159065: Epoch 107 +2026-04-08 19:04:06.160512: Current learning rate: 0.00903 +2026-04-08 19:05:47.934172: train_loss -0.0651 +2026-04-08 19:05:47.939005: val_loss -0.0573 +2026-04-08 19:05:47.941020: Pseudo dice [0.3387, 0.1068, 0.4872, 0.0623, 0.0, 0.4065, 0.0842] +2026-04-08 19:05:47.945745: Epoch time: 101.78 s +2026-04-08 19:05:49.004360: +2026-04-08 19:05:49.005759: Epoch 108 +2026-04-08 19:05:49.007071: Current learning rate: 0.00902 +2026-04-08 19:07:30.840792: train_loss -0.0626 +2026-04-08 19:07:30.845008: val_loss -0.0434 +2026-04-08 19:07:30.846641: Pseudo dice [0.4545, 0.3117, 0.2232, 0.0371, 0.0, 0.1303, 0.1814] +2026-04-08 19:07:30.848570: Epoch time: 101.84 s +2026-04-08 19:07:33.089786: +2026-04-08 19:07:33.093674: Epoch 109 +2026-04-08 19:07:33.095119: Current learning rate: 0.00901 +2026-04-08 19:09:14.845311: train_loss -0.0633 +2026-04-08 19:09:14.849702: val_loss -0.067 +2026-04-08 19:09:14.851414: Pseudo dice [0.3834, 0.6257, 0.5714, 0.0011, 0.0, 0.1101, 0.4663] +2026-04-08 19:09:14.855136: Epoch time: 101.76 s +2026-04-08 19:09:15.891951: +2026-04-08 19:09:15.896442: Epoch 110 +2026-04-08 19:09:15.904552: Current learning rate: 0.009 +2026-04-08 19:10:57.679096: train_loss -0.0724 +2026-04-08 19:10:57.685511: val_loss -0.0796 +2026-04-08 19:10:57.686770: Pseudo dice [0.2656, 0.0638, 0.7456, 0.1068, 0.0, 0.2597, 0.6165] +2026-04-08 19:10:57.688332: Epoch time: 101.79 s +2026-04-08 19:10:58.707758: +2026-04-08 19:10:58.709586: Epoch 111 +2026-04-08 19:10:58.710803: Current learning rate: 0.009 +2026-04-08 19:12:40.492432: train_loss -0.0779 +2026-04-08 19:12:40.497997: val_loss -0.0716 +2026-04-08 19:12:40.499725: Pseudo dice [0.2342, 0.2497, 0.6096, 0.3019, 0.0, 0.1898, 0.6577] +2026-04-08 19:12:40.501561: Epoch time: 101.79 s +2026-04-08 19:12:41.695407: +2026-04-08 19:12:41.697196: Epoch 112 +2026-04-08 19:12:41.698888: Current learning rate: 0.00899 +2026-04-08 19:14:23.236307: train_loss -0.0843 +2026-04-08 19:14:23.240365: val_loss -0.0771 +2026-04-08 19:14:23.241843: Pseudo dice [0.2335, 0.505, 0.6857, 0.0316, 0.0, 0.3553, 0.4857] +2026-04-08 19:14:23.243581: Epoch time: 101.54 s +2026-04-08 19:14:23.244842: Yayy! New best EMA pseudo Dice: 0.278 +2026-04-08 19:14:25.868961: +2026-04-08 19:14:25.871457: Epoch 113 +2026-04-08 19:14:25.872719: Current learning rate: 0.00898 +2026-04-08 19:16:07.524421: train_loss -0.0794 +2026-04-08 19:16:07.528945: val_loss -0.0565 +2026-04-08 19:16:07.530581: Pseudo dice [0.2511, 0.7246, 0.5906, 0.0049, 0.0, 0.2836, 0.7437] +2026-04-08 19:16:07.532875: Epoch time: 101.66 s +2026-04-08 19:16:07.534073: Yayy! New best EMA pseudo Dice: 0.2874 +2026-04-08 19:16:10.067365: +2026-04-08 19:16:10.069707: Epoch 114 +2026-04-08 19:16:10.070955: Current learning rate: 0.00897 +2026-04-08 19:17:51.659537: train_loss -0.0851 +2026-04-08 19:17:51.665475: val_loss -0.0983 +2026-04-08 19:17:51.666765: Pseudo dice [0.2215, 0.2851, 0.617, 0.0001, 0.0, 0.4203, 0.707] +2026-04-08 19:17:51.668530: Epoch time: 101.6 s +2026-04-08 19:17:51.669732: Yayy! New best EMA pseudo Dice: 0.2908 +2026-04-08 19:17:54.288952: +2026-04-08 19:17:54.291323: Epoch 115 +2026-04-08 19:17:54.292466: Current learning rate: 0.00896 +2026-04-08 19:19:36.030744: train_loss -0.0748 +2026-04-08 19:19:36.035847: val_loss -0.0891 +2026-04-08 19:19:36.037322: Pseudo dice [0.419, 0.2545, 0.7614, 0.1638, 0.0, 0.2955, 0.3878] +2026-04-08 19:19:36.039145: Epoch time: 101.74 s +2026-04-08 19:19:36.040700: Yayy! New best EMA pseudo Dice: 0.2943 +2026-04-08 19:19:38.659376: +2026-04-08 19:19:38.661505: Epoch 116 +2026-04-08 19:19:38.662596: Current learning rate: 0.00895 +2026-04-08 19:21:21.219409: train_loss -0.0819 +2026-04-08 19:21:21.224513: val_loss -0.0896 +2026-04-08 19:21:21.226531: Pseudo dice [0.5358, 0.5781, 0.5341, 0.0028, 0.0, 0.435, 0.6045] +2026-04-08 19:21:21.228631: Epoch time: 102.56 s +2026-04-08 19:21:21.229937: Yayy! New best EMA pseudo Dice: 0.3033 +2026-04-08 19:21:23.663621: +2026-04-08 19:21:23.666160: Epoch 117 +2026-04-08 19:21:23.667298: Current learning rate: 0.00894 +2026-04-08 19:23:05.405430: train_loss -0.074 +2026-04-08 19:23:05.409925: val_loss -0.0681 +2026-04-08 19:23:05.411262: Pseudo dice [0.3573, 0.5319, 0.613, 0.0043, 0.0, 0.2734, 0.3891] +2026-04-08 19:23:05.413444: Epoch time: 101.75 s +2026-04-08 19:23:05.414931: Yayy! New best EMA pseudo Dice: 0.304 +2026-04-08 19:23:08.084805: +2026-04-08 19:23:08.098045: Epoch 118 +2026-04-08 19:23:08.104742: Current learning rate: 0.00893 +2026-04-08 19:24:49.716904: train_loss -0.0825 +2026-04-08 19:24:49.721395: val_loss -0.0445 +2026-04-08 19:24:49.723183: Pseudo dice [0.3602, 0.4947, 0.6091, 0.0365, 0.0, 0.2558, 0.6783] +2026-04-08 19:24:49.725042: Epoch time: 101.64 s +2026-04-08 19:24:49.726422: Yayy! New best EMA pseudo Dice: 0.3083 +2026-04-08 19:24:52.334278: +2026-04-08 19:24:52.340341: Epoch 119 +2026-04-08 19:24:52.342082: Current learning rate: 0.00892 +2026-04-08 19:26:33.987988: train_loss -0.0841 +2026-04-08 19:26:33.995301: val_loss -0.0901 +2026-04-08 19:26:33.997308: Pseudo dice [0.1689, 0.6407, 0.5899, 0.2676, 0.0, 0.6236, 0.5607] +2026-04-08 19:26:33.999634: Epoch time: 101.66 s +2026-04-08 19:26:34.001163: Yayy! New best EMA pseudo Dice: 0.3182 +2026-04-08 19:26:36.781424: +2026-04-08 19:26:36.783498: Epoch 120 +2026-04-08 19:26:36.785117: Current learning rate: 0.00891 +2026-04-08 19:28:18.854276: train_loss -0.0796 +2026-04-08 19:28:18.860877: val_loss -0.092 +2026-04-08 19:28:18.863105: Pseudo dice [0.3237, 0.6075, 0.6045, 0.392, 0.0, 0.4127, 0.4372] +2026-04-08 19:28:18.867309: Epoch time: 102.08 s +2026-04-08 19:28:18.869127: Yayy! New best EMA pseudo Dice: 0.3261 +2026-04-08 19:28:21.703105: +2026-04-08 19:28:21.705923: Epoch 121 +2026-04-08 19:28:21.707189: Current learning rate: 0.0089 +2026-04-08 19:30:03.350329: train_loss -0.0921 +2026-04-08 19:30:03.354515: val_loss -0.0894 +2026-04-08 19:30:03.356120: Pseudo dice [0.4365, 0.1445, 0.539, 0.2724, 0.0, 0.3895, 0.6377] +2026-04-08 19:30:03.358325: Epoch time: 101.65 s +2026-04-08 19:30:03.359689: Yayy! New best EMA pseudo Dice: 0.3281 +2026-04-08 19:30:06.059858: +2026-04-08 19:30:06.061852: Epoch 122 +2026-04-08 19:30:06.063179: Current learning rate: 0.00889 +2026-04-08 19:31:47.870937: train_loss -0.0886 +2026-04-08 19:31:47.876554: val_loss -0.0934 +2026-04-08 19:31:47.878269: Pseudo dice [0.3529, 0.0847, 0.4133, 0.1168, 0.0, 0.4379, 0.502] +2026-04-08 19:31:47.880872: Epoch time: 101.81 s +2026-04-08 19:31:48.926359: +2026-04-08 19:31:48.927827: Epoch 123 +2026-04-08 19:31:48.929063: Current learning rate: 0.00889 +2026-04-08 19:33:30.756193: train_loss -0.0708 +2026-04-08 19:33:30.765486: val_loss -0.0733 +2026-04-08 19:33:30.767451: Pseudo dice [0.2286, 0.1916, 0.6819, 0.2372, 0.0, 0.4585, 0.6774] +2026-04-08 19:33:30.769376: Epoch time: 101.83 s +2026-04-08 19:33:31.859414: +2026-04-08 19:33:31.863009: Epoch 124 +2026-04-08 19:33:31.866261: Current learning rate: 0.00888 +2026-04-08 19:35:13.643757: train_loss -0.0748 +2026-04-08 19:35:13.649798: val_loss -0.0831 +2026-04-08 19:35:13.651518: Pseudo dice [0.2719, 0.4783, 0.7259, 0.1983, 0.0, 0.6893, 0.2533] +2026-04-08 19:35:13.654403: Epoch time: 101.79 s +2026-04-08 19:35:13.656128: Yayy! New best EMA pseudo Dice: 0.3304 +2026-04-08 19:35:16.455338: +2026-04-08 19:35:16.461109: Epoch 125 +2026-04-08 19:35:16.462352: Current learning rate: 0.00887 +2026-04-08 19:36:58.378923: train_loss -0.0827 +2026-04-08 19:36:58.384390: val_loss -0.1013 +2026-04-08 19:36:58.386283: Pseudo dice [0.3605, 0.5995, 0.5372, 0.3443, 0.0, 0.7493, 0.6458] +2026-04-08 19:36:58.389016: Epoch time: 101.93 s +2026-04-08 19:36:58.390862: Yayy! New best EMA pseudo Dice: 0.3436 +2026-04-08 19:37:02.090334: +2026-04-08 19:37:02.092322: Epoch 126 +2026-04-08 19:37:02.093938: Current learning rate: 0.00886 +2026-04-08 19:38:44.148154: train_loss -0.0872 +2026-04-08 19:38:44.153947: val_loss -0.0722 +2026-04-08 19:38:44.155857: Pseudo dice [0.1989, 0.7975, 0.2099, 0.0218, 0.0, 0.4072, 0.5571] +2026-04-08 19:38:44.157906: Epoch time: 102.06 s +2026-04-08 19:38:45.187889: +2026-04-08 19:38:45.189294: Epoch 127 +2026-04-08 19:38:45.190552: Current learning rate: 0.00885 +2026-04-08 19:40:27.597361: train_loss -0.0893 +2026-04-08 19:40:27.603502: val_loss -0.0893 +2026-04-08 19:40:27.605466: Pseudo dice [0.5384, 0.718, 0.5564, 0.4526, 0.0, 0.5682, 0.6057] +2026-04-08 19:40:27.608778: Epoch time: 102.41 s +2026-04-08 19:40:27.611100: Yayy! New best EMA pseudo Dice: 0.3557 +2026-04-08 19:40:30.129370: +2026-04-08 19:40:30.131557: Epoch 128 +2026-04-08 19:40:30.132775: Current learning rate: 0.00884 +2026-04-08 19:42:11.776213: train_loss -0.0882 +2026-04-08 19:42:11.780547: val_loss -0.1003 +2026-04-08 19:42:11.782509: Pseudo dice [0.52, 0.3737, 0.6457, 0.1031, 0.0, 0.6109, 0.7057] +2026-04-08 19:42:11.784428: Epoch time: 101.65 s +2026-04-08 19:42:11.786007: Yayy! New best EMA pseudo Dice: 0.3624 +2026-04-08 19:42:14.319710: +2026-04-08 19:42:14.321928: Epoch 129 +2026-04-08 19:42:14.323142: Current learning rate: 0.00883 +2026-04-08 19:43:56.263828: train_loss -0.0974 +2026-04-08 19:43:56.268976: val_loss -0.1116 +2026-04-08 19:43:56.270550: Pseudo dice [0.1814, 0.375, 0.7408, 0.3332, 0.0273, 0.3436, 0.3705] +2026-04-08 19:43:56.272793: Epoch time: 101.95 s +2026-04-08 19:43:57.383853: +2026-04-08 19:43:57.385294: Epoch 130 +2026-04-08 19:43:57.386423: Current learning rate: 0.00882 +2026-04-08 19:45:39.527501: train_loss -0.0936 +2026-04-08 19:45:39.532163: val_loss -0.0904 +2026-04-08 19:45:39.533948: Pseudo dice [0.2107, 0.2456, 0.5495, 0.1823, 0.2074, 0.5048, 0.5001] +2026-04-08 19:45:39.537532: Epoch time: 102.15 s +2026-04-08 19:45:40.599400: +2026-04-08 19:45:40.601756: Epoch 131 +2026-04-08 19:45:40.603119: Current learning rate: 0.00881 +2026-04-08 19:47:22.606724: train_loss -0.0855 +2026-04-08 19:47:22.611741: val_loss -0.0411 +2026-04-08 19:47:22.614190: Pseudo dice [0.3956, 0.213, 0.1868, 0.4204, 0.1952, 0.1707, 0.3269] +2026-04-08 19:47:22.618533: Epoch time: 102.01 s +2026-04-08 19:47:23.696853: +2026-04-08 19:47:23.699127: Epoch 132 +2026-04-08 19:47:23.700498: Current learning rate: 0.0088 +2026-04-08 19:49:05.842233: train_loss -0.0862 +2026-04-08 19:49:05.846574: val_loss -0.0949 +2026-04-08 19:49:05.848160: Pseudo dice [0.35, 0.387, 0.608, 0.0075, 0.2299, 0.3915, 0.5706] +2026-04-08 19:49:05.849886: Epoch time: 102.15 s +2026-04-08 19:49:06.906630: +2026-04-08 19:49:06.907955: Epoch 133 +2026-04-08 19:49:06.909157: Current learning rate: 0.00879 +2026-04-08 19:50:48.821359: train_loss -0.0976 +2026-04-08 19:50:48.828620: val_loss -0.0952 +2026-04-08 19:50:48.830410: Pseudo dice [0.1078, 0.3469, 0.6009, 0.1608, 0.3824, 0.5133, 0.851] +2026-04-08 19:50:48.833375: Epoch time: 101.92 s +2026-04-08 19:50:49.899534: +2026-04-08 19:50:49.901063: Epoch 134 +2026-04-08 19:50:49.902248: Current learning rate: 0.00879 +2026-04-08 19:52:32.539875: train_loss -0.0977 +2026-04-08 19:52:32.553912: val_loss -0.1099 +2026-04-08 19:52:32.555743: Pseudo dice [0.4756, 0.4172, 0.7826, 0.0698, 0.2806, 0.4699, 0.8205] +2026-04-08 19:52:32.559774: Epoch time: 102.64 s +2026-04-08 19:52:32.565004: Yayy! New best EMA pseudo Dice: 0.3699 +2026-04-08 19:52:35.085549: +2026-04-08 19:52:35.088022: Epoch 135 +2026-04-08 19:52:35.089359: Current learning rate: 0.00878 +2026-04-08 19:54:17.234681: train_loss -0.0854 +2026-04-08 19:54:17.239838: val_loss -0.1073 +2026-04-08 19:54:17.243091: Pseudo dice [0.1735, 0.6758, 0.7723, 0.0372, 0.3361, 0.5091, 0.4864] +2026-04-08 19:54:17.248221: Epoch time: 102.15 s +2026-04-08 19:54:17.249643: Yayy! New best EMA pseudo Dice: 0.3756 +2026-04-08 19:54:19.737370: +2026-04-08 19:54:19.739664: Epoch 136 +2026-04-08 19:54:19.740962: Current learning rate: 0.00877 +2026-04-08 19:56:01.498427: train_loss -0.1102 +2026-04-08 19:56:01.503729: val_loss -0.1053 +2026-04-08 19:56:01.505418: Pseudo dice [0.4696, 0.3834, 0.3061, 0.3242, 0.2112, 0.5199, 0.5034] +2026-04-08 19:56:01.507361: Epoch time: 101.76 s +2026-04-08 19:56:01.508952: Yayy! New best EMA pseudo Dice: 0.3769 +2026-04-08 19:56:04.324831: +2026-04-08 19:56:04.327818: Epoch 137 +2026-04-08 19:56:04.329073: Current learning rate: 0.00876 +2026-04-08 19:57:46.041103: train_loss -0.0972 +2026-04-08 19:57:46.046520: val_loss -0.1178 +2026-04-08 19:57:46.048979: Pseudo dice [0.5269, 0.3646, 0.4541, 0.1028, 0.326, 0.2265, 0.5661] +2026-04-08 19:57:46.051130: Epoch time: 101.72 s +2026-04-08 19:57:47.095574: +2026-04-08 19:57:47.104185: Epoch 138 +2026-04-08 19:57:47.108539: Current learning rate: 0.00875 +2026-04-08 19:59:29.507591: train_loss -0.0973 +2026-04-08 19:59:29.512664: val_loss -0.0979 +2026-04-08 19:59:29.515257: Pseudo dice [0.1965, 0.1494, 0.686, 0.4747, 0.2934, 0.5872, 0.3915] +2026-04-08 19:59:29.517086: Epoch time: 102.42 s +2026-04-08 19:59:29.521989: Yayy! New best EMA pseudo Dice: 0.378 +2026-04-08 19:59:32.273844: +2026-04-08 19:59:32.276121: Epoch 139 +2026-04-08 19:59:32.277379: Current learning rate: 0.00874 +2026-04-08 20:01:14.062296: train_loss -0.1031 +2026-04-08 20:01:14.066459: val_loss -0.1247 +2026-04-08 20:01:14.067791: Pseudo dice [0.4409, 0.1375, 0.6818, 0.5253, 0.2643, 0.5142, 0.6024] +2026-04-08 20:01:14.071605: Epoch time: 101.79 s +2026-04-08 20:01:14.073288: Yayy! New best EMA pseudo Dice: 0.3854 +2026-04-08 20:01:16.684962: +2026-04-08 20:01:16.686859: Epoch 140 +2026-04-08 20:01:16.688073: Current learning rate: 0.00873 +2026-04-08 20:02:58.415261: train_loss -0.0989 +2026-04-08 20:02:58.420250: val_loss -0.1049 +2026-04-08 20:02:58.421716: Pseudo dice [0.2574, 0.499, 0.4909, 0.4877, 0.2522, 0.6096, 0.7095] +2026-04-08 20:02:58.423937: Epoch time: 101.73 s +2026-04-08 20:02:58.425338: Yayy! New best EMA pseudo Dice: 0.3941 +2026-04-08 20:03:01.068273: +2026-04-08 20:03:01.071387: Epoch 141 +2026-04-08 20:03:01.072675: Current learning rate: 0.00872 +2026-04-08 20:04:42.662180: train_loss -0.1094 +2026-04-08 20:04:42.667449: val_loss -0.0912 +2026-04-08 20:04:42.668811: Pseudo dice [0.4216, 0.175, 0.6251, 0.1798, 0.3234, 0.2692, 0.3451] +2026-04-08 20:04:42.670832: Epoch time: 101.6 s +2026-04-08 20:04:43.737256: +2026-04-08 20:04:43.747464: Epoch 142 +2026-04-08 20:04:43.757125: Current learning rate: 0.00871 +2026-04-08 20:06:25.547496: train_loss -0.1109 +2026-04-08 20:06:25.552498: val_loss -0.0902 +2026-04-08 20:06:25.554826: Pseudo dice [0.5318, 0.4318, 0.2103, 0.2129, 0.1815, 0.5122, 0.3863] +2026-04-08 20:06:25.557823: Epoch time: 101.81 s +2026-04-08 20:06:26.636194: +2026-04-08 20:06:26.637954: Epoch 143 +2026-04-08 20:06:26.639328: Current learning rate: 0.0087 +2026-04-08 20:08:08.144555: train_loss -0.1065 +2026-04-08 20:08:08.167647: val_loss -0.114 +2026-04-08 20:08:08.169491: Pseudo dice [0.5751, 0.7328, 0.3569, 0.3828, 0.1952, 0.3622, 0.6566] +2026-04-08 20:08:08.171551: Epoch time: 101.51 s +2026-04-08 20:08:10.318702: +2026-04-08 20:08:10.320334: Epoch 144 +2026-04-08 20:08:10.321562: Current learning rate: 0.00869 +2026-04-08 20:09:52.034837: train_loss -0.1031 +2026-04-08 20:09:52.039913: val_loss -0.0943 +2026-04-08 20:09:52.043058: Pseudo dice [0.3668, 0.3827, 0.1901, 0.242, 0.3808, 0.5673, 0.5788] +2026-04-08 20:09:52.045756: Epoch time: 101.72 s +2026-04-08 20:09:53.140464: +2026-04-08 20:09:53.141973: Epoch 145 +2026-04-08 20:09:53.143440: Current learning rate: 0.00868 +2026-04-08 20:11:35.222605: train_loss -0.095 +2026-04-08 20:11:35.227321: val_loss -0.1345 +2026-04-08 20:11:35.228946: Pseudo dice [0.4162, 0.663, 0.6475, 0.2948, 0.3177, 0.6417, 0.4687] +2026-04-08 20:11:35.230966: Epoch time: 102.09 s +2026-04-08 20:11:35.232699: Yayy! New best EMA pseudo Dice: 0.4022 +2026-04-08 20:11:37.894113: +2026-04-08 20:11:37.896413: Epoch 146 +2026-04-08 20:11:37.897742: Current learning rate: 0.00868 +2026-04-08 20:13:19.883332: train_loss -0.0912 +2026-04-08 20:13:19.888038: val_loss -0.0986 +2026-04-08 20:13:19.889791: Pseudo dice [0.5688, 0.6189, 0.5635, 0.3195, 0.1749, 0.4219, 0.7827] +2026-04-08 20:13:19.892442: Epoch time: 101.99 s +2026-04-08 20:13:19.894015: Yayy! New best EMA pseudo Dice: 0.4112 +2026-04-08 20:13:22.335884: +2026-04-08 20:13:22.339477: Epoch 147 +2026-04-08 20:13:22.340902: Current learning rate: 0.00867 +2026-04-08 20:15:04.074435: train_loss -0.1038 +2026-04-08 20:15:04.078911: val_loss -0.1056 +2026-04-08 20:15:04.080408: Pseudo dice [0.5067, 0.5687, 0.5883, 0.2647, 0.2346, 0.4101, 0.7567] +2026-04-08 20:15:04.082574: Epoch time: 101.74 s +2026-04-08 20:15:04.084116: Yayy! New best EMA pseudo Dice: 0.4177 +2026-04-08 20:15:06.754717: +2026-04-08 20:15:06.757067: Epoch 148 +2026-04-08 20:15:06.758261: Current learning rate: 0.00866 +2026-04-08 20:16:48.271842: train_loss -0.1073 +2026-04-08 20:16:48.276286: val_loss -0.0923 +2026-04-08 20:16:48.278027: Pseudo dice [0.3388, 0.2466, 0.5322, 0.5522, 0.3951, 0.3498, 0.7166] +2026-04-08 20:16:48.280521: Epoch time: 101.52 s +2026-04-08 20:16:48.282201: Yayy! New best EMA pseudo Dice: 0.4207 +2026-04-08 20:16:50.610312: +2026-04-08 20:16:50.614088: Epoch 149 +2026-04-08 20:16:50.615473: Current learning rate: 0.00865 +2026-04-08 20:18:32.259336: train_loss -0.1019 +2026-04-08 20:18:32.264142: val_loss -0.1211 +2026-04-08 20:18:32.266226: Pseudo dice [0.6042, 0.3829, 0.6009, 0.5188, 0.4207, 0.3353, 0.4014] +2026-04-08 20:18:32.268342: Epoch time: 101.65 s +2026-04-08 20:18:33.847606: Yayy! New best EMA pseudo Dice: 0.4252 +2026-04-08 20:18:36.457492: +2026-04-08 20:18:36.458766: Epoch 150 +2026-04-08 20:18:36.459972: Current learning rate: 0.00864 +2026-04-08 20:20:18.201482: train_loss -0.1238 +2026-04-08 20:20:18.205787: val_loss -0.1075 +2026-04-08 20:20:18.207272: Pseudo dice [0.1907, 0.4032, 0.5775, 0.5102, 0.3659, 0.4391, 0.4814] +2026-04-08 20:20:18.210991: Epoch time: 101.75 s +2026-04-08 20:20:19.282929: +2026-04-08 20:20:19.284704: Epoch 151 +2026-04-08 20:20:19.285953: Current learning rate: 0.00863 +2026-04-08 20:22:01.241056: train_loss -0.1248 +2026-04-08 20:22:01.246588: val_loss -0.1086 +2026-04-08 20:22:01.248718: Pseudo dice [0.1358, 0.4088, 0.7244, 0.5617, 0.4008, 0.3569, 0.6888] +2026-04-08 20:22:01.253341: Epoch time: 101.96 s +2026-04-08 20:22:01.258714: Yayy! New best EMA pseudo Dice: 0.4294 +2026-04-08 20:22:03.816194: +2026-04-08 20:22:03.818389: Epoch 152 +2026-04-08 20:22:03.823514: Current learning rate: 0.00862 +2026-04-08 20:23:46.540033: train_loss -0.1117 +2026-04-08 20:23:46.544464: val_loss -0.1151 +2026-04-08 20:23:46.546039: Pseudo dice [0.5002, 0.2447, 0.5887, 0.5336, 0.3866, 0.6891, 0.8346] +2026-04-08 20:23:46.547827: Epoch time: 102.73 s +2026-04-08 20:23:46.549139: Yayy! New best EMA pseudo Dice: 0.4404 +2026-04-08 20:23:49.087125: +2026-04-08 20:23:49.089613: Epoch 153 +2026-04-08 20:23:49.090818: Current learning rate: 0.00861 +2026-04-08 20:25:30.851166: train_loss -0.1204 +2026-04-08 20:25:30.855643: val_loss -0.1075 +2026-04-08 20:25:30.857544: Pseudo dice [0.4693, 0.1007, 0.7219, 0.1908, 0.2606, 0.279, 0.8215] +2026-04-08 20:25:30.859582: Epoch time: 101.77 s +2026-04-08 20:25:31.984855: +2026-04-08 20:25:31.986215: Epoch 154 +2026-04-08 20:25:31.987571: Current learning rate: 0.0086 +2026-04-08 20:27:13.964895: train_loss -0.1214 +2026-04-08 20:27:13.970574: val_loss -0.1194 +2026-04-08 20:27:13.972228: Pseudo dice [0.6307, 0.0708, 0.831, 0.6865, 0.4252, 0.2596, 0.7202] +2026-04-08 20:27:13.976090: Epoch time: 101.98 s +2026-04-08 20:27:13.977999: Yayy! New best EMA pseudo Dice: 0.4451 +2026-04-08 20:27:16.496770: +2026-04-08 20:27:16.514736: Epoch 155 +2026-04-08 20:27:16.528988: Current learning rate: 0.00859 +2026-04-08 20:28:58.786683: train_loss -0.1092 +2026-04-08 20:28:58.791424: val_loss -0.0857 +2026-04-08 20:28:58.792793: Pseudo dice [0.1611, 0.2392, 0.7507, 0.3051, 0.3799, 0.2568, 0.2529] +2026-04-08 20:28:58.794785: Epoch time: 102.29 s +2026-04-08 20:28:59.893040: +2026-04-08 20:28:59.895426: Epoch 156 +2026-04-08 20:28:59.896879: Current learning rate: 0.00858 +2026-04-08 20:30:41.597072: train_loss -0.1047 +2026-04-08 20:30:41.601895: val_loss -0.0982 +2026-04-08 20:30:41.603440: Pseudo dice [0.5424, 0.5332, 0.6569, 0.3655, 0.254, 0.3023, 0.5059] +2026-04-08 20:30:41.605368: Epoch time: 101.71 s +2026-04-08 20:30:42.701178: +2026-04-08 20:30:42.702704: Epoch 157 +2026-04-08 20:30:42.704026: Current learning rate: 0.00858 +2026-04-08 20:32:24.422848: train_loss -0.1034 +2026-04-08 20:32:24.428052: val_loss -0.1082 +2026-04-08 20:32:24.429728: Pseudo dice [0.3869, 0.3141, 0.6076, 0.2596, 0.2756, 0.1786, 0.4502] +2026-04-08 20:32:24.434304: Epoch time: 101.72 s +2026-04-08 20:32:25.530346: +2026-04-08 20:32:25.532311: Epoch 158 +2026-04-08 20:32:25.533902: Current learning rate: 0.00857 +2026-04-08 20:34:07.299212: train_loss -0.1171 +2026-04-08 20:34:07.303956: val_loss -0.0913 +2026-04-08 20:34:07.305519: Pseudo dice [0.314, 0.5765, 0.4771, 0.0703, 0.4835, 0.3902, 0.653] +2026-04-08 20:34:07.309250: Epoch time: 101.77 s +2026-04-08 20:34:08.418754: +2026-04-08 20:34:08.420338: Epoch 159 +2026-04-08 20:34:08.421554: Current learning rate: 0.00856 +2026-04-08 20:35:50.324675: train_loss -0.1078 +2026-04-08 20:35:50.330397: val_loss -0.1111 +2026-04-08 20:35:50.333888: Pseudo dice [0.4435, 0.1375, 0.5018, 0.3186, 0.4114, 0.5707, 0.7539] +2026-04-08 20:35:50.335986: Epoch time: 101.91 s +2026-04-08 20:35:51.443492: +2026-04-08 20:35:51.445300: Epoch 160 +2026-04-08 20:35:51.446617: Current learning rate: 0.00855 +2026-04-08 20:37:33.181701: train_loss -0.1343 +2026-04-08 20:37:33.187600: val_loss -0.1061 +2026-04-08 20:37:33.193947: Pseudo dice [0.4204, 0.2532, 0.6085, 0.3401, 0.3316, 0.5155, 0.5152] +2026-04-08 20:37:33.196086: Epoch time: 101.74 s +2026-04-08 20:37:34.289624: +2026-04-08 20:37:34.290998: Epoch 161 +2026-04-08 20:37:34.292167: Current learning rate: 0.00854 +2026-04-08 20:39:17.358633: train_loss -0.1277 +2026-04-08 20:39:17.363619: val_loss -0.1214 +2026-04-08 20:39:17.365435: Pseudo dice [0.6324, 0.2029, 0.6543, 0.6335, 0.3646, 0.4214, 0.546] +2026-04-08 20:39:17.367296: Epoch time: 103.07 s +2026-04-08 20:39:18.462034: +2026-04-08 20:39:18.463402: Epoch 162 +2026-04-08 20:39:18.464635: Current learning rate: 0.00853 +2026-04-08 20:41:00.302570: train_loss -0.1323 +2026-04-08 20:41:00.308929: val_loss -0.1054 +2026-04-08 20:41:00.310525: Pseudo dice [0.3984, 0.668, 0.6451, 0.5099, 0.4849, 0.3763, 0.6321] +2026-04-08 20:41:00.314258: Epoch time: 101.84 s +2026-04-08 20:41:01.419935: +2026-04-08 20:41:01.421442: Epoch 163 +2026-04-08 20:41:01.422669: Current learning rate: 0.00852 +2026-04-08 20:42:43.120645: train_loss -0.1253 +2026-04-08 20:42:43.125782: val_loss -0.0994 +2026-04-08 20:42:43.127399: Pseudo dice [0.7239, 0.7209, 0.7651, 0.3437, 0.284, 0.6643, 0.4067] +2026-04-08 20:42:43.129926: Epoch time: 101.7 s +2026-04-08 20:42:43.131484: Yayy! New best EMA pseudo Dice: 0.4563 +2026-04-08 20:42:45.813085: +2026-04-08 20:42:45.815796: Epoch 164 +2026-04-08 20:42:45.817019: Current learning rate: 0.00851 +2026-04-08 20:44:27.439758: train_loss -0.1331 +2026-04-08 20:44:27.447211: val_loss -0.1124 +2026-04-08 20:44:27.449040: Pseudo dice [0.3608, 0.0352, 0.7636, 0.3475, 0.3059, 0.3488, 0.6205] +2026-04-08 20:44:27.451235: Epoch time: 101.63 s +2026-04-08 20:44:28.507767: +2026-04-08 20:44:28.509275: Epoch 165 +2026-04-08 20:44:28.510575: Current learning rate: 0.0085 +2026-04-08 20:46:10.259049: train_loss -0.1035 +2026-04-08 20:46:10.268758: val_loss -0.0808 +2026-04-08 20:46:10.271485: Pseudo dice [0.3756, 0.0528, 0.5379, 0.3244, 0.2944, 0.4416, 0.364] +2026-04-08 20:46:10.275577: Epoch time: 101.75 s +2026-04-08 20:46:11.335373: +2026-04-08 20:46:11.337364: Epoch 166 +2026-04-08 20:46:11.339075: Current learning rate: 0.00849 +2026-04-08 20:47:53.018212: train_loss -0.1188 +2026-04-08 20:47:53.023569: val_loss -0.1294 +2026-04-08 20:47:53.025290: Pseudo dice [0.3955, 0.5378, 0.6149, 0.0986, 0.3862, 0.7551, 0.7092] +2026-04-08 20:47:53.027646: Epoch time: 101.69 s +2026-04-08 20:47:54.096118: +2026-04-08 20:47:54.097711: Epoch 167 +2026-04-08 20:47:54.099221: Current learning rate: 0.00848 +2026-04-08 20:49:35.766854: train_loss -0.1158 +2026-04-08 20:49:35.772115: val_loss -0.1297 +2026-04-08 20:49:35.773664: Pseudo dice [0.3463, 0.3403, 0.7564, 0.1667, 0.3883, 0.3988, 0.795] +2026-04-08 20:49:35.775355: Epoch time: 101.67 s +2026-04-08 20:49:36.832960: +2026-04-08 20:49:36.834878: Epoch 168 +2026-04-08 20:49:36.836048: Current learning rate: 0.00847 +2026-04-08 20:51:18.633768: train_loss -0.1238 +2026-04-08 20:51:18.639016: val_loss -0.1041 +2026-04-08 20:51:18.640996: Pseudo dice [0.496, 0.1985, 0.621, 0.0002, 0.3436, 0.557, 0.6343] +2026-04-08 20:51:18.644653: Epoch time: 101.8 s +2026-04-08 20:51:19.738400: +2026-04-08 20:51:19.739791: Epoch 169 +2026-04-08 20:51:19.740975: Current learning rate: 0.00847 +2026-04-08 20:53:01.531368: train_loss -0.1196 +2026-04-08 20:53:01.536952: val_loss -0.1136 +2026-04-08 20:53:01.538541: Pseudo dice [0.6594, 0.1157, 0.6301, 0.624, 0.3871, 0.2455, 0.3805] +2026-04-08 20:53:01.540976: Epoch time: 101.8 s +2026-04-08 20:53:02.615544: +2026-04-08 20:53:02.617573: Epoch 170 +2026-04-08 20:53:02.619385: Current learning rate: 0.00846 +2026-04-08 20:54:44.417282: train_loss -0.1312 +2026-04-08 20:54:44.422912: val_loss -0.1067 +2026-04-08 20:54:44.424606: Pseudo dice [0.5498, 0.6953, 0.7159, 0.3072, 0.2212, 0.6232, 0.5277] +2026-04-08 20:54:44.426610: Epoch time: 101.8 s +2026-04-08 20:54:45.493821: +2026-04-08 20:54:45.495167: Epoch 171 +2026-04-08 20:54:45.496559: Current learning rate: 0.00845 +2026-04-08 20:56:27.212924: train_loss -0.1264 +2026-04-08 20:56:27.219578: val_loss -0.1181 +2026-04-08 20:56:27.221153: Pseudo dice [0.6198, 0.7701, 0.6132, 0.1286, 0.3693, 0.451, 0.7737] +2026-04-08 20:56:27.222976: Epoch time: 101.72 s +2026-04-08 20:56:27.224418: Yayy! New best EMA pseudo Dice: 0.4579 +2026-04-08 20:56:30.060320: +2026-04-08 20:56:30.062852: Epoch 172 +2026-04-08 20:56:30.064777: Current learning rate: 0.00844 +2026-04-08 20:58:12.291564: train_loss -0.119 +2026-04-08 20:58:12.296457: val_loss -0.1229 +2026-04-08 20:58:12.298032: Pseudo dice [0.5733, 0.7274, 0.7521, 0.2088, 0.3047, 0.7063, 0.4475] +2026-04-08 20:58:12.299917: Epoch time: 102.23 s +2026-04-08 20:58:12.301564: Yayy! New best EMA pseudo Dice: 0.4653 +2026-04-08 20:58:14.749382: +2026-04-08 20:58:14.751240: Epoch 173 +2026-04-08 20:58:14.752604: Current learning rate: 0.00843 +2026-04-08 20:59:57.689616: train_loss -0.1113 +2026-04-08 20:59:57.694861: val_loss -0.1424 +2026-04-08 20:59:57.696863: Pseudo dice [0.2986, 0.4433, 0.7738, 0.664, 0.5333, 0.7546, 0.578] +2026-04-08 20:59:57.700849: Epoch time: 102.94 s +2026-04-08 20:59:57.702981: Yayy! New best EMA pseudo Dice: 0.4765 +2026-04-08 21:00:00.188242: +2026-04-08 21:00:00.190433: Epoch 174 +2026-04-08 21:00:00.191764: Current learning rate: 0.00842 +2026-04-08 21:01:42.046483: train_loss -0.1149 +2026-04-08 21:01:42.050824: val_loss -0.1182 +2026-04-08 21:01:42.052729: Pseudo dice [0.6491, 0.2397, 0.528, 0.0588, 0.2749, 0.7066, 0.8296] +2026-04-08 21:01:42.054986: Epoch time: 101.86 s +2026-04-08 21:01:43.148211: +2026-04-08 21:01:43.149687: Epoch 175 +2026-04-08 21:01:43.151044: Current learning rate: 0.00841 +2026-04-08 21:03:24.926729: train_loss -0.1113 +2026-04-08 21:03:24.932210: val_loss -0.1136 +2026-04-08 21:03:24.933968: Pseudo dice [0.4871, 0.4473, 0.6077, 0.483, 0.45, 0.239, 0.6616] +2026-04-08 21:03:24.935989: Epoch time: 101.78 s +2026-04-08 21:03:26.012173: +2026-04-08 21:03:26.017527: Epoch 176 +2026-04-08 21:03:26.020995: Current learning rate: 0.0084 +2026-04-08 21:05:07.791167: train_loss -0.1253 +2026-04-08 21:05:07.795303: val_loss -0.1445 +2026-04-08 21:05:07.797095: Pseudo dice [0.6651, 0.7729, 0.6559, 0.6777, 0.5209, 0.4561, 0.8502] +2026-04-08 21:05:07.798823: Epoch time: 101.78 s +2026-04-08 21:05:07.800564: Yayy! New best EMA pseudo Dice: 0.4945 +2026-04-08 21:05:10.190012: +2026-04-08 21:05:10.192858: Epoch 177 +2026-04-08 21:05:10.194209: Current learning rate: 0.00839 +2026-04-08 21:06:52.239335: train_loss -0.1313 +2026-04-08 21:06:52.245507: val_loss -0.1073 +2026-04-08 21:06:52.247620: Pseudo dice [0.5595, 0.3743, 0.6788, 0.6725, 0.4375, 0.6828, 0.7193] +2026-04-08 21:06:52.249220: Epoch time: 102.05 s +2026-04-08 21:06:52.250718: Yayy! New best EMA pseudo Dice: 0.504 +2026-04-08 21:06:54.672901: +2026-04-08 21:06:54.675352: Epoch 178 +2026-04-08 21:06:54.677294: Current learning rate: 0.00838 +2026-04-08 21:08:37.530179: train_loss -0.1153 +2026-04-08 21:08:37.534484: val_loss -0.1364 +2026-04-08 21:08:37.535918: Pseudo dice [0.6785, 0.7126, 0.711, 0.143, 0.4878, 0.4878, 0.638] +2026-04-08 21:08:37.537641: Epoch time: 102.86 s +2026-04-08 21:08:37.539122: Yayy! New best EMA pseudo Dice: 0.5087 +2026-04-08 21:08:40.836955: +2026-04-08 21:08:40.839306: Epoch 179 +2026-04-08 21:08:40.840696: Current learning rate: 0.00837 +2026-04-08 21:10:22.397751: train_loss -0.1416 +2026-04-08 21:10:22.403092: val_loss -0.151 +2026-04-08 21:10:22.405146: Pseudo dice [0.478, 0.7932, 0.7997, 0.6566, 0.48, 0.5175, 0.6797] +2026-04-08 21:10:22.407242: Epoch time: 101.56 s +2026-04-08 21:10:22.409030: Yayy! New best EMA pseudo Dice: 0.5208 +2026-04-08 21:10:24.836552: +2026-04-08 21:10:24.838943: Epoch 180 +2026-04-08 21:10:24.840081: Current learning rate: 0.00836 +2026-04-08 21:12:06.495372: train_loss -0.1545 +2026-04-08 21:12:06.502671: val_loss -0.1388 +2026-04-08 21:12:06.504588: Pseudo dice [0.6289, 0.545, 0.8003, 0.319, 0.4292, 0.7098, 0.7417] +2026-04-08 21:12:06.507220: Epoch time: 101.66 s +2026-04-08 21:12:06.508739: Yayy! New best EMA pseudo Dice: 0.5283 +2026-04-08 21:12:08.977792: +2026-04-08 21:12:08.979639: Epoch 181 +2026-04-08 21:12:08.980811: Current learning rate: 0.00836 +2026-04-08 21:13:50.808568: train_loss -0.123 +2026-04-08 21:13:50.813272: val_loss -0.1307 +2026-04-08 21:13:50.814805: Pseudo dice [0.7202, 0.749, 0.8577, 0.6533, 0.3084, 0.205, 0.6896] +2026-04-08 21:13:50.818791: Epoch time: 101.83 s +2026-04-08 21:13:50.820535: Yayy! New best EMA pseudo Dice: 0.5353 +2026-04-08 21:13:53.192863: +2026-04-08 21:13:53.194590: Epoch 182 +2026-04-08 21:13:53.195778: Current learning rate: 0.00835 +2026-04-08 21:15:35.006321: train_loss -0.1278 +2026-04-08 21:15:35.012264: val_loss -0.1054 +2026-04-08 21:15:35.014983: Pseudo dice [0.5369, 0.1902, 0.7361, 0.5406, 0.3503, 0.4478, 0.69] +2026-04-08 21:15:35.018875: Epoch time: 101.82 s +2026-04-08 21:15:36.102412: +2026-04-08 21:15:36.103762: Epoch 183 +2026-04-08 21:15:36.104986: Current learning rate: 0.00834 +2026-04-08 21:17:17.816714: train_loss -0.1244 +2026-04-08 21:17:17.821026: val_loss -0.0892 +2026-04-08 21:17:17.822719: Pseudo dice [0.4049, 0.5881, 0.4806, 0.615, 0.3398, 0.5904, 0.3074] +2026-04-08 21:17:17.824498: Epoch time: 101.72 s +2026-04-08 21:17:18.903414: +2026-04-08 21:17:18.905075: Epoch 184 +2026-04-08 21:17:18.906360: Current learning rate: 0.00833 +2026-04-08 21:19:00.691870: train_loss -0.1362 +2026-04-08 21:19:00.697913: val_loss -0.1162 +2026-04-08 21:19:00.699912: Pseudo dice [0.4871, 0.5453, 0.5263, 0.13, 0.3541, 0.3045, 0.7649] +2026-04-08 21:19:00.701859: Epoch time: 101.79 s +2026-04-08 21:19:01.788497: +2026-04-08 21:19:01.789820: Epoch 185 +2026-04-08 21:19:01.791139: Current learning rate: 0.00832 +2026-04-08 21:20:43.785331: train_loss -0.138 +2026-04-08 21:20:43.790198: val_loss -0.1248 +2026-04-08 21:20:43.791972: Pseudo dice [0.6928, 0.4518, 0.7873, 0.1535, 0.4494, 0.3061, 0.6217] +2026-04-08 21:20:43.794487: Epoch time: 102.0 s +2026-04-08 21:20:44.885015: +2026-04-08 21:20:44.887447: Epoch 186 +2026-04-08 21:20:44.889141: Current learning rate: 0.00831 +2026-04-08 21:22:26.556914: train_loss -0.1302 +2026-04-08 21:22:26.561313: val_loss -0.117 +2026-04-08 21:22:26.563113: Pseudo dice [0.4627, 0.773, 0.5181, 0.408, 0.4784, 0.1786, 0.1167] +2026-04-08 21:22:26.565790: Epoch time: 101.68 s +2026-04-08 21:22:27.642915: +2026-04-08 21:22:27.644454: Epoch 187 +2026-04-08 21:22:27.645712: Current learning rate: 0.0083 +2026-04-08 21:24:09.811386: train_loss -0.1278 +2026-04-08 21:24:09.815536: val_loss -0.1164 +2026-04-08 21:24:09.817707: Pseudo dice [0.7007, 0.8113, 0.3756, 0.4211, 0.477, 0.3037, 0.5979] +2026-04-08 21:24:09.819674: Epoch time: 102.17 s +2026-04-08 21:24:10.929469: +2026-04-08 21:24:10.931097: Epoch 188 +2026-04-08 21:24:10.932700: Current learning rate: 0.00829 +2026-04-08 21:25:52.981560: train_loss -0.1131 +2026-04-08 21:25:52.988122: val_loss -0.119 +2026-04-08 21:25:52.997880: Pseudo dice [0.7048, 0.7479, 0.6725, 0.1179, 0.2726, 0.1732, 0.3171] +2026-04-08 21:25:52.999701: Epoch time: 102.06 s +2026-04-08 21:25:54.094423: +2026-04-08 21:25:54.096095: Epoch 189 +2026-04-08 21:25:54.098412: Current learning rate: 0.00828 +2026-04-08 21:27:35.937178: train_loss -0.1165 +2026-04-08 21:27:35.944405: val_loss -0.095 +2026-04-08 21:27:35.946925: Pseudo dice [0.4993, 0.6498, 0.4495, 0.0628, 0.2835, 0.5981, 0.2702] +2026-04-08 21:27:35.949238: Epoch time: 101.85 s +2026-04-08 21:27:37.030534: +2026-04-08 21:27:37.032952: Epoch 190 +2026-04-08 21:27:37.034421: Current learning rate: 0.00827 +2026-04-08 21:29:19.342276: train_loss -0.1222 +2026-04-08 21:29:19.347704: val_loss -0.101 +2026-04-08 21:29:19.349707: Pseudo dice [0.3871, 0.0645, 0.7384, 0.5535, 0.3041, 0.5227, 0.5265] +2026-04-08 21:29:19.351782: Epoch time: 102.31 s +2026-04-08 21:29:20.430505: +2026-04-08 21:29:20.432813: Epoch 191 +2026-04-08 21:29:20.434771: Current learning rate: 0.00826 +2026-04-08 21:31:02.850211: train_loss -0.1233 +2026-04-08 21:31:02.856671: val_loss -0.1132 +2026-04-08 21:31:02.858210: Pseudo dice [0.5466, 0.7057, 0.7028, 0.3373, 0.322, 0.333, 0.7989] +2026-04-08 21:31:02.860098: Epoch time: 102.42 s +2026-04-08 21:31:04.011008: +2026-04-08 21:31:04.014010: Epoch 192 +2026-04-08 21:31:04.016139: Current learning rate: 0.00825 +2026-04-08 21:32:45.634156: train_loss -0.1257 +2026-04-08 21:32:45.639674: val_loss -0.1431 +2026-04-08 21:32:45.641529: Pseudo dice [0.4989, 0.2009, 0.718, 0.5793, 0.5021, 0.4369, 0.6883] +2026-04-08 21:32:45.644083: Epoch time: 101.63 s +2026-04-08 21:32:46.725951: +2026-04-08 21:32:46.727356: Epoch 193 +2026-04-08 21:32:46.728536: Current learning rate: 0.00824 +2026-04-08 21:34:28.354831: train_loss -0.1346 +2026-04-08 21:34:28.360273: val_loss -0.1185 +2026-04-08 21:34:28.362060: Pseudo dice [0.8023, 0.0772, 0.58, 0.4961, 0.4978, 0.2938, 0.7064] +2026-04-08 21:34:28.363917: Epoch time: 101.63 s +2026-04-08 21:34:29.450488: +2026-04-08 21:34:29.451843: Epoch 194 +2026-04-08 21:34:29.453176: Current learning rate: 0.00824 +2026-04-08 21:36:11.141571: train_loss -0.1097 +2026-04-08 21:36:11.146872: val_loss -0.1157 +2026-04-08 21:36:11.148579: Pseudo dice [0.7318, 0.5883, 0.6649, 0.2833, 0.5244, 0.4794, 0.4458] +2026-04-08 21:36:11.151082: Epoch time: 101.69 s +2026-04-08 21:36:12.251364: +2026-04-08 21:36:12.252807: Epoch 195 +2026-04-08 21:36:12.253981: Current learning rate: 0.00823 +2026-04-08 21:37:53.863537: train_loss -0.123 +2026-04-08 21:37:53.869459: val_loss -0.135 +2026-04-08 21:37:53.871095: Pseudo dice [0.2702, 0.2239, 0.8357, 0.6366, 0.4661, 0.4091, 0.7396] +2026-04-08 21:37:53.875363: Epoch time: 101.62 s +2026-04-08 21:37:54.979563: +2026-04-08 21:37:54.981826: Epoch 196 +2026-04-08 21:37:54.983266: Current learning rate: 0.00822 +2026-04-08 21:39:36.657283: train_loss -0.1335 +2026-04-08 21:39:36.661926: val_loss -0.1413 +2026-04-08 21:39:36.663778: Pseudo dice [0.6458, 0.3765, 0.7524, 0.5448, 0.4365, 0.6755, 0.7525] +2026-04-08 21:39:36.666450: Epoch time: 101.68 s +2026-04-08 21:39:37.760142: +2026-04-08 21:39:37.761510: Epoch 197 +2026-04-08 21:39:37.762773: Current learning rate: 0.00821 +2026-04-08 21:41:19.486296: train_loss -0.1451 +2026-04-08 21:41:19.491851: val_loss -0.1429 +2026-04-08 21:41:19.494149: Pseudo dice [0.619, 0.6766, 0.8024, 0.0155, 0.378, 0.4087, 0.8388] +2026-04-08 21:41:19.496594: Epoch time: 101.73 s +2026-04-08 21:41:21.715266: +2026-04-08 21:41:21.717048: Epoch 198 +2026-04-08 21:41:21.718557: Current learning rate: 0.0082 +2026-04-08 21:43:03.434365: train_loss -0.1362 +2026-04-08 21:43:03.441534: val_loss -0.1659 +2026-04-08 21:43:03.443944: Pseudo dice [0.7095, 0.8275, 0.6868, 0.3406, 0.5049, 0.5789, 0.847] +2026-04-08 21:43:03.446580: Epoch time: 101.72 s +2026-04-08 21:43:04.542846: +2026-04-08 21:43:04.545140: Epoch 199 +2026-04-08 21:43:04.547492: Current learning rate: 0.00819 +2026-04-08 21:44:46.334797: train_loss -0.1336 +2026-04-08 21:44:46.340991: val_loss -0.1149 +2026-04-08 21:44:46.343471: Pseudo dice [0.4097, 0.802, 0.8106, 0.5222, 0.3764, 0.2908, 0.4526] +2026-04-08 21:44:46.345745: Epoch time: 101.79 s +2026-04-08 21:44:48.819931: +2026-04-08 21:44:48.822133: Epoch 200 +2026-04-08 21:44:48.823897: Current learning rate: 0.00818 +2026-04-08 21:46:30.681634: train_loss -0.1384 +2026-04-08 21:46:30.686606: val_loss -0.1272 +2026-04-08 21:46:30.688356: Pseudo dice [0.4846, 0.5637, 0.6203, 0.2969, 0.3998, 0.3574, 0.6638] +2026-04-08 21:46:30.692897: Epoch time: 101.86 s +2026-04-08 21:46:31.801896: +2026-04-08 21:46:31.803589: Epoch 201 +2026-04-08 21:46:31.804959: Current learning rate: 0.00817 +2026-04-08 21:48:13.609976: train_loss -0.1372 +2026-04-08 21:48:13.616510: val_loss -0.1346 +2026-04-08 21:48:13.618457: Pseudo dice [0.3491, 0.4849, 0.8037, 0.3297, 0.4159, 0.7276, 0.7079] +2026-04-08 21:48:13.622313: Epoch time: 101.81 s +2026-04-08 21:48:14.734658: +2026-04-08 21:48:14.736211: Epoch 202 +2026-04-08 21:48:14.737410: Current learning rate: 0.00816 +2026-04-08 21:49:56.992248: train_loss -0.1482 +2026-04-08 21:49:56.996980: val_loss -0.1257 +2026-04-08 21:49:56.999362: Pseudo dice [0.651, 0.5225, 0.7255, 0.443, 0.4829, 0.6042, 0.7649] +2026-04-08 21:49:57.001930: Epoch time: 102.26 s +2026-04-08 21:49:58.111382: +2026-04-08 21:49:58.113434: Epoch 203 +2026-04-08 21:49:58.114717: Current learning rate: 0.00815 +2026-04-08 21:51:40.776411: train_loss -0.1197 +2026-04-08 21:51:40.781755: val_loss -0.1128 +2026-04-08 21:51:40.783507: Pseudo dice [0.4057, 0.679, 0.7101, 0.0015, 0.4811, 0.3752, 0.673] +2026-04-08 21:51:40.785544: Epoch time: 102.67 s +2026-04-08 21:51:41.876416: +2026-04-08 21:51:41.880297: Epoch 204 +2026-04-08 21:51:41.890083: Current learning rate: 0.00814 +2026-04-08 21:53:23.887146: train_loss -0.1177 +2026-04-08 21:53:23.891538: val_loss -0.1405 +2026-04-08 21:53:23.893237: Pseudo dice [0.8153, 0.7459, 0.6948, 0.5151, 0.4562, 0.5034, 0.7285] +2026-04-08 21:53:23.895193: Epoch time: 102.01 s +2026-04-08 21:53:23.896784: Yayy! New best EMA pseudo Dice: 0.5359 +2026-04-08 21:53:26.398758: +2026-04-08 21:53:26.401148: Epoch 205 +2026-04-08 21:53:26.402377: Current learning rate: 0.00813 +2026-04-08 21:55:08.178782: train_loss -0.1245 +2026-04-08 21:55:08.196168: val_loss -0.1321 +2026-04-08 21:55:08.199273: Pseudo dice [0.7537, 0.7582, 0.4834, 0.2985, 0.439, 0.5038, 0.6333] +2026-04-08 21:55:08.204694: Epoch time: 101.78 s +2026-04-08 21:55:08.207246: Yayy! New best EMA pseudo Dice: 0.5376 +2026-04-08 21:55:10.640729: +2026-04-08 21:55:10.642617: Epoch 206 +2026-04-08 21:55:10.643891: Current learning rate: 0.00813 +2026-04-08 21:56:52.885502: train_loss -0.1277 +2026-04-08 21:56:52.890023: val_loss -0.1471 +2026-04-08 21:56:52.891674: Pseudo dice [0.7691, 0.8567, 0.735, 0.3475, 0.5927, 0.4136, 0.3171] +2026-04-08 21:56:52.893524: Epoch time: 102.25 s +2026-04-08 21:56:52.895533: Yayy! New best EMA pseudo Dice: 0.5415 +2026-04-08 21:56:55.569887: +2026-04-08 21:56:55.572881: Epoch 207 +2026-04-08 21:56:55.574224: Current learning rate: 0.00812 +2026-04-08 21:58:37.355912: train_loss -0.1412 +2026-04-08 21:58:37.361184: val_loss -0.137 +2026-04-08 21:58:37.363932: Pseudo dice [0.6173, 0.7061, 0.6688, 0.5904, 0.2575, 0.5448, 0.6738] +2026-04-08 21:58:37.372169: Epoch time: 101.79 s +2026-04-08 21:58:37.373672: Yayy! New best EMA pseudo Dice: 0.5453 +2026-04-08 21:58:39.828564: +2026-04-08 21:58:39.830687: Epoch 208 +2026-04-08 21:58:39.832674: Current learning rate: 0.00811 +2026-04-08 22:00:22.081524: train_loss -0.1445 +2026-04-08 22:00:22.086511: val_loss -0.13 +2026-04-08 22:00:22.088202: Pseudo dice [0.644, 0.1747, 0.6577, 0.4917, 0.4053, 0.7286, 0.6887] +2026-04-08 22:00:22.090259: Epoch time: 102.26 s +2026-04-08 22:00:23.168093: +2026-04-08 22:00:23.169847: Epoch 209 +2026-04-08 22:00:23.171240: Current learning rate: 0.0081 +2026-04-08 22:02:04.777397: train_loss -0.1403 +2026-04-08 22:02:04.783524: val_loss -0.1195 +2026-04-08 22:02:04.785673: Pseudo dice [0.44, 0.805, 0.709, 0.5657, 0.3295, 0.6877, 0.6066] +2026-04-08 22:02:04.789624: Epoch time: 101.61 s +2026-04-08 22:02:04.791224: Yayy! New best EMA pseudo Dice: 0.5496 +2026-04-08 22:02:07.524362: +2026-04-08 22:02:07.526963: Epoch 210 +2026-04-08 22:02:07.528212: Current learning rate: 0.00809 +2026-04-08 22:03:49.093303: train_loss -0.1483 +2026-04-08 22:03:49.099657: val_loss -0.1092 +2026-04-08 22:03:49.101862: Pseudo dice [0.632, 0.2261, 0.4798, 0.3398, 0.3882, 0.4867, 0.6139] +2026-04-08 22:03:49.104094: Epoch time: 101.57 s +2026-04-08 22:03:50.140966: +2026-04-08 22:03:50.142961: Epoch 211 +2026-04-08 22:03:50.144533: Current learning rate: 0.00808 +2026-04-08 22:05:31.977267: train_loss -0.1398 +2026-04-08 22:05:31.982159: val_loss -0.1284 +2026-04-08 22:05:31.985278: Pseudo dice [0.4268, 0.3721, 0.6852, 0.5063, 0.271, 0.646, 0.7791] +2026-04-08 22:05:31.987813: Epoch time: 101.84 s +2026-04-08 22:05:33.105261: +2026-04-08 22:05:33.107477: Epoch 212 +2026-04-08 22:05:33.108866: Current learning rate: 0.00807 +2026-04-08 22:07:14.791536: train_loss -0.1402 +2026-04-08 22:07:14.796861: val_loss -0.1411 +2026-04-08 22:07:14.798410: Pseudo dice [0.6166, 0.3431, 0.6995, 0.1927, 0.483, 0.6337, 0.7654] +2026-04-08 22:07:14.800685: Epoch time: 101.69 s +2026-04-08 22:07:15.833647: +2026-04-08 22:07:15.835305: Epoch 213 +2026-04-08 22:07:15.836769: Current learning rate: 0.00806 +2026-04-08 22:08:58.515581: train_loss -0.1326 +2026-04-08 22:08:58.524077: val_loss -0.131 +2026-04-08 22:08:58.528156: Pseudo dice [0.6054, 0.3047, 0.6338, 0.5124, 0.6192, 0.4872, 0.7123] +2026-04-08 22:08:58.534277: Epoch time: 102.69 s +2026-04-08 22:08:59.592193: +2026-04-08 22:08:59.594152: Epoch 214 +2026-04-08 22:08:59.595802: Current learning rate: 0.00805 +2026-04-08 22:10:41.942641: train_loss -0.1366 +2026-04-08 22:10:41.947891: val_loss -0.1286 +2026-04-08 22:10:41.949762: Pseudo dice [0.7142, 0.1513, 0.7234, 0.509, 0.2665, 0.3247, 0.5446] +2026-04-08 22:10:41.952229: Epoch time: 102.35 s +2026-04-08 22:10:42.989790: +2026-04-08 22:10:42.991646: Epoch 215 +2026-04-08 22:10:42.993230: Current learning rate: 0.00804 +2026-04-08 22:12:24.765865: train_loss -0.134 +2026-04-08 22:12:24.773667: val_loss -0.1377 +2026-04-08 22:12:24.775039: Pseudo dice [0.6884, 0.685, 0.802, 0.5268, 0.4641, 0.5695, 0.6771] +2026-04-08 22:12:24.777149: Epoch time: 101.78 s +2026-04-08 22:12:26.955929: +2026-04-08 22:12:26.957242: Epoch 216 +2026-04-08 22:12:26.958452: Current learning rate: 0.00803 +2026-04-08 22:14:08.786474: train_loss -0.1583 +2026-04-08 22:14:08.792185: val_loss -0.1432 +2026-04-08 22:14:08.794190: Pseudo dice [0.3152, 0.7878, 0.8121, 0.4319, 0.396, 0.6278, 0.697] +2026-04-08 22:14:08.796715: Epoch time: 101.83 s +2026-04-08 22:14:09.844876: +2026-04-08 22:14:09.846258: Epoch 217 +2026-04-08 22:14:09.848010: Current learning rate: 0.00802 +2026-04-08 22:15:51.783257: train_loss -0.1474 +2026-04-08 22:15:51.794159: val_loss -0.1613 +2026-04-08 22:15:51.797874: Pseudo dice [0.5403, 0.8046, 0.742, 0.7415, 0.5258, 0.6913, 0.8171] +2026-04-08 22:15:51.801609: Epoch time: 101.94 s +2026-04-08 22:15:51.805099: Yayy! New best EMA pseudo Dice: 0.5605 +2026-04-08 22:15:54.319690: +2026-04-08 22:15:54.322041: Epoch 218 +2026-04-08 22:15:54.323411: Current learning rate: 0.00801 +2026-04-08 22:17:36.056083: train_loss -0.1386 +2026-04-08 22:17:36.061763: val_loss -0.1432 +2026-04-08 22:17:36.064842: Pseudo dice [0.5928, 0.2219, 0.8263, 0.6647, 0.4178, 0.278, 0.8143] +2026-04-08 22:17:36.070121: Epoch time: 101.74 s +2026-04-08 22:17:37.098659: +2026-04-08 22:17:37.100929: Epoch 219 +2026-04-08 22:17:37.102822: Current learning rate: 0.00801 +2026-04-08 22:19:18.838994: train_loss -0.1604 +2026-04-08 22:19:18.844570: val_loss -0.1298 +2026-04-08 22:19:18.846280: Pseudo dice [0.4048, 0.5237, 0.8155, 0.4829, 0.2086, 0.4291, 0.7437] +2026-04-08 22:19:18.848790: Epoch time: 101.74 s +2026-04-08 22:19:19.884938: +2026-04-08 22:19:19.886740: Epoch 220 +2026-04-08 22:19:19.888223: Current learning rate: 0.008 +2026-04-08 22:21:01.776295: train_loss -0.1313 +2026-04-08 22:21:01.781564: val_loss -0.1087 +2026-04-08 22:21:01.783566: Pseudo dice [0.3974, 0.1796, 0.6649, 0.5591, 0.2193, 0.7122, 0.5484] +2026-04-08 22:21:01.788553: Epoch time: 101.89 s +2026-04-08 22:21:02.847504: +2026-04-08 22:21:02.850338: Epoch 221 +2026-04-08 22:21:02.852344: Current learning rate: 0.00799 +2026-04-08 22:22:44.572769: train_loss -0.1362 +2026-04-08 22:22:44.577765: val_loss -0.1305 +2026-04-08 22:22:44.579786: Pseudo dice [0.562, 0.1768, 0.7941, 0.0027, 0.4227, 0.6147, 0.7702] +2026-04-08 22:22:44.582030: Epoch time: 101.73 s +2026-04-08 22:22:45.607980: +2026-04-08 22:22:45.609567: Epoch 222 +2026-04-08 22:22:45.610878: Current learning rate: 0.00798 +2026-04-08 22:24:27.403006: train_loss -0.1519 +2026-04-08 22:24:27.408416: val_loss -0.1271 +2026-04-08 22:24:27.412035: Pseudo dice [0.7532, 0.6835, 0.7028, 0.2152, 0.5084, 0.3731, 0.6138] +2026-04-08 22:24:27.416994: Epoch time: 101.8 s +2026-04-08 22:24:28.464628: +2026-04-08 22:24:28.468207: Epoch 223 +2026-04-08 22:24:28.471351: Current learning rate: 0.00797 +2026-04-08 22:26:10.381922: train_loss -0.14 +2026-04-08 22:26:10.386933: val_loss -0.1231 +2026-04-08 22:26:10.388951: Pseudo dice [0.6088, 0.7436, 0.772, 0.1727, 0.3391, 0.2436, 0.555] +2026-04-08 22:26:10.391171: Epoch time: 101.92 s +2026-04-08 22:26:11.432919: +2026-04-08 22:26:11.435425: Epoch 224 +2026-04-08 22:26:11.436799: Current learning rate: 0.00796 +2026-04-08 22:27:53.082284: train_loss -0.1407 +2026-04-08 22:27:53.087104: val_loss -0.1246 +2026-04-08 22:27:53.090600: Pseudo dice [0.8272, 0.6151, 0.6863, 0.0592, 0.2374, 0.7159, 0.7017] +2026-04-08 22:27:53.093127: Epoch time: 101.65 s +2026-04-08 22:27:54.128841: +2026-04-08 22:27:54.130456: Epoch 225 +2026-04-08 22:27:54.131796: Current learning rate: 0.00795 +2026-04-08 22:29:35.764849: train_loss -0.1481 +2026-04-08 22:29:35.770173: val_loss -0.1519 +2026-04-08 22:29:35.773130: Pseudo dice [0.5126, 0.8891, 0.6269, 0.1455, 0.4811, 0.5958, 0.7243] +2026-04-08 22:29:35.775430: Epoch time: 101.64 s +2026-04-08 22:29:36.793338: +2026-04-08 22:29:36.794982: Epoch 226 +2026-04-08 22:29:36.796469: Current learning rate: 0.00794 +2026-04-08 22:31:19.658009: train_loss -0.1645 +2026-04-08 22:31:19.662723: val_loss -0.1493 +2026-04-08 22:31:19.665879: Pseudo dice [0.6727, 0.4423, 0.8134, 0.4972, 0.5016, 0.7932, 0.8221] +2026-04-08 22:31:19.671883: Epoch time: 102.87 s +2026-04-08 22:31:20.699042: +2026-04-08 22:31:20.700773: Epoch 227 +2026-04-08 22:31:20.701986: Current learning rate: 0.00793 +2026-04-08 22:33:02.753356: train_loss -0.1601 +2026-04-08 22:33:02.759274: val_loss -0.0954 +2026-04-08 22:33:02.760947: Pseudo dice [0.6635, 0.8016, 0.6095, 0.6505, 0.2451, 0.372, 0.2967] +2026-04-08 22:33:02.765623: Epoch time: 102.06 s +2026-04-08 22:33:03.811130: +2026-04-08 22:33:03.812937: Epoch 228 +2026-04-08 22:33:03.814498: Current learning rate: 0.00792 +2026-04-08 22:34:45.309829: train_loss -0.1391 +2026-04-08 22:34:45.316151: val_loss -0.1489 +2026-04-08 22:34:45.318029: Pseudo dice [0.7811, 0.1386, 0.6969, 0.4649, 0.4823, 0.7018, 0.7712] +2026-04-08 22:34:45.321715: Epoch time: 101.5 s +2026-04-08 22:34:46.329825: +2026-04-08 22:34:46.331528: Epoch 229 +2026-04-08 22:34:46.332987: Current learning rate: 0.00791 +2026-04-08 22:36:28.360132: train_loss -0.1294 +2026-04-08 22:36:28.371438: val_loss -0.1297 +2026-04-08 22:36:28.373777: Pseudo dice [0.6223, 0.6415, 0.8158, 0.62, 0.5209, 0.4586, 0.7809] +2026-04-08 22:36:28.375750: Epoch time: 102.03 s +2026-04-08 22:36:29.399894: +2026-04-08 22:36:29.401564: Epoch 230 +2026-04-08 22:36:29.403104: Current learning rate: 0.0079 +2026-04-08 22:38:11.400741: train_loss -0.1296 +2026-04-08 22:38:11.405153: val_loss -0.1359 +2026-04-08 22:38:11.406783: Pseudo dice [0.6962, 0.554, 0.7397, 0.231, 0.4142, 0.5126, 0.8246] +2026-04-08 22:38:11.410990: Epoch time: 102.0 s +2026-04-08 22:38:12.433935: +2026-04-08 22:38:12.441344: Epoch 231 +2026-04-08 22:38:12.442690: Current learning rate: 0.00789 +2026-04-08 22:39:54.258687: train_loss -0.1574 +2026-04-08 22:39:54.263365: val_loss -0.1365 +2026-04-08 22:39:54.264899: Pseudo dice [0.7603, 0.3464, 0.7279, 0.3891, 0.5453, 0.5708, 0.571] +2026-04-08 22:39:54.267033: Epoch time: 101.83 s +2026-04-08 22:39:55.290953: +2026-04-08 22:39:55.292585: Epoch 232 +2026-04-08 22:39:55.294010: Current learning rate: 0.00789 +2026-04-08 22:41:37.091035: train_loss -0.1242 +2026-04-08 22:41:37.096340: val_loss -0.1424 +2026-04-08 22:41:37.098073: Pseudo dice [0.6967, 0.1108, 0.6499, 0.146, 0.5332, 0.5998, 0.8013] +2026-04-08 22:41:37.099789: Epoch time: 101.8 s +2026-04-08 22:41:38.118981: +2026-04-08 22:41:38.120214: Epoch 233 +2026-04-08 22:41:38.121445: Current learning rate: 0.00788 +2026-04-08 22:43:19.920123: train_loss -0.1151 +2026-04-08 22:43:19.926566: val_loss -0.1138 +2026-04-08 22:43:19.930656: Pseudo dice [0.5557, 0.7705, 0.3516, 0.1693, 0.3798, 0.5713, 0.6483] +2026-04-08 22:43:19.932589: Epoch time: 101.8 s +2026-04-08 22:43:20.942427: +2026-04-08 22:43:20.944039: Epoch 234 +2026-04-08 22:43:20.945339: Current learning rate: 0.00787 +2026-04-08 22:45:02.797553: train_loss -0.1286 +2026-04-08 22:45:02.809275: val_loss -0.1185 +2026-04-08 22:45:02.813836: Pseudo dice [0.6225, 0.599, 0.7799, 0.5305, 0.4221, 0.3725, 0.6852] +2026-04-08 22:45:02.817976: Epoch time: 101.86 s +2026-04-08 22:45:03.845173: +2026-04-08 22:45:03.846856: Epoch 235 +2026-04-08 22:45:03.850536: Current learning rate: 0.00786 +2026-04-08 22:46:46.178030: train_loss -0.1338 +2026-04-08 22:46:46.184500: val_loss -0.1422 +2026-04-08 22:46:46.186278: Pseudo dice [0.4428, 0.8033, 0.6488, 0.0855, 0.5417, 0.7807, 0.7351] +2026-04-08 22:46:46.188319: Epoch time: 102.34 s +2026-04-08 22:46:47.225772: +2026-04-08 22:46:47.227283: Epoch 236 +2026-04-08 22:46:47.228609: Current learning rate: 0.00785 +2026-04-08 22:48:30.356116: train_loss -0.1566 +2026-04-08 22:48:30.360900: val_loss -0.1192 +2026-04-08 22:48:30.362589: Pseudo dice [0.5954, 0.485, 0.6961, 0.551, 0.5032, 0.4602, 0.7132] +2026-04-08 22:48:30.364307: Epoch time: 103.13 s +2026-04-08 22:48:31.380297: +2026-04-08 22:48:31.382191: Epoch 237 +2026-04-08 22:48:31.383862: Current learning rate: 0.00784 +2026-04-08 22:50:15.376373: train_loss -0.1556 +2026-04-08 22:50:15.382346: val_loss -0.1363 +2026-04-08 22:50:15.384756: Pseudo dice [0.6803, 0.7159, 0.5133, 0.0484, 0.4556, 0.4366, 0.7415] +2026-04-08 22:50:15.387931: Epoch time: 104.0 s +2026-04-08 22:50:16.439295: +2026-04-08 22:50:16.440828: Epoch 238 +2026-04-08 22:50:16.442281: Current learning rate: 0.00783 +2026-04-08 22:51:57.900649: train_loss -0.1371 +2026-04-08 22:51:57.904843: val_loss -0.1369 +2026-04-08 22:51:57.906618: Pseudo dice [0.3819, 0.2038, 0.8454, 0.4188, 0.4821, 0.555, 0.697] +2026-04-08 22:51:57.908417: Epoch time: 101.46 s +2026-04-08 22:51:58.940512: +2026-04-08 22:51:58.942691: Epoch 239 +2026-04-08 22:51:58.944495: Current learning rate: 0.00782 +2026-04-08 22:53:40.243524: train_loss -0.1447 +2026-04-08 22:53:40.248776: val_loss -0.1498 +2026-04-08 22:53:40.250599: Pseudo dice [0.7548, 0.1823, 0.669, 0.7057, 0.4272, 0.3413, 0.6852] +2026-04-08 22:53:40.254382: Epoch time: 101.31 s +2026-04-08 22:53:41.288221: +2026-04-08 22:53:41.290125: Epoch 240 +2026-04-08 22:53:41.291770: Current learning rate: 0.00781 +2026-04-08 22:55:22.816501: train_loss -0.1441 +2026-04-08 22:55:22.821613: val_loss -0.1369 +2026-04-08 22:55:22.823322: Pseudo dice [0.4397, 0.4464, 0.672, 0.3443, 0.3984, 0.7851, 0.8383] +2026-04-08 22:55:22.825150: Epoch time: 101.53 s +2026-04-08 22:55:23.876860: +2026-04-08 22:55:23.879519: Epoch 241 +2026-04-08 22:55:23.890154: Current learning rate: 0.0078 +2026-04-08 22:57:05.279085: train_loss -0.1661 +2026-04-08 22:57:05.283745: val_loss -0.1324 +2026-04-08 22:57:05.285619: Pseudo dice [0.706, 0.337, 0.7887, 0.4702, 0.4778, 0.3972, 0.8366] +2026-04-08 22:57:05.289569: Epoch time: 101.41 s +2026-04-08 22:57:06.326998: +2026-04-08 22:57:06.328462: Epoch 242 +2026-04-08 22:57:06.329634: Current learning rate: 0.00779 +2026-04-08 22:58:47.751244: train_loss -0.1626 +2026-04-08 22:58:47.756378: val_loss -0.1383 +2026-04-08 22:58:47.757967: Pseudo dice [0.5693, 0.7756, 0.6908, 0.4895, 0.5139, 0.6073, 0.716] +2026-04-08 22:58:47.760293: Epoch time: 101.43 s +2026-04-08 22:58:48.804251: +2026-04-08 22:58:48.805685: Epoch 243 +2026-04-08 22:58:48.807287: Current learning rate: 0.00778 +2026-04-08 23:00:30.379005: train_loss -0.1524 +2026-04-08 23:00:30.383846: val_loss -0.1516 +2026-04-08 23:00:30.385451: Pseudo dice [0.6024, 0.1474, 0.7723, 0.5516, 0.5983, 0.661, 0.8233] +2026-04-08 23:00:30.387398: Epoch time: 101.58 s +2026-04-08 23:00:30.389534: Yayy! New best EMA pseudo Dice: 0.5611 +2026-04-08 23:00:32.770726: +2026-04-08 23:00:32.773021: Epoch 244 +2026-04-08 23:00:32.774403: Current learning rate: 0.00777 +2026-04-08 23:02:14.238313: train_loss -0.1616 +2026-04-08 23:02:14.243644: val_loss -0.1392 +2026-04-08 23:02:14.244806: Pseudo dice [0.5859, 0.8497, 0.6564, 0.1669, 0.5847, 0.5809, 0.8052] +2026-04-08 23:02:14.246416: Epoch time: 101.47 s +2026-04-08 23:02:14.247997: Yayy! New best EMA pseudo Dice: 0.5654 +2026-04-08 23:02:16.705557: +2026-04-08 23:02:16.707911: Epoch 245 +2026-04-08 23:02:16.710539: Current learning rate: 0.00777 +2026-04-08 23:03:58.758187: train_loss -0.1493 +2026-04-08 23:03:58.763518: val_loss -0.1379 +2026-04-08 23:03:58.766294: Pseudo dice [0.4497, 0.7816, 0.7774, 0.6281, 0.4601, 0.3955, 0.87] +2026-04-08 23:03:58.770548: Epoch time: 102.06 s +2026-04-08 23:03:58.772574: Yayy! New best EMA pseudo Dice: 0.5712 +2026-04-08 23:04:01.653760: +2026-04-08 23:04:01.657395: Epoch 246 +2026-04-08 23:04:01.658562: Current learning rate: 0.00776 +2026-04-08 23:05:42.975996: train_loss -0.1508 +2026-04-08 23:05:42.985943: val_loss -0.13 +2026-04-08 23:05:42.987636: Pseudo dice [0.7198, 0.578, 0.4882, 0.2297, 0.3528, 0.5588, 0.7439] +2026-04-08 23:05:42.989564: Epoch time: 101.33 s +2026-04-08 23:05:44.042457: +2026-04-08 23:05:44.043810: Epoch 247 +2026-04-08 23:05:44.045200: Current learning rate: 0.00775 +2026-04-08 23:07:25.425109: train_loss -0.1533 +2026-04-08 23:07:25.429188: val_loss -0.122 +2026-04-08 23:07:25.430573: Pseudo dice [0.6479, 0.4339, 0.5727, 0.687, 0.4461, 0.4158, 0.7752] +2026-04-08 23:07:25.432150: Epoch time: 101.39 s +2026-04-08 23:07:26.477857: +2026-04-08 23:07:26.479903: Epoch 248 +2026-04-08 23:07:26.481656: Current learning rate: 0.00774 +2026-04-08 23:09:07.937410: train_loss -0.1626 +2026-04-08 23:09:07.942272: val_loss -0.1343 +2026-04-08 23:09:07.944084: Pseudo dice [0.4142, 0.0345, 0.6587, 0.5037, 0.4323, 0.7114, 0.7787] +2026-04-08 23:09:07.948134: Epoch time: 101.46 s +2026-04-08 23:09:08.993134: +2026-04-08 23:09:08.994541: Epoch 249 +2026-04-08 23:09:08.995875: Current learning rate: 0.00773 +2026-04-08 23:10:50.905767: train_loss -0.167 +2026-04-08 23:10:50.910475: val_loss -0.1404 +2026-04-08 23:10:50.912635: Pseudo dice [0.7715, 0.3181, 0.6376, 0.4842, 0.4456, 0.3504, 0.7591] +2026-04-08 23:10:50.917461: Epoch time: 101.92 s +2026-04-08 23:10:53.426879: +2026-04-08 23:10:53.430104: Epoch 250 +2026-04-08 23:10:53.432102: Current learning rate: 0.00772 +2026-04-08 23:12:34.832512: train_loss -0.1641 +2026-04-08 23:12:34.839068: val_loss -0.1565 +2026-04-08 23:12:34.840914: Pseudo dice [0.5006, 0.7673, 0.7778, 0.3731, 0.5667, 0.2657, 0.921] +2026-04-08 23:12:34.844842: Epoch time: 101.41 s +2026-04-08 23:12:35.897893: +2026-04-08 23:12:35.908867: Epoch 251 +2026-04-08 23:12:35.911116: Current learning rate: 0.00771 +2026-04-08 23:14:17.330997: train_loss -0.1557 +2026-04-08 23:14:17.336783: val_loss -0.1624 +2026-04-08 23:14:17.338483: Pseudo dice [0.7221, 0.8171, 0.7035, 0.6693, 0.4669, 0.6363, 0.8109] +2026-04-08 23:14:17.340781: Epoch time: 101.44 s +2026-04-08 23:14:17.342209: Yayy! New best EMA pseudo Dice: 0.5748 +2026-04-08 23:14:19.715898: +2026-04-08 23:14:19.718493: Epoch 252 +2026-04-08 23:14:19.720018: Current learning rate: 0.0077 +2026-04-08 23:16:01.498479: train_loss -0.1393 +2026-04-08 23:16:01.503583: val_loss -0.1399 +2026-04-08 23:16:01.505477: Pseudo dice [0.7375, 0.7849, 0.7673, 0.7277, 0.5526, 0.7659, 0.6809] +2026-04-08 23:16:01.508096: Epoch time: 101.79 s +2026-04-08 23:16:01.509682: Yayy! New best EMA pseudo Dice: 0.589 +2026-04-08 23:16:04.146744: +2026-04-08 23:16:04.149713: Epoch 253 +2026-04-08 23:16:04.150913: Current learning rate: 0.00769 +2026-04-08 23:17:45.516354: train_loss -0.1639 +2026-04-08 23:17:45.524591: val_loss -0.152 +2026-04-08 23:17:45.528275: Pseudo dice [0.5094, 0.3622, 0.8249, 0.4671, 0.4361, 0.3791, 0.7468] +2026-04-08 23:17:45.531548: Epoch time: 101.37 s +2026-04-08 23:17:47.626546: +2026-04-08 23:17:47.629136: Epoch 254 +2026-04-08 23:17:47.630511: Current learning rate: 0.00768 +2026-04-08 23:19:29.158215: train_loss -0.1439 +2026-04-08 23:19:29.163399: val_loss -0.1622 +2026-04-08 23:19:29.165857: Pseudo dice [0.6112, 0.3073, 0.7705, 0.5239, 0.5863, 0.698, 0.8376] +2026-04-08 23:19:29.168705: Epoch time: 101.54 s +2026-04-08 23:19:30.211708: +2026-04-08 23:19:30.213180: Epoch 255 +2026-04-08 23:19:30.214682: Current learning rate: 0.00767 +2026-04-08 23:21:11.868333: train_loss -0.1577 +2026-04-08 23:21:11.874750: val_loss -0.1441 +2026-04-08 23:21:11.876801: Pseudo dice [0.4826, 0.282, 0.7653, 0.0963, 0.6236, 0.5398, 0.7538] +2026-04-08 23:21:11.880390: Epoch time: 101.66 s +2026-04-08 23:21:12.914199: +2026-04-08 23:21:12.915917: Epoch 256 +2026-04-08 23:21:12.917679: Current learning rate: 0.00766 +2026-04-08 23:22:54.562478: train_loss -0.1565 +2026-04-08 23:22:54.569994: val_loss -0.1164 +2026-04-08 23:22:54.572705: Pseudo dice [0.6685, 0.5661, 0.769, 0.5863, 0.3029, 0.5478, 0.8773] +2026-04-08 23:22:54.577623: Epoch time: 101.65 s +2026-04-08 23:22:55.653539: +2026-04-08 23:22:55.655222: Epoch 257 +2026-04-08 23:22:55.657067: Current learning rate: 0.00765 +2026-04-08 23:24:37.380101: train_loss -0.1643 +2026-04-08 23:24:37.384879: val_loss -0.1719 +2026-04-08 23:24:37.386685: Pseudo dice [0.5571, 0.6348, 0.8665, 0.0145, 0.601, 0.7016, 0.8746] +2026-04-08 23:24:37.389361: Epoch time: 101.73 s +2026-04-08 23:24:38.440665: +2026-04-08 23:24:38.442189: Epoch 258 +2026-04-08 23:24:38.443490: Current learning rate: 0.00764 +2026-04-08 23:26:20.187621: train_loss -0.1742 +2026-04-08 23:26:20.200675: val_loss -0.1514 +2026-04-08 23:26:20.203778: Pseudo dice [0.4425, 0.2125, 0.4987, 0.565, 0.4872, 0.7057, 0.8763] +2026-04-08 23:26:20.210747: Epoch time: 101.75 s +2026-04-08 23:26:21.276760: +2026-04-08 23:26:21.279005: Epoch 259 +2026-04-08 23:26:21.280775: Current learning rate: 0.00764 +2026-04-08 23:28:02.867918: train_loss -0.1671 +2026-04-08 23:28:02.873665: val_loss -0.1446 +2026-04-08 23:28:02.875669: Pseudo dice [0.6619, 0.399, 0.5514, 0.5613, 0.4556, 0.6842, 0.6671] +2026-04-08 23:28:02.877873: Epoch time: 101.59 s +2026-04-08 23:28:03.907670: +2026-04-08 23:28:03.910078: Epoch 260 +2026-04-08 23:28:03.911982: Current learning rate: 0.00763 +2026-04-08 23:29:45.262343: train_loss -0.1556 +2026-04-08 23:29:45.267081: val_loss -0.1379 +2026-04-08 23:29:45.268765: Pseudo dice [0.6669, 0.8185, 0.6064, 0.1946, 0.5125, 0.4686, 0.8082] +2026-04-08 23:29:45.270892: Epoch time: 101.36 s +2026-04-08 23:29:46.323751: +2026-04-08 23:29:46.330382: Epoch 261 +2026-04-08 23:29:46.331830: Current learning rate: 0.00762 +2026-04-08 23:31:27.663020: train_loss -0.1639 +2026-04-08 23:31:27.667712: val_loss -0.1397 +2026-04-08 23:31:27.669312: Pseudo dice [0.4512, 0.8438, 0.8756, 0.507, 0.4431, 0.708, 0.6782] +2026-04-08 23:31:27.671700: Epoch time: 101.34 s +2026-04-08 23:31:28.703877: +2026-04-08 23:31:28.705641: Epoch 262 +2026-04-08 23:31:28.707027: Current learning rate: 0.00761 +2026-04-08 23:33:10.077243: train_loss -0.1479 +2026-04-08 23:33:10.082524: val_loss -0.1394 +2026-04-08 23:33:10.084347: Pseudo dice [0.7549, 0.2118, 0.3995, 0.6022, 0.4488, 0.446, 0.4584] +2026-04-08 23:33:10.087184: Epoch time: 101.38 s +2026-04-08 23:33:11.131181: +2026-04-08 23:33:11.132566: Epoch 263 +2026-04-08 23:33:11.133822: Current learning rate: 0.0076 +2026-04-08 23:34:52.569403: train_loss -0.1549 +2026-04-08 23:34:52.575769: val_loss -0.1337 +2026-04-08 23:34:52.577100: Pseudo dice [0.6969, 0.697, 0.6515, 0.183, 0.4053, 0.6489, 0.494] +2026-04-08 23:34:52.578917: Epoch time: 101.44 s +2026-04-08 23:34:53.615011: +2026-04-08 23:34:53.616277: Epoch 264 +2026-04-08 23:34:53.617411: Current learning rate: 0.00759 +2026-04-08 23:36:35.207402: train_loss -0.1415 +2026-04-08 23:36:35.217235: val_loss -0.1072 +2026-04-08 23:36:35.219098: Pseudo dice [0.4351, 0.0352, 0.6711, 0.5661, 0.4611, 0.2983, 0.3138] +2026-04-08 23:36:35.221108: Epoch time: 101.6 s +2026-04-08 23:36:36.259390: +2026-04-08 23:36:36.261786: Epoch 265 +2026-04-08 23:36:36.264673: Current learning rate: 0.00758 +2026-04-08 23:38:18.436129: train_loss -0.1585 +2026-04-08 23:38:18.448683: val_loss -0.1571 +2026-04-08 23:38:18.452589: Pseudo dice [0.4713, 0.7158, 0.7795, 0.5795, 0.485, 0.6448, 0.7454] +2026-04-08 23:38:18.457510: Epoch time: 102.18 s +2026-04-08 23:38:19.502989: +2026-04-08 23:38:19.505433: Epoch 266 +2026-04-08 23:38:19.507223: Current learning rate: 0.00757 +2026-04-08 23:40:01.699541: train_loss -0.1664 +2026-04-08 23:40:01.704599: val_loss -0.1628 +2026-04-08 23:40:01.707721: Pseudo dice [0.4736, 0.2903, 0.8489, 0.5467, 0.5382, 0.7807, 0.8197] +2026-04-08 23:40:01.709865: Epoch time: 102.2 s +2026-04-08 23:40:02.752172: +2026-04-08 23:40:02.760598: Epoch 267 +2026-04-08 23:40:02.767144: Current learning rate: 0.00756 +2026-04-08 23:41:44.381980: train_loss -0.1522 +2026-04-08 23:41:44.386529: val_loss -0.1229 +2026-04-08 23:41:44.389135: Pseudo dice [0.6788, 0.6002, 0.6025, 0.5646, 0.5592, 0.2619, 0.2875] +2026-04-08 23:41:44.393342: Epoch time: 101.63 s +2026-04-08 23:41:45.436254: +2026-04-08 23:41:45.437800: Epoch 268 +2026-04-08 23:41:45.439126: Current learning rate: 0.00755 +2026-04-08 23:43:27.183137: train_loss -0.1403 +2026-04-08 23:43:27.194476: val_loss -0.1535 +2026-04-08 23:43:27.198157: Pseudo dice [0.3611, 0.1614, 0.7391, 0.7648, 0.3237, 0.2739, 0.5781] +2026-04-08 23:43:27.202590: Epoch time: 101.75 s +2026-04-08 23:43:28.236293: +2026-04-08 23:43:28.237609: Epoch 269 +2026-04-08 23:43:28.238838: Current learning rate: 0.00754 +2026-04-08 23:45:09.677422: train_loss -0.1538 +2026-04-08 23:45:09.682420: val_loss -0.1476 +2026-04-08 23:45:09.684330: Pseudo dice [0.7562, 0.2122, 0.7167, 0.7184, 0.2845, 0.6613, 0.6152] +2026-04-08 23:45:09.686918: Epoch time: 101.44 s +2026-04-08 23:45:10.740149: +2026-04-08 23:45:10.742065: Epoch 270 +2026-04-08 23:45:10.743564: Current learning rate: 0.00753 +2026-04-08 23:46:52.774562: train_loss -0.1656 +2026-04-08 23:46:52.780083: val_loss -0.1401 +2026-04-08 23:46:52.782134: Pseudo dice [0.5844, 0.687, 0.812, 0.4293, 0.5816, 0.4917, 0.3731] +2026-04-08 23:46:52.784664: Epoch time: 102.04 s +2026-04-08 23:46:53.843363: +2026-04-08 23:46:53.846755: Epoch 271 +2026-04-08 23:46:53.849477: Current learning rate: 0.00752 +2026-04-08 23:48:35.457560: train_loss -0.1566 +2026-04-08 23:48:35.463542: val_loss -0.1255 +2026-04-08 23:48:35.466363: Pseudo dice [0.5226, 0.3237, 0.7609, 0.5103, 0.5009, 0.5247, 0.585] +2026-04-08 23:48:35.468520: Epoch time: 101.62 s +2026-04-08 23:48:36.516843: +2026-04-08 23:48:36.518536: Epoch 272 +2026-04-08 23:48:36.520079: Current learning rate: 0.00751 +2026-04-08 23:50:18.034179: train_loss -0.1574 +2026-04-08 23:50:18.038970: val_loss -0.119 +2026-04-08 23:50:18.040570: Pseudo dice [0.6038, 0.0968, 0.5669, 0.7187, 0.2664, 0.4079, 0.8045] +2026-04-08 23:50:18.042884: Epoch time: 101.52 s +2026-04-08 23:50:19.070527: +2026-04-08 23:50:19.071907: Epoch 273 +2026-04-08 23:50:19.073173: Current learning rate: 0.00751 +2026-04-08 23:52:00.639219: train_loss -0.1463 +2026-04-08 23:52:00.647928: val_loss -0.133 +2026-04-08 23:52:00.650717: Pseudo dice [0.5383, 0.3035, 0.8169, 0.5976, 0.2403, 0.5802, 0.8623] +2026-04-08 23:52:00.655852: Epoch time: 101.57 s +2026-04-08 23:52:01.712947: +2026-04-08 23:52:01.714360: Epoch 274 +2026-04-08 23:52:01.717722: Current learning rate: 0.0075 +2026-04-08 23:53:43.274018: train_loss -0.1508 +2026-04-08 23:53:43.278702: val_loss -0.1676 +2026-04-08 23:53:43.280142: Pseudo dice [0.6262, 0.7266, 0.8258, 0.1709, 0.4767, 0.2804, 0.6116] +2026-04-08 23:53:43.282127: Epoch time: 101.56 s +2026-04-08 23:53:45.383075: +2026-04-08 23:53:45.384618: Epoch 275 +2026-04-08 23:53:45.386143: Current learning rate: 0.00749 +2026-04-08 23:55:26.889704: train_loss -0.1548 +2026-04-08 23:55:26.894112: val_loss -0.157 +2026-04-08 23:55:26.896068: Pseudo dice [0.7288, 0.5662, 0.6806, 0.629, 0.2627, 0.7076, 0.7886] +2026-04-08 23:55:26.898341: Epoch time: 101.51 s +2026-04-08 23:55:27.943479: +2026-04-08 23:55:27.945408: Epoch 276 +2026-04-08 23:55:27.946963: Current learning rate: 0.00748 +2026-04-08 23:57:09.833894: train_loss -0.1444 +2026-04-08 23:57:09.839203: val_loss -0.1248 +2026-04-08 23:57:09.841069: Pseudo dice [0.7813, 0.1651, 0.3847, 0.4731, 0.3453, 0.3062, 0.3955] +2026-04-08 23:57:09.843493: Epoch time: 101.89 s +2026-04-08 23:57:10.916642: +2026-04-08 23:57:10.918751: Epoch 277 +2026-04-08 23:57:10.920206: Current learning rate: 0.00747 +2026-04-08 23:58:52.823969: train_loss -0.1517 +2026-04-08 23:58:52.828759: val_loss -0.1362 +2026-04-08 23:58:52.830498: Pseudo dice [0.5635, 0.7698, 0.7904, 0.2718, 0.4268, 0.5609, 0.3071] +2026-04-08 23:58:52.834100: Epoch time: 101.91 s +2026-04-08 23:58:53.897447: +2026-04-08 23:58:53.898907: Epoch 278 +2026-04-08 23:58:53.900019: Current learning rate: 0.00746 +2026-04-09 00:00:35.738311: train_loss -0.1623 +2026-04-09 00:00:35.746035: val_loss -0.1411 +2026-04-09 00:00:35.748180: Pseudo dice [0.663, 0.8397, 0.7822, 0.5457, 0.4239, 0.5562, 0.7862] +2026-04-09 00:00:35.751063: Epoch time: 101.84 s +2026-04-09 00:00:36.801642: +2026-04-09 00:00:36.803505: Epoch 279 +2026-04-09 00:00:36.805298: Current learning rate: 0.00745 +2026-04-09 00:02:18.574021: train_loss -0.1653 +2026-04-09 00:02:18.578937: val_loss -0.1545 +2026-04-09 00:02:18.580939: Pseudo dice [0.47, 0.5133, 0.7251, 0.7173, 0.4641, 0.4795, 0.9187] +2026-04-09 00:02:18.583010: Epoch time: 101.78 s +2026-04-09 00:02:19.649988: +2026-04-09 00:02:19.652234: Epoch 280 +2026-04-09 00:02:19.654707: Current learning rate: 0.00744 +2026-04-09 00:04:01.566052: train_loss -0.1635 +2026-04-09 00:04:01.571283: val_loss -0.1458 +2026-04-09 00:04:01.573877: Pseudo dice [0.6618, 0.3935, 0.6585, 0.5009, 0.3907, 0.6045, 0.6084] +2026-04-09 00:04:01.576238: Epoch time: 101.92 s +2026-04-09 00:04:02.628454: +2026-04-09 00:04:02.630262: Epoch 281 +2026-04-09 00:04:02.632195: Current learning rate: 0.00743 +2026-04-09 00:05:44.914901: train_loss -0.1514 +2026-04-09 00:05:44.920423: val_loss -0.1097 +2026-04-09 00:05:44.922250: Pseudo dice [0.5711, 0.7468, 0.5413, 0.7939, 0.5903, 0.5847, 0.4408] +2026-04-09 00:05:44.923982: Epoch time: 102.29 s +2026-04-09 00:05:45.980525: +2026-04-09 00:05:45.982124: Epoch 282 +2026-04-09 00:05:45.983635: Current learning rate: 0.00742 +2026-04-09 00:07:29.778716: train_loss -0.1488 +2026-04-09 00:07:29.783770: val_loss -0.1467 +2026-04-09 00:07:29.785592: Pseudo dice [0.4552, 0.1456, 0.8324, 0.4615, 0.4347, 0.4621, 0.6508] +2026-04-09 00:07:29.787345: Epoch time: 103.8 s +2026-04-09 00:07:30.840359: +2026-04-09 00:07:30.843904: Epoch 283 +2026-04-09 00:07:30.847145: Current learning rate: 0.00741 +2026-04-09 00:09:12.485107: train_loss -0.157 +2026-04-09 00:09:12.491381: val_loss -0.1361 +2026-04-09 00:09:12.494096: Pseudo dice [0.6619, 0.8839, 0.8233, 0.5795, 0.5674, 0.4702, 0.7111] +2026-04-09 00:09:12.496576: Epoch time: 101.65 s +2026-04-09 00:09:13.529229: +2026-04-09 00:09:13.530906: Epoch 284 +2026-04-09 00:09:13.535055: Current learning rate: 0.0074 +2026-04-09 00:10:55.335618: train_loss -0.1539 +2026-04-09 00:10:55.347411: val_loss -0.1417 +2026-04-09 00:10:55.350606: Pseudo dice [0.5068, 0.6768, 0.5795, 0.6414, 0.3333, 0.4658, 0.8421] +2026-04-09 00:10:55.354354: Epoch time: 101.81 s +2026-04-09 00:10:56.402862: +2026-04-09 00:10:56.404731: Epoch 285 +2026-04-09 00:10:56.406836: Current learning rate: 0.00739 +2026-04-09 00:12:38.212365: train_loss -0.1556 +2026-04-09 00:12:38.217296: val_loss -0.1539 +2026-04-09 00:12:38.219395: Pseudo dice [0.6321, 0.8345, 0.6912, 0.6977, 0.4206, 0.3718, 0.8863] +2026-04-09 00:12:38.221170: Epoch time: 101.81 s +2026-04-09 00:12:39.281291: +2026-04-09 00:12:39.282867: Epoch 286 +2026-04-09 00:12:39.284109: Current learning rate: 0.00738 +2026-04-09 00:14:21.107306: train_loss -0.1611 +2026-04-09 00:14:21.112355: val_loss -0.1685 +2026-04-09 00:14:21.114183: Pseudo dice [0.5071, 0.5014, 0.7873, 0.3704, 0.5316, 0.6016, 0.8734] +2026-04-09 00:14:21.116794: Epoch time: 101.83 s +2026-04-09 00:14:22.201358: +2026-04-09 00:14:22.203326: Epoch 287 +2026-04-09 00:14:22.205110: Current learning rate: 0.00738 +2026-04-09 00:16:03.715095: train_loss -0.1706 +2026-04-09 00:16:03.719306: val_loss -0.1563 +2026-04-09 00:16:03.720773: Pseudo dice [0.5896, 0.281, 0.6952, 0.4192, 0.5951, 0.6774, 0.7465] +2026-04-09 00:16:03.722585: Epoch time: 101.52 s +2026-04-09 00:16:04.783260: +2026-04-09 00:16:04.786219: Epoch 288 +2026-04-09 00:16:04.788822: Current learning rate: 0.00737 +2026-04-09 00:17:46.167721: train_loss -0.1597 +2026-04-09 00:17:46.173388: val_loss -0.1278 +2026-04-09 00:17:46.175065: Pseudo dice [0.4627, 0.6539, 0.709, 0.5777, 0.5262, 0.8656, 0.4676] +2026-04-09 00:17:46.176847: Epoch time: 101.39 s +2026-04-09 00:17:47.230288: +2026-04-09 00:17:47.231970: Epoch 289 +2026-04-09 00:17:47.233604: Current learning rate: 0.00736 +2026-04-09 00:19:28.736098: train_loss -0.1621 +2026-04-09 00:19:28.741907: val_loss -0.153 +2026-04-09 00:19:28.743713: Pseudo dice [0.5469, 0.0685, 0.6786, 0.3732, 0.589, 0.5664, 0.8889] +2026-04-09 00:19:28.745709: Epoch time: 101.51 s +2026-04-09 00:19:29.801556: +2026-04-09 00:19:29.803706: Epoch 290 +2026-04-09 00:19:29.805480: Current learning rate: 0.00735 +2026-04-09 00:21:11.312649: train_loss -0.1631 +2026-04-09 00:21:11.317771: val_loss -0.1268 +2026-04-09 00:21:11.320116: Pseudo dice [0.6111, 0.7271, 0.6315, 0.0118, 0.3474, 0.7505, 0.8429] +2026-04-09 00:21:11.322227: Epoch time: 101.51 s +2026-04-09 00:21:12.374151: +2026-04-09 00:21:12.375533: Epoch 291 +2026-04-09 00:21:12.376796: Current learning rate: 0.00734 +2026-04-09 00:22:53.974115: train_loss -0.1532 +2026-04-09 00:22:53.983300: val_loss -0.1204 +2026-04-09 00:22:53.984990: Pseudo dice [0.4959, 0.7401, 0.1359, 0.7187, 0.4538, 0.0895, 0.6511] +2026-04-09 00:22:53.987158: Epoch time: 101.6 s +2026-04-09 00:22:55.042614: +2026-04-09 00:22:55.043933: Epoch 292 +2026-04-09 00:22:55.045966: Current learning rate: 0.00733 +2026-04-09 00:24:36.546228: train_loss -0.1555 +2026-04-09 00:24:36.552156: val_loss -0.1459 +2026-04-09 00:24:36.553591: Pseudo dice [0.644, 0.1768, 0.797, 0.6068, 0.6124, 0.694, 0.6221] +2026-04-09 00:24:36.555788: Epoch time: 101.51 s +2026-04-09 00:24:37.625511: +2026-04-09 00:24:37.626872: Epoch 293 +2026-04-09 00:24:37.628321: Current learning rate: 0.00732 +2026-04-09 00:26:19.148955: train_loss -0.1447 +2026-04-09 00:26:19.156451: val_loss -0.1583 +2026-04-09 00:26:19.157970: Pseudo dice [0.6361, 0.8171, 0.8497, 0.6039, 0.285, 0.5035, 0.8854] +2026-04-09 00:26:19.159760: Epoch time: 101.53 s +2026-04-09 00:26:20.214796: +2026-04-09 00:26:20.216392: Epoch 294 +2026-04-09 00:26:20.217692: Current learning rate: 0.00731 +2026-04-09 00:28:01.589256: train_loss -0.1461 +2026-04-09 00:28:01.593715: val_loss -0.1421 +2026-04-09 00:28:01.596164: Pseudo dice [0.5646, 0.7793, 0.7988, 0.4985, 0.4136, 0.2775, 0.796] +2026-04-09 00:28:01.598474: Epoch time: 101.38 s +2026-04-09 00:28:02.662552: +2026-04-09 00:28:02.663792: Epoch 295 +2026-04-09 00:28:02.665097: Current learning rate: 0.0073 +2026-04-09 00:29:45.051676: train_loss -0.1635 +2026-04-09 00:29:45.057097: val_loss -0.1435 +2026-04-09 00:29:45.058771: Pseudo dice [0.6862, 0.7212, 0.8354, 0.6915, 0.5297, 0.4237, 0.4232] +2026-04-09 00:29:45.060874: Epoch time: 102.39 s +2026-04-09 00:29:46.135770: +2026-04-09 00:29:46.139020: Epoch 296 +2026-04-09 00:29:46.142610: Current learning rate: 0.00729 +2026-04-09 00:31:27.666414: train_loss -0.1689 +2026-04-09 00:31:27.671312: val_loss -0.1382 +2026-04-09 00:31:27.672580: Pseudo dice [0.407, 0.3489, 0.648, 0.6614, 0.5051, 0.7083, 0.8726] +2026-04-09 00:31:27.674518: Epoch time: 101.53 s +2026-04-09 00:31:28.729722: +2026-04-09 00:31:28.733279: Epoch 297 +2026-04-09 00:31:28.734513: Current learning rate: 0.00728 +2026-04-09 00:33:10.292703: train_loss -0.1605 +2026-04-09 00:33:10.298266: val_loss -0.1384 +2026-04-09 00:33:10.300134: Pseudo dice [0.7825, 0.8496, 0.5272, 0.5087, 0.4523, 0.5196, 0.7427] +2026-04-09 00:33:10.302344: Epoch time: 101.57 s +2026-04-09 00:33:11.381412: +2026-04-09 00:33:11.383030: Epoch 298 +2026-04-09 00:33:11.384404: Current learning rate: 0.00727 +2026-04-09 00:34:52.999604: train_loss -0.1676 +2026-04-09 00:34:53.004548: val_loss -0.1227 +2026-04-09 00:34:53.005854: Pseudo dice [0.7523, 0.367, 0.6534, 0.6218, 0.4098, 0.2607, 0.819] +2026-04-09 00:34:53.007479: Epoch time: 101.62 s +2026-04-09 00:34:54.080726: +2026-04-09 00:34:54.083339: Epoch 299 +2026-04-09 00:34:54.085311: Current learning rate: 0.00726 +2026-04-09 00:36:35.767916: train_loss -0.1595 +2026-04-09 00:36:35.773017: val_loss -0.1718 +2026-04-09 00:36:35.774706: Pseudo dice [0.3756, 0.5767, 0.7592, 0.632, 0.6311, 0.6091, 0.6654] +2026-04-09 00:36:35.776854: Epoch time: 101.69 s +2026-04-09 00:36:38.290684: +2026-04-09 00:36:38.293070: Epoch 300 +2026-04-09 00:36:38.294242: Current learning rate: 0.00725 +2026-04-09 00:38:20.029074: train_loss -0.1559 +2026-04-09 00:38:20.038173: val_loss -0.125 +2026-04-09 00:38:20.041907: Pseudo dice [0.3441, 0.5467, 0.812, 0.0311, 0.4798, 0.4205, 0.7573] +2026-04-09 00:38:20.045503: Epoch time: 101.74 s +2026-04-09 00:38:21.122978: +2026-04-09 00:38:21.125177: Epoch 301 +2026-04-09 00:38:21.126474: Current learning rate: 0.00724 +2026-04-09 00:40:02.754224: train_loss -0.1603 +2026-04-09 00:40:02.761832: val_loss -0.1298 +2026-04-09 00:40:02.763617: Pseudo dice [0.6994, 0.2316, 0.7315, 0.1302, 0.4027, 0.2838, 0.5396] +2026-04-09 00:40:02.766255: Epoch time: 101.63 s +2026-04-09 00:40:03.878443: +2026-04-09 00:40:03.880235: Epoch 302 +2026-04-09 00:40:03.881695: Current learning rate: 0.00724 +2026-04-09 00:41:45.433955: train_loss -0.1672 +2026-04-09 00:41:45.458459: val_loss -0.1505 +2026-04-09 00:41:45.460112: Pseudo dice [0.7639, 0.8363, 0.6048, 0.222, 0.4946, 0.2433, 0.8717] +2026-04-09 00:41:45.461879: Epoch time: 101.56 s +2026-04-09 00:41:46.537677: +2026-04-09 00:41:46.539682: Epoch 303 +2026-04-09 00:41:46.540876: Current learning rate: 0.00723 +2026-04-09 00:43:28.087179: train_loss -0.1544 +2026-04-09 00:43:28.092978: val_loss -0.1228 +2026-04-09 00:43:28.094627: Pseudo dice [0.7924, 0.5593, 0.5743, 0.1441, 0.3476, 0.524, 0.7486] +2026-04-09 00:43:28.096420: Epoch time: 101.55 s +2026-04-09 00:43:29.154157: +2026-04-09 00:43:29.155530: Epoch 304 +2026-04-09 00:43:29.156739: Current learning rate: 0.00722 +2026-04-09 00:45:10.867232: train_loss -0.1583 +2026-04-09 00:45:10.875944: val_loss -0.1623 +2026-04-09 00:45:10.878374: Pseudo dice [0.7289, 0.7071, 0.5404, 0.5101, 0.4136, 0.653, 0.9154] +2026-04-09 00:45:10.880262: Epoch time: 101.72 s +2026-04-09 00:45:11.945049: +2026-04-09 00:45:11.947190: Epoch 305 +2026-04-09 00:45:11.948678: Current learning rate: 0.00721 +2026-04-09 00:46:53.392348: train_loss -0.1798 +2026-04-09 00:46:53.397044: val_loss -0.1537 +2026-04-09 00:46:53.398631: Pseudo dice [0.4711, 0.7762, 0.6935, 0.7281, 0.4704, 0.716, 0.9415] +2026-04-09 00:46:53.400446: Epoch time: 101.45 s +2026-04-09 00:46:54.452591: +2026-04-09 00:46:54.454294: Epoch 306 +2026-04-09 00:46:54.455576: Current learning rate: 0.0072 +2026-04-09 00:48:36.002066: train_loss -0.1657 +2026-04-09 00:48:36.006578: val_loss -0.1491 +2026-04-09 00:48:36.008288: Pseudo dice [0.5461, 0.564, 0.7514, 0.7546, 0.4082, 0.7544, 0.5922] +2026-04-09 00:48:36.010270: Epoch time: 101.55 s +2026-04-09 00:48:37.290900: +2026-04-09 00:48:37.297974: Epoch 307 +2026-04-09 00:48:37.299344: Current learning rate: 0.00719 +2026-04-09 00:50:18.655513: train_loss -0.1711 +2026-04-09 00:50:18.660656: val_loss -0.1752 +2026-04-09 00:50:18.662413: Pseudo dice [0.5619, 0.4076, 0.852, 0.6243, 0.6734, 0.6358, 0.8429] +2026-04-09 00:50:18.664261: Epoch time: 101.37 s +2026-04-09 00:50:18.665668: Yayy! New best EMA pseudo Dice: 0.5906 +2026-04-09 00:50:21.278225: +2026-04-09 00:50:21.280713: Epoch 308 +2026-04-09 00:50:21.282350: Current learning rate: 0.00718 +2026-04-09 00:52:02.891769: train_loss -0.1581 +2026-04-09 00:52:02.896183: val_loss -0.1478 +2026-04-09 00:52:02.897900: Pseudo dice [0.2812, 0.6835, 0.4698, 0.633, 0.5109, 0.4847, 0.7087] +2026-04-09 00:52:02.900172: Epoch time: 101.62 s +2026-04-09 00:52:03.939142: +2026-04-09 00:52:03.940473: Epoch 309 +2026-04-09 00:52:03.941624: Current learning rate: 0.00717 +2026-04-09 00:53:45.238891: train_loss -0.1597 +2026-04-09 00:53:45.243412: val_loss -0.1668 +2026-04-09 00:53:45.245319: Pseudo dice [0.7855, 0.4348, 0.6937, 0.156, 0.5458, 0.8216, 0.8375] +2026-04-09 00:53:45.247068: Epoch time: 101.3 s +2026-04-09 00:53:46.311022: +2026-04-09 00:53:46.312317: Epoch 310 +2026-04-09 00:53:46.313457: Current learning rate: 0.00716 +2026-04-09 00:55:27.870502: train_loss -0.1681 +2026-04-09 00:55:27.875742: val_loss -0.1638 +2026-04-09 00:55:27.877818: Pseudo dice [0.6842, 0.7443, 0.7338, 0.6459, 0.592, 0.4807, 0.9407] +2026-04-09 00:55:27.879754: Epoch time: 101.56 s +2026-04-09 00:55:27.881657: Yayy! New best EMA pseudo Dice: 0.598 +2026-04-09 00:55:30.449854: +2026-04-09 00:55:30.452085: Epoch 311 +2026-04-09 00:55:30.453479: Current learning rate: 0.00715 +2026-04-09 00:57:12.356111: train_loss -0.1757 +2026-04-09 00:57:12.360895: val_loss -0.1449 +2026-04-09 00:57:12.362449: Pseudo dice [0.5308, 0.7213, 0.7422, 0.7556, 0.4783, 0.7684, 0.9106] +2026-04-09 00:57:12.364613: Epoch time: 101.91 s +2026-04-09 00:57:12.366049: Yayy! New best EMA pseudo Dice: 0.6083 +2026-04-09 00:57:14.751993: +2026-04-09 00:57:14.754361: Epoch 312 +2026-04-09 00:57:14.755523: Current learning rate: 0.00714 +2026-04-09 00:58:56.324967: train_loss -0.1433 +2026-04-09 00:58:56.329599: val_loss -0.1435 +2026-04-09 00:58:56.330814: Pseudo dice [0.5863, 0.7613, 0.6538, 0.453, 0.3744, 0.5554, 0.276] +2026-04-09 00:58:56.334701: Epoch time: 101.58 s +2026-04-09 00:58:57.390562: +2026-04-09 00:58:57.392172: Epoch 313 +2026-04-09 00:58:57.393431: Current learning rate: 0.00713 +2026-04-09 01:00:39.036034: train_loss -0.1549 +2026-04-09 01:00:39.055174: val_loss -0.1394 +2026-04-09 01:00:39.066163: Pseudo dice [0.6809, 0.0275, 0.8101, 0.595, 0.432, 0.7609, 0.8395] +2026-04-09 01:00:39.069220: Epoch time: 101.65 s +2026-04-09 01:00:41.155215: +2026-04-09 01:00:41.157262: Epoch 314 +2026-04-09 01:00:41.158943: Current learning rate: 0.00712 +2026-04-09 01:02:22.928249: train_loss -0.1785 +2026-04-09 01:02:22.932712: val_loss -0.142 +2026-04-09 01:02:22.934082: Pseudo dice [0.5243, 0.8105, 0.7597, 0.1512, 0.4419, 0.605, 0.5594] +2026-04-09 01:02:22.936121: Epoch time: 101.78 s +2026-04-09 01:02:23.998326: +2026-04-09 01:02:23.999830: Epoch 315 +2026-04-09 01:02:24.001399: Current learning rate: 0.00711 +2026-04-09 01:04:05.701947: train_loss -0.1628 +2026-04-09 01:04:05.706107: val_loss -0.1794 +2026-04-09 01:04:05.707511: Pseudo dice [0.7409, 0.6595, 0.8009, 0.6262, 0.4137, 0.6567, 0.8199] +2026-04-09 01:04:05.709191: Epoch time: 101.71 s +2026-04-09 01:04:06.762490: +2026-04-09 01:04:06.764061: Epoch 316 +2026-04-09 01:04:06.765242: Current learning rate: 0.0071 +2026-04-09 01:05:48.308306: train_loss -0.1637 +2026-04-09 01:05:48.313306: val_loss -0.1391 +2026-04-09 01:05:48.315203: Pseudo dice [0.6931, 0.299, 0.3342, 0.1469, 0.3829, 0.7516, 0.9268] +2026-04-09 01:05:48.317296: Epoch time: 101.55 s +2026-04-09 01:05:49.402212: +2026-04-09 01:05:49.403936: Epoch 317 +2026-04-09 01:05:49.405778: Current learning rate: 0.0071 +2026-04-09 01:07:31.595164: train_loss -0.1373 +2026-04-09 01:07:31.601473: val_loss -0.1558 +2026-04-09 01:07:31.603881: Pseudo dice [0.5925, 0.7269, 0.6727, 0.6571, 0.3753, 0.6288, 0.8592] +2026-04-09 01:07:31.608238: Epoch time: 102.2 s +2026-04-09 01:07:32.684187: +2026-04-09 01:07:32.685852: Epoch 318 +2026-04-09 01:07:32.687051: Current learning rate: 0.00709 +2026-04-09 01:09:14.002287: train_loss -0.1636 +2026-04-09 01:09:14.007144: val_loss -0.1448 +2026-04-09 01:09:14.008896: Pseudo dice [0.4941, 0.0907, 0.7851, 0.6696, 0.5236, 0.4551, 0.842] +2026-04-09 01:09:14.010770: Epoch time: 101.32 s +2026-04-09 01:09:15.100509: +2026-04-09 01:09:15.102314: Epoch 319 +2026-04-09 01:09:15.103585: Current learning rate: 0.00708 +2026-04-09 01:10:56.649688: train_loss -0.173 +2026-04-09 01:10:56.654441: val_loss -0.1485 +2026-04-09 01:10:56.655901: Pseudo dice [0.7216, 0.1933, 0.5537, 0.6266, 0.4537, 0.4056, 0.4479] +2026-04-09 01:10:56.657907: Epoch time: 101.55 s +2026-04-09 01:10:57.729898: +2026-04-09 01:10:57.731275: Epoch 320 +2026-04-09 01:10:57.732436: Current learning rate: 0.00707 +2026-04-09 01:12:39.397598: train_loss -0.1479 +2026-04-09 01:12:39.402075: val_loss -0.1249 +2026-04-09 01:12:39.403843: Pseudo dice [0.2285, 0.5766, 0.7181, 0.7021, 0.5842, 0.6265, 0.6874] +2026-04-09 01:12:39.405616: Epoch time: 101.67 s +2026-04-09 01:12:40.478982: +2026-04-09 01:12:40.480471: Epoch 321 +2026-04-09 01:12:40.481980: Current learning rate: 0.00706 +2026-04-09 01:14:21.909651: train_loss -0.1751 +2026-04-09 01:14:21.914343: val_loss -0.1478 +2026-04-09 01:14:21.915749: Pseudo dice [0.6839, 0.8655, 0.8407, 0.5712, 0.4342, 0.5233, 0.8855] +2026-04-09 01:14:21.917521: Epoch time: 101.43 s +2026-04-09 01:14:22.993754: +2026-04-09 01:14:22.995467: Epoch 322 +2026-04-09 01:14:22.997012: Current learning rate: 0.00705 +2026-04-09 01:16:04.455442: train_loss -0.1684 +2026-04-09 01:16:04.460574: val_loss -0.1452 +2026-04-09 01:16:04.462686: Pseudo dice [0.637, 0.6678, 0.8242, 0.5817, 0.4869, 0.6831, 0.752] +2026-04-09 01:16:04.465445: Epoch time: 101.46 s +2026-04-09 01:16:05.545680: +2026-04-09 01:16:05.547266: Epoch 323 +2026-04-09 01:16:05.548426: Current learning rate: 0.00704 +2026-04-09 01:17:47.089643: train_loss -0.1821 +2026-04-09 01:17:47.097462: val_loss -0.1538 +2026-04-09 01:17:47.098910: Pseudo dice [0.7648, 0.4882, 0.7058, 0.2475, 0.413, 0.4319, 0.8478] +2026-04-09 01:17:47.101535: Epoch time: 101.55 s +2026-04-09 01:17:48.196282: +2026-04-09 01:17:48.197814: Epoch 324 +2026-04-09 01:17:48.199052: Current learning rate: 0.00703 +2026-04-09 01:19:29.817017: train_loss -0.1597 +2026-04-09 01:19:29.821838: val_loss -0.1494 +2026-04-09 01:19:29.823718: Pseudo dice [0.5808, 0.2951, 0.8365, 0.7667, 0.5015, 0.11, 0.8393] +2026-04-09 01:19:29.825536: Epoch time: 101.62 s +2026-04-09 01:19:30.889059: +2026-04-09 01:19:30.890419: Epoch 325 +2026-04-09 01:19:30.891649: Current learning rate: 0.00702 +2026-04-09 01:21:12.405451: train_loss -0.1655 +2026-04-09 01:21:12.409807: val_loss -0.1594 +2026-04-09 01:21:12.411576: Pseudo dice [0.7189, 0.7525, 0.5991, 0.6021, 0.3497, 0.3545, 0.7812] +2026-04-09 01:21:12.413304: Epoch time: 101.52 s +2026-04-09 01:21:13.489368: +2026-04-09 01:21:13.491001: Epoch 326 +2026-04-09 01:21:13.492528: Current learning rate: 0.00701 +2026-04-09 01:22:54.952222: train_loss -0.1694 +2026-04-09 01:22:54.956795: val_loss -0.1233 +2026-04-09 01:22:54.958805: Pseudo dice [0.6551, 0.3147, 0.7766, 0.2533, 0.4599, 0.6229, 0.5945] +2026-04-09 01:22:54.960623: Epoch time: 101.47 s +2026-04-09 01:22:56.039021: +2026-04-09 01:22:56.040892: Epoch 327 +2026-04-09 01:22:56.042105: Current learning rate: 0.007 +2026-04-09 01:24:37.493328: train_loss -0.178 +2026-04-09 01:24:37.503747: val_loss -0.1525 +2026-04-09 01:24:37.506939: Pseudo dice [0.7285, 0.3654, 0.5649, 0.598, 0.487, 0.6271, 0.8306] +2026-04-09 01:24:37.510560: Epoch time: 101.46 s +2026-04-09 01:24:38.590331: +2026-04-09 01:24:38.591924: Epoch 328 +2026-04-09 01:24:38.593484: Current learning rate: 0.00699 +2026-04-09 01:26:20.360055: train_loss -0.1604 +2026-04-09 01:26:20.364269: val_loss -0.1521 +2026-04-09 01:26:20.365781: Pseudo dice [0.6867, 0.7265, 0.793, 0.7692, 0.6387, 0.7705, 0.858] +2026-04-09 01:26:20.367808: Epoch time: 101.77 s +2026-04-09 01:26:21.438006: +2026-04-09 01:26:21.439420: Epoch 329 +2026-04-09 01:26:21.440830: Current learning rate: 0.00698 +2026-04-09 01:28:03.035366: train_loss -0.1638 +2026-04-09 01:28:03.040442: val_loss -0.1444 +2026-04-09 01:28:03.042859: Pseudo dice [0.3537, 0.697, 0.74, 0.7298, 0.4476, 0.3545, 0.6839] +2026-04-09 01:28:03.045027: Epoch time: 101.6 s +2026-04-09 01:28:04.127471: +2026-04-09 01:28:04.128815: Epoch 330 +2026-04-09 01:28:04.130021: Current learning rate: 0.00697 +2026-04-09 01:29:45.529950: train_loss -0.1661 +2026-04-09 01:29:45.534225: val_loss -0.132 +2026-04-09 01:29:45.536018: Pseudo dice [0.6763, 0.8488, 0.6505, 0.6763, 0.4841, 0.5958, 0.1759] +2026-04-09 01:29:45.538312: Epoch time: 101.41 s +2026-04-09 01:29:46.596859: +2026-04-09 01:29:46.598080: Epoch 331 +2026-04-09 01:29:46.599446: Current learning rate: 0.00696 +2026-04-09 01:31:28.143102: train_loss -0.1665 +2026-04-09 01:31:28.147664: val_loss -0.1305 +2026-04-09 01:31:28.149099: Pseudo dice [0.575, 0.7698, 0.523, 0.3681, 0.4192, 0.406, 0.5751] +2026-04-09 01:31:28.151342: Epoch time: 101.55 s +2026-04-09 01:31:29.231773: +2026-04-09 01:31:29.243358: Epoch 332 +2026-04-09 01:31:29.247501: Current learning rate: 0.00696 +2026-04-09 01:33:10.522954: train_loss -0.1631 +2026-04-09 01:33:10.528651: val_loss -0.1565 +2026-04-09 01:33:10.530266: Pseudo dice [0.4723, 0.7633, 0.5166, 0.6973, 0.4063, 0.3239, 0.8827] +2026-04-09 01:33:10.532393: Epoch time: 101.29 s +2026-04-09 01:33:11.602027: +2026-04-09 01:33:11.603778: Epoch 333 +2026-04-09 01:33:11.605479: Current learning rate: 0.00695 +2026-04-09 01:34:52.991272: train_loss -0.1554 +2026-04-09 01:34:52.995845: val_loss -0.1213 +2026-04-09 01:34:52.998718: Pseudo dice [0.3853, 0.2947, 0.6486, 0.6081, 0.4331, 0.3906, 0.3796] +2026-04-09 01:34:53.001338: Epoch time: 101.39 s +2026-04-09 01:34:55.175067: +2026-04-09 01:34:55.176568: Epoch 334 +2026-04-09 01:34:55.177893: Current learning rate: 0.00694 +2026-04-09 01:36:36.648155: train_loss -0.1662 +2026-04-09 01:36:36.653562: val_loss -0.1501 +2026-04-09 01:36:36.656278: Pseudo dice [0.8555, 0.6153, 0.6948, 0.5322, 0.4798, 0.4834, 0.8175] +2026-04-09 01:36:36.658345: Epoch time: 101.48 s +2026-04-09 01:36:37.753255: +2026-04-09 01:36:37.754671: Epoch 335 +2026-04-09 01:36:37.755881: Current learning rate: 0.00693 +2026-04-09 01:38:19.199300: train_loss -0.1781 +2026-04-09 01:38:19.204567: val_loss -0.1636 +2026-04-09 01:38:19.206331: Pseudo dice [0.5827, 0.1669, 0.6922, 0.1213, 0.4897, 0.6385, 0.6132] +2026-04-09 01:38:19.208365: Epoch time: 101.45 s +2026-04-09 01:38:20.306863: +2026-04-09 01:38:20.308306: Epoch 336 +2026-04-09 01:38:20.309552: Current learning rate: 0.00692 +2026-04-09 01:40:02.087652: train_loss -0.1697 +2026-04-09 01:40:02.093242: val_loss -0.1198 +2026-04-09 01:40:02.094885: Pseudo dice [0.742, 0.3375, 0.6694, 0.1254, 0.4458, 0.2293, 0.5813] +2026-04-09 01:40:02.098291: Epoch time: 101.78 s +2026-04-09 01:40:03.201101: +2026-04-09 01:40:03.202491: Epoch 337 +2026-04-09 01:40:03.204033: Current learning rate: 0.00691 +2026-04-09 01:41:44.919664: train_loss -0.1731 +2026-04-09 01:41:44.923932: val_loss -0.1533 +2026-04-09 01:41:44.925434: Pseudo dice [0.6348, 0.3908, 0.7357, 0.6216, 0.564, 0.4558, 0.8766] +2026-04-09 01:41:44.927055: Epoch time: 101.72 s +2026-04-09 01:41:46.013956: +2026-04-09 01:41:46.015498: Epoch 338 +2026-04-09 01:41:46.016708: Current learning rate: 0.0069 +2026-04-09 01:43:27.689928: train_loss -0.1558 +2026-04-09 01:43:27.694321: val_loss -0.1355 +2026-04-09 01:43:27.695672: Pseudo dice [0.5205, 0.7677, 0.7479, 0.1647, 0.4814, 0.3118, 0.1538] +2026-04-09 01:43:27.697557: Epoch time: 101.68 s +2026-04-09 01:43:28.784385: +2026-04-09 01:43:28.786133: Epoch 339 +2026-04-09 01:43:28.787978: Current learning rate: 0.00689 +2026-04-09 01:45:10.334592: train_loss -0.1645 +2026-04-09 01:45:10.338913: val_loss -0.1645 +2026-04-09 01:45:10.340536: Pseudo dice [0.8019, 0.6999, 0.6691, 0.2309, 0.4961, 0.5186, 0.8308] +2026-04-09 01:45:10.342540: Epoch time: 101.55 s +2026-04-09 01:45:11.438307: +2026-04-09 01:45:11.440189: Epoch 340 +2026-04-09 01:45:11.441461: Current learning rate: 0.00688 +2026-04-09 01:46:53.046227: train_loss -0.1782 +2026-04-09 01:46:53.050689: val_loss -0.1302 +2026-04-09 01:46:53.052758: Pseudo dice [0.7495, 0.8045, 0.7714, 0.4231, 0.3949, 0.3727, 0.821] +2026-04-09 01:46:53.054978: Epoch time: 101.61 s +2026-04-09 01:46:54.154834: +2026-04-09 01:46:54.156223: Epoch 341 +2026-04-09 01:46:54.157466: Current learning rate: 0.00687 +2026-04-09 01:48:35.579735: train_loss -0.1726 +2026-04-09 01:48:35.585594: val_loss -0.1625 +2026-04-09 01:48:35.587638: Pseudo dice [0.8006, 0.6078, 0.807, 0.5359, 0.3999, 0.6195, 0.6062] +2026-04-09 01:48:35.591643: Epoch time: 101.43 s +2026-04-09 01:48:36.695731: +2026-04-09 01:48:36.697241: Epoch 342 +2026-04-09 01:48:36.698488: Current learning rate: 0.00686 +2026-04-09 01:50:18.177113: train_loss -0.1762 +2026-04-09 01:50:18.182404: val_loss -0.1149 +2026-04-09 01:50:18.184281: Pseudo dice [0.5229, 0.266, 0.7405, 0.4523, 0.5099, 0.1198, 0.2485] +2026-04-09 01:50:18.186473: Epoch time: 101.48 s +2026-04-09 01:50:19.287459: +2026-04-09 01:50:19.289062: Epoch 343 +2026-04-09 01:50:19.290255: Current learning rate: 0.00685 +2026-04-09 01:52:00.828816: train_loss -0.1705 +2026-04-09 01:52:00.833622: val_loss -0.1468 +2026-04-09 01:52:00.835493: Pseudo dice [0.7875, 0.533, 0.3549, 0.5178, 0.5049, 0.6499, 0.7324] +2026-04-09 01:52:00.838107: Epoch time: 101.54 s +2026-04-09 01:52:01.939896: +2026-04-09 01:52:01.941299: Epoch 344 +2026-04-09 01:52:01.942438: Current learning rate: 0.00684 +2026-04-09 01:53:43.450551: train_loss -0.1571 +2026-04-09 01:53:43.455034: val_loss -0.152 +2026-04-09 01:53:43.456876: Pseudo dice [0.7355, 0.4238, 0.8241, 0.8364, 0.4089, 0.2866, 0.7011] +2026-04-09 01:53:43.458618: Epoch time: 101.51 s +2026-04-09 01:53:44.570708: +2026-04-09 01:53:44.572281: Epoch 345 +2026-04-09 01:53:44.573493: Current learning rate: 0.00683 +2026-04-09 01:55:25.991210: train_loss -0.1783 +2026-04-09 01:55:25.995916: val_loss -0.121 +2026-04-09 01:55:25.997466: Pseudo dice [0.7021, 0.5008, 0.6605, 0.0063, 0.3526, 0.3626, 0.6982] +2026-04-09 01:55:25.999233: Epoch time: 101.42 s +2026-04-09 01:55:27.092924: +2026-04-09 01:55:27.094371: Epoch 346 +2026-04-09 01:55:27.095422: Current learning rate: 0.00682 +2026-04-09 01:57:08.659084: train_loss -0.1671 +2026-04-09 01:57:08.664073: val_loss -0.1294 +2026-04-09 01:57:08.666034: Pseudo dice [0.7289, 0.706, 0.7787, 0.244, 0.651, 0.2749, 0.88] +2026-04-09 01:57:08.668155: Epoch time: 101.57 s +2026-04-09 01:57:09.787164: +2026-04-09 01:57:09.788635: Epoch 347 +2026-04-09 01:57:09.790014: Current learning rate: 0.00681 +2026-04-09 01:58:51.330083: train_loss -0.168 +2026-04-09 01:58:51.334186: val_loss -0.1236 +2026-04-09 01:58:51.336356: Pseudo dice [0.651, 0.8483, 0.5356, 0.2847, 0.3023, 0.3933, 0.4606] +2026-04-09 01:58:51.338311: Epoch time: 101.55 s +2026-04-09 01:58:52.450905: +2026-04-09 01:58:52.452215: Epoch 348 +2026-04-09 01:58:52.453478: Current learning rate: 0.0068 +2026-04-09 02:00:34.024221: train_loss -0.1615 +2026-04-09 02:00:34.028685: val_loss -0.1496 +2026-04-09 02:00:34.030529: Pseudo dice [0.3888, 0.7869, 0.4983, 0.5245, 0.5341, 0.7425, 0.6213] +2026-04-09 02:00:34.032281: Epoch time: 101.58 s +2026-04-09 02:00:35.127011: +2026-04-09 02:00:35.128324: Epoch 349 +2026-04-09 02:00:35.130015: Current learning rate: 0.0068 +2026-04-09 02:02:16.693610: train_loss -0.1746 +2026-04-09 02:02:16.699359: val_loss -0.163 +2026-04-09 02:02:16.701080: Pseudo dice [0.7705, 0.5496, 0.7747, 0.6282, 0.5179, 0.5453, 0.7815] +2026-04-09 02:02:16.702984: Epoch time: 101.57 s +2026-04-09 02:02:19.166360: +2026-04-09 02:02:19.168763: Epoch 350 +2026-04-09 02:02:19.170000: Current learning rate: 0.00679 +2026-04-09 02:04:00.601651: train_loss -0.1777 +2026-04-09 02:04:00.613760: val_loss -0.13 +2026-04-09 02:04:00.617044: Pseudo dice [0.5859, 0.4773, 0.5765, 0.3139, 0.3959, 0.6461, 0.3862] +2026-04-09 02:04:00.620431: Epoch time: 101.44 s +2026-04-09 02:04:01.935554: +2026-04-09 02:04:01.936978: Epoch 351 +2026-04-09 02:04:01.938216: Current learning rate: 0.00678 +2026-04-09 02:05:43.546558: train_loss -0.1651 +2026-04-09 02:05:43.551202: val_loss -0.1544 +2026-04-09 02:05:43.554257: Pseudo dice [0.229, 0.8677, 0.833, 0.3454, 0.5736, 0.4538, 0.7081] +2026-04-09 02:05:43.556607: Epoch time: 101.61 s +2026-04-09 02:05:44.643826: +2026-04-09 02:05:44.645201: Epoch 352 +2026-04-09 02:05:44.646567: Current learning rate: 0.00677 +2026-04-09 02:07:26.150683: train_loss -0.1682 +2026-04-09 02:07:26.156419: val_loss -0.134 +2026-04-09 02:07:26.157659: Pseudo dice [0.8334, 0.2116, 0.6907, 0.2233, 0.3031, 0.3234, 0.741] +2026-04-09 02:07:26.159886: Epoch time: 101.51 s +2026-04-09 02:07:28.360628: +2026-04-09 02:07:28.362346: Epoch 353 +2026-04-09 02:07:28.363841: Current learning rate: 0.00676 +2026-04-09 02:09:09.955394: train_loss -0.1778 +2026-04-09 02:09:09.959597: val_loss -0.1506 +2026-04-09 02:09:09.961431: Pseudo dice [0.6673, 0.7661, 0.7088, 0.5011, 0.3117, 0.3905, 0.7508] +2026-04-09 02:09:09.963239: Epoch time: 101.6 s +2026-04-09 02:09:11.057709: +2026-04-09 02:09:11.059050: Epoch 354 +2026-04-09 02:09:11.060240: Current learning rate: 0.00675 +2026-04-09 02:10:52.536999: train_loss -0.1823 +2026-04-09 02:10:52.540805: val_loss -0.1717 +2026-04-09 02:10:52.542216: Pseudo dice [0.3298, 0.7873, 0.7472, 0.0086, 0.5546, 0.7448, 0.7737] +2026-04-09 02:10:52.543912: Epoch time: 101.48 s +2026-04-09 02:10:53.647629: +2026-04-09 02:10:53.649149: Epoch 355 +2026-04-09 02:10:53.650676: Current learning rate: 0.00674 +2026-04-09 02:12:35.430682: train_loss -0.1636 +2026-04-09 02:12:35.435289: val_loss -0.1459 +2026-04-09 02:12:35.436897: Pseudo dice [0.8243, 0.3122, 0.7199, 0.4971, 0.3893, 0.4059, 0.7498] +2026-04-09 02:12:35.439798: Epoch time: 101.79 s +2026-04-09 02:12:36.535049: +2026-04-09 02:12:36.540372: Epoch 356 +2026-04-09 02:12:36.547964: Current learning rate: 0.00673 +2026-04-09 02:14:18.288490: train_loss -0.162 +2026-04-09 02:14:18.295334: val_loss -0.1188 +2026-04-09 02:14:18.297339: Pseudo dice [0.5659, 0.1911, 0.6901, 0.6766, 0.2368, 0.1984, 0.6417] +2026-04-09 02:14:18.299236: Epoch time: 101.76 s +2026-04-09 02:14:19.399909: +2026-04-09 02:14:19.401434: Epoch 357 +2026-04-09 02:14:19.402945: Current learning rate: 0.00672 +2026-04-09 02:16:00.946564: train_loss -0.1813 +2026-04-09 02:16:00.952424: val_loss -0.1753 +2026-04-09 02:16:00.954377: Pseudo dice [0.3027, 0.3726, 0.5855, 0.6607, 0.5593, 0.7817, 0.8099] +2026-04-09 02:16:00.956372: Epoch time: 101.55 s +2026-04-09 02:16:02.081170: +2026-04-09 02:16:02.082548: Epoch 358 +2026-04-09 02:16:02.083780: Current learning rate: 0.00671 +2026-04-09 02:17:43.733529: train_loss -0.1684 +2026-04-09 02:17:43.738063: val_loss -0.1409 +2026-04-09 02:17:43.739760: Pseudo dice [0.6156, 0.3209, 0.6337, 0.7849, 0.4597, 0.365, 0.8506] +2026-04-09 02:17:43.741705: Epoch time: 101.66 s +2026-04-09 02:17:44.833673: +2026-04-09 02:17:44.835118: Epoch 359 +2026-04-09 02:17:44.836376: Current learning rate: 0.0067 +2026-04-09 02:19:26.476861: train_loss -0.1815 +2026-04-09 02:19:26.481303: val_loss -0.1238 +2026-04-09 02:19:26.482878: Pseudo dice [0.8639, 0.5433, 0.6383, 0.1336, 0.5325, 0.4562, 0.8203] +2026-04-09 02:19:26.484682: Epoch time: 101.65 s +2026-04-09 02:19:27.582299: +2026-04-09 02:19:27.587909: Epoch 360 +2026-04-09 02:19:27.589245: Current learning rate: 0.00669 +2026-04-09 02:21:09.134139: train_loss -0.1779 +2026-04-09 02:21:09.146567: val_loss -0.1577 +2026-04-09 02:21:09.148101: Pseudo dice [0.8265, 0.8314, 0.5565, 0.6908, 0.2987, 0.2093, 0.8396] +2026-04-09 02:21:09.150845: Epoch time: 101.55 s +2026-04-09 02:21:10.243447: +2026-04-09 02:21:10.245128: Epoch 361 +2026-04-09 02:21:10.246406: Current learning rate: 0.00668 +2026-04-09 02:22:51.838255: train_loss -0.1822 +2026-04-09 02:22:51.842427: val_loss -0.1382 +2026-04-09 02:22:51.844171: Pseudo dice [0.6571, 0.2418, 0.7418, 0.6102, 0.5381, 0.4878, 0.865] +2026-04-09 02:22:51.845897: Epoch time: 101.6 s +2026-04-09 02:22:52.966081: +2026-04-09 02:22:52.967718: Epoch 362 +2026-04-09 02:22:52.969640: Current learning rate: 0.00667 +2026-04-09 02:24:34.375259: train_loss -0.1731 +2026-04-09 02:24:34.380060: val_loss -0.1382 +2026-04-09 02:24:34.382432: Pseudo dice [0.4965, 0.2829, 0.5589, 0.5601, 0.3527, 0.5192, 0.7327] +2026-04-09 02:24:34.384391: Epoch time: 101.41 s +2026-04-09 02:24:35.486086: +2026-04-09 02:24:35.487407: Epoch 363 +2026-04-09 02:24:35.488796: Current learning rate: 0.00666 +2026-04-09 02:26:17.007267: train_loss -0.173 +2026-04-09 02:26:17.012306: val_loss -0.1525 +2026-04-09 02:26:17.013730: Pseudo dice [0.5278, 0.4902, 0.7296, 0.6039, 0.3687, 0.4605, 0.9157] +2026-04-09 02:26:17.015727: Epoch time: 101.52 s +2026-04-09 02:26:18.127977: +2026-04-09 02:26:18.135614: Epoch 364 +2026-04-09 02:26:18.137465: Current learning rate: 0.00665 +2026-04-09 02:27:59.918609: train_loss -0.1785 +2026-04-09 02:27:59.922728: val_loss -0.1395 +2026-04-09 02:27:59.923986: Pseudo dice [0.5677, 0.1316, 0.8308, 0.7423, 0.4114, 0.6572, 0.4084] +2026-04-09 02:27:59.925625: Epoch time: 101.79 s +2026-04-09 02:28:01.019941: +2026-04-09 02:28:01.021395: Epoch 365 +2026-04-09 02:28:01.022552: Current learning rate: 0.00665 +2026-04-09 02:29:42.546098: train_loss -0.1581 +2026-04-09 02:29:42.551172: val_loss -0.1476 +2026-04-09 02:29:42.552738: Pseudo dice [0.6418, 0.3155, 0.8436, 0.6739, 0.5738, 0.6203, 0.7378] +2026-04-09 02:29:42.554892: Epoch time: 101.53 s +2026-04-09 02:29:43.663960: +2026-04-09 02:29:43.665309: Epoch 366 +2026-04-09 02:29:43.666482: Current learning rate: 0.00664 +2026-04-09 02:31:25.139930: train_loss -0.1585 +2026-04-09 02:31:25.144387: val_loss -0.1431 +2026-04-09 02:31:25.145740: Pseudo dice [0.5638, 0.1843, 0.8093, 0.8342, 0.4935, 0.6884, 0.8468] +2026-04-09 02:31:25.147527: Epoch time: 101.48 s +2026-04-09 02:31:26.255154: +2026-04-09 02:31:26.256764: Epoch 367 +2026-04-09 02:31:26.258772: Current learning rate: 0.00663 +2026-04-09 02:33:07.598923: train_loss -0.1617 +2026-04-09 02:33:07.606550: val_loss -0.1209 +2026-04-09 02:33:07.608449: Pseudo dice [0.4679, 0.6221, 0.7941, 0.4258, 0.5749, 0.0671, 0.3774] +2026-04-09 02:33:07.611017: Epoch time: 101.35 s +2026-04-09 02:33:08.705808: +2026-04-09 02:33:08.707239: Epoch 368 +2026-04-09 02:33:08.708586: Current learning rate: 0.00662 +2026-04-09 02:34:50.197881: train_loss -0.1605 +2026-04-09 02:34:50.203547: val_loss -0.1354 +2026-04-09 02:34:50.205091: Pseudo dice [0.5823, 0.3537, 0.7659, 0.4694, 0.5023, 0.6217, 0.7078] +2026-04-09 02:34:50.207115: Epoch time: 101.5 s +2026-04-09 02:34:51.323852: +2026-04-09 02:34:51.325914: Epoch 369 +2026-04-09 02:34:51.327806: Current learning rate: 0.00661 +2026-04-09 02:36:32.841761: train_loss -0.1729 +2026-04-09 02:36:32.846679: val_loss -0.1572 +2026-04-09 02:36:32.848499: Pseudo dice [0.6715, 0.1729, 0.5482, 0.5318, 0.4528, 0.72, 0.6892] +2026-04-09 02:36:32.850371: Epoch time: 101.52 s +2026-04-09 02:36:33.947925: +2026-04-09 02:36:33.949648: Epoch 370 +2026-04-09 02:36:33.951158: Current learning rate: 0.0066 +2026-04-09 02:38:15.326644: train_loss -0.1656 +2026-04-09 02:38:15.330796: val_loss -0.1421 +2026-04-09 02:38:15.332413: Pseudo dice [0.548, 0.1056, 0.868, 0.209, 0.5242, 0.6467, 0.489] +2026-04-09 02:38:15.334902: Epoch time: 101.38 s +2026-04-09 02:38:16.431871: +2026-04-09 02:38:16.433210: Epoch 371 +2026-04-09 02:38:16.435360: Current learning rate: 0.00659 +2026-04-09 02:39:57.779082: train_loss -0.1885 +2026-04-09 02:39:57.783716: val_loss -0.1436 +2026-04-09 02:39:57.785533: Pseudo dice [0.6675, 0.5852, 0.8395, 0.5059, 0.4579, 0.658, 0.8637] +2026-04-09 02:39:57.787753: Epoch time: 101.35 s +2026-04-09 02:39:58.884530: +2026-04-09 02:39:58.885806: Epoch 372 +2026-04-09 02:39:58.887264: Current learning rate: 0.00658 +2026-04-09 02:41:40.420226: train_loss -0.175 +2026-04-09 02:41:40.425412: val_loss -0.1293 +2026-04-09 02:41:40.426848: Pseudo dice [0.5501, 0.2042, 0.353, 0.512, 0.3186, 0.4482, 0.8916] +2026-04-09 02:41:40.428456: Epoch time: 101.54 s +2026-04-09 02:41:42.632973: +2026-04-09 02:41:42.634486: Epoch 373 +2026-04-09 02:41:42.635683: Current learning rate: 0.00657 +2026-04-09 02:43:24.041115: train_loss -0.1751 +2026-04-09 02:43:24.046869: val_loss -0.1564 +2026-04-09 02:43:24.049108: Pseudo dice [0.3087, 0.6302, 0.8767, 0.5577, 0.5944, 0.6658, 0.6356] +2026-04-09 02:43:24.051177: Epoch time: 101.41 s +2026-04-09 02:43:25.151745: +2026-04-09 02:43:25.153238: Epoch 374 +2026-04-09 02:43:25.154724: Current learning rate: 0.00656 +2026-04-09 02:45:07.021270: train_loss -0.1797 +2026-04-09 02:45:07.025489: val_loss -0.1735 +2026-04-09 02:45:07.027303: Pseudo dice [0.5074, 0.4581, 0.7788, 0.5416, 0.5234, 0.5399, 0.8028] +2026-04-09 02:45:07.029107: Epoch time: 101.87 s +2026-04-09 02:45:08.139433: +2026-04-09 02:45:08.142788: Epoch 375 +2026-04-09 02:45:08.145226: Current learning rate: 0.00655 +2026-04-09 02:46:49.781336: train_loss -0.1796 +2026-04-09 02:46:49.786149: val_loss -0.1686 +2026-04-09 02:46:49.787436: Pseudo dice [0.3044, 0.8234, 0.8333, 0.6967, 0.5272, 0.8497, 0.6456] +2026-04-09 02:46:49.789290: Epoch time: 101.64 s +2026-04-09 02:46:50.913054: +2026-04-09 02:46:50.914482: Epoch 376 +2026-04-09 02:46:50.915717: Current learning rate: 0.00654 +2026-04-09 02:48:32.483492: train_loss -0.1657 +2026-04-09 02:48:32.488737: val_loss -0.1448 +2026-04-09 02:48:32.490864: Pseudo dice [0.7279, 0.6971, 0.7475, 0.3763, 0.4106, 0.285, 0.7459] +2026-04-09 02:48:32.493656: Epoch time: 101.57 s +2026-04-09 02:48:33.593750: +2026-04-09 02:48:33.595216: Epoch 377 +2026-04-09 02:48:33.596482: Current learning rate: 0.00653 +2026-04-09 02:50:15.153257: train_loss -0.1669 +2026-04-09 02:50:15.157599: val_loss -0.1424 +2026-04-09 02:50:15.158998: Pseudo dice [0.6692, 0.6469, 0.6116, 0.3462, 0.3924, 0.4392, 0.7086] +2026-04-09 02:50:15.161072: Epoch time: 101.56 s +2026-04-09 02:50:16.246611: +2026-04-09 02:50:16.248238: Epoch 378 +2026-04-09 02:50:16.249671: Current learning rate: 0.00652 +2026-04-09 02:51:57.770619: train_loss -0.1649 +2026-04-09 02:51:57.775516: val_loss -0.0955 +2026-04-09 02:51:57.777131: Pseudo dice [0.6182, 0.6054, 0.4381, 0.1315, 0.2287, 0.4981, 0.5891] +2026-04-09 02:51:57.780967: Epoch time: 101.53 s +2026-04-09 02:51:58.884199: +2026-04-09 02:51:58.885460: Epoch 379 +2026-04-09 02:51:58.886863: Current learning rate: 0.00651 +2026-04-09 02:53:40.507679: train_loss -0.1588 +2026-04-09 02:53:40.511858: val_loss -0.1227 +2026-04-09 02:53:40.513294: Pseudo dice [0.6035, 0.6786, 0.8033, 0.2692, 0.2863, 0.5943, 0.6116] +2026-04-09 02:53:40.515413: Epoch time: 101.63 s +2026-04-09 02:53:41.607356: +2026-04-09 02:53:41.609056: Epoch 380 +2026-04-09 02:53:41.610937: Current learning rate: 0.0065 +2026-04-09 02:55:23.079540: train_loss -0.1655 +2026-04-09 02:55:23.084125: val_loss -0.1542 +2026-04-09 02:55:23.086661: Pseudo dice [0.4443, 0.8222, 0.7609, 0.1713, 0.5536, 0.286, 0.8416] +2026-04-09 02:55:23.090954: Epoch time: 101.48 s +2026-04-09 02:55:24.191067: +2026-04-09 02:55:24.193879: Epoch 381 +2026-04-09 02:55:24.195484: Current learning rate: 0.00649 +2026-04-09 02:57:05.779160: train_loss -0.1716 +2026-04-09 02:57:05.783340: val_loss -0.161 +2026-04-09 02:57:05.785170: Pseudo dice [0.8274, 0.2421, 0.7551, 0.567, 0.3172, 0.6656, 0.7342] +2026-04-09 02:57:05.786930: Epoch time: 101.59 s +2026-04-09 02:57:06.908476: +2026-04-09 02:57:06.910608: Epoch 382 +2026-04-09 02:57:06.911897: Current learning rate: 0.00648 +2026-04-09 02:58:48.402609: train_loss -0.1727 +2026-04-09 02:58:48.408949: val_loss -0.1724 +2026-04-09 02:58:48.410604: Pseudo dice [0.8095, 0.8386, 0.7994, 0.683, 0.5564, 0.487, 0.8823] +2026-04-09 02:58:48.412745: Epoch time: 101.5 s +2026-04-09 02:58:49.521358: +2026-04-09 02:58:49.523043: Epoch 383 +2026-04-09 02:58:49.524537: Current learning rate: 0.00648 +2026-04-09 03:00:30.826354: train_loss -0.1753 +2026-04-09 03:00:30.831363: val_loss -0.1548 +2026-04-09 03:00:30.832890: Pseudo dice [0.5837, 0.2768, 0.721, 0.8443, 0.3944, 0.793, 0.8057] +2026-04-09 03:00:30.835013: Epoch time: 101.31 s +2026-04-09 03:00:31.948899: +2026-04-09 03:00:31.951060: Epoch 384 +2026-04-09 03:00:31.952336: Current learning rate: 0.00647 +2026-04-09 03:02:13.569617: train_loss -0.1856 +2026-04-09 03:02:13.573553: val_loss -0.1434 +2026-04-09 03:02:13.575110: Pseudo dice [0.7233, 0.8145, 0.6425, 0.5471, 0.5003, 0.4809, 0.2724] +2026-04-09 03:02:13.576921: Epoch time: 101.62 s +2026-04-09 03:02:14.708797: +2026-04-09 03:02:14.710838: Epoch 385 +2026-04-09 03:02:14.712061: Current learning rate: 0.00646 +2026-04-09 03:03:56.105038: train_loss -0.1661 +2026-04-09 03:03:56.110996: val_loss -0.1368 +2026-04-09 03:03:56.112302: Pseudo dice [0.7908, 0.3403, 0.7194, 0.5484, 0.3879, 0.7566, 0.6132] +2026-04-09 03:03:56.115054: Epoch time: 101.4 s +2026-04-09 03:03:57.260364: +2026-04-09 03:03:57.261952: Epoch 386 +2026-04-09 03:03:57.263301: Current learning rate: 0.00645 +2026-04-09 03:05:38.893208: train_loss -0.1743 +2026-04-09 03:05:38.898766: val_loss -0.1598 +2026-04-09 03:05:38.900204: Pseudo dice [0.6201, 0.4603, 0.8049, 0.5292, 0.6168, 0.391, 0.6826] +2026-04-09 03:05:38.902080: Epoch time: 101.64 s +2026-04-09 03:05:40.032792: +2026-04-09 03:05:40.034628: Epoch 387 +2026-04-09 03:05:40.036273: Current learning rate: 0.00644 +2026-04-09 03:07:21.821452: train_loss -0.1727 +2026-04-09 03:07:21.825668: val_loss -0.1516 +2026-04-09 03:07:21.827752: Pseudo dice [0.6754, 0.7825, 0.6691, 0.6856, 0.5321, 0.5204, 0.8903] +2026-04-09 03:07:21.830282: Epoch time: 101.79 s +2026-04-09 03:07:22.943435: +2026-04-09 03:07:22.944883: Epoch 388 +2026-04-09 03:07:22.946127: Current learning rate: 0.00643 +2026-04-09 03:09:04.451268: train_loss -0.163 +2026-04-09 03:09:04.459108: val_loss -0.1258 +2026-04-09 03:09:04.460881: Pseudo dice [0.4895, 0.0818, 0.8248, 0.6003, 0.554, 0.6284, 0.5053] +2026-04-09 03:09:04.462660: Epoch time: 101.51 s +2026-04-09 03:09:05.587507: +2026-04-09 03:09:05.589810: Epoch 389 +2026-04-09 03:09:05.591167: Current learning rate: 0.00642 +2026-04-09 03:10:47.144158: train_loss -0.1763 +2026-04-09 03:10:47.148704: val_loss -0.1678 +2026-04-09 03:10:47.150214: Pseudo dice [0.6933, 0.5964, 0.7355, 0.4477, 0.6074, 0.33, 0.7898] +2026-04-09 03:10:47.152344: Epoch time: 101.56 s +2026-04-09 03:10:48.281705: +2026-04-09 03:10:48.283022: Epoch 390 +2026-04-09 03:10:48.284160: Current learning rate: 0.00641 +2026-04-09 03:12:29.648392: train_loss -0.1792 +2026-04-09 03:12:29.653738: val_loss -0.1596 +2026-04-09 03:12:29.655875: Pseudo dice [0.404, 0.8626, 0.8303, 0.6964, 0.3691, 0.5638, 0.743] +2026-04-09 03:12:29.657888: Epoch time: 101.37 s +2026-04-09 03:12:30.778939: +2026-04-09 03:12:30.781275: Epoch 391 +2026-04-09 03:12:30.782567: Current learning rate: 0.0064 +2026-04-09 03:14:12.523744: train_loss -0.1785 +2026-04-09 03:14:12.536091: val_loss -0.1513 +2026-04-09 03:14:12.539284: Pseudo dice [0.7252, 0.8522, 0.787, 0.6397, 0.4401, 0.4847, 0.7189] +2026-04-09 03:14:12.543787: Epoch time: 101.75 s +2026-04-09 03:14:13.672714: +2026-04-09 03:14:13.677442: Epoch 392 +2026-04-09 03:14:13.684187: Current learning rate: 0.00639 +2026-04-09 03:15:55.999818: train_loss -0.1794 +2026-04-09 03:15:56.009284: val_loss -0.1458 +2026-04-09 03:15:56.012660: Pseudo dice [0.6059, 0.6388, 0.6096, 0.7386, 0.5047, 0.3581, 0.8227] +2026-04-09 03:15:56.015837: Epoch time: 102.33 s +2026-04-09 03:15:57.147439: +2026-04-09 03:15:57.148919: Epoch 393 +2026-04-09 03:15:57.150135: Current learning rate: 0.00638 +2026-04-09 03:17:39.197195: train_loss -0.1747 +2026-04-09 03:17:39.201886: val_loss -0.1457 +2026-04-09 03:17:39.203521: Pseudo dice [0.6313, 0.7871, 0.791, 0.233, 0.5855, 0.5289, 0.8656] +2026-04-09 03:17:39.206094: Epoch time: 102.05 s +2026-04-09 03:17:40.306455: +2026-04-09 03:17:40.308580: Epoch 394 +2026-04-09 03:17:40.310034: Current learning rate: 0.00637 +2026-04-09 03:19:21.980345: train_loss -0.1798 +2026-04-09 03:19:21.984834: val_loss -0.1802 +2026-04-09 03:19:21.986662: Pseudo dice [0.7813, 0.667, 0.7453, 0.7411, 0.5157, 0.714, 0.7617] +2026-04-09 03:19:21.988604: Epoch time: 101.68 s +2026-04-09 03:19:21.990069: Yayy! New best EMA pseudo Dice: 0.6134 +2026-04-09 03:19:24.482413: +2026-04-09 03:19:24.485193: Epoch 395 +2026-04-09 03:19:24.486654: Current learning rate: 0.00636 +2026-04-09 03:21:06.058759: train_loss -0.1714 +2026-04-09 03:21:06.063406: val_loss -0.1332 +2026-04-09 03:21:06.065102: Pseudo dice [0.7104, 0.7918, 0.7852, 0.5804, 0.4517, 0.2943, 0.7092] +2026-04-09 03:21:06.068458: Epoch time: 101.58 s +2026-04-09 03:21:06.069890: Yayy! New best EMA pseudo Dice: 0.6138 +2026-04-09 03:21:08.849222: +2026-04-09 03:21:08.851706: Epoch 396 +2026-04-09 03:21:08.852975: Current learning rate: 0.00635 +2026-04-09 03:22:50.460352: train_loss -0.175 +2026-04-09 03:22:50.465206: val_loss -0.1459 +2026-04-09 03:22:50.467365: Pseudo dice [0.7753, 0.1808, 0.7377, 0.7707, 0.4156, 0.2833, 0.8209] +2026-04-09 03:22:50.470186: Epoch time: 101.61 s +2026-04-09 03:22:51.590342: +2026-04-09 03:22:51.591733: Epoch 397 +2026-04-09 03:22:51.592888: Current learning rate: 0.00634 +2026-04-09 03:24:33.288707: train_loss -0.1688 +2026-04-09 03:24:33.293483: val_loss -0.1476 +2026-04-09 03:24:33.295024: Pseudo dice [0.7895, 0.6849, 0.7443, 0.3914, 0.3528, 0.4966, 0.654] +2026-04-09 03:24:33.296521: Epoch time: 101.7 s +2026-04-09 03:24:34.404677: +2026-04-09 03:24:34.406003: Epoch 398 +2026-04-09 03:24:34.407167: Current learning rate: 0.00633 +2026-04-09 03:26:16.011813: train_loss -0.163 +2026-04-09 03:26:16.015786: val_loss -0.155 +2026-04-09 03:26:16.017415: Pseudo dice [0.6014, 0.6315, 0.5759, 0.1661, 0.5687, 0.4169, 0.7466] +2026-04-09 03:26:16.019444: Epoch time: 101.61 s +2026-04-09 03:26:17.144815: +2026-04-09 03:26:17.146322: Epoch 399 +2026-04-09 03:26:17.147521: Current learning rate: 0.00632 +2026-04-09 03:27:58.887707: train_loss -0.1792 +2026-04-09 03:27:58.893558: val_loss -0.1461 +2026-04-09 03:27:58.895211: Pseudo dice [0.611, 0.6122, 0.5395, 0.6427, 0.547, 0.4392, 0.8926] +2026-04-09 03:27:58.897553: Epoch time: 101.75 s +2026-04-09 03:28:01.649589: +2026-04-09 03:28:01.652229: Epoch 400 +2026-04-09 03:28:01.653751: Current learning rate: 0.00631 +2026-04-09 03:29:43.177270: train_loss -0.1725 +2026-04-09 03:29:43.181836: val_loss -0.1393 +2026-04-09 03:29:43.183324: Pseudo dice [0.7419, 0.5041, 0.655, 0.5671, 0.6223, 0.8536, 0.9305] +2026-04-09 03:29:43.185132: Epoch time: 101.53 s +2026-04-09 03:29:44.312442: +2026-04-09 03:29:44.313842: Epoch 401 +2026-04-09 03:29:44.315371: Current learning rate: 0.0063 +2026-04-09 03:31:26.573866: train_loss -0.1633 +2026-04-09 03:31:26.581519: val_loss -0.1658 +2026-04-09 03:31:26.583312: Pseudo dice [0.7462, 0.8542, 0.8133, 0.5827, 0.5353, 0.7401, 0.4093] +2026-04-09 03:31:26.585482: Epoch time: 102.26 s +2026-04-09 03:31:26.588431: Yayy! New best EMA pseudo Dice: 0.6161 +2026-04-09 03:31:29.321237: +2026-04-09 03:31:29.325393: Epoch 402 +2026-04-09 03:31:29.326805: Current learning rate: 0.0063 +2026-04-09 03:33:10.947176: train_loss -0.1661 +2026-04-09 03:33:10.952724: val_loss -0.1583 +2026-04-09 03:33:10.955990: Pseudo dice [0.6778, 0.8362, 0.8808, 0.0898, 0.5246, 0.6348, 0.6074] +2026-04-09 03:33:10.957937: Epoch time: 101.63 s +2026-04-09 03:33:12.062872: +2026-04-09 03:33:12.064713: Epoch 403 +2026-04-09 03:33:12.066040: Current learning rate: 0.00629 +2026-04-09 03:34:53.576887: train_loss -0.1583 +2026-04-09 03:34:53.581831: val_loss -0.1391 +2026-04-09 03:34:53.585538: Pseudo dice [0.6494, 0.0696, 0.7787, 0.3843, 0.5287, 0.4282, 0.8479] +2026-04-09 03:34:53.589423: Epoch time: 101.52 s +2026-04-09 03:34:54.716036: +2026-04-09 03:34:54.717595: Epoch 404 +2026-04-09 03:34:54.718945: Current learning rate: 0.00628 +2026-04-09 03:36:36.094044: train_loss -0.1683 +2026-04-09 03:36:36.099724: val_loss -0.1579 +2026-04-09 03:36:36.101275: Pseudo dice [0.7621, 0.6318, 0.7496, 0.5005, 0.4436, 0.5069, 0.7859] +2026-04-09 03:36:36.102943: Epoch time: 101.38 s +2026-04-09 03:36:37.221618: +2026-04-09 03:36:37.223180: Epoch 405 +2026-04-09 03:36:37.224349: Current learning rate: 0.00627 +2026-04-09 03:38:18.850812: train_loss -0.1775 +2026-04-09 03:38:18.855918: val_loss -0.1785 +2026-04-09 03:38:18.857521: Pseudo dice [0.779, 0.5243, 0.8322, 0.5056, 0.6143, 0.517, 0.7131] +2026-04-09 03:38:18.859482: Epoch time: 101.63 s +2026-04-09 03:38:19.967587: +2026-04-09 03:38:19.968841: Epoch 406 +2026-04-09 03:38:19.971063: Current learning rate: 0.00626 +2026-04-09 03:40:01.565461: train_loss -0.1835 +2026-04-09 03:40:01.570735: val_loss -0.1216 +2026-04-09 03:40:01.572487: Pseudo dice [0.484, 0.0366, 0.7331, 0.5009, 0.1558, 0.4411, 0.9018] +2026-04-09 03:40:01.574161: Epoch time: 101.6 s +2026-04-09 03:40:02.705926: +2026-04-09 03:40:02.707410: Epoch 407 +2026-04-09 03:40:02.708621: Current learning rate: 0.00625 +2026-04-09 03:41:44.212460: train_loss -0.1759 +2026-04-09 03:41:44.216796: val_loss -0.1374 +2026-04-09 03:41:44.218058: Pseudo dice [0.7674, 0.2613, 0.6405, 0.6385, 0.4693, 0.6975, 0.7366] +2026-04-09 03:41:44.219794: Epoch time: 101.51 s +2026-04-09 03:41:45.346089: +2026-04-09 03:41:45.351497: Epoch 408 +2026-04-09 03:41:45.352756: Current learning rate: 0.00624 +2026-04-09 03:43:26.902665: train_loss -0.1787 +2026-04-09 03:43:26.929265: val_loss -0.1582 +2026-04-09 03:43:26.931235: Pseudo dice [0.5444, 0.6124, 0.7645, 0.334, 0.4873, 0.4393, 0.7826] +2026-04-09 03:43:26.933204: Epoch time: 101.56 s +2026-04-09 03:43:28.039856: +2026-04-09 03:43:28.041811: Epoch 409 +2026-04-09 03:43:28.043321: Current learning rate: 0.00623 +2026-04-09 03:45:09.556364: train_loss -0.1926 +2026-04-09 03:45:09.560963: val_loss -0.1695 +2026-04-09 03:45:09.562976: Pseudo dice [0.7796, 0.6731, 0.6079, 0.6914, 0.5831, 0.7702, 0.7909] +2026-04-09 03:45:09.567911: Epoch time: 101.52 s +2026-04-09 03:45:11.888272: +2026-04-09 03:45:11.889919: Epoch 410 +2026-04-09 03:45:11.891209: Current learning rate: 0.00622 +2026-04-09 03:46:53.461262: train_loss -0.1875 +2026-04-09 03:46:53.466861: val_loss -0.1564 +2026-04-09 03:46:53.468282: Pseudo dice [0.8324, 0.3465, 0.819, 0.5985, 0.4656, 0.741, 0.7466] +2026-04-09 03:46:53.472459: Epoch time: 101.58 s +2026-04-09 03:46:54.539042: +2026-04-09 03:46:54.540684: Epoch 411 +2026-04-09 03:46:54.542006: Current learning rate: 0.00621 +2026-04-09 03:48:36.097450: train_loss -0.1941 +2026-04-09 03:48:36.102194: val_loss -0.1658 +2026-04-09 03:48:36.104204: Pseudo dice [0.5987, 0.7989, 0.7405, 0.394, 0.508, 0.5904, 0.7094] +2026-04-09 03:48:36.106402: Epoch time: 101.56 s +2026-04-09 03:48:37.159928: +2026-04-09 03:48:37.161423: Epoch 412 +2026-04-09 03:48:37.162613: Current learning rate: 0.0062 +2026-04-09 03:50:18.919426: train_loss -0.1933 +2026-04-09 03:50:18.924627: val_loss -0.1413 +2026-04-09 03:50:18.926760: Pseudo dice [0.7603, 0.804, 0.7084, 0.6559, 0.3088, 0.598, 0.5044] +2026-04-09 03:50:18.928938: Epoch time: 101.76 s +2026-04-09 03:50:19.976224: +2026-04-09 03:50:19.977739: Epoch 413 +2026-04-09 03:50:19.978963: Current learning rate: 0.00619 +2026-04-09 03:52:01.516692: train_loss -0.1954 +2026-04-09 03:52:01.521194: val_loss -0.1295 +2026-04-09 03:52:01.522708: Pseudo dice [0.5474, 0.6351, 0.7898, 0.4521, 0.2746, 0.2959, 0.7887] +2026-04-09 03:52:01.524576: Epoch time: 101.54 s +2026-04-09 03:52:02.587522: +2026-04-09 03:52:02.588989: Epoch 414 +2026-04-09 03:52:02.590797: Current learning rate: 0.00618 +2026-04-09 03:53:44.403100: train_loss -0.1875 +2026-04-09 03:53:44.411431: val_loss -0.1367 +2026-04-09 03:53:44.413288: Pseudo dice [0.5714, 0.4309, 0.6189, 0.7761, 0.407, 0.6138, 0.8841] +2026-04-09 03:53:44.415241: Epoch time: 101.82 s +2026-04-09 03:53:45.463113: +2026-04-09 03:53:45.464943: Epoch 415 +2026-04-09 03:53:45.466077: Current learning rate: 0.00617 +2026-04-09 03:55:27.126099: train_loss -0.1832 +2026-04-09 03:55:27.132438: val_loss -0.1331 +2026-04-09 03:55:27.134577: Pseudo dice [0.4276, 0.3823, 0.7565, 0.4332, 0.5357, 0.7679, 0.4997] +2026-04-09 03:55:27.141362: Epoch time: 101.67 s +2026-04-09 03:55:28.201813: +2026-04-09 03:55:28.203321: Epoch 416 +2026-04-09 03:55:28.204582: Current learning rate: 0.00616 +2026-04-09 03:57:09.739538: train_loss -0.1947 +2026-04-09 03:57:09.744834: val_loss -0.1677 +2026-04-09 03:57:09.746497: Pseudo dice [0.666, 0.8162, 0.832, 0.4874, 0.4046, 0.684, 0.8677] +2026-04-09 03:57:09.748998: Epoch time: 101.54 s +2026-04-09 03:57:10.821486: +2026-04-09 03:57:10.823120: Epoch 417 +2026-04-09 03:57:10.824468: Current learning rate: 0.00615 +2026-04-09 03:58:52.523984: train_loss -0.1786 +2026-04-09 03:58:52.531532: val_loss -0.1494 +2026-04-09 03:58:52.533494: Pseudo dice [0.7688, 0.6894, 0.7771, 0.4903, 0.5453, 0.7662, 0.4432] +2026-04-09 03:58:52.535895: Epoch time: 101.71 s +2026-04-09 03:58:53.594488: +2026-04-09 03:58:53.595839: Epoch 418 +2026-04-09 03:58:53.597086: Current learning rate: 0.00614 +2026-04-09 04:00:35.226569: train_loss -0.1841 +2026-04-09 04:00:35.234602: val_loss -0.1549 +2026-04-09 04:00:35.237163: Pseudo dice [0.6989, 0.7748, 0.5427, 0.5344, 0.4657, 0.8509, 0.8835] +2026-04-09 04:00:35.239733: Epoch time: 101.64 s +2026-04-09 04:00:35.241481: Yayy! New best EMA pseudo Dice: 0.6173 +2026-04-09 04:00:38.009059: +2026-04-09 04:00:38.011396: Epoch 419 +2026-04-09 04:00:38.013088: Current learning rate: 0.00613 +2026-04-09 04:02:19.715141: train_loss -0.1765 +2026-04-09 04:02:19.722751: val_loss -0.1458 +2026-04-09 04:02:19.724797: Pseudo dice [0.732, 0.8828, 0.6988, 0.6777, 0.4923, 0.5058, 0.7325] +2026-04-09 04:02:19.726766: Epoch time: 101.71 s +2026-04-09 04:02:19.728417: Yayy! New best EMA pseudo Dice: 0.623 +2026-04-09 04:02:22.350152: +2026-04-09 04:02:22.353536: Epoch 420 +2026-04-09 04:02:22.354814: Current learning rate: 0.00612 +2026-04-09 04:04:03.838295: train_loss -0.1845 +2026-04-09 04:04:03.844515: val_loss -0.1582 +2026-04-09 04:04:03.846988: Pseudo dice [0.5591, 0.8193, 0.6876, 0.6572, 0.5475, 0.6088, 0.7245] +2026-04-09 04:04:03.848965: Epoch time: 101.49 s +2026-04-09 04:04:03.851017: Yayy! New best EMA pseudo Dice: 0.6265 +2026-04-09 04:04:06.263065: +2026-04-09 04:04:06.266511: Epoch 421 +2026-04-09 04:04:06.267711: Current learning rate: 0.00612 +2026-04-09 04:05:47.918891: train_loss -0.1951 +2026-04-09 04:05:47.924150: val_loss -0.1763 +2026-04-09 04:05:47.925745: Pseudo dice [0.546, 0.3591, 0.7074, 0.7464, 0.6485, 0.7091, 0.69] +2026-04-09 04:05:47.929370: Epoch time: 101.66 s +2026-04-09 04:05:47.930927: Yayy! New best EMA pseudo Dice: 0.6268 +2026-04-09 04:05:50.574066: +2026-04-09 04:05:50.575837: Epoch 422 +2026-04-09 04:05:50.577100: Current learning rate: 0.00611 +2026-04-09 04:07:32.237657: train_loss -0.1743 +2026-04-09 04:07:32.244751: val_loss -0.1469 +2026-04-09 04:07:32.246710: Pseudo dice [0.5509, 0.1806, 0.6927, 0.5932, 0.3999, 0.7901, 0.877] +2026-04-09 04:07:32.251207: Epoch time: 101.67 s +2026-04-09 04:07:33.337261: +2026-04-09 04:07:33.338638: Epoch 423 +2026-04-09 04:07:33.339816: Current learning rate: 0.0061 +2026-04-09 04:09:15.039011: train_loss -0.1837 +2026-04-09 04:09:15.044183: val_loss -0.1585 +2026-04-09 04:09:15.045820: Pseudo dice [0.784, 0.6962, 0.7539, 0.5478, 0.5994, 0.5691, 0.8191] +2026-04-09 04:09:15.049811: Epoch time: 101.71 s +2026-04-09 04:09:15.051517: Yayy! New best EMA pseudo Dice: 0.6283 +2026-04-09 04:09:17.895273: +2026-04-09 04:09:17.897019: Epoch 424 +2026-04-09 04:09:17.898351: Current learning rate: 0.00609 +2026-04-09 04:10:59.600172: train_loss -0.1802 +2026-04-09 04:10:59.607760: val_loss -0.1535 +2026-04-09 04:10:59.609393: Pseudo dice [0.7655, 0.8288, 0.7888, 0.7282, 0.5447, 0.5847, 0.8005] +2026-04-09 04:10:59.611562: Epoch time: 101.71 s +2026-04-09 04:10:59.613113: Yayy! New best EMA pseudo Dice: 0.6375 +2026-04-09 04:11:02.303151: +2026-04-09 04:11:02.305786: Epoch 425 +2026-04-09 04:11:02.307150: Current learning rate: 0.00608 +2026-04-09 04:12:44.245584: train_loss -0.1785 +2026-04-09 04:12:44.252406: val_loss -0.155 +2026-04-09 04:12:44.254037: Pseudo dice [0.6414, 0.7779, 0.8864, 0.589, 0.2865, 0.7402, 0.9402] +2026-04-09 04:12:44.256385: Epoch time: 101.95 s +2026-04-09 04:12:44.258041: Yayy! New best EMA pseudo Dice: 0.6432 +2026-04-09 04:12:47.012810: +2026-04-09 04:12:47.014233: Epoch 426 +2026-04-09 04:12:47.015634: Current learning rate: 0.00607 +2026-04-09 04:14:29.099562: train_loss -0.1701 +2026-04-09 04:14:29.105776: val_loss -0.1737 +2026-04-09 04:14:29.107810: Pseudo dice [0.6934, 0.8123, 0.8344, 0.4237, 0.6333, 0.566, 0.848] +2026-04-09 04:14:29.109845: Epoch time: 102.09 s +2026-04-09 04:14:29.111419: Yayy! New best EMA pseudo Dice: 0.6476 +2026-04-09 04:14:31.940851: +2026-04-09 04:14:31.943543: Epoch 427 +2026-04-09 04:14:31.944968: Current learning rate: 0.00606 +2026-04-09 04:16:15.211913: train_loss -0.2129 +2026-04-09 04:16:15.219747: val_loss -0.211 +2026-04-09 04:16:15.222135: Pseudo dice [0.0, 0.0, 0.857, 0.5605, 0.451, 0.2994, 0.5256] +2026-04-09 04:16:15.224271: Epoch time: 103.27 s +2026-04-09 04:16:16.369143: +2026-04-09 04:16:16.371824: Epoch 428 +2026-04-09 04:16:16.374666: Current learning rate: 0.00605 +2026-04-09 04:17:58.364669: train_loss -0.2669 +2026-04-09 04:17:58.371742: val_loss -0.2088 +2026-04-09 04:17:58.373567: Pseudo dice [0.0, 0.0, 0.6279, 0.5755, 0.4211, 0.5387, 0.3812] +2026-04-09 04:17:58.375952: Epoch time: 102.0 s +2026-04-09 04:17:59.460775: +2026-04-09 04:17:59.462101: Epoch 429 +2026-04-09 04:17:59.463479: Current learning rate: 0.00604 +2026-04-09 04:19:41.339619: train_loss -0.2724 +2026-04-09 04:19:41.345828: val_loss -0.2124 +2026-04-09 04:19:41.347458: Pseudo dice [0.0, 0.0, 0.6822, 0.2094, 0.4486, 0.5236, 0.8624] +2026-04-09 04:19:41.349406: Epoch time: 101.88 s +2026-04-09 04:19:42.397825: +2026-04-09 04:19:42.406355: Epoch 430 +2026-04-09 04:19:42.407971: Current learning rate: 0.00603 +2026-04-09 04:21:24.271507: train_loss -0.2617 +2026-04-09 04:21:24.278844: val_loss -0.2252 +2026-04-09 04:21:24.280440: Pseudo dice [0.3402, 0.0, 0.8414, 0.0852, 0.1856, 0.3451, 0.7466] +2026-04-09 04:21:24.282960: Epoch time: 101.88 s +2026-04-09 04:21:25.332753: +2026-04-09 04:21:25.334174: Epoch 431 +2026-04-09 04:21:25.335341: Current learning rate: 0.00602 +2026-04-09 04:23:07.148565: train_loss -0.2684 +2026-04-09 04:23:07.154988: val_loss -0.2405 +2026-04-09 04:23:07.158233: Pseudo dice [0.0496, 0.0, 0.7683, 0.1992, 0.3118, 0.6551, 0.8123] +2026-04-09 04:23:07.161910: Epoch time: 101.82 s +2026-04-09 04:23:08.200129: +2026-04-09 04:23:08.201698: Epoch 432 +2026-04-09 04:23:08.202870: Current learning rate: 0.00601 +2026-04-09 04:24:50.135175: train_loss -0.2838 +2026-04-09 04:24:50.141709: val_loss -0.2178 +2026-04-09 04:24:50.143376: Pseudo dice [0.1325, 0.0, 0.75, 0.7671, 0.5304, 0.6429, 0.731] +2026-04-09 04:24:50.147267: Epoch time: 101.94 s +2026-04-09 04:24:51.207555: +2026-04-09 04:24:51.209116: Epoch 433 +2026-04-09 04:24:51.210522: Current learning rate: 0.006 +2026-04-09 04:26:33.401891: train_loss -0.2643 +2026-04-09 04:26:33.409460: val_loss -0.2343 +2026-04-09 04:26:33.411099: Pseudo dice [0.0, 0.0, 0.7273, 0.7528, 0.4341, 0.531, 0.4653] +2026-04-09 04:26:33.413233: Epoch time: 102.2 s +2026-04-09 04:26:34.813617: +2026-04-09 04:26:34.815494: Epoch 434 +2026-04-09 04:26:34.821319: Current learning rate: 0.00599 +2026-04-09 04:28:16.612936: train_loss -0.2672 +2026-04-09 04:28:16.619888: val_loss -0.2536 +2026-04-09 04:28:16.621846: Pseudo dice [0.0, 0.0, 0.787, 0.476, 0.4825, 0.5719, 0.903] +2026-04-09 04:28:16.623845: Epoch time: 101.8 s +2026-04-09 04:28:17.681776: +2026-04-09 04:28:17.684293: Epoch 435 +2026-04-09 04:28:17.685598: Current learning rate: 0.00598 +2026-04-09 04:29:59.941084: train_loss -0.2715 +2026-04-09 04:29:59.955511: val_loss -0.2011 +2026-04-09 04:29:59.958392: Pseudo dice [0.0, 0.0, 0.4954, 0.3878, 0.599, 0.6091, 0.6752] +2026-04-09 04:29:59.960409: Epoch time: 102.26 s +2026-04-09 04:30:01.039440: +2026-04-09 04:30:01.041207: Epoch 436 +2026-04-09 04:30:01.042481: Current learning rate: 0.00597 +2026-04-09 04:31:43.017278: train_loss -0.2702 +2026-04-09 04:31:43.023343: val_loss -0.2618 +2026-04-09 04:31:43.024932: Pseudo dice [0.0, 0.0, 0.7531, 0.8104, 0.5606, 0.3086, 0.8259] +2026-04-09 04:31:43.026863: Epoch time: 101.98 s +2026-04-09 04:31:44.085944: +2026-04-09 04:31:44.087276: Epoch 437 +2026-04-09 04:31:44.088411: Current learning rate: 0.00596 +2026-04-09 04:33:25.863595: train_loss -0.2818 +2026-04-09 04:33:25.869109: val_loss -0.2198 +2026-04-09 04:33:25.870797: Pseudo dice [0.0, 0.0, 0.4529, 0.7698, 0.456, 0.75, 0.8669] +2026-04-09 04:33:25.872806: Epoch time: 101.78 s +2026-04-09 04:33:26.913103: +2026-04-09 04:33:26.914337: Epoch 438 +2026-04-09 04:33:26.915544: Current learning rate: 0.00595 +2026-04-09 04:35:09.192524: train_loss -0.2842 +2026-04-09 04:35:09.198562: val_loss -0.2409 +2026-04-09 04:35:09.200184: Pseudo dice [0.0413, 0.0, 0.8115, 0.5632, 0.3114, 0.4528, 0.8682] +2026-04-09 04:35:09.202653: Epoch time: 102.28 s +2026-04-09 04:35:10.277369: +2026-04-09 04:35:10.279473: Epoch 439 +2026-04-09 04:35:10.280895: Current learning rate: 0.00594 +2026-04-09 04:36:52.594592: train_loss -0.2842 +2026-04-09 04:36:52.600681: val_loss -0.2293 +2026-04-09 04:36:52.603151: Pseudo dice [0.0, 0.0, 0.5109, 0.207, 0.4279, 0.3885, 0.7825] +2026-04-09 04:36:52.605449: Epoch time: 102.32 s +2026-04-09 04:36:53.659954: +2026-04-09 04:36:53.662470: Epoch 440 +2026-04-09 04:36:53.664257: Current learning rate: 0.00593 +2026-04-09 04:38:35.662846: train_loss -0.2769 +2026-04-09 04:38:35.668879: val_loss -0.2105 +2026-04-09 04:38:35.670252: Pseudo dice [0.0, 0.0, 0.7839, 0.4616, 0.3752, 0.1457, 0.5873] +2026-04-09 04:38:35.672100: Epoch time: 102.01 s +2026-04-09 04:38:36.702372: +2026-04-09 04:38:36.703747: Epoch 441 +2026-04-09 04:38:36.705087: Current learning rate: 0.00592 +2026-04-09 04:40:18.472727: train_loss -0.2773 +2026-04-09 04:40:18.478587: val_loss -0.239 +2026-04-09 04:40:18.480913: Pseudo dice [0.0, 0.0, 0.7832, 0.7143, 0.614, 0.2847, 0.8691] +2026-04-09 04:40:18.482825: Epoch time: 101.77 s +2026-04-09 04:40:19.499226: +2026-04-09 04:40:19.500950: Epoch 442 +2026-04-09 04:40:19.502347: Current learning rate: 0.00592 +2026-04-09 04:42:01.212026: train_loss -0.2791 +2026-04-09 04:42:01.218168: val_loss -0.2154 +2026-04-09 04:42:01.219850: Pseudo dice [0.0, 0.0, 0.6589, 0.2062, 0.5866, 0.5546, 0.8005] +2026-04-09 04:42:01.222101: Epoch time: 101.72 s +2026-04-09 04:42:02.267298: +2026-04-09 04:42:02.268779: Epoch 443 +2026-04-09 04:42:02.270177: Current learning rate: 0.00591 +2026-04-09 04:43:44.018812: train_loss -0.2859 +2026-04-09 04:43:44.045189: val_loss -0.2476 +2026-04-09 04:43:44.047163: Pseudo dice [0.0, 0.0, 0.8001, 0.061, 0.618, 0.4732, 0.8704] +2026-04-09 04:43:44.049179: Epoch time: 101.75 s +2026-04-09 04:43:45.101685: +2026-04-09 04:43:45.103722: Epoch 444 +2026-04-09 04:43:45.105105: Current learning rate: 0.0059 +2026-04-09 04:45:27.095586: train_loss -0.2947 +2026-04-09 04:45:27.100505: val_loss -0.2053 +2026-04-09 04:45:27.102476: Pseudo dice [0.0, 0.0, 0.7179, 0.0884, 0.317, 0.7413, 0.5979] +2026-04-09 04:45:27.105089: Epoch time: 102.0 s +2026-04-09 04:45:28.157433: +2026-04-09 04:45:28.161596: Epoch 445 +2026-04-09 04:45:28.165168: Current learning rate: 0.00589 +2026-04-09 04:47:09.797752: train_loss -0.2595 +2026-04-09 04:47:09.802448: val_loss -0.2082 +2026-04-09 04:47:09.803834: Pseudo dice [0.0, 0.0, 0.5842, 0.2752, 0.4123, 0.4636, 0.3082] +2026-04-09 04:47:09.805606: Epoch time: 101.64 s +2026-04-09 04:47:10.855092: +2026-04-09 04:47:10.856467: Epoch 446 +2026-04-09 04:47:10.857702: Current learning rate: 0.00588 +2026-04-09 04:48:52.542568: train_loss -0.2554 +2026-04-09 04:48:52.547728: val_loss -0.2326 +2026-04-09 04:48:52.549426: Pseudo dice [0.0, 0.0, 0.8321, 0.8052, 0.4668, 0.6355, 0.7504] +2026-04-09 04:48:52.551136: Epoch time: 101.69 s +2026-04-09 04:48:53.576144: +2026-04-09 04:48:53.577429: Epoch 447 +2026-04-09 04:48:53.578796: Current learning rate: 0.00587 +2026-04-09 04:50:35.396348: train_loss -0.2677 +2026-04-09 04:50:35.400580: val_loss -0.2482 +2026-04-09 04:50:35.402195: Pseudo dice [0.0, 0.0, 0.8221, 0.3318, 0.5553, 0.7586, 0.7811] +2026-04-09 04:50:35.403760: Epoch time: 101.82 s +2026-04-09 04:50:37.623411: +2026-04-09 04:50:37.625050: Epoch 448 +2026-04-09 04:50:37.626552: Current learning rate: 0.00586 +2026-04-09 04:52:19.544336: train_loss -0.2824 +2026-04-09 04:52:19.549530: val_loss -0.2407 +2026-04-09 04:52:19.551092: Pseudo dice [0.7979, 0.0, 0.7389, 0.5577, 0.4205, 0.5464, 0.9172] +2026-04-09 04:52:19.552941: Epoch time: 101.92 s +2026-04-09 04:52:20.595817: +2026-04-09 04:52:20.597317: Epoch 449 +2026-04-09 04:52:20.598555: Current learning rate: 0.00585 +2026-04-09 04:54:02.863670: train_loss -0.2476 +2026-04-09 04:54:02.868522: val_loss -0.2346 +2026-04-09 04:54:02.871173: Pseudo dice [0.0002, 0.0, 0.8581, 0.5407, 0.5784, 0.7105, 0.672] +2026-04-09 04:54:02.873051: Epoch time: 102.27 s +2026-04-09 04:54:05.568405: +2026-04-09 04:54:05.569914: Epoch 450 +2026-04-09 04:54:05.571271: Current learning rate: 0.00584 +2026-04-09 04:55:47.440737: train_loss -0.2604 +2026-04-09 04:55:47.448408: val_loss -0.2382 +2026-04-09 04:55:47.450609: Pseudo dice [0.0, 0.0, 0.6832, 0.6149, 0.5187, 0.7989, 0.8733] +2026-04-09 04:55:47.454344: Epoch time: 101.88 s +2026-04-09 04:55:48.488196: +2026-04-09 04:55:48.489793: Epoch 451 +2026-04-09 04:55:48.491440: Current learning rate: 0.00583 +2026-04-09 04:57:30.272017: train_loss -0.2741 +2026-04-09 04:57:30.276338: val_loss -0.2359 +2026-04-09 04:57:30.278960: Pseudo dice [0.3481, 0.0, 0.6716, 0.6085, 0.5191, 0.3204, 0.8686] +2026-04-09 04:57:30.282030: Epoch time: 101.79 s +2026-04-09 04:57:31.330555: +2026-04-09 04:57:31.332005: Epoch 452 +2026-04-09 04:57:31.333345: Current learning rate: 0.00582 +2026-04-09 04:59:13.172127: train_loss -0.2533 +2026-04-09 04:59:13.177476: val_loss -0.2151 +2026-04-09 04:59:13.179787: Pseudo dice [0.018, 0.0, 0.8422, 0.689, 0.4587, 0.6689, 0.913] +2026-04-09 04:59:13.183121: Epoch time: 101.84 s +2026-04-09 04:59:14.233091: +2026-04-09 04:59:14.234823: Epoch 453 +2026-04-09 04:59:14.236063: Current learning rate: 0.00581 +2026-04-09 05:00:56.210186: train_loss -0.2613 +2026-04-09 05:00:56.214914: val_loss -0.2312 +2026-04-09 05:00:56.217044: Pseudo dice [0.0317, 0.0, 0.7247, 0.3445, 0.3965, 0.6347, 0.4678] +2026-04-09 05:00:56.219277: Epoch time: 101.98 s +2026-04-09 05:00:57.268545: +2026-04-09 05:00:57.274408: Epoch 454 +2026-04-09 05:00:57.276051: Current learning rate: 0.0058 +2026-04-09 05:02:39.096896: train_loss -0.2886 +2026-04-09 05:02:39.101794: val_loss -0.2485 +2026-04-09 05:02:39.104094: Pseudo dice [0.6356, 0.0, 0.8764, 0.2933, 0.39, 0.2821, 0.7491] +2026-04-09 05:02:39.108063: Epoch time: 101.83 s +2026-04-09 05:02:40.141136: +2026-04-09 05:02:40.142567: Epoch 455 +2026-04-09 05:02:40.143935: Current learning rate: 0.00579 +2026-04-09 05:04:21.987152: train_loss -0.2604 +2026-04-09 05:04:21.992092: val_loss -0.2379 +2026-04-09 05:04:21.993612: Pseudo dice [0.4903, 0.0, 0.7857, 0.4853, 0.5378, 0.4523, 0.924] +2026-04-09 05:04:21.995348: Epoch time: 101.85 s +2026-04-09 05:04:23.081478: +2026-04-09 05:04:23.082962: Epoch 456 +2026-04-09 05:04:23.084490: Current learning rate: 0.00578 +2026-04-09 05:06:04.997831: train_loss -0.2771 +2026-04-09 05:06:05.002526: val_loss -0.2384 +2026-04-09 05:06:05.004185: Pseudo dice [0.5035, 0.0, 0.7559, 0.6572, 0.3261, 0.1472, 0.6168] +2026-04-09 05:06:05.005865: Epoch time: 101.92 s +2026-04-09 05:06:06.017459: +2026-04-09 05:06:06.018714: Epoch 457 +2026-04-09 05:06:06.019922: Current learning rate: 0.00577 +2026-04-09 05:07:47.766895: train_loss -0.2551 +2026-04-09 05:07:47.772068: val_loss -0.2301 +2026-04-09 05:07:47.774024: Pseudo dice [0.0, 0.0, 0.8069, 0.2128, 0.3042, 0.4849, 0.8802] +2026-04-09 05:07:47.775983: Epoch time: 101.75 s +2026-04-09 05:07:48.819284: +2026-04-09 05:07:48.820832: Epoch 458 +2026-04-09 05:07:48.822254: Current learning rate: 0.00576 +2026-04-09 05:09:30.705367: train_loss -0.2599 +2026-04-09 05:09:30.709565: val_loss -0.2139 +2026-04-09 05:09:30.711156: Pseudo dice [0.0048, 0.0, 0.6545, 0.5043, 0.4096, 0.6002, 0.3906] +2026-04-09 05:09:30.713037: Epoch time: 101.89 s +2026-04-09 05:09:31.731998: +2026-04-09 05:09:31.734221: Epoch 459 +2026-04-09 05:09:31.735842: Current learning rate: 0.00575 +2026-04-09 05:11:13.712157: train_loss -0.2766 +2026-04-09 05:11:13.721366: val_loss -0.2534 +2026-04-09 05:11:13.723323: Pseudo dice [0.2418, 0.0, 0.656, 0.712, 0.5858, 0.6499, 0.8951] +2026-04-09 05:11:13.726380: Epoch time: 101.98 s +2026-04-09 05:11:14.758546: +2026-04-09 05:11:14.760169: Epoch 460 +2026-04-09 05:11:14.761288: Current learning rate: 0.00574 +2026-04-09 05:12:56.630587: train_loss -0.2706 +2026-04-09 05:12:56.636387: val_loss -0.2403 +2026-04-09 05:12:56.637781: Pseudo dice [0.5673, 0.0, 0.7265, 0.1486, 0.5664, 0.371, 0.6771] +2026-04-09 05:12:56.639539: Epoch time: 101.88 s +2026-04-09 05:12:57.661208: +2026-04-09 05:12:57.662649: Epoch 461 +2026-04-09 05:12:57.663810: Current learning rate: 0.00573 +2026-04-09 05:14:39.385512: train_loss -0.2625 +2026-04-09 05:14:39.389586: val_loss -0.24 +2026-04-09 05:14:39.391139: Pseudo dice [0.4222, 0.0, 0.8091, 0.5844, 0.377, 0.5734, 0.8618] +2026-04-09 05:14:39.392724: Epoch time: 101.73 s +2026-04-09 05:14:40.422845: +2026-04-09 05:14:40.424393: Epoch 462 +2026-04-09 05:14:40.425729: Current learning rate: 0.00572 +2026-04-09 05:16:22.311802: train_loss -0.287 +2026-04-09 05:16:22.316941: val_loss -0.1979 +2026-04-09 05:16:22.318784: Pseudo dice [0.6013, 0.0, 0.7936, 0.2671, 0.3677, 0.5037, 0.4128] +2026-04-09 05:16:22.320639: Epoch time: 101.89 s +2026-04-09 05:16:23.359831: +2026-04-09 05:16:23.361291: Epoch 463 +2026-04-09 05:16:23.362573: Current learning rate: 0.00571 +2026-04-09 05:18:05.315995: train_loss -0.2634 +2026-04-09 05:18:05.320803: val_loss -0.2395 +2026-04-09 05:18:05.322086: Pseudo dice [0.3709, 0.0, 0.7745, 0.7462, 0.4983, 0.4466, 0.8422] +2026-04-09 05:18:05.323939: Epoch time: 101.96 s +2026-04-09 05:18:06.364395: +2026-04-09 05:18:06.365777: Epoch 464 +2026-04-09 05:18:06.367004: Current learning rate: 0.0057 +2026-04-09 05:19:48.162045: train_loss -0.2799 +2026-04-09 05:19:48.167306: val_loss -0.2031 +2026-04-09 05:19:48.169169: Pseudo dice [0.0, 0.0, 0.6184, 0.2929, 0.5143, 0.7849, 0.7946] +2026-04-09 05:19:48.171260: Epoch time: 101.8 s +2026-04-09 05:19:49.233999: +2026-04-09 05:19:49.235910: Epoch 465 +2026-04-09 05:19:49.237408: Current learning rate: 0.0057 +2026-04-09 05:21:31.112443: train_loss -0.2561 +2026-04-09 05:21:31.118474: val_loss -0.2194 +2026-04-09 05:21:31.120117: Pseudo dice [0.148, 0.0, 0.6662, 0.3602, 0.5231, 0.6389, 0.9027] +2026-04-09 05:21:31.122571: Epoch time: 101.88 s +2026-04-09 05:21:32.163506: +2026-04-09 05:21:32.168554: Epoch 466 +2026-04-09 05:21:32.187359: Current learning rate: 0.00569 +2026-04-09 05:23:14.076117: train_loss -0.2691 +2026-04-09 05:23:14.081580: val_loss -0.2299 +2026-04-09 05:23:14.083657: Pseudo dice [0.148, 0.0, 0.874, 0.6105, 0.5416, 0.6856, 0.4129] +2026-04-09 05:23:14.085904: Epoch time: 101.92 s +2026-04-09 05:23:15.138386: +2026-04-09 05:23:15.139908: Epoch 467 +2026-04-09 05:23:15.141149: Current learning rate: 0.00568 +2026-04-09 05:24:56.962393: train_loss -0.2661 +2026-04-09 05:24:56.966539: val_loss -0.2143 +2026-04-09 05:24:56.968063: Pseudo dice [0.4136, 0.0, 0.7962, 0.7811, 0.2135, 0.3817, 0.5079] +2026-04-09 05:24:56.969807: Epoch time: 101.83 s +2026-04-09 05:24:58.003912: +2026-04-09 05:24:58.005465: Epoch 468 +2026-04-09 05:24:58.006721: Current learning rate: 0.00567 +2026-04-09 05:26:39.913832: train_loss -0.2771 +2026-04-09 05:26:39.920875: val_loss -0.2456 +2026-04-09 05:26:39.922397: Pseudo dice [0.7738, 0.0, 0.871, 0.7029, 0.4191, 0.5434, 0.606] +2026-04-09 05:26:39.924040: Epoch time: 101.91 s +2026-04-09 05:26:42.172540: +2026-04-09 05:26:42.173941: Epoch 469 +2026-04-09 05:26:42.175230: Current learning rate: 0.00566 +2026-04-09 05:28:24.271074: train_loss -0.2739 +2026-04-09 05:28:24.275761: val_loss -0.1963 +2026-04-09 05:28:24.277385: Pseudo dice [0.0216, 0.0, 0.8577, 0.5325, 0.3167, 0.3162, 0.2359] +2026-04-09 05:28:24.279163: Epoch time: 102.1 s +2026-04-09 05:28:25.316111: +2026-04-09 05:28:25.317708: Epoch 470 +2026-04-09 05:28:25.318910: Current learning rate: 0.00565 +2026-04-09 05:30:07.101706: train_loss -0.2709 +2026-04-09 05:30:07.106037: val_loss -0.2183 +2026-04-09 05:30:07.107169: Pseudo dice [0.2833, 0.0, 0.7865, 0.2421, 0.4103, 0.6579, 0.5896] +2026-04-09 05:30:07.111145: Epoch time: 101.79 s +2026-04-09 05:30:08.140611: +2026-04-09 05:30:08.142193: Epoch 471 +2026-04-09 05:30:08.143436: Current learning rate: 0.00564 +2026-04-09 05:31:50.032661: train_loss -0.2567 +2026-04-09 05:31:50.036822: val_loss -0.2522 +2026-04-09 05:31:50.038900: Pseudo dice [0.5191, 0.0, 0.7719, 0.1053, 0.4764, 0.6654, 0.8494] +2026-04-09 05:31:50.041395: Epoch time: 101.9 s +2026-04-09 05:31:51.074132: +2026-04-09 05:31:51.075497: Epoch 472 +2026-04-09 05:31:51.076726: Current learning rate: 0.00563 +2026-04-09 05:33:32.863913: train_loss -0.2716 +2026-04-09 05:33:32.869069: val_loss -0.2422 +2026-04-09 05:33:32.870616: Pseudo dice [0.2459, 0.0, 0.7525, 0.4287, 0.6292, 0.5768, 0.8305] +2026-04-09 05:33:32.872565: Epoch time: 101.79 s +2026-04-09 05:33:33.892214: +2026-04-09 05:33:33.893810: Epoch 473 +2026-04-09 05:33:33.895044: Current learning rate: 0.00562 +2026-04-09 05:35:15.769656: train_loss -0.2685 +2026-04-09 05:35:15.773930: val_loss -0.2322 +2026-04-09 05:35:15.775323: Pseudo dice [0.2507, 0.0, 0.7393, 0.4693, 0.5749, 0.2016, 0.7297] +2026-04-09 05:35:15.776938: Epoch time: 101.88 s +2026-04-09 05:35:16.817934: +2026-04-09 05:35:16.822571: Epoch 474 +2026-04-09 05:35:16.825092: Current learning rate: 0.00561 +2026-04-09 05:36:58.685981: train_loss -0.2826 +2026-04-09 05:36:58.692725: val_loss -0.2124 +2026-04-09 05:36:58.694731: Pseudo dice [0.1626, 0.0, 0.7915, 0.451, 0.4268, 0.4797, 0.654] +2026-04-09 05:36:58.697106: Epoch time: 101.87 s +2026-04-09 05:36:59.757629: +2026-04-09 05:36:59.759317: Epoch 475 +2026-04-09 05:36:59.761222: Current learning rate: 0.0056 +2026-04-09 05:38:41.440039: train_loss -0.2681 +2026-04-09 05:38:41.445268: val_loss -0.2563 +2026-04-09 05:38:41.447170: Pseudo dice [0.3692, 0.0, 0.7987, 0.3129, 0.4683, 0.674, 0.756] +2026-04-09 05:38:41.448998: Epoch time: 101.69 s +2026-04-09 05:38:42.487663: +2026-04-09 05:38:42.488928: Epoch 476 +2026-04-09 05:38:42.490118: Current learning rate: 0.00559 +2026-04-09 05:40:24.384077: train_loss -0.278 +2026-04-09 05:40:24.390402: val_loss -0.2169 +2026-04-09 05:40:24.392510: Pseudo dice [0.0, 0.0, 0.7079, 0.5859, 0.2467, 0.471, 0.8092] +2026-04-09 05:40:24.398125: Epoch time: 101.9 s +2026-04-09 05:40:25.435526: +2026-04-09 05:40:25.437112: Epoch 477 +2026-04-09 05:40:25.438851: Current learning rate: 0.00558 +2026-04-09 05:42:07.277607: train_loss -0.2455 +2026-04-09 05:42:07.283555: val_loss -0.2244 +2026-04-09 05:42:07.285319: Pseudo dice [0.0, 0.0, 0.7597, 0.4773, 0.4033, 0.7356, 0.7532] +2026-04-09 05:42:07.287172: Epoch time: 101.85 s +2026-04-09 05:42:08.315130: +2026-04-09 05:42:08.316525: Epoch 478 +2026-04-09 05:42:08.317622: Current learning rate: 0.00557 +2026-04-09 05:43:50.388306: train_loss -0.2476 +2026-04-09 05:43:50.392949: val_loss -0.2231 +2026-04-09 05:43:50.394577: Pseudo dice [0.0, 0.0, 0.8258, 0.4162, 0.6101, 0.6757, 0.7643] +2026-04-09 05:43:50.396666: Epoch time: 102.08 s +2026-04-09 05:43:51.464352: +2026-04-09 05:43:51.465797: Epoch 479 +2026-04-09 05:43:51.466957: Current learning rate: 0.00556 +2026-04-09 05:45:33.216233: train_loss -0.2811 +2026-04-09 05:45:33.220660: val_loss -0.2416 +2026-04-09 05:45:33.222579: Pseudo dice [0.4285, 0.0, 0.8285, 0.3756, 0.3696, 0.4296, 0.8756] +2026-04-09 05:45:33.224483: Epoch time: 101.76 s +2026-04-09 05:45:34.284017: +2026-04-09 05:45:34.285711: Epoch 480 +2026-04-09 05:45:34.286936: Current learning rate: 0.00555 +2026-04-09 05:47:16.115944: train_loss -0.298 +2026-04-09 05:47:16.119756: val_loss -0.2383 +2026-04-09 05:47:16.121239: Pseudo dice [0.3686, 0.0, 0.7582, 0.4731, 0.4846, 0.7863, 0.8659] +2026-04-09 05:47:16.123214: Epoch time: 101.84 s +2026-04-09 05:47:17.184188: +2026-04-09 05:47:17.185738: Epoch 481 +2026-04-09 05:47:17.187132: Current learning rate: 0.00554 +2026-04-09 05:48:59.069991: train_loss -0.2861 +2026-04-09 05:48:59.075437: val_loss -0.2367 +2026-04-09 05:48:59.077822: Pseudo dice [0.1605, 0.0, 0.5969, 0.6096, 0.494, 0.6712, 0.9] +2026-04-09 05:48:59.079820: Epoch time: 101.89 s +2026-04-09 05:49:00.181868: +2026-04-09 05:49:00.183234: Epoch 482 +2026-04-09 05:49:00.184492: Current learning rate: 0.00553 +2026-04-09 05:50:42.013810: train_loss -0.2698 +2026-04-09 05:50:42.018307: val_loss -0.2306 +2026-04-09 05:50:42.019899: Pseudo dice [0.3455, 0.0, 0.3695, 0.5255, 0.4831, 0.3856, 0.5344] +2026-04-09 05:50:42.022065: Epoch time: 101.84 s +2026-04-09 05:50:43.077783: +2026-04-09 05:50:43.079501: Epoch 483 +2026-04-09 05:50:43.080878: Current learning rate: 0.00552 +2026-04-09 05:52:25.092494: train_loss -0.2603 +2026-04-09 05:52:25.096665: val_loss -0.252 +2026-04-09 05:52:25.098227: Pseudo dice [0.3398, 0.0, 0.7929, 0.6917, 0.5748, 0.3956, 0.6276] +2026-04-09 05:52:25.100660: Epoch time: 102.02 s +2026-04-09 05:52:26.135872: +2026-04-09 05:52:26.137341: Epoch 484 +2026-04-09 05:52:26.138520: Current learning rate: 0.00551 +2026-04-09 05:54:08.225667: train_loss -0.279 +2026-04-09 05:54:08.231473: val_loss -0.2185 +2026-04-09 05:54:08.233113: Pseudo dice [0.7504, 0.0, 0.5989, 0.0291, 0.4842, 0.3877, 0.8062] +2026-04-09 05:54:08.235348: Epoch time: 102.09 s +2026-04-09 05:54:09.278160: +2026-04-09 05:54:09.280969: Epoch 485 +2026-04-09 05:54:09.283594: Current learning rate: 0.0055 +2026-04-09 05:55:51.210801: train_loss -0.2715 +2026-04-09 05:55:51.216334: val_loss -0.2408 +2026-04-09 05:55:51.218184: Pseudo dice [0.7593, 0.0, 0.7873, 0.3926, 0.5244, 0.4955, 0.8215] +2026-04-09 05:55:51.221057: Epoch time: 101.94 s +2026-04-09 05:55:52.251990: +2026-04-09 05:55:52.254832: Epoch 486 +2026-04-09 05:55:52.257868: Current learning rate: 0.00549 +2026-04-09 05:57:34.459491: train_loss -0.2871 +2026-04-09 05:57:34.464331: val_loss -0.2067 +2026-04-09 05:57:34.466038: Pseudo dice [0.6054, 0.0, 0.6731, 0.7151, 0.5228, 0.3099, 0.8462] +2026-04-09 05:57:34.467864: Epoch time: 102.21 s +2026-04-09 05:57:35.523780: +2026-04-09 05:57:35.525260: Epoch 487 +2026-04-09 05:57:35.526642: Current learning rate: 0.00548 +2026-04-09 05:59:17.571757: train_loss -0.2833 +2026-04-09 05:59:17.576632: val_loss -0.2294 +2026-04-09 05:59:17.578977: Pseudo dice [0.2779, 0.0, 0.6258, 0.4648, 0.474, 0.4137, 0.8755] +2026-04-09 05:59:17.580833: Epoch time: 102.05 s +2026-04-09 05:59:18.653882: +2026-04-09 05:59:18.661438: Epoch 488 +2026-04-09 05:59:18.665922: Current learning rate: 0.00547 +2026-04-09 06:01:00.646463: train_loss -0.2565 +2026-04-09 06:01:00.651141: val_loss -0.2252 +2026-04-09 06:01:00.652880: Pseudo dice [0.4259, 0.0, 0.7617, 0.5477, 0.447, 0.2623, 0.7035] +2026-04-09 06:01:00.655399: Epoch time: 102.0 s +2026-04-09 06:01:01.697061: +2026-04-09 06:01:01.699121: Epoch 489 +2026-04-09 06:01:01.708083: Current learning rate: 0.00546 +2026-04-09 06:02:43.305100: train_loss -0.2592 +2026-04-09 06:02:43.311229: val_loss -0.2499 +2026-04-09 06:02:43.312776: Pseudo dice [0.3183, 0.0, 0.7291, 0.772, 0.4459, 0.4093, 0.3901] +2026-04-09 06:02:43.316980: Epoch time: 101.61 s +2026-04-09 06:02:45.363168: +2026-04-09 06:02:45.364627: Epoch 490 +2026-04-09 06:02:45.365865: Current learning rate: 0.00546 +2026-04-09 06:04:27.005935: train_loss -0.2751 +2026-04-09 06:04:27.010788: val_loss -0.2386 +2026-04-09 06:04:27.012148: Pseudo dice [0.6678, 0.0, 0.8018, 0.6966, 0.5343, 0.7456, 0.5632] +2026-04-09 06:04:27.013927: Epoch time: 101.65 s +2026-04-09 06:04:28.088807: +2026-04-09 06:04:28.091387: Epoch 491 +2026-04-09 06:04:28.093310: Current learning rate: 0.00545 +2026-04-09 06:06:09.900508: train_loss -0.2924 +2026-04-09 06:06:09.918426: val_loss -0.2506 +2026-04-09 06:06:09.920415: Pseudo dice [0.402, 0.0, 0.7745, 0.7014, 0.4927, 0.6715, 0.8723] +2026-04-09 06:06:09.925383: Epoch time: 101.81 s +2026-04-09 06:06:10.983449: +2026-04-09 06:06:10.984976: Epoch 492 +2026-04-09 06:06:10.986322: Current learning rate: 0.00544 +2026-04-09 06:07:52.888793: train_loss -0.2902 +2026-04-09 06:07:52.894108: val_loss -0.2287 +2026-04-09 06:07:52.895738: Pseudo dice [0.4104, 0.0, 0.7455, 0.5126, 0.409, 0.4638, 0.7035] +2026-04-09 06:07:52.898152: Epoch time: 101.91 s +2026-04-09 06:07:53.963070: +2026-04-09 06:07:53.964557: Epoch 493 +2026-04-09 06:07:53.965830: Current learning rate: 0.00543 +2026-04-09 06:09:35.716542: train_loss -0.278 +2026-04-09 06:09:35.722312: val_loss -0.2384 +2026-04-09 06:09:35.723763: Pseudo dice [0.0, 0.0, 0.6743, 0.686, 0.5922, 0.3203, 0.6265] +2026-04-09 06:09:35.726281: Epoch time: 101.76 s +2026-04-09 06:09:36.782220: +2026-04-09 06:09:36.784006: Epoch 494 +2026-04-09 06:09:36.785378: Current learning rate: 0.00542 +2026-04-09 06:11:18.457375: train_loss -0.2665 +2026-04-09 06:11:18.462519: val_loss -0.2404 +2026-04-09 06:11:18.464690: Pseudo dice [0.0, 0.0, 0.847, 0.5144, 0.4901, 0.3871, 0.9282] +2026-04-09 06:11:18.466732: Epoch time: 101.68 s +2026-04-09 06:11:19.513302: +2026-04-09 06:11:19.514810: Epoch 495 +2026-04-09 06:11:19.516123: Current learning rate: 0.00541 +2026-04-09 06:13:01.542839: train_loss -0.2697 +2026-04-09 06:13:01.547989: val_loss -0.2452 +2026-04-09 06:13:01.549982: Pseudo dice [0.0, 0.0, 0.8481, 0.6215, 0.5027, 0.5813, 0.8318] +2026-04-09 06:13:01.552560: Epoch time: 102.03 s +2026-04-09 06:13:02.615768: +2026-04-09 06:13:02.617294: Epoch 496 +2026-04-09 06:13:02.618427: Current learning rate: 0.0054 +2026-04-09 06:14:44.468018: train_loss -0.2768 +2026-04-09 06:14:44.472776: val_loss -0.1691 +2026-04-09 06:14:44.474216: Pseudo dice [0.0, 0.0, 0.5235, 0.1541, 0.5476, 0.2549, 0.1573] +2026-04-09 06:14:44.476106: Epoch time: 101.86 s +2026-04-09 06:14:45.521748: +2026-04-09 06:14:45.523040: Epoch 497 +2026-04-09 06:14:45.524365: Current learning rate: 0.00539 +2026-04-09 06:16:27.201169: train_loss -0.2668 +2026-04-09 06:16:27.205286: val_loss -0.2318 +2026-04-09 06:16:27.207351: Pseudo dice [0.0, 0.0, 0.5982, 0.3279, 0.4656, 0.571, 0.8965] +2026-04-09 06:16:27.209338: Epoch time: 101.68 s +2026-04-09 06:16:28.261226: +2026-04-09 06:16:28.262750: Epoch 498 +2026-04-09 06:16:28.263899: Current learning rate: 0.00538 +2026-04-09 06:18:10.132425: train_loss -0.2762 +2026-04-09 06:18:10.136480: val_loss -0.2442 +2026-04-09 06:18:10.138076: Pseudo dice [0.0, 0.0, 0.8673, 0.3747, 0.481, 0.54, 0.8639] +2026-04-09 06:18:10.140132: Epoch time: 101.87 s +2026-04-09 06:18:11.234442: +2026-04-09 06:18:11.235900: Epoch 499 +2026-04-09 06:18:11.237149: Current learning rate: 0.00537 +2026-04-09 06:19:52.834516: train_loss -0.2756 +2026-04-09 06:19:52.840141: val_loss -0.2302 +2026-04-09 06:19:52.842615: Pseudo dice [0.0, 0.0, 0.6355, 0.6119, 0.5457, 0.7761, 0.5246] +2026-04-09 06:19:52.844535: Epoch time: 101.6 s +2026-04-09 06:19:55.503021: +2026-04-09 06:19:55.504410: Epoch 500 +2026-04-09 06:19:55.505664: Current learning rate: 0.00536 +2026-04-09 06:21:37.390020: train_loss -0.271 +2026-04-09 06:21:37.394131: val_loss -0.2395 +2026-04-09 06:21:37.396103: Pseudo dice [0.0, 0.0, 0.8842, 0.7733, 0.6084, 0.5556, 0.6998] +2026-04-09 06:21:37.397969: Epoch time: 101.89 s +2026-04-09 06:21:38.531682: +2026-04-09 06:21:38.533111: Epoch 501 +2026-04-09 06:21:38.534334: Current learning rate: 0.00535 +2026-04-09 06:23:20.359838: train_loss -0.2694 +2026-04-09 06:23:20.367322: val_loss -0.2486 +2026-04-09 06:23:20.369844: Pseudo dice [0.6937, 0.0, 0.781, 0.7978, 0.5785, 0.5075, 0.7602] +2026-04-09 06:23:20.372081: Epoch time: 101.83 s +2026-04-09 06:23:21.432850: +2026-04-09 06:23:21.434204: Epoch 502 +2026-04-09 06:23:21.435454: Current learning rate: 0.00534 +2026-04-09 06:25:03.170397: train_loss -0.276 +2026-04-09 06:25:03.178391: val_loss -0.2375 +2026-04-09 06:25:03.180835: Pseudo dice [0.368, 0.0, 0.7682, 0.0013, 0.6102, 0.6783, 0.7498] +2026-04-09 06:25:03.184018: Epoch time: 101.74 s +2026-04-09 06:25:04.228062: +2026-04-09 06:25:04.229360: Epoch 503 +2026-04-09 06:25:04.230595: Current learning rate: 0.00533 +2026-04-09 06:26:46.051526: train_loss -0.2778 +2026-04-09 06:26:46.056983: val_loss -0.2216 +2026-04-09 06:26:46.058548: Pseudo dice [0.1441, 0.0, 0.6104, 0.6391, 0.5716, 0.6346, 0.8225] +2026-04-09 06:26:46.060718: Epoch time: 101.83 s +2026-04-09 06:26:47.098163: +2026-04-09 06:26:47.099393: Epoch 504 +2026-04-09 06:26:47.100604: Current learning rate: 0.00532 +2026-04-09 06:28:28.795658: train_loss -0.2692 +2026-04-09 06:28:28.800476: val_loss -0.2109 +2026-04-09 06:28:28.803735: Pseudo dice [0.4998, 0.0, 0.5847, 0.0619, 0.5305, 0.2487, 0.8289] +2026-04-09 06:28:28.805605: Epoch time: 101.7 s +2026-04-09 06:28:29.883096: +2026-04-09 06:28:29.884525: Epoch 505 +2026-04-09 06:28:29.885792: Current learning rate: 0.00531 +2026-04-09 06:30:11.663006: train_loss -0.2804 +2026-04-09 06:30:11.667829: val_loss -0.2531 +2026-04-09 06:30:11.669227: Pseudo dice [0.5577, 0.0, 0.7282, 0.5113, 0.3969, 0.5649, 0.6187] +2026-04-09 06:30:11.670917: Epoch time: 101.78 s +2026-04-09 06:30:12.718851: +2026-04-09 06:30:12.720213: Epoch 506 +2026-04-09 06:30:12.721546: Current learning rate: 0.0053 +2026-04-09 06:31:54.568054: train_loss -0.268 +2026-04-09 06:31:54.578077: val_loss -0.2347 +2026-04-09 06:31:54.581703: Pseudo dice [0.0, 0.0, 0.8021, 0.5552, 0.5685, 0.478, 0.9202] +2026-04-09 06:31:54.586889: Epoch time: 101.85 s +2026-04-09 06:31:55.649264: +2026-04-09 06:31:55.651998: Epoch 507 +2026-04-09 06:31:55.655071: Current learning rate: 0.00529 +2026-04-09 06:33:37.264954: train_loss -0.286 +2026-04-09 06:33:37.269345: val_loss -0.2554 +2026-04-09 06:33:37.270777: Pseudo dice [0.237, 0.0, 0.7241, 0.6049, 0.4523, 0.4622, 0.8911] +2026-04-09 06:33:37.272614: Epoch time: 101.62 s +2026-04-09 06:33:38.299436: +2026-04-09 06:33:38.300756: Epoch 508 +2026-04-09 06:33:38.301961: Current learning rate: 0.00528 +2026-04-09 06:35:19.970790: train_loss -0.2733 +2026-04-09 06:35:19.975677: val_loss -0.2642 +2026-04-09 06:35:19.976994: Pseudo dice [0.6385, 0.0, 0.7806, 0.3893, 0.552, 0.8253, 0.8197] +2026-04-09 06:35:19.981869: Epoch time: 101.67 s +2026-04-09 06:35:21.026939: +2026-04-09 06:35:21.028243: Epoch 509 +2026-04-09 06:35:21.029402: Current learning rate: 0.00527 +2026-04-09 06:37:02.757342: train_loss -0.2717 +2026-04-09 06:37:02.761731: val_loss -0.2498 +2026-04-09 06:37:02.763297: Pseudo dice [0.5283, 0.0, 0.8969, 0.4756, 0.4716, 0.7859, 0.8522] +2026-04-09 06:37:02.765197: Epoch time: 101.73 s +2026-04-09 06:37:04.827558: +2026-04-09 06:37:04.830908: Epoch 510 +2026-04-09 06:37:04.832045: Current learning rate: 0.00526 +2026-04-09 06:38:46.524990: train_loss -0.2909 +2026-04-09 06:38:46.529668: val_loss -0.2653 +2026-04-09 06:38:46.531606: Pseudo dice [0.6331, 0.0, 0.7944, 0.57, 0.4187, 0.5362, 0.8178] +2026-04-09 06:38:46.533553: Epoch time: 101.7 s +2026-04-09 06:38:47.579746: +2026-04-09 06:38:47.581187: Epoch 511 +2026-04-09 06:38:47.582494: Current learning rate: 0.00525 +2026-04-09 06:40:29.213445: train_loss -0.2749 +2026-04-09 06:40:29.219360: val_loss -0.2546 +2026-04-09 06:40:29.220760: Pseudo dice [0.7346, 0.0, 0.8221, 0.6931, 0.5357, 0.6375, 0.929] +2026-04-09 06:40:29.222413: Epoch time: 101.64 s +2026-04-09 06:40:30.266996: +2026-04-09 06:40:30.268481: Epoch 512 +2026-04-09 06:40:30.269684: Current learning rate: 0.00524 +2026-04-09 06:42:11.835364: train_loss -0.2815 +2026-04-09 06:42:11.841284: val_loss -0.2263 +2026-04-09 06:42:11.843173: Pseudo dice [0.1796, 0.0, 0.645, 0.2558, 0.4878, 0.6927, 0.8338] +2026-04-09 06:42:11.845234: Epoch time: 101.57 s +2026-04-09 06:42:12.885144: +2026-04-09 06:42:12.886763: Epoch 513 +2026-04-09 06:42:12.887974: Current learning rate: 0.00523 +2026-04-09 06:43:54.390140: train_loss -0.2687 +2026-04-09 06:43:54.395421: val_loss -0.2445 +2026-04-09 06:43:54.397046: Pseudo dice [0.6837, 0.0, 0.4901, 0.5944, 0.3681, 0.6134, 0.8824] +2026-04-09 06:43:54.399363: Epoch time: 101.51 s +2026-04-09 06:43:55.456933: +2026-04-09 06:43:55.458406: Epoch 514 +2026-04-09 06:43:55.459528: Current learning rate: 0.00522 +2026-04-09 06:45:37.223463: train_loss -0.2681 +2026-04-09 06:45:37.228205: val_loss -0.24 +2026-04-09 06:45:37.229479: Pseudo dice [0.4667, 0.0, 0.8001, 0.5484, 0.6429, 0.759, 0.8486] +2026-04-09 06:45:37.231373: Epoch time: 101.77 s +2026-04-09 06:45:38.271113: +2026-04-09 06:45:38.272428: Epoch 515 +2026-04-09 06:45:38.273646: Current learning rate: 0.00521 +2026-04-09 06:47:20.022321: train_loss -0.2725 +2026-04-09 06:47:20.028203: val_loss -0.2546 +2026-04-09 06:47:20.031058: Pseudo dice [0.3656, 0.0, 0.774, 0.3164, 0.4957, 0.2023, 0.7914] +2026-04-09 06:47:20.033177: Epoch time: 101.75 s +2026-04-09 06:47:21.094668: +2026-04-09 06:47:21.096253: Epoch 516 +2026-04-09 06:47:21.097555: Current learning rate: 0.0052 +2026-04-09 06:49:02.806482: train_loss -0.2887 +2026-04-09 06:49:02.811387: val_loss -0.2711 +2026-04-09 06:49:02.812961: Pseudo dice [0.686, 0.0, 0.6574, 0.4776, 0.6185, 0.7016, 0.7369] +2026-04-09 06:49:02.815369: Epoch time: 101.71 s +2026-04-09 06:49:04.095341: +2026-04-09 06:49:04.096632: Epoch 517 +2026-04-09 06:49:04.098411: Current learning rate: 0.00519 +2026-04-09 06:50:45.963677: train_loss -0.2664 +2026-04-09 06:50:45.968184: val_loss -0.23 +2026-04-09 06:50:45.969666: Pseudo dice [0.4866, 0.0, 0.6456, 0.4372, 0.4375, 0.5702, 0.7856] +2026-04-09 06:50:45.971366: Epoch time: 101.87 s +2026-04-09 06:50:47.014782: +2026-04-09 06:50:47.016590: Epoch 518 +2026-04-09 06:50:47.017903: Current learning rate: 0.00518 +2026-04-09 06:52:28.820113: train_loss -0.2758 +2026-04-09 06:52:28.825179: val_loss -0.2253 +2026-04-09 06:52:28.826889: Pseudo dice [0.6389, 0.0, 0.642, 0.2886, 0.4845, 0.42, 0.8355] +2026-04-09 06:52:28.828965: Epoch time: 101.81 s +2026-04-09 06:52:29.886896: +2026-04-09 06:52:29.888493: Epoch 519 +2026-04-09 06:52:29.889730: Current learning rate: 0.00518 +2026-04-09 06:54:11.553337: train_loss -0.2812 +2026-04-09 06:54:11.558810: val_loss -0.2329 +2026-04-09 06:54:11.560440: Pseudo dice [0.2409, 0.0, 0.7449, 0.2747, 0.5368, 0.658, 0.9229] +2026-04-09 06:54:11.563090: Epoch time: 101.67 s +2026-04-09 06:54:12.610834: +2026-04-09 06:54:12.612067: Epoch 520 +2026-04-09 06:54:12.613254: Current learning rate: 0.00517 +2026-04-09 06:55:54.300188: train_loss -0.27 +2026-04-09 06:55:54.306356: val_loss -0.2554 +2026-04-09 06:55:54.308237: Pseudo dice [0.5006, 0.0, 0.8331, 0.4503, 0.3599, 0.679, 0.8568] +2026-04-09 06:55:54.309991: Epoch time: 101.69 s +2026-04-09 06:55:55.454571: +2026-04-09 06:55:55.456297: Epoch 521 +2026-04-09 06:55:55.457492: Current learning rate: 0.00516 +2026-04-09 06:57:37.129192: train_loss -0.2857 +2026-04-09 06:57:37.134293: val_loss -0.2451 +2026-04-09 06:57:37.135604: Pseudo dice [0.8433, 0.0, 0.8169, 0.5638, 0.621, 0.7177, 0.7419] +2026-04-09 06:57:37.139657: Epoch time: 101.68 s +2026-04-09 06:57:38.175548: +2026-04-09 06:57:38.176927: Epoch 522 +2026-04-09 06:57:38.178209: Current learning rate: 0.00515 +2026-04-09 06:59:20.062436: train_loss -0.2813 +2026-04-09 06:59:20.067130: val_loss -0.2484 +2026-04-09 06:59:20.068670: Pseudo dice [0.044, 0.0, 0.7837, 0.4132, 0.4962, 0.6992, 0.8037] +2026-04-09 06:59:20.070470: Epoch time: 101.89 s +2026-04-09 06:59:21.142798: +2026-04-09 06:59:21.144141: Epoch 523 +2026-04-09 06:59:21.145631: Current learning rate: 0.00514 +2026-04-09 07:01:02.833565: train_loss -0.2812 +2026-04-09 07:01:02.838526: val_loss -0.2179 +2026-04-09 07:01:02.840739: Pseudo dice [0.1058, 0.0, 0.7369, 0.4754, 0.2474, 0.6077, 0.8006] +2026-04-09 07:01:02.842814: Epoch time: 101.69 s +2026-04-09 07:01:03.872160: +2026-04-09 07:01:03.873619: Epoch 524 +2026-04-09 07:01:03.874873: Current learning rate: 0.00513 +2026-04-09 07:02:45.653381: train_loss -0.2901 +2026-04-09 07:02:45.659734: val_loss -0.2562 +2026-04-09 07:02:45.661556: Pseudo dice [0.3788, 0.0, 0.6822, 0.6911, 0.5778, 0.7728, 0.7797] +2026-04-09 07:02:45.663533: Epoch time: 101.78 s +2026-04-09 07:02:46.693351: +2026-04-09 07:02:46.694943: Epoch 525 +2026-04-09 07:02:46.696154: Current learning rate: 0.00512 +2026-04-09 07:04:28.850836: train_loss -0.2807 +2026-04-09 07:04:28.855239: val_loss -0.2447 +2026-04-09 07:04:28.857008: Pseudo dice [0.6059, 0.0, 0.8172, 0.631, 0.4781, 0.5446, 0.8105] +2026-04-09 07:04:28.859183: Epoch time: 102.16 s +2026-04-09 07:04:29.956454: +2026-04-09 07:04:29.958419: Epoch 526 +2026-04-09 07:04:29.959874: Current learning rate: 0.00511 +2026-04-09 07:06:11.995797: train_loss -0.288 +2026-04-09 07:06:12.000301: val_loss -0.2117 +2026-04-09 07:06:12.002205: Pseudo dice [0.3156, 0.0, 0.7959, 0.707, 0.5597, 0.4376, 0.9146] +2026-04-09 07:06:12.004110: Epoch time: 102.04 s +2026-04-09 07:06:13.056280: +2026-04-09 07:06:13.057954: Epoch 527 +2026-04-09 07:06:13.062013: Current learning rate: 0.0051 +2026-04-09 07:07:54.935690: train_loss -0.2866 +2026-04-09 07:07:54.940746: val_loss -0.2173 +2026-04-09 07:07:54.942189: Pseudo dice [0.3941, 0.0, 0.6848, 0.259, 0.3911, 0.5485, 0.8553] +2026-04-09 07:07:54.943660: Epoch time: 101.88 s +2026-04-09 07:07:56.043645: +2026-04-09 07:07:56.045000: Epoch 528 +2026-04-09 07:07:56.046247: Current learning rate: 0.00509 +2026-04-09 07:09:37.811813: train_loss -0.274 +2026-04-09 07:09:37.816330: val_loss -0.2006 +2026-04-09 07:09:37.817879: Pseudo dice [0.0345, 0.0, 0.7689, 0.0003, 0.5613, 0.4041, 0.5904] +2026-04-09 07:09:37.819965: Epoch time: 101.77 s +2026-04-09 07:09:38.868042: +2026-04-09 07:09:38.870881: Epoch 529 +2026-04-09 07:09:38.872302: Current learning rate: 0.00508 +2026-04-09 07:11:20.473900: train_loss -0.2935 +2026-04-09 07:11:20.478619: val_loss -0.2334 +2026-04-09 07:11:20.481261: Pseudo dice [0.5713, 0.0, 0.8073, 0.6525, 0.5017, 0.376, 0.6307] +2026-04-09 07:11:20.483485: Epoch time: 101.61 s +2026-04-09 07:11:21.524576: +2026-04-09 07:11:21.525989: Epoch 530 +2026-04-09 07:11:21.528013: Current learning rate: 0.00507 +2026-04-09 07:13:03.135392: train_loss -0.3001 +2026-04-09 07:13:03.139441: val_loss -0.2732 +2026-04-09 07:13:03.140712: Pseudo dice [0.3731, 0.0, 0.8198, 0.7588, 0.5625, 0.666, 0.8619] +2026-04-09 07:13:03.142400: Epoch time: 101.61 s +2026-04-09 07:13:05.192848: +2026-04-09 07:13:05.194181: Epoch 531 +2026-04-09 07:13:05.195405: Current learning rate: 0.00506 +2026-04-09 07:14:46.923029: train_loss -0.2824 +2026-04-09 07:14:46.931479: val_loss -0.2133 +2026-04-09 07:14:46.933768: Pseudo dice [0.1765, 0.0, 0.8376, 0.3546, 0.3456, 0.0906, 0.9207] +2026-04-09 07:14:46.937851: Epoch time: 101.73 s +2026-04-09 07:14:47.998926: +2026-04-09 07:14:48.000435: Epoch 532 +2026-04-09 07:14:48.002056: Current learning rate: 0.00505 +2026-04-09 07:16:29.807376: train_loss -0.2882 +2026-04-09 07:16:29.813375: val_loss -0.2567 +2026-04-09 07:16:29.815230: Pseudo dice [0.5116, 0.0, 0.7983, 0.8076, 0.3746, 0.5371, 0.9054] +2026-04-09 07:16:29.818542: Epoch time: 101.81 s +2026-04-09 07:16:30.873831: +2026-04-09 07:16:30.875123: Epoch 533 +2026-04-09 07:16:30.877126: Current learning rate: 0.00504 +2026-04-09 07:18:12.581969: train_loss -0.2778 +2026-04-09 07:18:12.594064: val_loss -0.2561 +2026-04-09 07:18:12.597314: Pseudo dice [0.4357, 0.0, 0.8979, 0.8414, 0.5723, 0.4544, 0.6889] +2026-04-09 07:18:12.600799: Epoch time: 101.71 s +2026-04-09 07:18:13.655805: +2026-04-09 07:18:13.657379: Epoch 534 +2026-04-09 07:18:13.659059: Current learning rate: 0.00503 +2026-04-09 07:19:55.360815: train_loss -0.2793 +2026-04-09 07:19:55.364679: val_loss -0.2448 +2026-04-09 07:19:55.366264: Pseudo dice [0.3762, 0.0, 0.6638, 0.7576, 0.5502, 0.582, 0.8346] +2026-04-09 07:19:55.368247: Epoch time: 101.71 s +2026-04-09 07:19:56.404272: +2026-04-09 07:19:56.406034: Epoch 535 +2026-04-09 07:19:56.407422: Current learning rate: 0.00502 +2026-04-09 07:21:38.058882: train_loss -0.2643 +2026-04-09 07:21:38.064359: val_loss -0.2174 +2026-04-09 07:21:38.066108: Pseudo dice [0.0005, 0.0, 0.7476, 0.6653, 0.3149, 0.7305, 0.7416] +2026-04-09 07:21:38.068210: Epoch time: 101.66 s +2026-04-09 07:21:39.124928: +2026-04-09 07:21:39.126371: Epoch 536 +2026-04-09 07:21:39.127717: Current learning rate: 0.00501 +2026-04-09 07:23:20.775299: train_loss -0.27 +2026-04-09 07:23:20.780363: val_loss -0.2427 +2026-04-09 07:23:20.781938: Pseudo dice [0.1731, 0.0, 0.8366, 0.7875, 0.4883, 0.4032, 0.8115] +2026-04-09 07:23:20.783632: Epoch time: 101.65 s +2026-04-09 07:23:21.837955: +2026-04-09 07:23:21.839491: Epoch 537 +2026-04-09 07:23:21.840897: Current learning rate: 0.005 +2026-04-09 07:25:03.654132: train_loss -0.2744 +2026-04-09 07:25:03.658435: val_loss -0.2387 +2026-04-09 07:25:03.660295: Pseudo dice [0.4246, 0.0, 0.829, 0.7234, 0.3549, 0.4433, 0.9353] +2026-04-09 07:25:03.662541: Epoch time: 101.82 s +2026-04-09 07:25:04.697198: +2026-04-09 07:25:04.701045: Epoch 538 +2026-04-09 07:25:04.703956: Current learning rate: 0.00499 +2026-04-09 07:26:46.532093: train_loss -0.2773 +2026-04-09 07:26:46.536813: val_loss -0.2696 +2026-04-09 07:26:46.539123: Pseudo dice [0.6564, 0.0, 0.7879, 0.7247, 0.4606, 0.8232, 0.8896] +2026-04-09 07:26:46.541091: Epoch time: 101.84 s +2026-04-09 07:26:47.604112: +2026-04-09 07:26:47.605988: Epoch 539 +2026-04-09 07:26:47.607434: Current learning rate: 0.00498 +2026-04-09 07:28:29.486767: train_loss -0.2901 +2026-04-09 07:28:29.491579: val_loss -0.2165 +2026-04-09 07:28:29.492911: Pseudo dice [0.5301, 0.0, 0.807, 0.3344, 0.4706, 0.5321, 0.8819] +2026-04-09 07:28:29.494794: Epoch time: 101.89 s +2026-04-09 07:28:30.539900: +2026-04-09 07:28:30.542989: Epoch 540 +2026-04-09 07:28:30.544990: Current learning rate: 0.00497 +2026-04-09 07:30:12.397979: train_loss -0.2737 +2026-04-09 07:30:12.401964: val_loss -0.2461 +2026-04-09 07:30:12.403350: Pseudo dice [0.1301, 0.0, 0.833, 0.6396, 0.549, 0.465, 0.8114] +2026-04-09 07:30:12.405157: Epoch time: 101.86 s +2026-04-09 07:30:13.449919: +2026-04-09 07:30:13.451595: Epoch 541 +2026-04-09 07:30:13.452882: Current learning rate: 0.00496 +2026-04-09 07:31:55.154196: train_loss -0.2718 +2026-04-09 07:31:55.158949: val_loss -0.2141 +2026-04-09 07:31:55.160873: Pseudo dice [0.4227, 0.0, 0.7295, 0.7793, 0.4966, 0.3353, 0.2693] +2026-04-09 07:31:55.162400: Epoch time: 101.71 s +2026-04-09 07:31:56.213689: +2026-04-09 07:31:56.215134: Epoch 542 +2026-04-09 07:31:56.216214: Current learning rate: 0.00495 +2026-04-09 07:33:37.835329: train_loss -0.272 +2026-04-09 07:33:37.840413: val_loss -0.2202 +2026-04-09 07:33:37.841685: Pseudo dice [0.8518, 0.0, 0.5048, 0.3165, 0.3786, 0.5017, 0.6124] +2026-04-09 07:33:37.843454: Epoch time: 101.62 s +2026-04-09 07:33:38.889634: +2026-04-09 07:33:38.891257: Epoch 543 +2026-04-09 07:33:38.892411: Current learning rate: 0.00494 +2026-04-09 07:35:20.602578: train_loss -0.294 +2026-04-09 07:35:20.607346: val_loss -0.2283 +2026-04-09 07:35:20.609024: Pseudo dice [0.5109, 0.0, 0.6984, 0.3734, 0.5034, 0.3445, 0.7496] +2026-04-09 07:35:20.610719: Epoch time: 101.72 s +2026-04-09 07:35:21.660396: +2026-04-09 07:35:21.661649: Epoch 544 +2026-04-09 07:35:21.662791: Current learning rate: 0.00493 +2026-04-09 07:37:03.540118: train_loss -0.2814 +2026-04-09 07:37:03.545043: val_loss -0.2484 +2026-04-09 07:37:03.547032: Pseudo dice [0.7439, 0.0, 0.8184, 0.7972, 0.3758, 0.7306, 0.8282] +2026-04-09 07:37:03.548812: Epoch time: 101.88 s +2026-04-09 07:37:04.601038: +2026-04-09 07:37:04.602346: Epoch 545 +2026-04-09 07:37:04.603623: Current learning rate: 0.00492 +2026-04-09 07:38:46.415751: train_loss -0.2911 +2026-04-09 07:38:46.421574: val_loss -0.253 +2026-04-09 07:38:46.423254: Pseudo dice [0.4868, 0.0, 0.8263, 0.0921, 0.1566, 0.4631, 0.6917] +2026-04-09 07:38:46.426808: Epoch time: 101.82 s +2026-04-09 07:38:47.474595: +2026-04-09 07:38:47.475989: Epoch 546 +2026-04-09 07:38:47.477160: Current learning rate: 0.00491 +2026-04-09 07:40:29.328497: train_loss -0.2844 +2026-04-09 07:40:29.332868: val_loss -0.2514 +2026-04-09 07:40:29.334236: Pseudo dice [0.6601, 0.0, 0.7553, 0.7364, 0.4053, 0.8042, 0.7936] +2026-04-09 07:40:29.336229: Epoch time: 101.86 s +2026-04-09 07:40:30.382047: +2026-04-09 07:40:30.383761: Epoch 547 +2026-04-09 07:40:30.384984: Current learning rate: 0.0049 +2026-04-09 07:42:12.188409: train_loss -0.275 +2026-04-09 07:42:12.193524: val_loss -0.2414 +2026-04-09 07:42:12.195881: Pseudo dice [0.2891, 0.0, 0.6169, 0.4354, 0.5281, 0.6614, 0.7674] +2026-04-09 07:42:12.198440: Epoch time: 101.81 s +2026-04-09 07:42:13.245532: +2026-04-09 07:42:13.246843: Epoch 548 +2026-04-09 07:42:13.247926: Current learning rate: 0.00489 +2026-04-09 07:43:55.130603: train_loss -0.276 +2026-04-09 07:43:55.136001: val_loss -0.2487 +2026-04-09 07:43:55.138180: Pseudo dice [0.2029, 0.0, 0.8573, 0.8622, 0.4988, 0.5743, 0.8916] +2026-04-09 07:43:55.140859: Epoch time: 101.89 s +2026-04-09 07:43:56.188135: +2026-04-09 07:43:56.191251: Epoch 549 +2026-04-09 07:43:56.193068: Current learning rate: 0.00488 +2026-04-09 07:45:37.899118: train_loss -0.275 +2026-04-09 07:45:37.903801: val_loss -0.2362 +2026-04-09 07:45:37.905288: Pseudo dice [0.1724, 0.0, 0.814, 0.6803, 0.7178, 0.4932, 0.564] +2026-04-09 07:45:37.907170: Epoch time: 101.71 s +2026-04-09 07:45:40.550573: +2026-04-09 07:45:40.551728: Epoch 550 +2026-04-09 07:45:40.552904: Current learning rate: 0.00487 +2026-04-09 07:47:22.286867: train_loss -0.2878 +2026-04-09 07:47:22.293066: val_loss -0.2173 +2026-04-09 07:47:22.294736: Pseudo dice [0.2818, 0.0, 0.7377, 0.6853, 0.5535, 0.5417, 0.8456] +2026-04-09 07:47:22.296506: Epoch time: 101.74 s +2026-04-09 07:47:23.352139: +2026-04-09 07:47:23.353459: Epoch 551 +2026-04-09 07:47:23.354694: Current learning rate: 0.00486 +2026-04-09 07:49:06.239936: train_loss -0.2861 +2026-04-09 07:49:06.248676: val_loss -0.2602 +2026-04-09 07:49:06.250919: Pseudo dice [0.5059, 0.0, 0.6986, 0.5603, 0.4453, 0.6161, 0.914] +2026-04-09 07:49:06.253670: Epoch time: 102.89 s +2026-04-09 07:49:07.307926: +2026-04-09 07:49:07.313222: Epoch 552 +2026-04-09 07:49:07.314448: Current learning rate: 0.00485 +2026-04-09 07:50:49.197473: train_loss -0.2784 +2026-04-09 07:50:49.201951: val_loss -0.2507 +2026-04-09 07:50:49.203242: Pseudo dice [0.4041, 0.0, 0.8129, 0.4716, 0.3829, 0.5115, 0.911] +2026-04-09 07:50:49.205462: Epoch time: 101.89 s +2026-04-09 07:50:50.237503: +2026-04-09 07:50:50.238791: Epoch 553 +2026-04-09 07:50:50.239972: Current learning rate: 0.00484 +2026-04-09 07:52:32.050217: train_loss -0.2868 +2026-04-09 07:52:32.054395: val_loss -0.2176 +2026-04-09 07:52:32.056036: Pseudo dice [0.2085, 0.0, 0.8748, 0.4426, 0.5073, 0.4847, 0.8418] +2026-04-09 07:52:32.058091: Epoch time: 101.82 s +2026-04-09 07:52:33.161672: +2026-04-09 07:52:33.163570: Epoch 554 +2026-04-09 07:52:33.164737: Current learning rate: 0.00484 +2026-04-09 07:54:14.952019: train_loss -0.2927 +2026-04-09 07:54:14.956254: val_loss -0.2604 +2026-04-09 07:54:14.957861: Pseudo dice [0.5742, 0.0, 0.7334, 0.7565, 0.5907, 0.5567, 0.8515] +2026-04-09 07:54:14.959707: Epoch time: 101.79 s +2026-04-09 07:54:15.997945: +2026-04-09 07:54:16.000108: Epoch 555 +2026-04-09 07:54:16.001220: Current learning rate: 0.00483 +2026-04-09 07:55:57.663973: train_loss -0.2794 +2026-04-09 07:55:57.669061: val_loss -0.2413 +2026-04-09 07:55:57.670519: Pseudo dice [0.4154, 0.0, 0.7618, 0.6181, 0.3511, 0.642, 0.8911] +2026-04-09 07:55:57.672551: Epoch time: 101.67 s +2026-04-09 07:55:58.711560: +2026-04-09 07:55:58.713124: Epoch 556 +2026-04-09 07:55:58.714423: Current learning rate: 0.00482 +2026-04-09 07:57:40.556480: train_loss -0.2933 +2026-04-09 07:57:40.560629: val_loss -0.2375 +2026-04-09 07:57:40.562048: Pseudo dice [0.8363, 0.0, 0.7886, 0.6235, 0.5609, 0.5275, 0.7268] +2026-04-09 07:57:40.564017: Epoch time: 101.85 s +2026-04-09 07:57:41.614064: +2026-04-09 07:57:41.615523: Epoch 557 +2026-04-09 07:57:41.616802: Current learning rate: 0.00481 +2026-04-09 07:59:23.273922: train_loss -0.2966 +2026-04-09 07:59:23.277930: val_loss -0.2742 +2026-04-09 07:59:23.279170: Pseudo dice [0.639, 0.0, 0.6903, 0.8448, 0.6323, 0.6022, 0.8725] +2026-04-09 07:59:23.280692: Epoch time: 101.66 s +2026-04-09 07:59:24.317846: +2026-04-09 07:59:24.320473: Epoch 558 +2026-04-09 07:59:24.322085: Current learning rate: 0.0048 +2026-04-09 08:01:05.974756: train_loss -0.2864 +2026-04-09 08:01:05.979088: val_loss -0.2271 +2026-04-09 08:01:05.981127: Pseudo dice [0.348, 0.0, 0.7113, 0.7985, 0.5809, 0.6703, 0.9306] +2026-04-09 08:01:05.983086: Epoch time: 101.66 s +2026-04-09 08:01:07.030653: +2026-04-09 08:01:07.033470: Epoch 559 +2026-04-09 08:01:07.034734: Current learning rate: 0.00479 +2026-04-09 08:02:48.785700: train_loss -0.2781 +2026-04-09 08:02:48.790094: val_loss -0.2271 +2026-04-09 08:02:48.792055: Pseudo dice [0.0252, 0.0, 0.7994, 0.5952, 0.4025, 0.8048, 0.8696] +2026-04-09 08:02:48.793941: Epoch time: 101.76 s +2026-04-09 08:02:49.842544: +2026-04-09 08:02:49.843957: Epoch 560 +2026-04-09 08:02:49.845176: Current learning rate: 0.00478 +2026-04-09 08:04:31.476796: train_loss -0.2717 +2026-04-09 08:04:31.481342: val_loss -0.2591 +2026-04-09 08:04:31.482988: Pseudo dice [0.4149, 0.0, 0.7576, 0.8415, 0.5917, 0.5775, 0.7731] +2026-04-09 08:04:31.484894: Epoch time: 101.64 s +2026-04-09 08:04:32.529538: +2026-04-09 08:04:32.530913: Epoch 561 +2026-04-09 08:04:32.532387: Current learning rate: 0.00477 +2026-04-09 08:06:14.111395: train_loss -0.2949 +2026-04-09 08:06:14.116714: val_loss -0.2406 +2026-04-09 08:06:14.118274: Pseudo dice [0.8171, 0.0, 0.4408, 0.2549, 0.148, 0.5301, 0.8995] +2026-04-09 08:06:14.120581: Epoch time: 101.58 s +2026-04-09 08:06:15.183864: +2026-04-09 08:06:15.185468: Epoch 562 +2026-04-09 08:06:15.186834: Current learning rate: 0.00476 +2026-04-09 08:07:56.985170: train_loss -0.2826 +2026-04-09 08:07:56.989983: val_loss -0.208 +2026-04-09 08:07:56.992017: Pseudo dice [0.8353, 0.0, 0.6586, 0.0581, 0.3452, 0.4153, 0.2578] +2026-04-09 08:07:56.994463: Epoch time: 101.8 s +2026-04-09 08:07:58.053924: +2026-04-09 08:07:58.055639: Epoch 563 +2026-04-09 08:07:58.056913: Current learning rate: 0.00475 +2026-04-09 08:09:39.891154: train_loss -0.2651 +2026-04-09 08:09:39.895229: val_loss -0.2429 +2026-04-09 08:09:39.897007: Pseudo dice [0.4843, 0.0, 0.6364, 0.7263, 0.3376, 0.6709, 0.6468] +2026-04-09 08:09:39.898918: Epoch time: 101.84 s +2026-04-09 08:09:40.978748: +2026-04-09 08:09:40.979903: Epoch 564 +2026-04-09 08:09:40.981022: Current learning rate: 0.00474 +2026-04-09 08:11:22.681974: train_loss -0.2937 +2026-04-09 08:11:22.686516: val_loss -0.2517 +2026-04-09 08:11:22.688113: Pseudo dice [0.273, 0.0, 0.8659, 0.4946, 0.1732, 0.7066, 0.6845] +2026-04-09 08:11:22.689867: Epoch time: 101.71 s +2026-04-09 08:11:23.743970: +2026-04-09 08:11:23.745169: Epoch 565 +2026-04-09 08:11:23.746349: Current learning rate: 0.00473 +2026-04-09 08:13:05.413091: train_loss -0.2892 +2026-04-09 08:13:05.418059: val_loss -0.2487 +2026-04-09 08:13:05.419979: Pseudo dice [0.787, 0.0, 0.7519, 0.5455, 0.4385, 0.6809, 0.6819] +2026-04-09 08:13:05.422092: Epoch time: 101.67 s +2026-04-09 08:13:06.497033: +2026-04-09 08:13:06.499367: Epoch 566 +2026-04-09 08:13:06.500708: Current learning rate: 0.00472 +2026-04-09 08:14:48.154711: train_loss -0.296 +2026-04-09 08:14:48.160696: val_loss -0.228 +2026-04-09 08:14:48.162524: Pseudo dice [0.5889, 0.0, 0.876, 0.4709, 0.412, 0.6485, 0.6656] +2026-04-09 08:14:48.164604: Epoch time: 101.66 s +2026-04-09 08:14:49.236743: +2026-04-09 08:14:49.238121: Epoch 567 +2026-04-09 08:14:49.239372: Current learning rate: 0.00471 +2026-04-09 08:16:31.098085: train_loss -0.2835 +2026-04-09 08:16:31.102212: val_loss -0.2328 +2026-04-09 08:16:31.103765: Pseudo dice [0.6747, 0.0, 0.5721, 0.8028, 0.2946, 0.6957, 0.8017] +2026-04-09 08:16:31.105962: Epoch time: 101.86 s +2026-04-09 08:16:32.156151: +2026-04-09 08:16:32.157861: Epoch 568 +2026-04-09 08:16:32.159257: Current learning rate: 0.0047 +2026-04-09 08:18:13.943670: train_loss -0.2951 +2026-04-09 08:18:13.947918: val_loss -0.2658 +2026-04-09 08:18:13.949305: Pseudo dice [0.7673, 0.0, 0.8245, 0.8323, 0.502, 0.6635, 0.9187] +2026-04-09 08:18:13.950958: Epoch time: 101.79 s +2026-04-09 08:18:14.999310: +2026-04-09 08:18:15.000877: Epoch 569 +2026-04-09 08:18:15.002500: Current learning rate: 0.00469 +2026-04-09 08:19:56.738906: train_loss -0.2903 +2026-04-09 08:19:56.744137: val_loss -0.2444 +2026-04-09 08:19:56.745656: Pseudo dice [0.77, 0.0, 0.5842, 0.612, 0.3329, 0.8354, 0.6372] +2026-04-09 08:19:56.747973: Epoch time: 101.74 s +2026-04-09 08:19:57.786120: +2026-04-09 08:19:57.806634: Epoch 570 +2026-04-09 08:19:57.808053: Current learning rate: 0.00468 +2026-04-09 08:21:39.605073: train_loss -0.2803 +2026-04-09 08:21:39.609891: val_loss -0.2173 +2026-04-09 08:21:39.611350: Pseudo dice [0.1605, 0.0, 0.7434, 0.4657, 0.395, 0.4102, 0.6133] +2026-04-09 08:21:39.613447: Epoch time: 101.82 s +2026-04-09 08:21:40.670476: +2026-04-09 08:21:40.672930: Epoch 571 +2026-04-09 08:21:40.674060: Current learning rate: 0.00467 +2026-04-09 08:23:22.390522: train_loss -0.3046 +2026-04-09 08:23:22.395192: val_loss -0.2445 +2026-04-09 08:23:22.396705: Pseudo dice [0.7284, 0.0, 0.7397, 0.116, 0.3611, 0.784, 0.7302] +2026-04-09 08:23:22.398785: Epoch time: 101.72 s +2026-04-09 08:23:24.471501: +2026-04-09 08:23:24.472869: Epoch 572 +2026-04-09 08:23:24.474144: Current learning rate: 0.00466 +2026-04-09 08:25:06.167481: train_loss -0.2891 +2026-04-09 08:25:06.171764: val_loss -0.235 +2026-04-09 08:25:06.173635: Pseudo dice [0.6592, 0.0, 0.7396, 0.5925, 0.5091, 0.3606, 0.5813] +2026-04-09 08:25:06.175399: Epoch time: 101.7 s +2026-04-09 08:25:07.249226: +2026-04-09 08:25:07.250764: Epoch 573 +2026-04-09 08:25:07.251910: Current learning rate: 0.00465 +2026-04-09 08:26:49.063182: train_loss -0.2805 +2026-04-09 08:26:49.068293: val_loss -0.2357 +2026-04-09 08:26:49.069948: Pseudo dice [0.5945, 0.0, 0.7025, 0.2625, 0.5885, 0.6056, 0.8578] +2026-04-09 08:26:49.071699: Epoch time: 101.82 s +2026-04-09 08:26:50.159230: +2026-04-09 08:26:50.160475: Epoch 574 +2026-04-09 08:26:50.161563: Current learning rate: 0.00464 +2026-04-09 08:28:32.021758: train_loss -0.2912 +2026-04-09 08:28:32.025929: val_loss -0.2912 +2026-04-09 08:28:32.027192: Pseudo dice [0.7328, 0.0, 0.8458, 0.7793, 0.5314, 0.6921, 0.9393] +2026-04-09 08:28:32.029027: Epoch time: 101.87 s +2026-04-09 08:28:33.092818: +2026-04-09 08:28:33.094086: Epoch 575 +2026-04-09 08:28:33.095372: Current learning rate: 0.00463 +2026-04-09 08:30:14.936303: train_loss -0.293 +2026-04-09 08:30:14.941676: val_loss -0.2451 +2026-04-09 08:30:14.943152: Pseudo dice [0.6745, 0.0, 0.4973, 0.3551, 0.5779, 0.6495, 0.8019] +2026-04-09 08:30:14.945749: Epoch time: 101.85 s +2026-04-09 08:30:16.018066: +2026-04-09 08:30:16.019600: Epoch 576 +2026-04-09 08:30:16.020777: Current learning rate: 0.00462 +2026-04-09 08:31:57.900121: train_loss -0.2876 +2026-04-09 08:31:57.904265: val_loss -0.2275 +2026-04-09 08:31:57.906300: Pseudo dice [0.5254, 0.0, 0.7071, 0.7306, 0.5482, 0.6347, 0.4811] +2026-04-09 08:31:57.908535: Epoch time: 101.89 s +2026-04-09 08:31:58.981426: +2026-04-09 08:31:58.982757: Epoch 577 +2026-04-09 08:31:58.983824: Current learning rate: 0.00461 +2026-04-09 08:33:40.780648: train_loss -0.2917 +2026-04-09 08:33:40.784807: val_loss -0.2411 +2026-04-09 08:33:40.786336: Pseudo dice [0.4877, 0.0, 0.7944, 0.394, 0.6629, 0.472, 0.9339] +2026-04-09 08:33:40.787989: Epoch time: 101.8 s +2026-04-09 08:33:41.846993: +2026-04-09 08:33:41.849399: Epoch 578 +2026-04-09 08:33:41.850583: Current learning rate: 0.0046 +2026-04-09 08:35:23.673114: train_loss -0.291 +2026-04-09 08:35:23.677905: val_loss -0.26 +2026-04-09 08:35:23.679625: Pseudo dice [0.7496, 0.0, 0.8077, 0.7069, 0.5083, 0.6894, 0.6978] +2026-04-09 08:35:23.681765: Epoch time: 101.83 s +2026-04-09 08:35:24.767578: +2026-04-09 08:35:24.768949: Epoch 579 +2026-04-09 08:35:24.771014: Current learning rate: 0.00459 +2026-04-09 08:37:06.429525: train_loss -0.2814 +2026-04-09 08:37:06.434151: val_loss -0.241 +2026-04-09 08:37:06.435398: Pseudo dice [0.6864, 0.0, 0.796, 0.5013, 0.3714, 0.3054, 0.9] +2026-04-09 08:37:06.437407: Epoch time: 101.67 s +2026-04-09 08:37:07.540561: +2026-04-09 08:37:07.541884: Epoch 580 +2026-04-09 08:37:07.543180: Current learning rate: 0.00458 +2026-04-09 08:38:49.318632: train_loss -0.2972 +2026-04-09 08:38:49.322467: val_loss -0.251 +2026-04-09 08:38:49.324172: Pseudo dice [0.4452, 0.0, 0.7971, 0.641, 0.5735, 0.6508, 0.509] +2026-04-09 08:38:49.326199: Epoch time: 101.78 s +2026-04-09 08:38:50.387440: +2026-04-09 08:38:50.388874: Epoch 581 +2026-04-09 08:38:50.390110: Current learning rate: 0.00457 +2026-04-09 08:40:31.821455: train_loss -0.2718 +2026-04-09 08:40:31.826228: val_loss -0.2349 +2026-04-09 08:40:31.827897: Pseudo dice [0.7369, 0.0, 0.6735, 0.5927, 0.5474, 0.7265, 0.886] +2026-04-09 08:40:31.830035: Epoch time: 101.44 s +2026-04-09 08:40:32.921592: +2026-04-09 08:40:32.923064: Epoch 582 +2026-04-09 08:40:32.924510: Current learning rate: 0.00456 +2026-04-09 08:42:14.575569: train_loss -0.2934 +2026-04-09 08:42:14.579942: val_loss -0.2396 +2026-04-09 08:42:14.581835: Pseudo dice [0.3595, 0.0, 0.8277, 0.3468, 0.51, 0.5189, 0.736] +2026-04-09 08:42:14.583789: Epoch time: 101.66 s +2026-04-09 08:42:15.660626: +2026-04-09 08:42:15.662107: Epoch 583 +2026-04-09 08:42:15.663428: Current learning rate: 0.00455 +2026-04-09 08:43:57.220748: train_loss -0.3007 +2026-04-09 08:43:57.226075: val_loss -0.2668 +2026-04-09 08:43:57.227583: Pseudo dice [0.5501, 0.0, 0.8306, 0.6812, 0.5561, 0.7742, 0.8589] +2026-04-09 08:43:57.229213: Epoch time: 101.56 s +2026-04-09 08:43:58.291967: +2026-04-09 08:43:58.293282: Epoch 584 +2026-04-09 08:43:58.294549: Current learning rate: 0.00454 +2026-04-09 08:45:39.693002: train_loss -0.288 +2026-04-09 08:45:39.716623: val_loss -0.247 +2026-04-09 08:45:39.718179: Pseudo dice [0.2626, 0.0, 0.8572, 0.8051, 0.559, 0.6088, 0.9156] +2026-04-09 08:45:39.721656: Epoch time: 101.4 s +2026-04-09 08:45:40.777049: +2026-04-09 08:45:40.778266: Epoch 585 +2026-04-09 08:45:40.779500: Current learning rate: 0.00453 +2026-04-09 08:47:22.286811: train_loss -0.293 +2026-04-09 08:47:22.290679: val_loss -0.255 +2026-04-09 08:47:22.292118: Pseudo dice [0.5059, 0.0, 0.838, 0.0822, 0.5905, 0.4512, 0.6925] +2026-04-09 08:47:22.298843: Epoch time: 101.51 s +2026-04-09 08:47:23.355556: +2026-04-09 08:47:23.356847: Epoch 586 +2026-04-09 08:47:23.357965: Current learning rate: 0.00452 +2026-04-09 08:49:04.784176: train_loss -0.2859 +2026-04-09 08:49:04.789527: val_loss -0.259 +2026-04-09 08:49:04.790896: Pseudo dice [0.7222, 0.0, 0.6686, 0.418, 0.4254, 0.6282, 0.9203] +2026-04-09 08:49:04.794674: Epoch time: 101.43 s +2026-04-09 08:49:05.875422: +2026-04-09 08:49:05.877113: Epoch 587 +2026-04-09 08:49:05.878416: Current learning rate: 0.00451 +2026-04-09 08:50:47.281427: train_loss -0.2939 +2026-04-09 08:50:47.286243: val_loss -0.2254 +2026-04-09 08:50:47.287830: Pseudo dice [0.4292, 0.0, 0.8326, 0.7676, 0.5068, 0.7059, 0.5616] +2026-04-09 08:50:47.289464: Epoch time: 101.41 s +2026-04-09 08:50:48.365165: +2026-04-09 08:50:48.366532: Epoch 588 +2026-04-09 08:50:48.367724: Current learning rate: 0.0045 +2026-04-09 08:52:29.886306: train_loss -0.2744 +2026-04-09 08:52:29.890341: val_loss -0.2556 +2026-04-09 08:52:29.891873: Pseudo dice [0.5171, 0.0, 0.7686, 0.6043, 0.569, 0.7255, 0.8479] +2026-04-09 08:52:29.893899: Epoch time: 101.52 s +2026-04-09 08:52:30.973214: +2026-04-09 08:52:30.985062: Epoch 589 +2026-04-09 08:52:30.998920: Current learning rate: 0.00449 +2026-04-09 08:54:12.671282: train_loss -0.2897 +2026-04-09 08:54:12.676161: val_loss -0.2471 +2026-04-09 08:54:12.677453: Pseudo dice [0.4102, 0.0, 0.7865, 0.3099, 0.387, 0.8429, 0.7807] +2026-04-09 08:54:12.679533: Epoch time: 101.7 s +2026-04-09 08:54:13.761907: +2026-04-09 08:54:13.764397: Epoch 590 +2026-04-09 08:54:13.765740: Current learning rate: 0.00448 +2026-04-09 08:55:55.429355: train_loss -0.2861 +2026-04-09 08:55:55.436387: val_loss -0.2814 +2026-04-09 08:55:55.438180: Pseudo dice [0.6397, 0.0, 0.8319, 0.5663, 0.567, 0.7203, 0.6904] +2026-04-09 08:55:55.440758: Epoch time: 101.67 s +2026-04-09 08:55:56.505553: +2026-04-09 08:55:56.508711: Epoch 591 +2026-04-09 08:55:56.512438: Current learning rate: 0.00447 +2026-04-09 08:57:38.162111: train_loss -0.2957 +2026-04-09 08:57:38.167010: val_loss -0.2632 +2026-04-09 08:57:38.168612: Pseudo dice [0.774, 0.0, 0.8218, 0.435, 0.4819, 0.7603, 0.8854] +2026-04-09 08:57:38.172273: Epoch time: 101.66 s +2026-04-09 08:57:39.223603: +2026-04-09 08:57:39.225038: Epoch 592 +2026-04-09 08:57:39.226150: Current learning rate: 0.00446 +2026-04-09 08:59:20.695351: train_loss -0.2933 +2026-04-09 08:59:20.700022: val_loss -0.2287 +2026-04-09 08:59:20.701837: Pseudo dice [0.203, 0.0, 0.79, 0.8623, 0.5676, 0.6478, 0.9224] +2026-04-09 08:59:20.703855: Epoch time: 101.47 s +2026-04-09 08:59:22.848576: +2026-04-09 08:59:22.849891: Epoch 593 +2026-04-09 08:59:22.851115: Current learning rate: 0.00445 +2026-04-09 09:01:04.592563: train_loss -0.2894 +2026-04-09 09:01:04.598147: val_loss -0.2189 +2026-04-09 09:01:04.611518: Pseudo dice [0.6579, 0.0, 0.2989, 0.2802, 0.2772, 0.8382, 0.8364] +2026-04-09 09:01:04.613576: Epoch time: 101.75 s +2026-04-09 09:01:05.691885: +2026-04-09 09:01:05.693345: Epoch 594 +2026-04-09 09:01:05.694544: Current learning rate: 0.00444 +2026-04-09 09:02:47.410899: train_loss -0.3109 +2026-04-09 09:02:47.415345: val_loss -0.253 +2026-04-09 09:02:47.417594: Pseudo dice [0.385, 0.0, 0.8244, 0.7319, 0.5785, 0.554, 0.9212] +2026-04-09 09:02:47.419813: Epoch time: 101.72 s +2026-04-09 09:02:48.480985: +2026-04-09 09:02:48.482326: Epoch 595 +2026-04-09 09:02:48.483454: Current learning rate: 0.00443 +2026-04-09 09:04:29.998226: train_loss -0.293 +2026-04-09 09:04:30.002499: val_loss -0.2654 +2026-04-09 09:04:30.004230: Pseudo dice [0.3441, 0.0, 0.8419, 0.8222, 0.5841, 0.8798, 0.8969] +2026-04-09 09:04:30.006383: Epoch time: 101.52 s +2026-04-09 09:04:31.078579: +2026-04-09 09:04:31.080765: Epoch 596 +2026-04-09 09:04:31.082070: Current learning rate: 0.00442 +2026-04-09 09:06:12.839597: train_loss -0.2911 +2026-04-09 09:06:12.843583: val_loss -0.2721 +2026-04-09 09:06:12.845179: Pseudo dice [0.5782, 0.0, 0.6776, 0.7023, 0.3971, 0.8121, 0.9292] +2026-04-09 09:06:12.847196: Epoch time: 101.76 s +2026-04-09 09:06:14.144367: +2026-04-09 09:06:14.145614: Epoch 597 +2026-04-09 09:06:14.146681: Current learning rate: 0.00441 +2026-04-09 09:07:55.808692: train_loss -0.2995 +2026-04-09 09:07:55.814396: val_loss -0.2305 +2026-04-09 09:07:55.816585: Pseudo dice [0.2802, 0.0, 0.7755, 0.1875, 0.3657, 0.7719, 0.8731] +2026-04-09 09:07:55.818791: Epoch time: 101.67 s +2026-04-09 09:07:56.894318: +2026-04-09 09:07:56.895675: Epoch 598 +2026-04-09 09:07:56.896917: Current learning rate: 0.0044 +2026-04-09 09:09:38.648522: train_loss -0.2721 +2026-04-09 09:09:38.652449: val_loss -0.2237 +2026-04-09 09:09:38.654134: Pseudo dice [0.2111, 0.0, 0.6741, 0.5415, 0.2915, 0.4786, 0.7189] +2026-04-09 09:09:38.655934: Epoch time: 101.76 s +2026-04-09 09:09:39.714175: +2026-04-09 09:09:39.715388: Epoch 599 +2026-04-09 09:09:39.716474: Current learning rate: 0.00439 +2026-04-09 09:11:21.491683: train_loss -0.2909 +2026-04-09 09:11:21.496359: val_loss -0.2171 +2026-04-09 09:11:21.498225: Pseudo dice [0.5019, 0.0, 0.7849, 0.7349, 0.421, 0.7736, 0.8116] +2026-04-09 09:11:21.500364: Epoch time: 101.78 s +2026-04-09 09:11:24.231676: +2026-04-09 09:11:24.233251: Epoch 600 +2026-04-09 09:11:24.234478: Current learning rate: 0.00438 +2026-04-09 09:13:05.879308: train_loss -0.2922 +2026-04-09 09:13:05.883420: val_loss -0.2527 +2026-04-09 09:13:05.885142: Pseudo dice [0.4652, 0.0, 0.7663, 0.7796, 0.5476, 0.5776, 0.8544] +2026-04-09 09:13:05.887151: Epoch time: 101.65 s +2026-04-09 09:13:06.981447: +2026-04-09 09:13:06.982906: Epoch 601 +2026-04-09 09:13:06.984177: Current learning rate: 0.00437 +2026-04-09 09:14:48.652184: train_loss -0.3023 +2026-04-09 09:14:48.655669: val_loss -0.2468 +2026-04-09 09:14:48.657742: Pseudo dice [0.2216, 0.0, 0.8511, 0.4395, 0.5035, 0.5386, 0.4108] +2026-04-09 09:14:48.659249: Epoch time: 101.67 s +2026-04-09 09:14:49.740628: +2026-04-09 09:14:49.741985: Epoch 602 +2026-04-09 09:14:49.743142: Current learning rate: 0.00436 +2026-04-09 09:16:31.463626: train_loss -0.2829 +2026-04-09 09:16:31.467370: val_loss -0.2375 +2026-04-09 09:16:31.468683: Pseudo dice [0.5942, 0.0, 0.5629, 0.8291, 0.4694, 0.52, 0.7862] +2026-04-09 09:16:31.470002: Epoch time: 101.73 s +2026-04-09 09:16:32.547794: +2026-04-09 09:16:32.549081: Epoch 603 +2026-04-09 09:16:32.550248: Current learning rate: 0.00435 +2026-04-09 09:18:14.375543: train_loss -0.2955 +2026-04-09 09:18:14.380873: val_loss -0.2663 +2026-04-09 09:18:14.387568: Pseudo dice [0.5233, 0.0, 0.8372, 0.6939, 0.5561, 0.7071, 0.8929] +2026-04-09 09:18:14.389188: Epoch time: 101.83 s +2026-04-09 09:18:15.476622: +2026-04-09 09:18:15.478036: Epoch 604 +2026-04-09 09:18:15.479306: Current learning rate: 0.00434 +2026-04-09 09:19:57.260857: train_loss -0.2882 +2026-04-09 09:19:57.265064: val_loss -0.2771 +2026-04-09 09:19:57.266523: Pseudo dice [0.7414, 0.0, 0.7785, 0.7295, 0.581, 0.3708, 0.744] +2026-04-09 09:19:57.268030: Epoch time: 101.79 s +2026-04-09 09:19:58.339792: +2026-04-09 09:19:58.341172: Epoch 605 +2026-04-09 09:19:58.342401: Current learning rate: 0.00433 +2026-04-09 09:21:40.029799: train_loss -0.2846 +2026-04-09 09:21:40.033324: val_loss -0.2379 +2026-04-09 09:21:40.034869: Pseudo dice [0.4115, 0.0, 0.7954, 0.1677, 0.6421, 0.492, 0.8346] +2026-04-09 09:21:40.036383: Epoch time: 101.69 s +2026-04-09 09:21:41.107682: +2026-04-09 09:21:41.109305: Epoch 606 +2026-04-09 09:21:41.111092: Current learning rate: 0.00432 +2026-04-09 09:23:22.855404: train_loss -0.2887 +2026-04-09 09:23:22.859588: val_loss -0.2209 +2026-04-09 09:23:22.861278: Pseudo dice [0.7166, 0.0, 0.8294, 0.6759, 0.4448, 0.2319, 0.8679] +2026-04-09 09:23:22.862592: Epoch time: 101.75 s +2026-04-09 09:23:23.940236: +2026-04-09 09:23:23.941430: Epoch 607 +2026-04-09 09:23:23.942718: Current learning rate: 0.00431 +2026-04-09 09:25:05.584237: train_loss -0.2855 +2026-04-09 09:25:05.589067: val_loss -0.2396 +2026-04-09 09:25:05.591017: Pseudo dice [0.5815, 0.0, 0.7037, 0.2586, 0.5601, 0.3407, 0.8011] +2026-04-09 09:25:05.592517: Epoch time: 101.65 s +2026-04-09 09:25:06.664099: +2026-04-09 09:25:06.665634: Epoch 608 +2026-04-09 09:25:06.667000: Current learning rate: 0.0043 +2026-04-09 09:26:48.554619: train_loss -0.2884 +2026-04-09 09:26:48.559005: val_loss -0.2495 +2026-04-09 09:26:48.561561: Pseudo dice [0.5411, 0.0, 0.7463, 0.1818, 0.5074, 0.582, 0.8649] +2026-04-09 09:26:48.563224: Epoch time: 101.89 s +2026-04-09 09:26:49.653297: +2026-04-09 09:26:49.654502: Epoch 609 +2026-04-09 09:26:49.655592: Current learning rate: 0.00429 +2026-04-09 09:28:31.369110: train_loss -0.2835 +2026-04-09 09:28:31.373242: val_loss -0.2232 +2026-04-09 09:28:31.374661: Pseudo dice [0.4507, 0.0, 0.718, 0.5486, 0.5813, 0.6361, 0.9406] +2026-04-09 09:28:31.375999: Epoch time: 101.72 s +2026-04-09 09:28:32.441687: +2026-04-09 09:28:32.443015: Epoch 610 +2026-04-09 09:28:32.444158: Current learning rate: 0.00429 +2026-04-09 09:30:14.111347: train_loss -0.2809 +2026-04-09 09:30:14.114719: val_loss -0.2458 +2026-04-09 09:30:14.116089: Pseudo dice [0.2228, 0.0, 0.5886, 0.3794, 0.6016, 0.7647, 0.743] +2026-04-09 09:30:14.117988: Epoch time: 101.67 s +2026-04-09 09:30:15.221270: +2026-04-09 09:30:15.222764: Epoch 611 +2026-04-09 09:30:15.223920: Current learning rate: 0.00428 +2026-04-09 09:31:57.046446: train_loss -0.2843 +2026-04-09 09:31:57.050862: val_loss -0.2436 +2026-04-09 09:31:57.052378: Pseudo dice [0.5815, 0.0, 0.7672, 0.7653, 0.4423, 0.2715, 0.7354] +2026-04-09 09:31:57.054468: Epoch time: 101.83 s +2026-04-09 09:31:58.124131: +2026-04-09 09:31:58.125742: Epoch 612 +2026-04-09 09:31:58.127140: Current learning rate: 0.00427 +2026-04-09 09:33:39.930067: train_loss -0.2791 +2026-04-09 09:33:39.934276: val_loss -0.2544 +2026-04-09 09:33:39.935761: Pseudo dice [0.9037, 0.0, 0.7815, 0.6698, 0.5279, 0.6464, 0.7937] +2026-04-09 09:33:39.937109: Epoch time: 101.81 s +2026-04-09 09:33:42.012807: +2026-04-09 09:33:42.014255: Epoch 613 +2026-04-09 09:33:42.015639: Current learning rate: 0.00426 +2026-04-09 09:35:23.953186: train_loss -0.2922 +2026-04-09 09:35:23.958756: val_loss -0.2331 +2026-04-09 09:35:23.960656: Pseudo dice [0.7838, 0.0, 0.8819, 0.223, 0.4939, 0.2033, 0.3873] +2026-04-09 09:35:23.962339: Epoch time: 101.94 s +2026-04-09 09:35:25.036528: +2026-04-09 09:35:25.038216: Epoch 614 +2026-04-09 09:35:25.039679: Current learning rate: 0.00425 +2026-04-09 09:37:06.792752: train_loss -0.2803 +2026-04-09 09:37:06.797101: val_loss -0.2416 +2026-04-09 09:37:06.798524: Pseudo dice [0.627, 0.0, 0.6969, 0.8444, 0.4539, 0.639, 0.8054] +2026-04-09 09:37:06.799945: Epoch time: 101.76 s +2026-04-09 09:37:07.874525: +2026-04-09 09:37:07.876066: Epoch 615 +2026-04-09 09:37:07.877435: Current learning rate: 0.00424 +2026-04-09 09:38:49.603391: train_loss -0.2894 +2026-04-09 09:38:49.607445: val_loss -0.208 +2026-04-09 09:38:49.608986: Pseudo dice [0.2666, 0.0, 0.8501, 0.4783, 0.311, 0.6541, 0.9197] +2026-04-09 09:38:49.610309: Epoch time: 101.73 s +2026-04-09 09:38:50.689469: +2026-04-09 09:38:50.691277: Epoch 616 +2026-04-09 09:38:50.692842: Current learning rate: 0.00423 +2026-04-09 09:40:32.297718: train_loss -0.2793 +2026-04-09 09:40:32.303441: val_loss -0.2455 +2026-04-09 09:40:32.305194: Pseudo dice [0.2663, 0.0, 0.843, 0.5787, 0.632, 0.5593, 0.7973] +2026-04-09 09:40:32.307188: Epoch time: 101.61 s +2026-04-09 09:40:33.416096: +2026-04-09 09:40:33.418612: Epoch 617 +2026-04-09 09:40:33.420411: Current learning rate: 0.00422 +2026-04-09 09:42:15.160836: train_loss -0.2935 +2026-04-09 09:42:15.165873: val_loss -0.258 +2026-04-09 09:42:15.167185: Pseudo dice [0.6576, 0.0, 0.7695, 0.7303, 0.5532, 0.6071, 0.8779] +2026-04-09 09:42:15.168674: Epoch time: 101.75 s +2026-04-09 09:42:16.240238: +2026-04-09 09:42:16.241611: Epoch 618 +2026-04-09 09:42:16.242977: Current learning rate: 0.00421 +2026-04-09 09:43:57.875438: train_loss -0.279 +2026-04-09 09:43:57.879575: val_loss -0.2491 +2026-04-09 09:43:57.881061: Pseudo dice [0.3194, 0.0, 0.7663, 0.5793, 0.3823, 0.8051, 0.8975] +2026-04-09 09:43:57.882616: Epoch time: 101.64 s +2026-04-09 09:43:58.970380: +2026-04-09 09:43:58.971595: Epoch 619 +2026-04-09 09:43:58.972624: Current learning rate: 0.0042 +2026-04-09 09:45:40.591282: train_loss -0.2765 +2026-04-09 09:45:40.595151: val_loss -0.2478 +2026-04-09 09:45:40.596759: Pseudo dice [0.4784, 0.0, 0.44, 0.4335, 0.4113, 0.623, 0.9185] +2026-04-09 09:45:40.598755: Epoch time: 101.62 s +2026-04-09 09:45:41.678410: +2026-04-09 09:45:41.680230: Epoch 620 +2026-04-09 09:45:41.681378: Current learning rate: 0.00419 +2026-04-09 09:47:23.396833: train_loss -0.2739 +2026-04-09 09:47:23.400375: val_loss -0.2276 +2026-04-09 09:47:23.402116: Pseudo dice [0.0, 0.0, 0.7107, 0.4923, 0.4922, 0.4865, 0.832] +2026-04-09 09:47:23.404529: Epoch time: 101.72 s +2026-04-09 09:47:24.511096: +2026-04-09 09:47:24.512508: Epoch 621 +2026-04-09 09:47:24.514312: Current learning rate: 0.00418 +2026-04-09 09:49:06.142425: train_loss -0.2811 +2026-04-09 09:49:06.145924: val_loss -0.2202 +2026-04-09 09:49:06.147352: Pseudo dice [0.0463, 0.0, 0.7864, 0.8674, 0.6098, 0.4204, 0.8643] +2026-04-09 09:49:06.149056: Epoch time: 101.63 s +2026-04-09 09:49:07.206704: +2026-04-09 09:49:07.209134: Epoch 622 +2026-04-09 09:49:07.210800: Current learning rate: 0.00417 +2026-04-09 09:50:48.713208: train_loss -0.2742 +2026-04-09 09:50:48.717560: val_loss -0.2401 +2026-04-09 09:50:48.718920: Pseudo dice [0.2648, 0.0, 0.8069, 0.0157, 0.5459, 0.4316, 0.8281] +2026-04-09 09:50:48.720273: Epoch time: 101.51 s +2026-04-09 09:50:49.809485: +2026-04-09 09:50:49.810959: Epoch 623 +2026-04-09 09:50:49.812060: Current learning rate: 0.00416 +2026-04-09 09:52:31.372153: train_loss -0.2912 +2026-04-09 09:52:31.377114: val_loss -0.2471 +2026-04-09 09:52:31.378898: Pseudo dice [0.4083, 0.0, 0.809, 0.6251, 0.5145, 0.8053, 0.6724] +2026-04-09 09:52:31.380373: Epoch time: 101.57 s +2026-04-09 09:52:32.450131: +2026-04-09 09:52:32.451506: Epoch 624 +2026-04-09 09:52:32.452794: Current learning rate: 0.00415 +2026-04-09 09:54:13.931884: train_loss -0.288 +2026-04-09 09:54:13.936428: val_loss -0.2357 +2026-04-09 09:54:13.938270: Pseudo dice [0.206, 0.0, 0.8666, 0.2302, 0.5738, 0.4961, 0.7809] +2026-04-09 09:54:13.940340: Epoch time: 101.48 s +2026-04-09 09:54:15.008421: +2026-04-09 09:54:15.010318: Epoch 625 +2026-04-09 09:54:15.011661: Current learning rate: 0.00414 +2026-04-09 09:55:56.517144: train_loss -0.2878 +2026-04-09 09:55:56.521660: val_loss -0.2448 +2026-04-09 09:55:56.523259: Pseudo dice [0.7069, 0.0, 0.7803, 0.737, 0.4378, 0.7176, 0.8531] +2026-04-09 09:55:56.525075: Epoch time: 101.51 s +2026-04-09 09:55:57.618519: +2026-04-09 09:55:57.619744: Epoch 626 +2026-04-09 09:55:57.620947: Current learning rate: 0.00413 +2026-04-09 09:57:39.391102: train_loss -0.2991 +2026-04-09 09:57:39.402210: val_loss -0.2334 +2026-04-09 09:57:39.405929: Pseudo dice [0.5937, 0.0, 0.5326, 0.0713, 0.4939, 0.8521, 0.8855] +2026-04-09 09:57:39.409546: Epoch time: 101.78 s +2026-04-09 09:57:40.501378: +2026-04-09 09:57:40.502938: Epoch 627 +2026-04-09 09:57:40.504269: Current learning rate: 0.00412 +2026-04-09 09:59:22.151555: train_loss -0.2706 +2026-04-09 09:59:22.155117: val_loss -0.2524 +2026-04-09 09:59:22.156337: Pseudo dice [0.3478, 0.0, 0.7964, 0.001, 0.5899, 0.598, 0.9099] +2026-04-09 09:59:22.157516: Epoch time: 101.65 s +2026-04-09 09:59:23.235707: +2026-04-09 09:59:23.237106: Epoch 628 +2026-04-09 09:59:23.238441: Current learning rate: 0.00411 +2026-04-09 10:01:05.144609: train_loss -0.2737 +2026-04-09 10:01:05.148186: val_loss -0.242 +2026-04-09 10:01:05.149698: Pseudo dice [0.7514, 0.0, 0.6537, 0.8204, 0.6366, 0.7177, 0.9028] +2026-04-09 10:01:05.151371: Epoch time: 101.91 s +2026-04-09 10:01:06.221119: +2026-04-09 10:01:06.222552: Epoch 629 +2026-04-09 10:01:06.223950: Current learning rate: 0.0041 +2026-04-09 10:02:47.872865: train_loss -0.2766 +2026-04-09 10:02:47.877960: val_loss -0.2413 +2026-04-09 10:02:47.879894: Pseudo dice [0.3423, 0.0, 0.8667, 0.6731, 0.4747, 0.6471, 0.7137] +2026-04-09 10:02:47.881419: Epoch time: 101.65 s +2026-04-09 10:02:48.961389: +2026-04-09 10:02:48.962842: Epoch 630 +2026-04-09 10:02:48.964125: Current learning rate: 0.00409 +2026-04-09 10:04:30.794054: train_loss -0.2759 +2026-04-09 10:04:30.798268: val_loss -0.2422 +2026-04-09 10:04:30.799857: Pseudo dice [0.3055, 0.0, 0.8428, 0.2922, 0.4697, 0.6383, 0.8616] +2026-04-09 10:04:30.801687: Epoch time: 101.84 s +2026-04-09 10:04:31.865362: +2026-04-09 10:04:31.866566: Epoch 631 +2026-04-09 10:04:31.867836: Current learning rate: 0.00408 +2026-04-09 10:06:13.673550: train_loss -0.2958 +2026-04-09 10:06:13.677540: val_loss -0.249 +2026-04-09 10:06:13.678985: Pseudo dice [0.7791, 0.0, 0.6313, 0.5737, 0.6, 0.3788, 0.8981] +2026-04-09 10:06:13.680474: Epoch time: 101.81 s +2026-04-09 10:06:14.746780: +2026-04-09 10:06:14.748546: Epoch 632 +2026-04-09 10:06:14.749810: Current learning rate: 0.00407 +2026-04-09 10:07:56.554177: train_loss -0.298 +2026-04-09 10:07:56.557731: val_loss -0.2617 +2026-04-09 10:07:56.559705: Pseudo dice [0.7748, 0.0, 0.8191, 0.5779, 0.5843, 0.6108, 0.4063] +2026-04-09 10:07:56.561474: Epoch time: 101.81 s +2026-04-09 10:07:57.635892: +2026-04-09 10:07:57.637271: Epoch 633 +2026-04-09 10:07:57.638537: Current learning rate: 0.00406 +2026-04-09 10:09:40.626577: train_loss -0.2994 +2026-04-09 10:09:40.632958: val_loss -0.2536 +2026-04-09 10:09:40.636822: Pseudo dice [0.718, 0.0, 0.8417, 0.8346, 0.5983, 0.7929, 0.8304] +2026-04-09 10:09:40.638657: Epoch time: 102.99 s +2026-04-09 10:09:41.733398: +2026-04-09 10:09:41.737730: Epoch 634 +2026-04-09 10:09:41.741018: Current learning rate: 0.00405 +2026-04-09 10:11:26.107884: train_loss -0.2903 +2026-04-09 10:11:26.119370: val_loss -0.2565 +2026-04-09 10:11:26.123681: Pseudo dice [0.4464, 0.0, 0.8114, 0.6349, 0.3534, 0.5649, 0.8291] +2026-04-09 10:11:26.129638: Epoch time: 104.38 s +2026-04-09 10:11:27.234552: +2026-04-09 10:11:27.237713: Epoch 635 +2026-04-09 10:11:27.241132: Current learning rate: 0.00404 +2026-04-09 10:13:08.832173: train_loss -0.3002 +2026-04-09 10:13:08.836162: val_loss -0.2497 +2026-04-09 10:13:08.837723: Pseudo dice [0.6336, 0.0, 0.7734, 0.7049, 0.3149, 0.7376, 0.856] +2026-04-09 10:13:08.838986: Epoch time: 101.6 s +2026-04-09 10:13:09.896299: +2026-04-09 10:13:09.897562: Epoch 636 +2026-04-09 10:13:09.899128: Current learning rate: 0.00403 +2026-04-09 10:14:51.782754: train_loss -0.2872 +2026-04-09 10:14:51.786626: val_loss -0.2604 +2026-04-09 10:14:51.788128: Pseudo dice [0.3419, 0.0, 0.8067, 0.2619, 0.4972, 0.5766, 0.9104] +2026-04-09 10:14:51.789408: Epoch time: 101.89 s +2026-04-09 10:14:52.874014: +2026-04-09 10:14:52.875872: Epoch 637 +2026-04-09 10:14:52.877291: Current learning rate: 0.00402 +2026-04-09 10:16:34.449534: train_loss -0.2874 +2026-04-09 10:16:34.455674: val_loss -0.2254 +2026-04-09 10:16:34.457407: Pseudo dice [0.0493, 0.0, 0.8145, 0.8037, 0.5384, 0.6362, 0.8771] +2026-04-09 10:16:34.459970: Epoch time: 101.58 s +2026-04-09 10:16:35.553345: +2026-04-09 10:16:35.554940: Epoch 638 +2026-04-09 10:16:35.556483: Current learning rate: 0.00401 +2026-04-09 10:18:17.824057: train_loss -0.2764 +2026-04-09 10:18:17.827877: val_loss -0.2468 +2026-04-09 10:18:17.829340: Pseudo dice [0.5804, 0.0, 0.5409, 0.6616, 0.6033, 0.4545, 0.9412] +2026-04-09 10:18:17.831059: Epoch time: 102.27 s +2026-04-09 10:18:18.897792: +2026-04-09 10:18:18.899300: Epoch 639 +2026-04-09 10:18:18.900469: Current learning rate: 0.004 +2026-04-09 10:20:00.621754: train_loss -0.2891 +2026-04-09 10:20:00.627238: val_loss -0.2449 +2026-04-09 10:20:00.629172: Pseudo dice [0.3106, 0.0, 0.6486, 0.5284, 0.4575, 0.5423, 0.8732] +2026-04-09 10:20:00.631033: Epoch time: 101.73 s +2026-04-09 10:20:01.707733: +2026-04-09 10:20:01.709313: Epoch 640 +2026-04-09 10:20:01.710495: Current learning rate: 0.00399 +2026-04-09 10:21:43.540467: train_loss -0.2912 +2026-04-09 10:21:43.545577: val_loss -0.2332 +2026-04-09 10:21:43.547014: Pseudo dice [0.5368, 0.0, 0.8243, 0.5072, 0.4655, 0.5354, 0.6132] +2026-04-09 10:21:43.548411: Epoch time: 101.84 s +2026-04-09 10:21:44.644933: +2026-04-09 10:21:44.647595: Epoch 641 +2026-04-09 10:21:44.649065: Current learning rate: 0.00398 +2026-04-09 10:23:26.333222: train_loss -0.2884 +2026-04-09 10:23:26.337091: val_loss -0.2403 +2026-04-09 10:23:26.339173: Pseudo dice [0.5057, 0.0, 0.6181, 0.288, 0.528, 0.7031, 0.7696] +2026-04-09 10:23:26.340569: Epoch time: 101.69 s +2026-04-09 10:23:27.397081: +2026-04-09 10:23:27.399160: Epoch 642 +2026-04-09 10:23:27.400835: Current learning rate: 0.00397 +2026-04-09 10:25:09.443442: train_loss -0.2784 +2026-04-09 10:25:09.449186: val_loss -0.2531 +2026-04-09 10:25:09.452262: Pseudo dice [0.4062, 0.0, 0.8973, 0.4452, 0.636, 0.6157, 0.8494] +2026-04-09 10:25:09.454199: Epoch time: 102.05 s +2026-04-09 10:25:10.618052: +2026-04-09 10:25:10.619518: Epoch 643 +2026-04-09 10:25:10.620891: Current learning rate: 0.00396 +2026-04-09 10:26:51.885309: train_loss -0.2856 +2026-04-09 10:26:51.888803: val_loss -0.2426 +2026-04-09 10:26:51.890110: Pseudo dice [0.4685, 0.0, 0.6197, 0.1344, 0.4172, 0.659, 0.2653] +2026-04-09 10:26:51.891738: Epoch time: 101.27 s +2026-04-09 10:26:52.975705: +2026-04-09 10:26:52.977147: Epoch 644 +2026-04-09 10:26:52.978376: Current learning rate: 0.00395 +2026-04-09 10:28:34.551950: train_loss -0.2937 +2026-04-09 10:28:34.557288: val_loss -0.2336 +2026-04-09 10:28:34.559343: Pseudo dice [0.637, 0.0, 0.7679, 0.5927, 0.5253, 0.5287, 0.8851] +2026-04-09 10:28:34.561291: Epoch time: 101.58 s +2026-04-09 10:28:35.640856: +2026-04-09 10:28:35.642707: Epoch 645 +2026-04-09 10:28:35.644234: Current learning rate: 0.00394 +2026-04-09 10:30:17.178897: train_loss -0.3003 +2026-04-09 10:30:17.183289: val_loss -0.2515 +2026-04-09 10:30:17.184881: Pseudo dice [0.563, 0.0, 0.772, 0.8761, 0.5336, 0.6454, 0.6965] +2026-04-09 10:30:17.186434: Epoch time: 101.54 s +2026-04-09 10:30:18.300670: +2026-04-09 10:30:18.302149: Epoch 646 +2026-04-09 10:30:18.303259: Current learning rate: 0.00393 +2026-04-09 10:31:59.845512: train_loss -0.2966 +2026-04-09 10:31:59.850092: val_loss -0.2424 +2026-04-09 10:31:59.851724: Pseudo dice [0.7287, 0.0, 0.7745, 0.4087, 0.5279, 0.7275, 0.4851] +2026-04-09 10:31:59.853250: Epoch time: 101.55 s +2026-04-09 10:32:00.936220: +2026-04-09 10:32:00.937634: Epoch 647 +2026-04-09 10:32:00.938930: Current learning rate: 0.00392 +2026-04-09 10:33:42.718613: train_loss -0.2777 +2026-04-09 10:33:42.722042: val_loss -0.2286 +2026-04-09 10:33:42.723904: Pseudo dice [0.0113, 0.0, 0.7116, 0.4125, 0.4505, 0.8172, 0.7492] +2026-04-09 10:33:42.725395: Epoch time: 101.79 s +2026-04-09 10:33:43.806723: +2026-04-09 10:33:43.808519: Epoch 648 +2026-04-09 10:33:43.809713: Current learning rate: 0.00391 +2026-04-09 10:35:25.612181: train_loss -0.2771 +2026-04-09 10:35:25.617238: val_loss -0.2544 +2026-04-09 10:35:25.618640: Pseudo dice [0.4091, 0.0, 0.6669, 0.6115, 0.4364, 0.4351, 0.9048] +2026-04-09 10:35:25.619826: Epoch time: 101.81 s +2026-04-09 10:35:26.700539: +2026-04-09 10:35:26.701815: Epoch 649 +2026-04-09 10:35:26.702928: Current learning rate: 0.0039 +2026-04-09 10:37:08.554273: train_loss -0.2933 +2026-04-09 10:37:08.558034: val_loss -0.2062 +2026-04-09 10:37:08.559311: Pseudo dice [0.375, 0.0, 0.6834, 0.093, 0.5219, 0.5027, 0.7519] +2026-04-09 10:37:08.560774: Epoch time: 101.86 s +2026-04-09 10:37:11.254335: +2026-04-09 10:37:11.258128: Epoch 650 +2026-04-09 10:37:11.259351: Current learning rate: 0.00389 +2026-04-09 10:38:52.730227: train_loss -0.2845 +2026-04-09 10:38:52.734497: val_loss -0.2311 +2026-04-09 10:38:52.736093: Pseudo dice [0.5382, 0.0, 0.6373, 0.6172, 0.5399, 0.7029, 0.7988] +2026-04-09 10:38:52.738170: Epoch time: 101.48 s +2026-04-09 10:38:53.814736: +2026-04-09 10:38:53.816241: Epoch 651 +2026-04-09 10:38:53.817579: Current learning rate: 0.00388 +2026-04-09 10:40:35.351071: train_loss -0.3056 +2026-04-09 10:40:35.354637: val_loss -0.2648 +2026-04-09 10:40:35.355913: Pseudo dice [0.6432, 0.0, 0.7742, 0.3185, 0.5396, 0.8707, 0.6375] +2026-04-09 10:40:35.357105: Epoch time: 101.54 s +2026-04-09 10:40:36.443372: +2026-04-09 10:40:36.444937: Epoch 652 +2026-04-09 10:40:36.446173: Current learning rate: 0.00387 +2026-04-09 10:42:17.927467: train_loss -0.2883 +2026-04-09 10:42:17.931869: val_loss -0.2485 +2026-04-09 10:42:17.933550: Pseudo dice [0.7385, 0.0, 0.7242, 0.3554, 0.4987, 0.4128, 0.8115] +2026-04-09 10:42:17.935291: Epoch time: 101.49 s +2026-04-09 10:42:19.002092: +2026-04-09 10:42:19.004089: Epoch 653 +2026-04-09 10:42:19.005280: Current learning rate: 0.00386 +2026-04-09 10:44:01.631424: train_loss -0.3059 +2026-04-09 10:44:01.634921: val_loss -0.2269 +2026-04-09 10:44:01.636636: Pseudo dice [0.6048, 0.0, 0.6036, 0.8708, 0.4907, 0.4268, 0.8815] +2026-04-09 10:44:01.638492: Epoch time: 102.63 s +2026-04-09 10:44:02.810440: +2026-04-09 10:44:02.812139: Epoch 654 +2026-04-09 10:44:02.813651: Current learning rate: 0.00385 +2026-04-09 10:45:44.052948: train_loss -0.2983 +2026-04-09 10:45:44.056859: val_loss -0.2686 +2026-04-09 10:45:44.058241: Pseudo dice [0.7785, 0.0, 0.7232, 0.7285, 0.4994, 0.7383, 0.9353] +2026-04-09 10:45:44.059885: Epoch time: 101.25 s +2026-04-09 10:45:45.134644: +2026-04-09 10:45:45.136684: Epoch 655 +2026-04-09 10:45:45.138200: Current learning rate: 0.00384 +2026-04-09 10:47:26.628841: train_loss -0.2975 +2026-04-09 10:47:26.632435: val_loss -0.2546 +2026-04-09 10:47:26.634078: Pseudo dice [0.55, 0.0, 0.664, 0.0748, 0.5195, 0.8491, 0.9262] +2026-04-09 10:47:26.635648: Epoch time: 101.5 s +2026-04-09 10:47:27.703661: +2026-04-09 10:47:27.705039: Epoch 656 +2026-04-09 10:47:27.706252: Current learning rate: 0.00383 +2026-04-09 10:49:08.990520: train_loss -0.2736 +2026-04-09 10:49:08.994854: val_loss -0.2626 +2026-04-09 10:49:08.996340: Pseudo dice [0.5656, 0.0, 0.7577, 0.7675, 0.5911, 0.8041, 0.8402] +2026-04-09 10:49:08.997916: Epoch time: 101.29 s +2026-04-09 10:49:10.060384: +2026-04-09 10:49:10.061853: Epoch 657 +2026-04-09 10:49:10.063603: Current learning rate: 0.00382 +2026-04-09 10:50:51.551627: train_loss -0.2874 +2026-04-09 10:50:51.555742: val_loss -0.271 +2026-04-09 10:50:51.557816: Pseudo dice [0.8201, 0.0, 0.8462, 0.7168, 0.6471, 0.747, 0.9269] +2026-04-09 10:50:51.560323: Epoch time: 101.49 s +2026-04-09 10:50:52.647305: +2026-04-09 10:50:52.648663: Epoch 658 +2026-04-09 10:50:52.649862: Current learning rate: 0.00381 +2026-04-09 10:52:33.968527: train_loss -0.3088 +2026-04-09 10:52:33.972728: val_loss -0.2462 +2026-04-09 10:52:33.974941: Pseudo dice [0.801, 0.0, 0.4453, 0.6813, 0.5321, 0.8522, 0.8396] +2026-04-09 10:52:33.976481: Epoch time: 101.32 s +2026-04-09 10:52:35.079628: +2026-04-09 10:52:35.081105: Epoch 659 +2026-04-09 10:52:35.082325: Current learning rate: 0.0038 +2026-04-09 10:54:16.723796: train_loss -0.3117 +2026-04-09 10:54:16.728204: val_loss -0.2639 +2026-04-09 10:54:16.729594: Pseudo dice [0.7018, 0.0, 0.8279, 0.7811, 0.5883, 0.7923, 0.9365] +2026-04-09 10:54:16.731200: Epoch time: 101.65 s +2026-04-09 10:54:17.799990: +2026-04-09 10:54:17.801539: Epoch 660 +2026-04-09 10:54:17.802900: Current learning rate: 0.00379 +2026-04-09 10:55:59.579609: train_loss -0.3076 +2026-04-09 10:55:59.584401: val_loss -0.2441 +2026-04-09 10:55:59.586030: Pseudo dice [0.8071, 0.0, 0.7208, 0.1522, 0.4194, 0.6537, 0.8705] +2026-04-09 10:55:59.588511: Epoch time: 101.78 s +2026-04-09 10:56:00.672595: +2026-04-09 10:56:00.673843: Epoch 661 +2026-04-09 10:56:00.674944: Current learning rate: 0.00378 +2026-04-09 10:57:42.332305: train_loss -0.2989 +2026-04-09 10:57:42.337960: val_loss -0.2368 +2026-04-09 10:57:42.339259: Pseudo dice [0.4802, 0.0, 0.645, 0.3057, 0.4929, 0.3835, 0.8322] +2026-04-09 10:57:42.340637: Epoch time: 101.66 s +2026-04-09 10:57:43.418555: +2026-04-09 10:57:43.419830: Epoch 662 +2026-04-09 10:57:43.421142: Current learning rate: 0.00377 +2026-04-09 10:59:24.975175: train_loss -0.2911 +2026-04-09 10:59:24.979548: val_loss -0.2056 +2026-04-09 10:59:24.981560: Pseudo dice [0.7957, 0.0, 0.7088, 0.6786, 0.525, 0.1882, 0.7377] +2026-04-09 10:59:24.984097: Epoch time: 101.56 s +2026-04-09 10:59:26.058518: +2026-04-09 10:59:26.059978: Epoch 663 +2026-04-09 10:59:26.061373: Current learning rate: 0.00376 +2026-04-09 11:01:07.850171: train_loss -0.2901 +2026-04-09 11:01:07.854511: val_loss -0.2486 +2026-04-09 11:01:07.856385: Pseudo dice [0.1357, 0.0, 0.8375, 0.733, 0.6898, 0.893, 0.9154] +2026-04-09 11:01:07.858276: Epoch time: 101.79 s +2026-04-09 11:01:08.925570: +2026-04-09 11:01:08.927312: Epoch 664 +2026-04-09 11:01:08.929190: Current learning rate: 0.00375 +2026-04-09 11:02:50.648550: train_loss -0.2903 +2026-04-09 11:02:50.652984: val_loss -0.2364 +2026-04-09 11:02:50.654889: Pseudo dice [0.7864, 0.0, 0.7137, 0.4957, 0.5271, 0.4654, 0.8218] +2026-04-09 11:02:50.657053: Epoch time: 101.73 s +2026-04-09 11:02:51.743502: +2026-04-09 11:02:51.745237: Epoch 665 +2026-04-09 11:02:51.746614: Current learning rate: 0.00374 +2026-04-09 11:04:33.313079: train_loss -0.2896 +2026-04-09 11:04:33.317589: val_loss -0.2498 +2026-04-09 11:04:33.319263: Pseudo dice [0.6614, 0.0, 0.7128, 0.1793, 0.4478, 0.8409, 0.6601] +2026-04-09 11:04:33.321519: Epoch time: 101.57 s +2026-04-09 11:04:34.404413: +2026-04-09 11:04:34.405744: Epoch 666 +2026-04-09 11:04:34.406795: Current learning rate: 0.00373 +2026-04-09 11:06:16.214637: train_loss -0.2663 +2026-04-09 11:06:16.218761: val_loss -0.2472 +2026-04-09 11:06:16.220449: Pseudo dice [0.3948, 0.0, 0.7288, 0.4874, 0.4556, 0.5441, 0.8604] +2026-04-09 11:06:16.221709: Epoch time: 101.81 s +2026-04-09 11:06:17.317195: +2026-04-09 11:06:17.318832: Epoch 667 +2026-04-09 11:06:17.320598: Current learning rate: 0.00372 +2026-04-09 11:07:59.124492: train_loss -0.295 +2026-04-09 11:07:59.128784: val_loss -0.25 +2026-04-09 11:07:59.130438: Pseudo dice [0.5341, 0.0, 0.7762, 0.7669, 0.5801, 0.7017, 0.688] +2026-04-09 11:07:59.133732: Epoch time: 101.81 s +2026-04-09 11:08:00.237428: +2026-04-09 11:08:00.239014: Epoch 668 +2026-04-09 11:08:00.240783: Current learning rate: 0.00371 +2026-04-09 11:09:42.470392: train_loss -0.3112 +2026-04-09 11:09:42.474369: val_loss -0.2602 +2026-04-09 11:09:42.476088: Pseudo dice [0.421, 0.0, 0.7424, 0.4726, 0.626, 0.6517, 0.7032] +2026-04-09 11:09:42.477590: Epoch time: 102.24 s +2026-04-09 11:09:43.580795: +2026-04-09 11:09:43.582424: Epoch 669 +2026-04-09 11:09:43.584218: Current learning rate: 0.0037 +2026-04-09 11:11:25.267256: train_loss -0.2951 +2026-04-09 11:11:25.271970: val_loss -0.2445 +2026-04-09 11:11:25.273310: Pseudo dice [0.1763, 0.0, 0.5646, 0.5938, 0.3881, 0.7205, 0.8801] +2026-04-09 11:11:25.274578: Epoch time: 101.69 s +2026-04-09 11:11:26.376804: +2026-04-09 11:11:26.380698: Epoch 670 +2026-04-09 11:11:26.384142: Current learning rate: 0.00369 +2026-04-09 11:13:08.017300: train_loss -0.2893 +2026-04-09 11:13:08.020941: val_loss -0.2649 +2026-04-09 11:13:08.022463: Pseudo dice [0.6253, 0.0, 0.8595, 0.8288, 0.5664, 0.6109, 0.8456] +2026-04-09 11:13:08.023919: Epoch time: 101.64 s +2026-04-09 11:13:09.116858: +2026-04-09 11:13:09.118555: Epoch 671 +2026-04-09 11:13:09.120372: Current learning rate: 0.00368 +2026-04-09 11:14:50.835756: train_loss -0.3066 +2026-04-09 11:14:50.840104: val_loss -0.2335 +2026-04-09 11:14:50.841944: Pseudo dice [0.1612, 0.0, 0.7115, 0.4991, 0.5507, 0.7882, 0.796] +2026-04-09 11:14:50.843866: Epoch time: 101.72 s +2026-04-09 11:14:51.940897: +2026-04-09 11:14:51.956299: Epoch 672 +2026-04-09 11:14:51.967160: Current learning rate: 0.00367 +2026-04-09 11:16:33.898307: train_loss -0.287 +2026-04-09 11:16:33.901889: val_loss -0.2332 +2026-04-09 11:16:33.903303: Pseudo dice [0.2573, 0.0, 0.7911, 0.6728, 0.5079, 0.778, 0.604] +2026-04-09 11:16:33.904766: Epoch time: 101.96 s +2026-04-09 11:16:35.003714: +2026-04-09 11:16:35.005591: Epoch 673 +2026-04-09 11:16:35.006970: Current learning rate: 0.00366 +2026-04-09 11:18:17.827422: train_loss -0.2852 +2026-04-09 11:18:17.832442: val_loss -0.2432 +2026-04-09 11:18:17.834190: Pseudo dice [0.6576, 0.0, 0.7561, 0.5405, 0.5269, 0.7068, 0.9069] +2026-04-09 11:18:17.835689: Epoch time: 102.83 s +2026-04-09 11:18:18.991064: +2026-04-09 11:18:18.992878: Epoch 674 +2026-04-09 11:18:18.994591: Current learning rate: 0.00365 +2026-04-09 11:20:00.750536: train_loss -0.29 +2026-04-09 11:20:00.754830: val_loss -0.2634 +2026-04-09 11:20:00.756612: Pseudo dice [0.6551, 0.0, 0.8094, 0.6694, 0.6037, 0.6067, 0.9358] +2026-04-09 11:20:00.758246: Epoch time: 101.76 s +2026-04-09 11:20:01.854577: +2026-04-09 11:20:01.857991: Epoch 675 +2026-04-09 11:20:01.859840: Current learning rate: 0.00364 +2026-04-09 11:21:43.478183: train_loss -0.3147 +2026-04-09 11:21:43.482142: val_loss -0.253 +2026-04-09 11:21:43.483929: Pseudo dice [0.8377, 0.0, 0.7447, 0.3367, 0.5434, 0.4699, 0.8142] +2026-04-09 11:21:43.485401: Epoch time: 101.63 s +2026-04-09 11:21:44.570763: +2026-04-09 11:21:44.572191: Epoch 676 +2026-04-09 11:21:44.573584: Current learning rate: 0.00363 +2026-04-09 11:23:27.807269: train_loss -0.3051 +2026-04-09 11:23:27.812842: val_loss -0.2546 +2026-04-09 11:23:27.814652: Pseudo dice [0.6961, 0.0, 0.819, 0.6702, 0.5334, 0.842, 0.9127] +2026-04-09 11:23:27.816367: Epoch time: 103.24 s +2026-04-09 11:23:28.943890: +2026-04-09 11:23:28.945638: Epoch 677 +2026-04-09 11:23:28.953585: Current learning rate: 0.00362 +2026-04-09 11:25:10.769219: train_loss -0.292 +2026-04-09 11:25:10.787245: val_loss -0.2506 +2026-04-09 11:25:10.790140: Pseudo dice [0.2754, 0.0, 0.8765, 0.561, 0.6235, 0.617, 0.8445] +2026-04-09 11:25:10.793504: Epoch time: 101.83 s +2026-04-09 11:25:11.880727: +2026-04-09 11:25:11.882372: Epoch 678 +2026-04-09 11:25:11.883864: Current learning rate: 0.00361 +2026-04-09 11:26:53.806073: train_loss -0.3066 +2026-04-09 11:26:53.809971: val_loss -0.2405 +2026-04-09 11:26:53.811427: Pseudo dice [0.367, 0.0, 0.7926, 0.7272, 0.5537, 0.4652, 0.7803] +2026-04-09 11:26:53.813389: Epoch time: 101.93 s +2026-04-09 11:26:54.922791: +2026-04-09 11:26:54.924547: Epoch 679 +2026-04-09 11:26:54.925699: Current learning rate: 0.0036 +2026-04-09 11:28:36.793753: train_loss -0.3001 +2026-04-09 11:28:36.797456: val_loss -0.2564 +2026-04-09 11:28:36.800210: Pseudo dice [0.7655, 0.0, 0.7568, 0.5162, 0.4492, 0.622, 0.9206] +2026-04-09 11:28:36.801935: Epoch time: 101.87 s +2026-04-09 11:28:37.949280: +2026-04-09 11:28:37.950832: Epoch 680 +2026-04-09 11:28:37.952042: Current learning rate: 0.00359 +2026-04-09 11:30:20.773757: train_loss -0.2915 +2026-04-09 11:30:20.777894: val_loss -0.2028 +2026-04-09 11:30:20.779626: Pseudo dice [0.2998, 0.0, 0.7254, 0.5595, 0.3517, 0.71, 0.7105] +2026-04-09 11:30:20.782003: Epoch time: 102.83 s +2026-04-09 11:30:21.897654: +2026-04-09 11:30:21.899147: Epoch 681 +2026-04-09 11:30:21.900528: Current learning rate: 0.00358 +2026-04-09 11:32:03.676817: train_loss -0.2866 +2026-04-09 11:32:03.681151: val_loss -0.2468 +2026-04-09 11:32:03.682671: Pseudo dice [0.7599, 0.0, 0.6185, 0.4072, 0.2994, 0.5549, 0.8205] +2026-04-09 11:32:03.684427: Epoch time: 101.78 s +2026-04-09 11:32:04.780828: +2026-04-09 11:32:04.789168: Epoch 682 +2026-04-09 11:32:04.794687: Current learning rate: 0.00357 +2026-04-09 11:33:47.962222: train_loss -0.3035 +2026-04-09 11:33:47.968017: val_loss -0.2441 +2026-04-09 11:33:47.970394: Pseudo dice [0.678, 0.0, 0.9034, 0.652, 0.5577, 0.7681, 0.8452] +2026-04-09 11:33:47.973189: Epoch time: 103.18 s +2026-04-09 11:33:49.094456: +2026-04-09 11:33:49.095798: Epoch 683 +2026-04-09 11:33:49.097010: Current learning rate: 0.00356 +2026-04-09 11:35:30.689357: train_loss -0.2995 +2026-04-09 11:35:30.693103: val_loss -0.2511 +2026-04-09 11:35:30.695328: Pseudo dice [0.4116, 0.0, 0.8766, 0.7771, 0.5338, 0.7709, 0.5504] +2026-04-09 11:35:30.697194: Epoch time: 101.6 s +2026-04-09 11:35:31.778786: +2026-04-09 11:35:31.780100: Epoch 684 +2026-04-09 11:35:31.781212: Current learning rate: 0.00355 +2026-04-09 11:37:13.280808: train_loss -0.3024 +2026-04-09 11:37:13.284729: val_loss -0.2543 +2026-04-09 11:37:13.286565: Pseudo dice [0.6187, 0.0, 0.8528, 0.6844, 0.4716, 0.8247, 0.8429] +2026-04-09 11:37:13.288226: Epoch time: 101.51 s +2026-04-09 11:37:14.379221: +2026-04-09 11:37:14.381645: Epoch 685 +2026-04-09 11:37:14.383008: Current learning rate: 0.00354 +2026-04-09 11:38:56.161324: train_loss -0.3076 +2026-04-09 11:38:56.166245: val_loss -0.2548 +2026-04-09 11:38:56.167807: Pseudo dice [0.6079, 0.0, 0.8539, 0.4564, 0.2846, 0.7259, 0.9252] +2026-04-09 11:38:56.169297: Epoch time: 101.79 s +2026-04-09 11:38:57.240775: +2026-04-09 11:38:57.243245: Epoch 686 +2026-04-09 11:38:57.244758: Current learning rate: 0.00353 +2026-04-09 11:40:38.823374: train_loss -0.2969 +2026-04-09 11:40:38.831657: val_loss -0.2642 +2026-04-09 11:40:38.834909: Pseudo dice [0.7565, 0.0, 0.8111, 0.7103, 0.5295, 0.5475, 0.7548] +2026-04-09 11:40:38.840239: Epoch time: 101.59 s +2026-04-09 11:40:39.947653: +2026-04-09 11:40:39.950148: Epoch 687 +2026-04-09 11:40:39.953753: Current learning rate: 0.00352 +2026-04-09 11:42:21.668570: train_loss -0.3071 +2026-04-09 11:42:21.672588: val_loss -0.2685 +2026-04-09 11:42:21.674287: Pseudo dice [0.5725, 0.0, 0.7377, 0.7598, 0.4953, 0.7264, 0.8745] +2026-04-09 11:42:21.675638: Epoch time: 101.72 s +2026-04-09 11:42:22.752651: +2026-04-09 11:42:22.754002: Epoch 688 +2026-04-09 11:42:22.755216: Current learning rate: 0.00351 +2026-04-09 11:44:04.267274: train_loss -0.3078 +2026-04-09 11:44:04.271310: val_loss -0.2381 +2026-04-09 11:44:04.272834: Pseudo dice [0.5622, 0.0, 0.5984, 0.5545, 0.5569, 0.4774, 0.9399] +2026-04-09 11:44:04.274457: Epoch time: 101.52 s +2026-04-09 11:44:05.381416: +2026-04-09 11:44:05.383016: Epoch 689 +2026-04-09 11:44:05.384332: Current learning rate: 0.0035 +2026-04-09 11:45:47.172763: train_loss -0.2997 +2026-04-09 11:45:47.177098: val_loss -0.2505 +2026-04-09 11:45:47.178507: Pseudo dice [0.5872, 0.0, 0.8505, 0.1492, 0.5069, 0.7266, 0.9128] +2026-04-09 11:45:47.179849: Epoch time: 101.79 s +2026-04-09 11:45:48.275711: +2026-04-09 11:45:48.277257: Epoch 690 +2026-04-09 11:45:48.278405: Current learning rate: 0.00349 +2026-04-09 11:47:30.079377: train_loss -0.2993 +2026-04-09 11:47:30.083639: val_loss -0.2796 +2026-04-09 11:47:30.085043: Pseudo dice [0.6994, 0.0, 0.8539, 0.4796, 0.3979, 0.8084, 0.8778] +2026-04-09 11:47:30.086797: Epoch time: 101.81 s +2026-04-09 11:47:31.378997: +2026-04-09 11:47:31.381754: Epoch 691 +2026-04-09 11:47:31.383054: Current learning rate: 0.00348 +2026-04-09 11:49:13.217541: train_loss -0.3104 +2026-04-09 11:49:13.221697: val_loss -0.2453 +2026-04-09 11:49:13.223426: Pseudo dice [0.4261, 0.0, 0.8731, 0.7038, 0.5338, 0.7495, 0.9202] +2026-04-09 11:49:13.224806: Epoch time: 101.84 s +2026-04-09 11:49:14.332451: +2026-04-09 11:49:14.333799: Epoch 692 +2026-04-09 11:49:14.335091: Current learning rate: 0.00346 +2026-04-09 11:50:56.252322: train_loss -0.3079 +2026-04-09 11:50:56.256440: val_loss -0.2659 +2026-04-09 11:50:56.258087: Pseudo dice [0.3811, 0.0, 0.8895, 0.8503, 0.5518, 0.8932, 0.9075] +2026-04-09 11:50:56.259457: Epoch time: 101.92 s +2026-04-09 11:50:58.359874: +2026-04-09 11:50:58.361286: Epoch 693 +2026-04-09 11:50:58.362669: Current learning rate: 0.00345 +2026-04-09 11:52:40.259650: train_loss -0.3205 +2026-04-09 11:52:40.265090: val_loss -0.2547 +2026-04-09 11:52:40.266650: Pseudo dice [0.7574, 0.0, 0.8525, 0.7635, 0.4706, 0.7681, 0.6712] +2026-04-09 11:52:40.268153: Epoch time: 101.9 s +2026-04-09 11:52:41.360711: +2026-04-09 11:52:41.362716: Epoch 694 +2026-04-09 11:52:41.364452: Current learning rate: 0.00344 +2026-04-09 11:54:22.961219: train_loss -0.2834 +2026-04-09 11:54:22.965309: val_loss -0.2464 +2026-04-09 11:54:22.966848: Pseudo dice [0.6035, 0.0, 0.7494, 0.5479, 0.4965, 0.3845, 0.887] +2026-04-09 11:54:22.969368: Epoch time: 101.6 s +2026-04-09 11:54:24.059205: +2026-04-09 11:54:24.060732: Epoch 695 +2026-04-09 11:54:24.061969: Current learning rate: 0.00343 +2026-04-09 11:56:05.851340: train_loss -0.3045 +2026-04-09 11:56:05.856357: val_loss -0.2539 +2026-04-09 11:56:05.858078: Pseudo dice [0.5001, 0.0, 0.7037, 0.5545, 0.5192, 0.6529, 0.752] +2026-04-09 11:56:05.859478: Epoch time: 101.8 s +2026-04-09 11:56:06.935193: +2026-04-09 11:56:06.936796: Epoch 696 +2026-04-09 11:56:06.938058: Current learning rate: 0.00342 +2026-04-09 11:57:48.540320: train_loss -0.3146 +2026-04-09 11:57:48.544791: val_loss -0.2477 +2026-04-09 11:57:48.546305: Pseudo dice [0.645, 0.0, 0.7909, 0.6985, 0.4706, 0.3724, 0.9506] +2026-04-09 11:57:48.548035: Epoch time: 101.61 s +2026-04-09 11:57:49.632813: +2026-04-09 11:57:49.634017: Epoch 697 +2026-04-09 11:57:49.635131: Current learning rate: 0.00341 +2026-04-09 11:59:31.349595: train_loss -0.3101 +2026-04-09 11:59:31.353371: val_loss -0.2603 +2026-04-09 11:59:31.355041: Pseudo dice [0.3883, 0.0, 0.7618, 0.6821, 0.6577, 0.4198, 0.7141] +2026-04-09 11:59:31.356569: Epoch time: 101.72 s +2026-04-09 11:59:32.528593: +2026-04-09 11:59:32.530156: Epoch 698 +2026-04-09 11:59:32.531759: Current learning rate: 0.0034 +2026-04-09 12:01:14.472727: train_loss -0.3116 +2026-04-09 12:01:14.476562: val_loss -0.2692 +2026-04-09 12:01:14.478163: Pseudo dice [0.5642, 0.0, 0.7686, 0.4369, 0.5651, 0.7062, 0.9421] +2026-04-09 12:01:14.479671: Epoch time: 101.95 s +2026-04-09 12:01:15.581931: +2026-04-09 12:01:15.583614: Epoch 699 +2026-04-09 12:01:15.585293: Current learning rate: 0.00339 +2026-04-09 12:02:57.395823: train_loss -0.2986 +2026-04-09 12:02:57.400442: val_loss -0.2481 +2026-04-09 12:02:57.402283: Pseudo dice [0.7411, 0.0, 0.7017, 0.6931, 0.7104, 0.5066, 0.8854] +2026-04-09 12:02:57.404133: Epoch time: 101.82 s +2026-04-09 12:03:00.228514: +2026-04-09 12:03:00.230269: Epoch 700 +2026-04-09 12:03:00.232567: Current learning rate: 0.00338 +2026-04-09 12:04:41.834187: train_loss -0.3129 +2026-04-09 12:04:41.839376: val_loss -0.2451 +2026-04-09 12:04:41.841243: Pseudo dice [0.6379, 0.0, 0.8007, 0.3549, 0.5774, 0.6817, 0.7772] +2026-04-09 12:04:41.844002: Epoch time: 101.61 s +2026-04-09 12:04:42.935473: +2026-04-09 12:04:42.937252: Epoch 701 +2026-04-09 12:04:42.938694: Current learning rate: 0.00337 +2026-04-09 12:06:24.809428: train_loss -0.3052 +2026-04-09 12:06:24.814767: val_loss -0.2549 +2026-04-09 12:06:24.816674: Pseudo dice [0.851, 0.0, 0.7335, 0.5351, 0.529, 0.395, 0.935] +2026-04-09 12:06:24.818192: Epoch time: 101.88 s +2026-04-09 12:06:25.918481: +2026-04-09 12:06:25.919836: Epoch 702 +2026-04-09 12:06:25.922184: Current learning rate: 0.00336 +2026-04-09 12:08:07.491626: train_loss -0.2985 +2026-04-09 12:08:07.496007: val_loss -0.2433 +2026-04-09 12:08:07.497925: Pseudo dice [0.5219, 0.0, 0.8687, 0.4881, 0.6361, 0.4771, 0.4233] +2026-04-09 12:08:07.499743: Epoch time: 101.58 s +2026-04-09 12:08:08.588410: +2026-04-09 12:08:08.589893: Epoch 703 +2026-04-09 12:08:08.591188: Current learning rate: 0.00335 +2026-04-09 12:09:50.322003: train_loss -0.3086 +2026-04-09 12:09:50.325774: val_loss -0.2535 +2026-04-09 12:09:50.327420: Pseudo dice [0.7826, 0.0, 0.8071, 0.7937, 0.589, 0.6705, 0.8865] +2026-04-09 12:09:50.328715: Epoch time: 101.74 s +2026-04-09 12:09:51.417120: +2026-04-09 12:09:51.418443: Epoch 704 +2026-04-09 12:09:51.419714: Current learning rate: 0.00334 +2026-04-09 12:11:33.076812: train_loss -0.3114 +2026-04-09 12:11:33.080529: val_loss -0.266 +2026-04-09 12:11:33.082212: Pseudo dice [0.407, 0.0, 0.7502, 0.5175, 0.6141, 0.6529, 0.745] +2026-04-09 12:11:33.085124: Epoch time: 101.66 s +2026-04-09 12:11:34.215741: +2026-04-09 12:11:34.225186: Epoch 705 +2026-04-09 12:11:34.226609: Current learning rate: 0.00333 +2026-04-09 12:13:15.817830: train_loss -0.31 +2026-04-09 12:13:15.821871: val_loss -0.2623 +2026-04-09 12:13:15.823419: Pseudo dice [0.4288, 0.0, 0.8474, 0.4752, 0.4904, 0.337, 0.9346] +2026-04-09 12:13:15.824669: Epoch time: 101.61 s +2026-04-09 12:13:16.931913: +2026-04-09 12:13:16.933487: Epoch 706 +2026-04-09 12:13:16.935018: Current learning rate: 0.00332 +2026-04-09 12:14:58.581652: train_loss -0.3004 +2026-04-09 12:14:58.585459: val_loss -0.252 +2026-04-09 12:14:58.586798: Pseudo dice [0.6367, 0.0, 0.8701, 0.7177, 0.4652, 0.7612, 0.8529] +2026-04-09 12:14:58.588355: Epoch time: 101.65 s +2026-04-09 12:14:59.694751: +2026-04-09 12:14:59.696041: Epoch 707 +2026-04-09 12:14:59.697558: Current learning rate: 0.00331 +2026-04-09 12:16:41.301988: train_loss -0.3041 +2026-04-09 12:16:41.313241: val_loss -0.2675 +2026-04-09 12:16:41.314842: Pseudo dice [0.6611, 0.0, 0.893, 0.8267, 0.5207, 0.5955, 0.7769] +2026-04-09 12:16:41.316522: Epoch time: 101.61 s +2026-04-09 12:16:42.421295: +2026-04-09 12:16:42.422832: Epoch 708 +2026-04-09 12:16:42.424189: Current learning rate: 0.0033 +2026-04-09 12:18:24.041141: train_loss -0.3118 +2026-04-09 12:18:24.046109: val_loss -0.2524 +2026-04-09 12:18:24.047941: Pseudo dice [0.7166, 0.0, 0.6853, 0.3689, 0.4744, 0.6645, 0.8883] +2026-04-09 12:18:24.049543: Epoch time: 101.62 s +2026-04-09 12:18:25.130820: +2026-04-09 12:18:25.132277: Epoch 709 +2026-04-09 12:18:25.133565: Current learning rate: 0.00329 +2026-04-09 12:20:06.887482: train_loss -0.2902 +2026-04-09 12:20:06.891128: val_loss -0.2128 +2026-04-09 12:20:06.892782: Pseudo dice [0.0, 0.0, 0.7888, 0.3594, 0.4982, 0.2275, 0.6923] +2026-04-09 12:20:06.894356: Epoch time: 101.76 s +2026-04-09 12:20:08.000015: +2026-04-09 12:20:08.019311: Epoch 710 +2026-04-09 12:20:08.031197: Current learning rate: 0.00328 +2026-04-09 12:21:50.370261: train_loss -0.2816 +2026-04-09 12:21:50.374943: val_loss -0.2114 +2026-04-09 12:21:50.378660: Pseudo dice [0.1015, 0.0, 0.8482, 0.7495, 0.4011, 0.3057, 0.8525] +2026-04-09 12:21:50.380306: Epoch time: 102.37 s +2026-04-09 12:21:51.509147: +2026-04-09 12:21:51.510800: Epoch 711 +2026-04-09 12:21:51.512660: Current learning rate: 0.00327 +2026-04-09 12:23:33.069433: train_loss -0.2986 +2026-04-09 12:23:33.074375: val_loss -0.2459 +2026-04-09 12:23:33.076172: Pseudo dice [0.5029, 0.0, 0.9236, 0.4491, 0.5705, 0.3822, 0.5146] +2026-04-09 12:23:33.077789: Epoch time: 101.56 s +2026-04-09 12:23:34.186089: +2026-04-09 12:23:34.188072: Epoch 712 +2026-04-09 12:23:34.189246: Current learning rate: 0.00326 +2026-04-09 12:25:15.927226: train_loss -0.2945 +2026-04-09 12:25:15.931658: val_loss -0.2445 +2026-04-09 12:25:15.933377: Pseudo dice [0.6678, 0.0, 0.5224, 0.6379, 0.5772, 0.8035, 0.7989] +2026-04-09 12:25:15.934784: Epoch time: 101.74 s +2026-04-09 12:25:18.017776: +2026-04-09 12:25:18.019434: Epoch 713 +2026-04-09 12:25:18.020708: Current learning rate: 0.00325 +2026-04-09 12:26:59.670003: train_loss -0.3063 +2026-04-09 12:26:59.674631: val_loss -0.2712 +2026-04-09 12:26:59.676575: Pseudo dice [0.5096, 0.0, 0.881, 0.8294, 0.5253, 0.7842, 0.7618] +2026-04-09 12:26:59.677990: Epoch time: 101.66 s +2026-04-09 12:27:00.767307: +2026-04-09 12:27:00.769140: Epoch 714 +2026-04-09 12:27:00.770301: Current learning rate: 0.00324 +2026-04-09 12:28:42.450987: train_loss -0.3135 +2026-04-09 12:28:42.454958: val_loss -0.2421 +2026-04-09 12:28:42.456592: Pseudo dice [0.6577, 0.0, 0.8714, 0.6882, 0.5466, 0.4278, 0.7001] +2026-04-09 12:28:42.458101: Epoch time: 101.69 s +2026-04-09 12:28:43.540012: +2026-04-09 12:28:43.541659: Epoch 715 +2026-04-09 12:28:43.543085: Current learning rate: 0.00323 +2026-04-09 12:30:25.161723: train_loss -0.2961 +2026-04-09 12:30:25.166337: val_loss -0.2473 +2026-04-09 12:30:25.167875: Pseudo dice [0.6929, 0.0, 0.807, 0.7036, 0.2587, 0.3721, 0.8428] +2026-04-09 12:30:25.169652: Epoch time: 101.62 s +2026-04-09 12:30:26.297104: +2026-04-09 12:30:26.310663: Epoch 716 +2026-04-09 12:30:26.325511: Current learning rate: 0.00322 +2026-04-09 12:32:08.050508: train_loss -0.3125 +2026-04-09 12:32:08.054105: val_loss -0.2673 +2026-04-09 12:32:08.055639: Pseudo dice [0.5761, 0.0, 0.8142, 0.6344, 0.5034, 0.7798, 0.9321] +2026-04-09 12:32:08.057098: Epoch time: 101.76 s +2026-04-09 12:32:09.149586: +2026-04-09 12:32:09.151554: Epoch 717 +2026-04-09 12:32:09.152681: Current learning rate: 0.00321 +2026-04-09 12:33:51.188499: train_loss -0.3067 +2026-04-09 12:33:51.194777: val_loss -0.2754 +2026-04-09 12:33:51.196785: Pseudo dice [0.5463, 0.0, 0.8127, 0.7693, 0.5975, 0.4804, 0.8833] +2026-04-09 12:33:51.199291: Epoch time: 102.04 s +2026-04-09 12:33:52.293363: +2026-04-09 12:33:52.295256: Epoch 718 +2026-04-09 12:33:52.296448: Current learning rate: 0.0032 +2026-04-09 12:35:33.777987: train_loss -0.3024 +2026-04-09 12:35:33.782132: val_loss -0.2676 +2026-04-09 12:35:33.783831: Pseudo dice [0.686, 0.0, 0.8379, 0.8765, 0.611, 0.5214, 0.8763] +2026-04-09 12:35:33.785553: Epoch time: 101.49 s +2026-04-09 12:35:34.881924: +2026-04-09 12:35:34.883504: Epoch 719 +2026-04-09 12:35:34.884692: Current learning rate: 0.00319 +2026-04-09 12:37:16.524190: train_loss -0.297 +2026-04-09 12:37:16.529460: val_loss -0.2339 +2026-04-09 12:37:16.531610: Pseudo dice [0.8323, 0.0, 0.7152, 0.6353, 0.5185, 0.5048, 0.894] +2026-04-09 12:37:16.534185: Epoch time: 101.65 s +2026-04-09 12:37:17.627504: +2026-04-09 12:37:17.629995: Epoch 720 +2026-04-09 12:37:17.631126: Current learning rate: 0.00318 +2026-04-09 12:38:59.188404: train_loss -0.3036 +2026-04-09 12:38:59.192176: val_loss -0.2181 +2026-04-09 12:38:59.193614: Pseudo dice [0.8253, 0.0, 0.6114, 0.0839, 0.292, 0.3916, 0.8225] +2026-04-09 12:38:59.195102: Epoch time: 101.56 s +2026-04-09 12:39:00.288862: +2026-04-09 12:39:00.290288: Epoch 721 +2026-04-09 12:39:00.291427: Current learning rate: 0.00317 +2026-04-09 12:40:41.894481: train_loss -0.3064 +2026-04-09 12:40:41.898756: val_loss -0.2268 +2026-04-09 12:40:41.900565: Pseudo dice [0.5017, 0.0, 0.498, 0.6362, 0.5301, 0.7353, 0.6802] +2026-04-09 12:40:41.901847: Epoch time: 101.61 s +2026-04-09 12:40:42.998838: +2026-04-09 12:40:43.000300: Epoch 722 +2026-04-09 12:40:43.001455: Current learning rate: 0.00316 +2026-04-09 12:42:24.552648: train_loss -0.3068 +2026-04-09 12:42:24.556373: val_loss -0.2485 +2026-04-09 12:42:24.558172: Pseudo dice [0.6432, 0.0, 0.8112, 0.7187, 0.4658, 0.6146, 0.7818] +2026-04-09 12:42:24.559728: Epoch time: 101.56 s +2026-04-09 12:42:25.661662: +2026-04-09 12:42:25.672193: Epoch 723 +2026-04-09 12:42:25.677423: Current learning rate: 0.00315 +2026-04-09 12:44:07.312723: train_loss -0.3029 +2026-04-09 12:44:07.316633: val_loss -0.2731 +2026-04-09 12:44:07.318033: Pseudo dice [0.5966, 0.0, 0.7905, 0.7607, 0.3323, 0.5362, 0.9396] +2026-04-09 12:44:07.319661: Epoch time: 101.65 s +2026-04-09 12:44:08.396616: +2026-04-09 12:44:08.398923: Epoch 724 +2026-04-09 12:44:08.400759: Current learning rate: 0.00314 +2026-04-09 12:45:50.176691: train_loss -0.3084 +2026-04-09 12:45:50.180686: val_loss -0.2692 +2026-04-09 12:45:50.182144: Pseudo dice [0.6578, 0.0, 0.6691, 0.7577, 0.4636, 0.75, 0.8714] +2026-04-09 12:45:50.183501: Epoch time: 101.78 s +2026-04-09 12:45:51.285636: +2026-04-09 12:45:51.286901: Epoch 725 +2026-04-09 12:45:51.289151: Current learning rate: 0.00313 +2026-04-09 12:47:33.087144: train_loss -0.3039 +2026-04-09 12:47:33.091383: val_loss -0.2248 +2026-04-09 12:47:33.093640: Pseudo dice [0.1159, 0.0, 0.6912, 0.5269, 0.5731, 0.3594, 0.8279] +2026-04-09 12:47:33.095055: Epoch time: 101.8 s +2026-04-09 12:47:34.221578: +2026-04-09 12:47:34.226506: Epoch 726 +2026-04-09 12:47:34.227636: Current learning rate: 0.00312 +2026-04-09 12:49:15.898888: train_loss -0.2934 +2026-04-09 12:49:15.904422: val_loss -0.218 +2026-04-09 12:49:15.906759: Pseudo dice [0.3177, 0.0, 0.6296, 0.6455, 0.3565, 0.4274, 0.9279] +2026-04-09 12:49:15.908991: Epoch time: 101.68 s +2026-04-09 12:49:17.030896: +2026-04-09 12:49:17.032611: Epoch 727 +2026-04-09 12:49:17.034261: Current learning rate: 0.00311 +2026-04-09 12:50:58.745729: train_loss -0.3016 +2026-04-09 12:50:58.750878: val_loss -0.2396 +2026-04-09 12:50:58.753215: Pseudo dice [0.3176, 0.0, 0.8575, 0.3559, 0.4572, 0.7264, 0.8074] +2026-04-09 12:50:58.754871: Epoch time: 101.72 s +2026-04-09 12:50:59.868436: +2026-04-09 12:50:59.869927: Epoch 728 +2026-04-09 12:50:59.871285: Current learning rate: 0.0031 +2026-04-09 12:52:41.784055: train_loss -0.2949 +2026-04-09 12:52:41.789559: val_loss -0.2487 +2026-04-09 12:52:41.791430: Pseudo dice [0.067, 0.0, 0.881, 0.7321, 0.5419, 0.7902, 0.9022] +2026-04-09 12:52:41.793748: Epoch time: 101.92 s +2026-04-09 12:52:42.905615: +2026-04-09 12:52:42.907259: Epoch 729 +2026-04-09 12:52:42.908579: Current learning rate: 0.00309 +2026-04-09 12:54:25.559485: train_loss -0.3005 +2026-04-09 12:54:25.563250: val_loss -0.2744 +2026-04-09 12:54:25.564859: Pseudo dice [0.5745, 0.0, 0.8223, 0.7667, 0.5592, 0.7084, 0.9086] +2026-04-09 12:54:25.566607: Epoch time: 102.66 s +2026-04-09 12:54:26.680680: +2026-04-09 12:54:26.682218: Epoch 730 +2026-04-09 12:54:26.683795: Current learning rate: 0.00308 +2026-04-09 12:56:09.181365: train_loss -0.3013 +2026-04-09 12:56:09.184829: val_loss -0.244 +2026-04-09 12:56:09.186535: Pseudo dice [0.3339, 0.0, 0.8367, 0.6735, 0.5054, 0.6685, 0.8805] +2026-04-09 12:56:09.187950: Epoch time: 102.5 s +2026-04-09 12:56:10.292422: +2026-04-09 12:56:10.295905: Epoch 731 +2026-04-09 12:56:10.297176: Current learning rate: 0.00307 +2026-04-09 12:57:52.057648: train_loss -0.308 +2026-04-09 12:57:52.062057: val_loss -0.2471 +2026-04-09 12:57:52.063643: Pseudo dice [0.7197, 0.0, 0.5262, 0.5695, 0.4019, 0.5506, 0.7959] +2026-04-09 12:57:52.065007: Epoch time: 101.77 s +2026-04-09 12:57:53.173733: +2026-04-09 12:57:53.175450: Epoch 732 +2026-04-09 12:57:53.176618: Current learning rate: 0.00306 +2026-04-09 12:59:35.070855: train_loss -0.303 +2026-04-09 12:59:35.074453: val_loss -0.2768 +2026-04-09 12:59:35.075927: Pseudo dice [0.2444, 0.0, 0.7544, 0.7352, 0.5301, 0.7753, 0.7322] +2026-04-09 12:59:35.077307: Epoch time: 101.9 s +2026-04-09 12:59:37.234644: +2026-04-09 12:59:37.236123: Epoch 733 +2026-04-09 12:59:37.249277: Current learning rate: 0.00305 +2026-04-09 13:01:19.049871: train_loss -0.3104 +2026-04-09 13:01:19.053952: val_loss -0.26 +2026-04-09 13:01:19.056540: Pseudo dice [0.5198, 0.0, 0.8211, 0.7083, 0.5399, 0.4936, 0.9397] +2026-04-09 13:01:19.058250: Epoch time: 101.82 s +2026-04-09 13:01:20.159955: +2026-04-09 13:01:20.161545: Epoch 734 +2026-04-09 13:01:20.162820: Current learning rate: 0.00304 +2026-04-09 13:03:02.059819: train_loss -0.3011 +2026-04-09 13:03:02.064480: val_loss -0.2731 +2026-04-09 13:03:02.066349: Pseudo dice [0.5448, 0.0, 0.8671, 0.8773, 0.5122, 0.7774, 0.8086] +2026-04-09 13:03:02.067942: Epoch time: 101.9 s +2026-04-09 13:03:03.177747: +2026-04-09 13:03:03.179163: Epoch 735 +2026-04-09 13:03:03.180654: Current learning rate: 0.00303 +2026-04-09 13:04:45.128076: train_loss -0.3098 +2026-04-09 13:04:45.131724: val_loss -0.2551 +2026-04-09 13:04:45.133309: Pseudo dice [0.553, 0.0, 0.696, 0.7852, 0.5284, 0.6742, 0.8309] +2026-04-09 13:04:45.134941: Epoch time: 101.95 s +2026-04-09 13:04:46.222798: +2026-04-09 13:04:46.224347: Epoch 736 +2026-04-09 13:04:46.225507: Current learning rate: 0.00302 +2026-04-09 13:06:28.325303: train_loss -0.2995 +2026-04-09 13:06:28.329108: val_loss -0.2494 +2026-04-09 13:06:28.330551: Pseudo dice [0.409, 0.0, 0.7134, 0.761, 0.5996, 0.837, 0.5632] +2026-04-09 13:06:28.331998: Epoch time: 102.11 s +2026-04-09 13:06:29.444072: +2026-04-09 13:06:29.445412: Epoch 737 +2026-04-09 13:06:29.446986: Current learning rate: 0.00301 +2026-04-09 13:08:11.482732: train_loss -0.3079 +2026-04-09 13:08:11.486726: val_loss -0.2793 +2026-04-09 13:08:11.488775: Pseudo dice [0.6042, 0.0, 0.8438, 0.5635, 0.2576, 0.4705, 0.7879] +2026-04-09 13:08:11.490477: Epoch time: 102.04 s +2026-04-09 13:08:12.579544: +2026-04-09 13:08:12.580821: Epoch 738 +2026-04-09 13:08:12.582104: Current learning rate: 0.003 +2026-04-09 13:09:55.895686: train_loss -0.3199 +2026-04-09 13:09:55.904178: val_loss -0.2605 +2026-04-09 13:09:55.905941: Pseudo dice [0.8725, 0.0, 0.7936, 0.4417, 0.4795, 0.6995, 0.8629] +2026-04-09 13:09:55.907837: Epoch time: 103.32 s +2026-04-09 13:09:57.039544: +2026-04-09 13:09:57.041162: Epoch 739 +2026-04-09 13:09:57.042727: Current learning rate: 0.00299 +2026-04-09 13:11:39.095708: train_loss -0.3082 +2026-04-09 13:11:39.100574: val_loss -0.2593 +2026-04-09 13:11:39.102281: Pseudo dice [0.4718, 0.0, 0.8203, 0.5673, 0.6246, 0.4927, 0.83] +2026-04-09 13:11:39.103981: Epoch time: 102.06 s +2026-04-09 13:11:40.190206: +2026-04-09 13:11:40.191644: Epoch 740 +2026-04-09 13:11:40.192812: Current learning rate: 0.00297 +2026-04-09 13:13:21.966445: train_loss -0.3061 +2026-04-09 13:13:21.971638: val_loss -0.2684 +2026-04-09 13:13:21.974068: Pseudo dice [0.7638, 0.0, 0.8205, 0.4445, 0.6342, 0.771, 0.8449] +2026-04-09 13:13:21.976131: Epoch time: 101.78 s +2026-04-09 13:13:23.082557: +2026-04-09 13:13:23.083922: Epoch 741 +2026-04-09 13:13:23.085064: Current learning rate: 0.00296 +2026-04-09 13:15:05.333247: train_loss -0.3201 +2026-04-09 13:15:05.337052: val_loss -0.2554 +2026-04-09 13:15:05.339737: Pseudo dice [0.5771, 0.0, 0.827, 0.7952, 0.468, 0.5203, 0.9129] +2026-04-09 13:15:05.340912: Epoch time: 102.25 s +2026-04-09 13:15:06.435283: +2026-04-09 13:15:06.436680: Epoch 742 +2026-04-09 13:15:06.437915: Current learning rate: 0.00295 +2026-04-09 13:16:48.477784: train_loss -0.3115 +2026-04-09 13:16:48.483023: val_loss -0.2343 +2026-04-09 13:16:48.484937: Pseudo dice [0.8389, 0.0, 0.7001, 0.5168, 0.4787, 0.762, 0.6254] +2026-04-09 13:16:48.486669: Epoch time: 102.05 s +2026-04-09 13:16:49.588457: +2026-04-09 13:16:49.589845: Epoch 743 +2026-04-09 13:16:49.590920: Current learning rate: 0.00294 +2026-04-09 13:18:31.755896: train_loss -0.2936 +2026-04-09 13:18:31.764838: val_loss -0.2546 +2026-04-09 13:18:31.767040: Pseudo dice [0.6724, 0.0, 0.7702, 0.7622, 0.5702, 0.4219, 0.9297] +2026-04-09 13:18:31.770618: Epoch time: 102.17 s +2026-04-09 13:18:32.876500: +2026-04-09 13:18:32.878252: Epoch 744 +2026-04-09 13:18:32.881072: Current learning rate: 0.00293 +2026-04-09 13:20:15.101380: train_loss -0.2892 +2026-04-09 13:20:15.104607: val_loss -0.2493 +2026-04-09 13:20:15.106070: Pseudo dice [0.7126, 0.0, 0.6381, 0.7601, 0.5108, 0.609, 0.95] +2026-04-09 13:20:15.107505: Epoch time: 102.23 s +2026-04-09 13:20:16.206508: +2026-04-09 13:20:16.207863: Epoch 745 +2026-04-09 13:20:16.209014: Current learning rate: 0.00292 +2026-04-09 13:21:58.275915: train_loss -0.3033 +2026-04-09 13:21:58.280136: val_loss -0.2658 +2026-04-09 13:21:58.281650: Pseudo dice [0.7372, 0.0, 0.7008, 0.2164, 0.567, 0.5563, 0.885] +2026-04-09 13:21:58.283073: Epoch time: 102.07 s +2026-04-09 13:21:59.378206: +2026-04-09 13:21:59.379484: Epoch 746 +2026-04-09 13:21:59.380751: Current learning rate: 0.00291 +2026-04-09 13:23:41.580185: train_loss -0.307 +2026-04-09 13:23:41.583691: val_loss -0.2425 +2026-04-09 13:23:41.585036: Pseudo dice [0.6067, 0.0, 0.8125, 0.1563, 0.3931, 0.8427, 0.8136] +2026-04-09 13:23:41.586525: Epoch time: 102.2 s +2026-04-09 13:23:42.681474: +2026-04-09 13:23:42.683142: Epoch 747 +2026-04-09 13:23:42.684341: Current learning rate: 0.0029 +2026-04-09 13:25:24.774288: train_loss -0.3147 +2026-04-09 13:25:24.778781: val_loss -0.2694 +2026-04-09 13:25:24.781237: Pseudo dice [0.654, 0.0, 0.8858, 0.8111, 0.4272, 0.6338, 0.9294] +2026-04-09 13:25:24.782885: Epoch time: 102.1 s +2026-04-09 13:25:25.877732: +2026-04-09 13:25:25.892848: Epoch 748 +2026-04-09 13:25:25.895512: Current learning rate: 0.00289 +2026-04-09 13:27:08.122924: train_loss -0.3042 +2026-04-09 13:27:08.126368: val_loss -0.2486 +2026-04-09 13:27:08.128004: Pseudo dice [0.3629, 0.0, 0.6721, 0.3261, 0.6621, 0.8893, 0.9117] +2026-04-09 13:27:08.129312: Epoch time: 102.25 s +2026-04-09 13:27:09.228573: +2026-04-09 13:27:09.231455: Epoch 749 +2026-04-09 13:27:09.232615: Current learning rate: 0.00288 +2026-04-09 13:28:51.660743: train_loss -0.3065 +2026-04-09 13:28:51.666058: val_loss -0.2542 +2026-04-09 13:28:51.668498: Pseudo dice [0.405, 0.0, 0.8334, 0.7104, 0.6407, 0.8953, 0.9303] +2026-04-09 13:28:51.670511: Epoch time: 102.44 s +2026-04-09 13:28:54.499928: +2026-04-09 13:28:54.501354: Epoch 750 +2026-04-09 13:28:54.502640: Current learning rate: 0.00287 +2026-04-09 13:30:36.837065: train_loss -0.3208 +2026-04-09 13:30:36.842427: val_loss -0.2729 +2026-04-09 13:30:36.845630: Pseudo dice [0.4621, 0.0, 0.8875, 0.8397, 0.4649, 0.6088, 0.8836] +2026-04-09 13:30:36.860238: Epoch time: 102.34 s +2026-04-09 13:30:37.982131: +2026-04-09 13:30:37.983416: Epoch 751 +2026-04-09 13:30:37.984501: Current learning rate: 0.00286 +2026-04-09 13:32:20.363115: train_loss -0.312 +2026-04-09 13:32:20.371607: val_loss -0.2421 +2026-04-09 13:32:20.373255: Pseudo dice [0.798, 0.0, 0.6188, 0.1261, 0.5601, 0.4209, 0.785] +2026-04-09 13:32:20.375376: Epoch time: 102.38 s +2026-04-09 13:32:21.463894: +2026-04-09 13:32:21.465486: Epoch 752 +2026-04-09 13:32:21.467593: Current learning rate: 0.00285 +2026-04-09 13:34:03.940841: train_loss -0.314 +2026-04-09 13:34:03.953175: val_loss -0.253 +2026-04-09 13:34:03.955115: Pseudo dice [0.5428, 0.0, 0.6661, 0.2366, 0.5885, 0.8004, 0.9332] +2026-04-09 13:34:03.964613: Epoch time: 102.48 s +2026-04-09 13:34:06.154638: +2026-04-09 13:34:06.156677: Epoch 753 +2026-04-09 13:34:06.158026: Current learning rate: 0.00284 +2026-04-09 13:35:48.399602: train_loss -0.3049 +2026-04-09 13:35:48.403684: val_loss -0.2865 +2026-04-09 13:35:48.405791: Pseudo dice [0.7245, 0.0, 0.7714, 0.6495, 0.6908, 0.5211, 0.9293] +2026-04-09 13:35:48.407368: Epoch time: 102.25 s +2026-04-09 13:35:49.526937: +2026-04-09 13:35:49.528370: Epoch 754 +2026-04-09 13:35:49.529478: Current learning rate: 0.00283 +2026-04-09 13:37:31.785048: train_loss -0.3117 +2026-04-09 13:37:31.789122: val_loss -0.255 +2026-04-09 13:37:31.791002: Pseudo dice [0.6478, 0.0, 0.8437, 0.8541, 0.4884, 0.8761, 0.9282] +2026-04-09 13:37:31.792779: Epoch time: 102.26 s +2026-04-09 13:37:32.919021: +2026-04-09 13:37:32.920698: Epoch 755 +2026-04-09 13:37:32.922156: Current learning rate: 0.00282 +2026-04-09 13:39:15.450478: train_loss -0.3057 +2026-04-09 13:39:15.454552: val_loss -0.264 +2026-04-09 13:39:15.456371: Pseudo dice [0.4055, 0.0, 0.6521, 0.6698, 0.2094, 0.5081, 0.889] +2026-04-09 13:39:15.458286: Epoch time: 102.53 s +2026-04-09 13:39:16.565897: +2026-04-09 13:39:16.567390: Epoch 756 +2026-04-09 13:39:16.568742: Current learning rate: 0.00281 +2026-04-09 13:40:58.737937: train_loss -0.3088 +2026-04-09 13:40:58.742737: val_loss -0.2902 +2026-04-09 13:40:58.746843: Pseudo dice [0.2438, 0.0, 0.7774, 0.4129, 0.5333, 0.7105, 0.8037] +2026-04-09 13:40:58.749548: Epoch time: 102.18 s +2026-04-09 13:40:59.886467: +2026-04-09 13:40:59.940418: Epoch 757 +2026-04-09 13:40:59.964181: Current learning rate: 0.0028 +2026-04-09 13:42:42.152356: train_loss -0.3017 +2026-04-09 13:42:42.159467: val_loss -0.2719 +2026-04-09 13:42:42.161450: Pseudo dice [0.4948, 0.0, 0.7807, 0.5591, 0.6386, 0.7027, 0.9216] +2026-04-09 13:42:42.163736: Epoch time: 102.27 s +2026-04-09 13:42:43.294582: +2026-04-09 13:42:43.298913: Epoch 758 +2026-04-09 13:42:43.302093: Current learning rate: 0.00279 +2026-04-09 13:44:25.381081: train_loss -0.3207 +2026-04-09 13:44:25.388440: val_loss -0.2479 +2026-04-09 13:44:25.389985: Pseudo dice [0.5139, 0.0, 0.8, 0.4701, 0.73, 0.8905, 0.7477] +2026-04-09 13:44:25.396478: Epoch time: 102.09 s +2026-04-09 13:44:26.509171: +2026-04-09 13:44:26.511112: Epoch 759 +2026-04-09 13:44:26.512654: Current learning rate: 0.00278 +2026-04-09 13:46:08.759068: train_loss -0.3172 +2026-04-09 13:46:08.763120: val_loss -0.2601 +2026-04-09 13:46:08.764729: Pseudo dice [0.7201, 0.0, 0.6909, 0.5326, 0.5639, 0.8, 0.9116] +2026-04-09 13:46:08.766079: Epoch time: 102.25 s +2026-04-09 13:46:09.879777: +2026-04-09 13:46:09.881265: Epoch 760 +2026-04-09 13:46:09.882735: Current learning rate: 0.00277 +2026-04-09 13:47:52.218532: train_loss -0.3217 +2026-04-09 13:47:52.222951: val_loss -0.2804 +2026-04-09 13:47:52.224516: Pseudo dice [0.4407, 0.0, 0.7941, 0.739, 0.3916, 0.9084, 0.9374] +2026-04-09 13:47:52.226071: Epoch time: 102.34 s +2026-04-09 13:47:53.355665: +2026-04-09 13:47:53.357467: Epoch 761 +2026-04-09 13:47:53.358887: Current learning rate: 0.00276 +2026-04-09 13:49:35.516428: train_loss -0.3232 +2026-04-09 13:49:35.520640: val_loss -0.2767 +2026-04-09 13:49:35.522286: Pseudo dice [0.5178, 0.0, 0.7555, 0.7676, 0.5269, 0.6007, 0.8589] +2026-04-09 13:49:35.523483: Epoch time: 102.16 s +2026-04-09 13:49:36.623532: +2026-04-09 13:49:36.624720: Epoch 762 +2026-04-09 13:49:36.626009: Current learning rate: 0.00275 +2026-04-09 13:51:18.899988: train_loss -0.3339 +2026-04-09 13:51:18.904275: val_loss -0.2622 +2026-04-09 13:51:18.905644: Pseudo dice [0.6586, 0.0, 0.6968, 0.5324, 0.6029, 0.1701, 0.7795] +2026-04-09 13:51:18.908731: Epoch time: 102.28 s +2026-04-09 13:51:20.033668: +2026-04-09 13:51:20.035387: Epoch 763 +2026-04-09 13:51:20.037050: Current learning rate: 0.00274 +2026-04-09 13:53:02.195003: train_loss -0.3073 +2026-04-09 13:53:02.199445: val_loss -0.2479 +2026-04-09 13:53:02.202177: Pseudo dice [0.7622, 0.0, 0.8424, 0.558, 0.5035, 0.779, 0.9002] +2026-04-09 13:53:02.203626: Epoch time: 102.16 s +2026-04-09 13:53:03.332154: +2026-04-09 13:53:03.333330: Epoch 764 +2026-04-09 13:53:03.334398: Current learning rate: 0.00273 +2026-04-09 13:54:45.547361: train_loss -0.3231 +2026-04-09 13:54:45.553495: val_loss -0.2789 +2026-04-09 13:54:45.554838: Pseudo dice [0.586, 0.0, 0.7471, 0.8494, 0.5617, 0.7125, 0.8951] +2026-04-09 13:54:45.556250: Epoch time: 102.22 s +2026-04-09 13:54:46.682552: +2026-04-09 13:54:46.683941: Epoch 765 +2026-04-09 13:54:46.685133: Current learning rate: 0.00272 +2026-04-09 13:56:28.850989: train_loss -0.3263 +2026-04-09 13:56:28.855578: val_loss -0.2305 +2026-04-09 13:56:28.856944: Pseudo dice [0.4415, 0.0, 0.8435, 0.6604, 0.5411, 0.518, 0.9179] +2026-04-09 13:56:28.858458: Epoch time: 102.17 s +2026-04-09 13:56:29.966235: +2026-04-09 13:56:29.967577: Epoch 766 +2026-04-09 13:56:29.968693: Current learning rate: 0.00271 +2026-04-09 13:58:12.094593: train_loss -0.3184 +2026-04-09 13:58:12.098852: val_loss -0.2506 +2026-04-09 13:58:12.100471: Pseudo dice [0.6749, 0.0, 0.7441, 0.5269, 0.4802, 0.8765, 0.8717] +2026-04-09 13:58:12.102155: Epoch time: 102.13 s +2026-04-09 13:58:13.219659: +2026-04-09 13:58:13.221647: Epoch 767 +2026-04-09 13:58:13.223165: Current learning rate: 0.0027 +2026-04-09 13:59:55.312282: train_loss -0.3144 +2026-04-09 13:59:55.315846: val_loss -0.2533 +2026-04-09 13:59:55.317198: Pseudo dice [0.5633, 0.0, 0.7955, 0.7766, 0.4709, 0.6749, 0.9061] +2026-04-09 13:59:55.318665: Epoch time: 102.1 s +2026-04-09 13:59:56.446956: +2026-04-09 13:59:56.448273: Epoch 768 +2026-04-09 13:59:56.449425: Current learning rate: 0.00268 +2026-04-09 14:01:38.576870: train_loss -0.2989 +2026-04-09 14:01:38.593701: val_loss -0.2345 +2026-04-09 14:01:38.596793: Pseudo dice [0.5393, 0.0, 0.7404, 0.6453, 0.3996, 0.9003, 0.901] +2026-04-09 14:01:38.600638: Epoch time: 102.13 s +2026-04-09 14:01:39.733086: +2026-04-09 14:01:39.734238: Epoch 769 +2026-04-09 14:01:39.735454: Current learning rate: 0.00267 +2026-04-09 14:03:22.068833: train_loss -0.2911 +2026-04-09 14:03:22.075478: val_loss -0.2543 +2026-04-09 14:03:22.078674: Pseudo dice [0.2578, 0.0, 0.8149, 0.5772, 0.512, 0.5843, 0.5789] +2026-04-09 14:03:22.082430: Epoch time: 102.34 s +2026-04-09 14:03:23.204636: +2026-04-09 14:03:23.205959: Epoch 770 +2026-04-09 14:03:23.207155: Current learning rate: 0.00266 +2026-04-09 14:05:05.638650: train_loss -0.3047 +2026-04-09 14:05:05.647572: val_loss -0.2309 +2026-04-09 14:05:05.654743: Pseudo dice [0.5064, 0.0, 0.7722, 0.8131, 0.5327, 0.5121, 0.8338] +2026-04-09 14:05:05.656271: Epoch time: 102.44 s +2026-04-09 14:05:06.781219: +2026-04-09 14:05:06.782383: Epoch 771 +2026-04-09 14:05:06.783678: Current learning rate: 0.00265 +2026-04-09 14:06:49.106889: train_loss -0.3074 +2026-04-09 14:06:49.111571: val_loss -0.2336 +2026-04-09 14:06:49.113127: Pseudo dice [0.301, 0.0, 0.8437, 0.7536, 0.5267, 0.6707, 0.8632] +2026-04-09 14:06:49.114589: Epoch time: 102.33 s +2026-04-09 14:06:50.251823: +2026-04-09 14:06:50.253154: Epoch 772 +2026-04-09 14:06:50.254323: Current learning rate: 0.00264 +2026-04-09 14:08:33.432621: train_loss -0.2938 +2026-04-09 14:08:33.436424: val_loss -0.2387 +2026-04-09 14:08:33.437772: Pseudo dice [0.3835, 0.0, 0.8139, 0.6277, 0.3928, 0.6283, 0.8418] +2026-04-09 14:08:33.439123: Epoch time: 103.18 s +2026-04-09 14:08:34.555133: +2026-04-09 14:08:34.557517: Epoch 773 +2026-04-09 14:08:34.560066: Current learning rate: 0.00263 +2026-04-09 14:10:17.089968: train_loss -0.3114 +2026-04-09 14:10:17.093592: val_loss -0.2201 +2026-04-09 14:10:17.095229: Pseudo dice [0.4359, 0.0, 0.8046, 0.7248, 0.4855, 0.4517, 0.8656] +2026-04-09 14:10:17.096846: Epoch time: 102.54 s +2026-04-09 14:10:18.219350: +2026-04-09 14:10:18.220679: Epoch 774 +2026-04-09 14:10:18.221806: Current learning rate: 0.00262 +2026-04-09 14:12:00.517380: train_loss -0.3071 +2026-04-09 14:12:00.522645: val_loss -0.2822 +2026-04-09 14:12:00.524466: Pseudo dice [0.4183, 0.0, 0.7942, 0.3624, 0.571, 0.8343, 0.8489] +2026-04-09 14:12:00.525627: Epoch time: 102.3 s +2026-04-09 14:12:01.642194: +2026-04-09 14:12:01.643698: Epoch 775 +2026-04-09 14:12:01.645308: Current learning rate: 0.00261 +2026-04-09 14:13:43.958452: train_loss -0.3221 +2026-04-09 14:13:43.961786: val_loss -0.2641 +2026-04-09 14:13:43.963079: Pseudo dice [0.4392, 0.0, 0.6666, 0.1421, 0.331, 0.7453, 0.8505] +2026-04-09 14:13:43.964703: Epoch time: 102.32 s +2026-04-09 14:13:45.071587: +2026-04-09 14:13:45.073095: Epoch 776 +2026-04-09 14:13:45.074323: Current learning rate: 0.0026 +2026-04-09 14:15:27.299268: train_loss -0.3119 +2026-04-09 14:15:27.303947: val_loss -0.2699 +2026-04-09 14:15:27.305331: Pseudo dice [0.5953, 0.0, 0.8069, 0.8064, 0.4108, 0.7477, 0.9482] +2026-04-09 14:15:27.306751: Epoch time: 102.23 s +2026-04-09 14:15:28.452919: +2026-04-09 14:15:28.454531: Epoch 777 +2026-04-09 14:15:28.455782: Current learning rate: 0.00259 +2026-04-09 14:17:10.898827: train_loss -0.3188 +2026-04-09 14:17:10.903332: val_loss -0.2527 +2026-04-09 14:17:10.904921: Pseudo dice [0.1042, 0.0, 0.8387, 0.7511, 0.4529, 0.7866, 0.8831] +2026-04-09 14:17:10.906487: Epoch time: 102.45 s +2026-04-09 14:17:12.022045: +2026-04-09 14:17:12.023176: Epoch 778 +2026-04-09 14:17:12.024400: Current learning rate: 0.00258 +2026-04-09 14:18:54.344954: train_loss -0.3392 +2026-04-09 14:18:54.348453: val_loss -0.2695 +2026-04-09 14:18:54.349866: Pseudo dice [0.5811, 0.0, 0.6503, 0.7356, 0.583, 0.7935, 0.913] +2026-04-09 14:18:54.351212: Epoch time: 102.33 s +2026-04-09 14:18:55.499177: +2026-04-09 14:18:55.508013: Epoch 779 +2026-04-09 14:18:55.509204: Current learning rate: 0.00257 +2026-04-09 14:20:37.726641: train_loss -0.3523 +2026-04-09 14:20:37.733851: val_loss -0.3435 +2026-04-09 14:20:37.735404: Pseudo dice [0.6921, 0.0, 0.7749, 0.7321, 0.5397, 0.6046, 0.9327] +2026-04-09 14:20:37.736948: Epoch time: 102.23 s +2026-04-09 14:20:38.856501: +2026-04-09 14:20:38.857930: Epoch 780 +2026-04-09 14:20:38.859133: Current learning rate: 0.00256 +2026-04-09 14:22:21.230909: train_loss -0.4228 +2026-04-09 14:22:21.236299: val_loss -0.3793 +2026-04-09 14:22:21.239145: Pseudo dice [0.613, 0.0, 0.7727, 0.8976, 0.4593, 0.8069, 0.9322] +2026-04-09 14:22:21.240640: Epoch time: 102.38 s +2026-04-09 14:22:22.395883: +2026-04-09 14:22:22.397151: Epoch 781 +2026-04-09 14:22:22.398413: Current learning rate: 0.00255 +2026-04-09 14:24:04.642910: train_loss -0.4357 +2026-04-09 14:24:04.646956: val_loss -0.3936 +2026-04-09 14:24:04.648266: Pseudo dice [0.8081, 0.0, 0.8764, 0.7341, 0.0, 0.5794, 0.8583] +2026-04-09 14:24:04.649669: Epoch time: 102.25 s +2026-04-09 14:24:05.759185: +2026-04-09 14:24:05.760285: Epoch 782 +2026-04-09 14:24:05.761393: Current learning rate: 0.00254 +2026-04-09 14:25:48.159119: train_loss -0.4235 +2026-04-09 14:25:48.165066: val_loss -0.3588 +2026-04-09 14:25:48.167026: Pseudo dice [0.5045, 0.0, 0.7055, 0.2325, 0.4198, 0.5611, 0.9098] +2026-04-09 14:25:48.169013: Epoch time: 102.4 s +2026-04-09 14:25:49.303027: +2026-04-09 14:25:49.304994: Epoch 783 +2026-04-09 14:25:49.306178: Current learning rate: 0.00253 +2026-04-09 14:27:31.656157: train_loss -0.428 +2026-04-09 14:27:31.660106: val_loss -0.3865 +2026-04-09 14:27:31.661965: Pseudo dice [0.447, 0.0, 0.7746, 0.7277, 0.5717, 0.7485, 0.8528] +2026-04-09 14:27:31.663466: Epoch time: 102.36 s +2026-04-09 14:27:32.788216: +2026-04-09 14:27:32.789874: Epoch 784 +2026-04-09 14:27:32.791099: Current learning rate: 0.00252 +2026-04-09 14:29:15.164198: train_loss -0.423 +2026-04-09 14:29:15.168048: val_loss -0.3518 +2026-04-09 14:29:15.169568: Pseudo dice [0.5123, 0.0, 0.8623, 0.2143, 0.2763, 0.8076, 0.8673] +2026-04-09 14:29:15.171597: Epoch time: 102.38 s +2026-04-09 14:29:16.305366: +2026-04-09 14:29:16.306817: Epoch 785 +2026-04-09 14:29:16.308260: Current learning rate: 0.00251 +2026-04-09 14:30:58.820531: train_loss -0.4173 +2026-04-09 14:30:58.825660: val_loss -0.3545 +2026-04-09 14:30:58.827144: Pseudo dice [0.6412, 0.0, 0.564, 0.4048, 0.5851, 0.556, 0.5051] +2026-04-09 14:30:58.828696: Epoch time: 102.52 s +2026-04-09 14:30:59.942064: +2026-04-09 14:30:59.943400: Epoch 786 +2026-04-09 14:30:59.944680: Current learning rate: 0.0025 +2026-04-09 14:32:42.497358: train_loss -0.4265 +2026-04-09 14:32:42.501407: val_loss -0.3918 +2026-04-09 14:32:42.502756: Pseudo dice [0.7218, 0.0, 0.7824, 0.4617, 0.3646, 0.8815, 0.7035] +2026-04-09 14:32:42.504322: Epoch time: 102.56 s +2026-04-09 14:32:43.628682: +2026-04-09 14:32:43.630143: Epoch 787 +2026-04-09 14:32:43.631356: Current learning rate: 0.00249 +2026-04-09 14:34:26.164613: train_loss -0.4281 +2026-04-09 14:34:26.168117: val_loss -0.3661 +2026-04-09 14:34:26.169832: Pseudo dice [0.512, 0.0, 0.7466, 0.6169, 0.6561, 0.5055, 0.9318] +2026-04-09 14:34:26.171189: Epoch time: 102.54 s +2026-04-09 14:34:27.303239: +2026-04-09 14:34:27.304652: Epoch 788 +2026-04-09 14:34:27.306417: Current learning rate: 0.00248 +2026-04-09 14:36:09.864434: train_loss -0.4368 +2026-04-09 14:36:09.869854: val_loss -0.4142 +2026-04-09 14:36:09.871677: Pseudo dice [0.414, 0.0, 0.816, 0.5015, 0.5892, 0.8585, 0.8302] +2026-04-09 14:36:09.873367: Epoch time: 102.56 s +2026-04-09 14:36:11.007130: +2026-04-09 14:36:11.008912: Epoch 789 +2026-04-09 14:36:11.010538: Current learning rate: 0.00247 +2026-04-09 14:37:53.441729: train_loss -0.4229 +2026-04-09 14:37:53.454402: val_loss -0.3233 +2026-04-09 14:37:53.456341: Pseudo dice [0.5183, 0.0, 0.5551, 0.302, 0.6834, 0.7289, 0.8924] +2026-04-09 14:37:53.458497: Epoch time: 102.44 s +2026-04-09 14:37:54.581183: +2026-04-09 14:37:54.582773: Epoch 790 +2026-04-09 14:37:54.584059: Current learning rate: 0.00245 +2026-04-09 14:39:36.887371: train_loss -0.4184 +2026-04-09 14:39:36.893183: val_loss -0.3758 +2026-04-09 14:39:36.894676: Pseudo dice [0.6229, 0.0, 0.6283, 0.6147, 0.347, 0.5927, 0.9288] +2026-04-09 14:39:36.896237: Epoch time: 102.31 s +2026-04-09 14:39:38.042073: +2026-04-09 14:39:38.043832: Epoch 791 +2026-04-09 14:39:38.045868: Current learning rate: 0.00244 +2026-04-09 14:41:20.371387: train_loss -0.4337 +2026-04-09 14:41:20.378684: val_loss -0.3835 +2026-04-09 14:41:20.381263: Pseudo dice [0.1136, 0.0, 0.7837, 0.7237, 0.6074, 0.3886, 0.5537] +2026-04-09 14:41:20.383041: Epoch time: 102.33 s +2026-04-09 14:41:22.564396: +2026-04-09 14:41:22.566294: Epoch 792 +2026-04-09 14:41:22.567639: Current learning rate: 0.00243 +2026-04-09 14:43:05.084224: train_loss -0.4483 +2026-04-09 14:43:05.087954: val_loss -0.3986 +2026-04-09 14:43:05.089495: Pseudo dice [0.5775, 0.0, 0.7595, 0.8583, 0.5913, 0.6574, 0.9327] +2026-04-09 14:43:05.091006: Epoch time: 102.52 s +2026-04-09 14:43:06.211489: +2026-04-09 14:43:06.212848: Epoch 793 +2026-04-09 14:43:06.214027: Current learning rate: 0.00242 +2026-04-09 14:44:48.709362: train_loss -0.437 +2026-04-09 14:44:48.714154: val_loss -0.4003 +2026-04-09 14:44:48.716117: Pseudo dice [0.5115, 0.0, 0.6343, 0.6502, 0.4663, 0.7412, 0.9206] +2026-04-09 14:44:48.718313: Epoch time: 102.5 s +2026-04-09 14:44:49.838623: +2026-04-09 14:44:49.839922: Epoch 794 +2026-04-09 14:44:49.841188: Current learning rate: 0.00241 +2026-04-09 14:46:32.228086: train_loss -0.438 +2026-04-09 14:46:32.235317: val_loss -0.3985 +2026-04-09 14:46:32.237192: Pseudo dice [0.3548, 0.0, 0.865, 0.7177, 0.5641, 0.8684, 0.9238] +2026-04-09 14:46:32.238412: Epoch time: 102.39 s +2026-04-09 14:46:33.376509: +2026-04-09 14:46:33.378595: Epoch 795 +2026-04-09 14:46:33.379888: Current learning rate: 0.0024 +2026-04-09 14:48:15.831962: train_loss -0.4296 +2026-04-09 14:48:15.856832: val_loss -0.382 +2026-04-09 14:48:15.858772: Pseudo dice [0.7222, 0.0, 0.7699, 0.6152, 0.5345, 0.8583, 0.8156] +2026-04-09 14:48:15.860601: Epoch time: 102.46 s +2026-04-09 14:48:16.997218: +2026-04-09 14:48:16.998548: Epoch 796 +2026-04-09 14:48:16.999765: Current learning rate: 0.00239 +2026-04-09 14:49:59.507103: train_loss -0.4331 +2026-04-09 14:49:59.511176: val_loss -0.3549 +2026-04-09 14:49:59.512617: Pseudo dice [0.4156, 0.0, 0.7828, 0.3578, 0.585, 0.6088, 0.8875] +2026-04-09 14:49:59.513809: Epoch time: 102.51 s +2026-04-09 14:50:00.642651: +2026-04-09 14:50:00.643945: Epoch 797 +2026-04-09 14:50:00.645397: Current learning rate: 0.00238 +2026-04-09 14:51:43.052689: train_loss -0.4298 +2026-04-09 14:51:43.058668: val_loss -0.3535 +2026-04-09 14:51:43.060654: Pseudo dice [0.7161, 0.0, 0.8028, 0.5787, 0.4553, 0.3458, 0.9303] +2026-04-09 14:51:43.062482: Epoch time: 102.41 s +2026-04-09 14:51:44.208730: +2026-04-09 14:51:44.210452: Epoch 798 +2026-04-09 14:51:44.211924: Current learning rate: 0.00237 +2026-04-09 14:53:26.677114: train_loss -0.4378 +2026-04-09 14:53:26.681886: val_loss -0.3949 +2026-04-09 14:53:26.683199: Pseudo dice [0.7048, 0.0, 0.6618, 0.3162, 0.5989, 0.7158, 0.6321] +2026-04-09 14:53:26.684764: Epoch time: 102.47 s +2026-04-09 14:53:27.807020: +2026-04-09 14:53:27.808305: Epoch 799 +2026-04-09 14:53:27.809634: Current learning rate: 0.00236 +2026-04-09 14:55:10.318456: train_loss -0.4396 +2026-04-09 14:55:10.323821: val_loss -0.3801 +2026-04-09 14:55:10.325139: Pseudo dice [0.6669, 0.0, 0.8301, 0.6145, 0.5048, 0.6029, 0.9186] +2026-04-09 14:55:10.326806: Epoch time: 102.51 s +2026-04-09 14:55:13.525648: +2026-04-09 14:55:13.527030: Epoch 800 +2026-04-09 14:55:13.528814: Current learning rate: 0.00235 +2026-04-09 14:56:55.922660: train_loss -0.4334 +2026-04-09 14:56:55.927093: val_loss -0.3372 +2026-04-09 14:56:55.928716: Pseudo dice [0.3, 0.0, 0.7667, 0.8476, 0.5268, 0.378, 0.5546] +2026-04-09 14:56:55.930601: Epoch time: 102.4 s +2026-04-09 14:56:57.050890: +2026-04-09 14:56:57.052835: Epoch 801 +2026-04-09 14:56:57.054185: Current learning rate: 0.00234 +2026-04-09 14:58:39.545881: train_loss -0.4119 +2026-04-09 14:58:39.551193: val_loss -0.3667 +2026-04-09 14:58:39.553358: Pseudo dice [0.4907, 0.0, 0.761, 0.7318, 0.5705, 0.5475, 0.5471] +2026-04-09 14:58:39.554892: Epoch time: 102.5 s +2026-04-09 14:58:40.694189: +2026-04-09 14:58:40.698603: Epoch 802 +2026-04-09 14:58:40.702036: Current learning rate: 0.00233 +2026-04-09 15:00:23.234587: train_loss -0.4346 +2026-04-09 15:00:23.240044: val_loss -0.3844 +2026-04-09 15:00:23.241663: Pseudo dice [0.3732, 0.0, 0.8154, 0.289, 0.2831, 0.3631, 0.453] +2026-04-09 15:00:23.243366: Epoch time: 102.54 s +2026-04-09 15:00:24.371700: +2026-04-09 15:00:24.373214: Epoch 803 +2026-04-09 15:00:24.375099: Current learning rate: 0.00232 +2026-04-09 15:02:07.207202: train_loss -0.4452 +2026-04-09 15:02:07.219736: val_loss -0.3662 +2026-04-09 15:02:07.223190: Pseudo dice [0.3328, 0.0, 0.8054, 0.6484, 0.5788, 0.4928, 0.3734] +2026-04-09 15:02:07.226992: Epoch time: 102.84 s +2026-04-09 15:02:08.355995: +2026-04-09 15:02:08.357605: Epoch 804 +2026-04-09 15:02:08.359104: Current learning rate: 0.00231 +2026-04-09 15:03:50.971492: train_loss -0.4415 +2026-04-09 15:03:50.976772: val_loss -0.377 +2026-04-09 15:03:50.978770: Pseudo dice [0.8165, 0.0, 0.8075, 0.4858, 0.5169, 0.4245, 0.8965] +2026-04-09 15:03:50.980609: Epoch time: 102.62 s +2026-04-09 15:03:52.109736: +2026-04-09 15:03:52.111274: Epoch 805 +2026-04-09 15:03:52.112551: Current learning rate: 0.0023 +2026-04-09 15:05:34.736077: train_loss -0.4461 +2026-04-09 15:05:34.740468: val_loss -0.3924 +2026-04-09 15:05:34.742086: Pseudo dice [0.7502, 0.0, 0.7794, 0.7779, 0.5164, 0.8343, 0.8656] +2026-04-09 15:05:34.743616: Epoch time: 102.63 s +2026-04-09 15:05:35.878605: +2026-04-09 15:05:35.880027: Epoch 806 +2026-04-09 15:05:35.881866: Current learning rate: 0.00229 +2026-04-09 15:07:18.740816: train_loss -0.442 +2026-04-09 15:07:18.744983: val_loss -0.389 +2026-04-09 15:07:18.747054: Pseudo dice [0.5516, 0.0, 0.8359, 0.4781, 0.5029, 0.7144, 0.8895] +2026-04-09 15:07:18.749849: Epoch time: 102.87 s +2026-04-09 15:07:19.886980: +2026-04-09 15:07:19.888342: Epoch 807 +2026-04-09 15:07:19.889471: Current learning rate: 0.00228 +2026-04-09 15:09:02.390132: train_loss -0.4475 +2026-04-09 15:09:02.394291: val_loss -0.3926 +2026-04-09 15:09:02.396486: Pseudo dice [0.7603, 0.0, 0.7655, 0.2146, 0.4661, 0.7483, 0.9061] +2026-04-09 15:09:02.397727: Epoch time: 102.51 s +2026-04-09 15:09:03.520293: +2026-04-09 15:09:03.521610: Epoch 808 +2026-04-09 15:09:03.522885: Current learning rate: 0.00226 +2026-04-09 15:10:46.107809: train_loss -0.4477 +2026-04-09 15:10:46.112791: val_loss -0.3782 +2026-04-09 15:10:46.114965: Pseudo dice [0.4844, 0.0, 0.7886, 0.6182, 0.3587, 0.8573, 0.6432] +2026-04-09 15:10:46.116925: Epoch time: 102.59 s +2026-04-09 15:10:47.251531: +2026-04-09 15:10:47.253023: Epoch 809 +2026-04-09 15:10:47.254504: Current learning rate: 0.00225 +2026-04-09 15:12:33.883838: train_loss -0.4531 +2026-04-09 15:12:33.888287: val_loss -0.3786 +2026-04-09 15:12:33.889904: Pseudo dice [0.658, 0.0, 0.7663, 0.7786, 0.589, 0.6427, 0.8864] +2026-04-09 15:12:33.891798: Epoch time: 106.64 s +2026-04-09 15:12:35.032106: +2026-04-09 15:12:35.036089: Epoch 810 +2026-04-09 15:12:35.039072: Current learning rate: 0.00224 +2026-04-09 15:14:21.322516: train_loss -0.4354 +2026-04-09 15:14:21.326817: val_loss -0.3376 +2026-04-09 15:14:21.328377: Pseudo dice [0.1916, 0.0, 0.8201, 0.5209, 0.4116, 0.8428, 0.8067] +2026-04-09 15:14:21.329746: Epoch time: 106.29 s +2026-04-09 15:14:22.475096: +2026-04-09 15:14:22.477067: Epoch 811 +2026-04-09 15:14:22.479263: Current learning rate: 0.00223 +2026-04-09 15:16:06.244085: train_loss -0.4294 +2026-04-09 15:16:06.249962: val_loss -0.3143 +2026-04-09 15:16:06.251589: Pseudo dice [0.742, 0.0, 0.554, 0.4306, 0.4343, 0.3186, 0.2707] +2026-04-09 15:16:06.253211: Epoch time: 103.77 s +2026-04-09 15:16:07.371428: +2026-04-09 15:16:07.373129: Epoch 812 +2026-04-09 15:16:07.374750: Current learning rate: 0.00222 +2026-04-09 15:17:50.053143: train_loss -0.4188 +2026-04-09 15:17:50.057804: val_loss -0.3636 +2026-04-09 15:17:50.059702: Pseudo dice [0.5649, 0.0, 0.8154, 0.1504, 0.459, 0.3816, 0.4311] +2026-04-09 15:17:50.061548: Epoch time: 102.68 s +2026-04-09 15:17:51.196256: +2026-04-09 15:17:51.197496: Epoch 813 +2026-04-09 15:17:51.198886: Current learning rate: 0.00221 +2026-04-09 15:19:33.781154: train_loss -0.4293 +2026-04-09 15:19:33.785271: val_loss -0.3899 +2026-04-09 15:19:33.787627: Pseudo dice [0.4534, 0.0, 0.8331, 0.4117, 0.5587, 0.509, 0.8272] +2026-04-09 15:19:33.789595: Epoch time: 102.59 s +2026-04-09 15:19:34.933522: +2026-04-09 15:19:34.935077: Epoch 814 +2026-04-09 15:19:34.936329: Current learning rate: 0.0022 +2026-04-09 15:21:17.441946: train_loss -0.435 +2026-04-09 15:21:17.446636: val_loss -0.4004 +2026-04-09 15:21:17.447997: Pseudo dice [0.7248, 0.0, 0.7781, 0.735, 0.2586, 0.7633, 0.9094] +2026-04-09 15:21:17.450087: Epoch time: 102.51 s +2026-04-09 15:21:18.576566: +2026-04-09 15:21:18.577879: Epoch 815 +2026-04-09 15:21:18.579077: Current learning rate: 0.00219 +2026-04-09 15:23:01.306135: train_loss -0.4553 +2026-04-09 15:23:01.310216: val_loss -0.4204 +2026-04-09 15:23:01.311733: Pseudo dice [0.773, 0.0, 0.8061, 0.717, 0.594, 0.6041, 0.7931] +2026-04-09 15:23:01.313267: Epoch time: 102.73 s +2026-04-09 15:23:02.455084: +2026-04-09 15:23:02.456565: Epoch 816 +2026-04-09 15:23:02.457991: Current learning rate: 0.00218 +2026-04-09 15:24:44.972828: train_loss -0.447 +2026-04-09 15:24:44.978672: val_loss -0.3864 +2026-04-09 15:24:44.980775: Pseudo dice [0.689, 0.0, 0.8634, 0.4764, 0.4178, 0.6398, 0.8089] +2026-04-09 15:24:44.983138: Epoch time: 102.52 s +2026-04-09 15:24:46.100295: +2026-04-09 15:24:46.106611: Epoch 817 +2026-04-09 15:24:46.111934: Current learning rate: 0.00217 +2026-04-09 15:26:28.671086: train_loss -0.4348 +2026-04-09 15:26:28.675513: val_loss -0.3728 +2026-04-09 15:26:28.677141: Pseudo dice [0.6438, 0.0, 0.787, 0.721, 0.5147, 0.599, 0.9424] +2026-04-09 15:26:28.678590: Epoch time: 102.57 s +2026-04-09 15:26:29.811869: +2026-04-09 15:26:29.813216: Epoch 818 +2026-04-09 15:26:29.814528: Current learning rate: 0.00216 +2026-04-09 15:28:12.552512: train_loss -0.4395 +2026-04-09 15:28:12.557825: val_loss -0.3964 +2026-04-09 15:28:12.559684: Pseudo dice [0.101, 0.0, 0.7741, 0.8147, 0.5292, 0.8785, 0.8782] +2026-04-09 15:28:12.561447: Epoch time: 102.74 s +2026-04-09 15:28:13.690743: +2026-04-09 15:28:13.692176: Epoch 819 +2026-04-09 15:28:13.693685: Current learning rate: 0.00215 +2026-04-09 15:29:56.318355: train_loss -0.44 +2026-04-09 15:29:56.327178: val_loss -0.4064 +2026-04-09 15:29:56.330270: Pseudo dice [0.454, 0.0, 0.8422, 0.6272, 0.4058, 0.7077, 0.6013] +2026-04-09 15:29:56.331858: Epoch time: 102.63 s +2026-04-09 15:29:57.418040: +2026-04-09 15:29:57.419899: Epoch 820 +2026-04-09 15:29:57.421098: Current learning rate: 0.00214 +2026-04-09 15:31:40.310328: train_loss -0.4286 +2026-04-09 15:31:40.321418: val_loss -0.3876 +2026-04-09 15:31:40.323650: Pseudo dice [0.767, 0.0, 0.7917, 0.5107, 0.6848, 0.0714, 0.7182] +2026-04-09 15:31:40.326981: Epoch time: 102.9 s +2026-04-09 15:31:41.439890: +2026-04-09 15:31:41.441267: Epoch 821 +2026-04-09 15:31:41.442599: Current learning rate: 0.00213 +2026-04-09 15:33:25.422078: train_loss -0.4425 +2026-04-09 15:33:25.426239: val_loss -0.3978 +2026-04-09 15:33:25.427922: Pseudo dice [0.8294, 0.0, 0.8047, 0.8688, 0.5393, 0.6978, 0.8588] +2026-04-09 15:33:25.429299: Epoch time: 103.99 s +2026-04-09 15:33:26.498960: +2026-04-09 15:33:26.500289: Epoch 822 +2026-04-09 15:33:26.501465: Current learning rate: 0.00212 +2026-04-09 15:35:09.133674: train_loss -0.4433 +2026-04-09 15:35:09.137912: val_loss -0.3871 +2026-04-09 15:35:09.140085: Pseudo dice [0.8153, 0.0, 0.6464, 0.2306, 0.4434, 0.8591, 0.8496] +2026-04-09 15:35:09.142886: Epoch time: 102.64 s +2026-04-09 15:35:10.214032: +2026-04-09 15:35:10.215508: Epoch 823 +2026-04-09 15:35:10.216898: Current learning rate: 0.0021 +2026-04-09 15:36:53.063087: train_loss -0.4425 +2026-04-09 15:36:53.066787: val_loss -0.4225 +2026-04-09 15:36:53.068474: Pseudo dice [0.592, 0.0, 0.8824, 0.6916, 0.4722, 0.7486, 0.8442] +2026-04-09 15:36:53.069747: Epoch time: 102.85 s +2026-04-09 15:36:54.132280: +2026-04-09 15:36:54.134080: Epoch 824 +2026-04-09 15:36:54.135331: Current learning rate: 0.00209 +2026-04-09 15:38:36.738041: train_loss -0.4502 +2026-04-09 15:38:36.742403: val_loss -0.396 +2026-04-09 15:38:36.743957: Pseudo dice [0.5344, 0.0, 0.8268, 0.7929, 0.5901, 0.8076, 0.8734] +2026-04-09 15:38:36.745528: Epoch time: 102.61 s +2026-04-09 15:38:37.829430: +2026-04-09 15:38:37.831595: Epoch 825 +2026-04-09 15:38:37.833329: Current learning rate: 0.00208 +2026-04-09 15:40:20.670716: train_loss -0.4554 +2026-04-09 15:40:20.675498: val_loss -0.4056 +2026-04-09 15:40:20.677408: Pseudo dice [0.7164, 0.0, 0.8026, 0.8318, 0.4237, 0.6927, 0.9422] +2026-04-09 15:40:20.678939: Epoch time: 102.84 s +2026-04-09 15:40:21.736343: +2026-04-09 15:40:21.738111: Epoch 826 +2026-04-09 15:40:21.739347: Current learning rate: 0.00207 +2026-04-09 15:42:04.388228: train_loss -0.4576 +2026-04-09 15:42:04.399827: val_loss -0.3732 +2026-04-09 15:42:04.401448: Pseudo dice [0.4945, 0.0, 0.8011, 0.6143, 0.4331, 0.4022, 0.803] +2026-04-09 15:42:04.403304: Epoch time: 102.65 s +2026-04-09 15:42:05.469474: +2026-04-09 15:42:05.471343: Epoch 827 +2026-04-09 15:42:05.472749: Current learning rate: 0.00206 +2026-04-09 15:43:53.236809: train_loss -0.43 +2026-04-09 15:43:53.242614: val_loss -0.4119 +2026-04-09 15:43:53.244732: Pseudo dice [0.4804, 0.0, 0.8072, 0.3983, 0.4623, 0.6574, 0.9424] +2026-04-09 15:43:53.247148: Epoch time: 107.77 s +2026-04-09 15:43:54.342697: +2026-04-09 15:43:54.345535: Epoch 828 +2026-04-09 15:43:54.348114: Current learning rate: 0.00205 +2026-04-09 15:45:36.787632: train_loss -0.4472 +2026-04-09 15:45:36.793385: val_loss -0.386 +2026-04-09 15:45:36.795012: Pseudo dice [0.5739, 0.0, 0.7784, 0.5667, 0.4742, 0.7355, 0.8637] +2026-04-09 15:45:36.796534: Epoch time: 102.45 s +2026-04-09 15:45:37.853637: +2026-04-09 15:45:37.857480: Epoch 829 +2026-04-09 15:45:37.860893: Current learning rate: 0.00204 +2026-04-09 15:47:20.457566: train_loss -0.4577 +2026-04-09 15:47:20.461413: val_loss -0.4433 +2026-04-09 15:47:20.462847: Pseudo dice [0.6076, 0.0, 0.8042, 0.8884, 0.5796, 0.9088, 0.9089] +2026-04-09 15:47:20.464207: Epoch time: 102.61 s +2026-04-09 15:47:21.537605: +2026-04-09 15:47:21.539180: Epoch 830 +2026-04-09 15:47:21.540626: Current learning rate: 0.00203 +2026-04-09 15:49:04.133213: train_loss -0.4667 +2026-04-09 15:49:04.139824: val_loss -0.4216 +2026-04-09 15:49:04.141658: Pseudo dice [0.6956, 0.0, 0.8868, 0.719, 0.394, 0.8393, 0.6791] +2026-04-09 15:49:04.143854: Epoch time: 102.6 s +2026-04-09 15:49:05.201511: +2026-04-09 15:49:05.203303: Epoch 831 +2026-04-09 15:49:05.204649: Current learning rate: 0.00202 +2026-04-09 15:50:48.697183: train_loss -0.4701 +2026-04-09 15:50:48.701575: val_loss -0.4232 +2026-04-09 15:50:48.703346: Pseudo dice [0.785, 0.0, 0.7641, 0.8282, 0.5839, 0.8606, 0.8751] +2026-04-09 15:50:48.704990: Epoch time: 103.5 s +2026-04-09 15:50:49.787077: +2026-04-09 15:50:49.788403: Epoch 832 +2026-04-09 15:50:49.789626: Current learning rate: 0.00201 +2026-04-09 15:52:32.290221: train_loss -0.4517 +2026-04-09 15:52:32.294629: val_loss -0.3735 +2026-04-09 15:52:32.296272: Pseudo dice [0.0, 0.0, 0.8569, 0.8519, 0.254, 0.798, 0.8729] +2026-04-09 15:52:32.297681: Epoch time: 102.51 s +2026-04-09 15:52:33.357668: +2026-04-09 15:52:33.359142: Epoch 833 +2026-04-09 15:52:33.360394: Current learning rate: 0.002 +2026-04-09 15:54:15.924109: train_loss -0.443 +2026-04-09 15:54:15.930376: val_loss -0.4353 +2026-04-09 15:54:15.931705: Pseudo dice [0.7319, 0.0, 0.7497, 0.8118, 0.5667, 0.8656, 0.8445] +2026-04-09 15:54:15.933423: Epoch time: 102.57 s +2026-04-09 15:54:16.992974: +2026-04-09 15:54:16.994325: Epoch 834 +2026-04-09 15:54:16.995636: Current learning rate: 0.00199 +2026-04-09 15:55:59.749286: train_loss -0.4346 +2026-04-09 15:55:59.754337: val_loss -0.3782 +2026-04-09 15:55:59.756362: Pseudo dice [0.5026, 0.0, 0.821, 0.6843, 0.3314, 0.8854, 0.905] +2026-04-09 15:55:59.757698: Epoch time: 102.76 s +2026-04-09 15:56:00.831326: +2026-04-09 15:56:00.832857: Epoch 835 +2026-04-09 15:56:00.834192: Current learning rate: 0.00198 +2026-04-09 15:57:43.468333: train_loss -0.4583 +2026-04-09 15:57:43.471978: val_loss -0.3864 +2026-04-09 15:57:43.473850: Pseudo dice [0.5066, 0.0, 0.7235, 0.7485, 0.4458, 0.3052, 0.9223] +2026-04-09 15:57:43.475381: Epoch time: 102.64 s +2026-04-09 15:57:44.557816: +2026-04-09 15:57:44.559258: Epoch 836 +2026-04-09 15:57:44.560507: Current learning rate: 0.00196 +2026-04-09 15:59:27.360151: train_loss -0.4454 +2026-04-09 15:59:27.364778: val_loss -0.3852 +2026-04-09 15:59:27.367009: Pseudo dice [0.6167, 0.0, 0.785, 0.5035, 0.7013, 0.84, 0.7818] +2026-04-09 15:59:27.369125: Epoch time: 102.81 s +2026-04-09 15:59:28.422026: +2026-04-09 15:59:28.424953: Epoch 837 +2026-04-09 15:59:28.426263: Current learning rate: 0.00195 +2026-04-09 16:01:11.148615: train_loss -0.4587 +2026-04-09 16:01:11.153422: val_loss -0.3949 +2026-04-09 16:01:11.155036: Pseudo dice [0.8254, 0.0, 0.7681, 0.7671, 0.4559, 0.7253, 0.8787] +2026-04-09 16:01:11.156574: Epoch time: 102.73 s +2026-04-09 16:01:12.237401: +2026-04-09 16:01:12.238766: Epoch 838 +2026-04-09 16:01:12.240145: Current learning rate: 0.00194 +2026-04-09 16:02:54.968051: train_loss -0.452 +2026-04-09 16:02:54.972178: val_loss -0.4229 +2026-04-09 16:02:54.974277: Pseudo dice [0.7553, 0.0, 0.8279, 0.7751, 0.6442, 0.7839, 0.9065] +2026-04-09 16:02:54.975931: Epoch time: 102.73 s +2026-04-09 16:02:56.046749: +2026-04-09 16:02:56.048269: Epoch 839 +2026-04-09 16:02:56.049755: Current learning rate: 0.00193 +2026-04-09 16:04:38.807351: train_loss -0.4689 +2026-04-09 16:04:38.821786: val_loss -0.3706 +2026-04-09 16:04:38.827232: Pseudo dice [0.5054, 0.0, 0.8298, 0.594, 0.2967, 0.6003, 0.4526] +2026-04-09 16:04:38.833280: Epoch time: 102.76 s +2026-04-09 16:04:39.903386: +2026-04-09 16:04:39.904673: Epoch 840 +2026-04-09 16:04:39.906133: Current learning rate: 0.00192 +2026-04-09 16:06:22.624039: train_loss -0.4601 +2026-04-09 16:06:22.630858: val_loss -0.3502 +2026-04-09 16:06:22.632537: Pseudo dice [0.5378, 0.0, 0.7961, 0.4015, 0.2141, 0.6363, 0.7873] +2026-04-09 16:06:22.636785: Epoch time: 102.72 s +2026-04-09 16:06:23.716507: +2026-04-09 16:06:23.720714: Epoch 841 +2026-04-09 16:06:23.725512: Current learning rate: 0.00191 +2026-04-09 16:08:06.447980: train_loss -0.4578 +2026-04-09 16:08:06.453309: val_loss -0.4133 +2026-04-09 16:08:06.455341: Pseudo dice [0.6948, 0.0, 0.7344, 0.4333, 0.608, 0.8254, 0.789] +2026-04-09 16:08:06.457026: Epoch time: 102.73 s +2026-04-09 16:08:07.544775: +2026-04-09 16:08:07.546522: Epoch 842 +2026-04-09 16:08:07.548544: Current learning rate: 0.0019 +2026-04-09 16:09:50.154401: train_loss -0.4468 +2026-04-09 16:09:50.158765: val_loss -0.4164 +2026-04-09 16:09:50.161072: Pseudo dice [0.4033, 0.0, 0.6349, 0.7606, 0.3663, 0.7326, 0.9391] +2026-04-09 16:09:50.162584: Epoch time: 102.61 s +2026-04-09 16:09:51.216318: +2026-04-09 16:09:51.217953: Epoch 843 +2026-04-09 16:09:51.219109: Current learning rate: 0.00189 +2026-04-09 16:11:33.880120: train_loss -0.4486 +2026-04-09 16:11:33.885425: val_loss -0.4127 +2026-04-09 16:11:33.887103: Pseudo dice [0.5969, 0.0, 0.7343, 0.76, 0.5592, 0.8, 0.9156] +2026-04-09 16:11:33.888757: Epoch time: 102.67 s +2026-04-09 16:11:34.964670: +2026-04-09 16:11:34.965903: Epoch 844 +2026-04-09 16:11:34.967161: Current learning rate: 0.00188 +2026-04-09 16:13:17.710310: train_loss -0.4604 +2026-04-09 16:13:17.714201: val_loss -0.3851 +2026-04-09 16:13:17.715505: Pseudo dice [0.5969, 0.0, 0.6781, 0.6979, 0.4737, 0.6269, 0.9271] +2026-04-09 16:13:17.716781: Epoch time: 102.75 s +2026-04-09 16:13:18.785311: +2026-04-09 16:13:18.788996: Epoch 845 +2026-04-09 16:13:18.790358: Current learning rate: 0.00187 +2026-04-09 16:15:01.354546: train_loss -0.4573 +2026-04-09 16:15:01.359306: val_loss -0.3952 +2026-04-09 16:15:01.360807: Pseudo dice [0.2848, 0.0, 0.8781, 0.0011, 0.4222, 0.6855, 0.9068] +2026-04-09 16:15:01.362690: Epoch time: 102.57 s +2026-04-09 16:15:02.435084: +2026-04-09 16:15:02.436317: Epoch 846 +2026-04-09 16:15:02.437541: Current learning rate: 0.00186 +2026-04-09 16:16:44.865577: train_loss -0.449 +2026-04-09 16:16:44.869735: val_loss -0.3845 +2026-04-09 16:16:44.871214: Pseudo dice [0.805, 0.0, 0.7232, 0.7172, 0.5296, 0.866, 0.8942] +2026-04-09 16:16:44.872590: Epoch time: 102.43 s +2026-04-09 16:16:45.943025: +2026-04-09 16:16:45.945200: Epoch 847 +2026-04-09 16:16:45.947294: Current learning rate: 0.00185 +2026-04-09 16:18:28.638140: train_loss -0.4577 +2026-04-09 16:18:28.642931: val_loss -0.4014 +2026-04-09 16:18:28.645136: Pseudo dice [0.3543, 0.0, 0.8725, 0.5696, 0.643, 0.8002, 0.8576] +2026-04-09 16:18:28.646508: Epoch time: 102.7 s +2026-04-09 16:18:29.721792: +2026-04-09 16:18:29.723819: Epoch 848 +2026-04-09 16:18:29.725065: Current learning rate: 0.00184 +2026-04-09 16:20:12.338624: train_loss -0.4643 +2026-04-09 16:20:12.342379: val_loss -0.3506 +2026-04-09 16:20:12.343858: Pseudo dice [0.509, 0.0, 0.8195, 0.6622, 0.3689, 0.5366, 0.5785] +2026-04-09 16:20:12.345888: Epoch time: 102.62 s +2026-04-09 16:20:13.424189: +2026-04-09 16:20:13.426014: Epoch 849 +2026-04-09 16:20:13.427914: Current learning rate: 0.00182 +2026-04-09 16:21:55.848203: train_loss -0.433 +2026-04-09 16:21:55.852834: val_loss -0.4097 +2026-04-09 16:21:55.854334: Pseudo dice [0.7439, 0.0, 0.5662, 0.4175, 0.4258, 0.766, 0.7315] +2026-04-09 16:21:55.855981: Epoch time: 102.43 s +2026-04-09 16:21:58.616967: +2026-04-09 16:21:58.619517: Epoch 850 +2026-04-09 16:21:58.621716: Current learning rate: 0.00181 +2026-04-09 16:23:41.283812: train_loss -0.4559 +2026-04-09 16:23:41.289918: val_loss -0.408 +2026-04-09 16:23:41.291401: Pseudo dice [0.8489, 0.0, 0.6564, 0.6134, 0.5847, 0.7554, 0.9019] +2026-04-09 16:23:41.292710: Epoch time: 102.67 s +2026-04-09 16:23:42.346459: +2026-04-09 16:23:42.350049: Epoch 851 +2026-04-09 16:23:42.354921: Current learning rate: 0.0018 +2026-04-09 16:25:26.224313: train_loss -0.475 +2026-04-09 16:25:26.235019: val_loss -0.4234 +2026-04-09 16:25:26.236937: Pseudo dice [0.704, 0.0, 0.8592, 0.8583, 0.5412, 0.8514, 0.924] +2026-04-09 16:25:26.238526: Epoch time: 103.88 s +2026-04-09 16:25:27.305450: +2026-04-09 16:25:27.306966: Epoch 852 +2026-04-09 16:25:27.308295: Current learning rate: 0.00179 +2026-04-09 16:27:09.734886: train_loss -0.462 +2026-04-09 16:27:09.738654: val_loss -0.3869 +2026-04-09 16:27:09.740128: Pseudo dice [0.7844, 0.0, 0.7334, 0.2575, 0.3095, 0.6384, 0.9152] +2026-04-09 16:27:09.741580: Epoch time: 102.43 s +2026-04-09 16:27:10.812069: +2026-04-09 16:27:10.813585: Epoch 853 +2026-04-09 16:27:10.815677: Current learning rate: 0.00178 +2026-04-09 16:28:53.452298: train_loss -0.4454 +2026-04-09 16:28:53.475716: val_loss -0.3923 +2026-04-09 16:28:53.484794: Pseudo dice [0.7418, 0.0, 0.7641, 0.1023, 0.5591, 0.7862, 0.9212] +2026-04-09 16:28:53.486421: Epoch time: 102.64 s +2026-04-09 16:28:54.542514: +2026-04-09 16:28:54.544291: Epoch 854 +2026-04-09 16:28:54.545962: Current learning rate: 0.00177 +2026-04-09 16:30:37.293857: train_loss -0.4432 +2026-04-09 16:30:37.298399: val_loss -0.37 +2026-04-09 16:30:37.300513: Pseudo dice [0.3571, 0.0, 0.7258, 0.8221, 0.4908, 0.3332, 0.8506] +2026-04-09 16:30:37.302511: Epoch time: 102.75 s +2026-04-09 16:30:38.372583: +2026-04-09 16:30:38.373828: Epoch 855 +2026-04-09 16:30:38.375052: Current learning rate: 0.00176 +2026-04-09 16:32:21.210642: train_loss -0.4538 +2026-04-09 16:32:21.214936: val_loss -0.4177 +2026-04-09 16:32:21.216553: Pseudo dice [0.7196, 0.0, 0.7283, 0.8776, 0.4475, 0.8502, 0.7411] +2026-04-09 16:32:21.218351: Epoch time: 102.84 s +2026-04-09 16:32:22.298012: +2026-04-09 16:32:22.299825: Epoch 856 +2026-04-09 16:32:22.301567: Current learning rate: 0.00175 +2026-04-09 16:34:04.691926: train_loss -0.4539 +2026-04-09 16:34:04.695560: val_loss -0.3924 +2026-04-09 16:34:04.697227: Pseudo dice [0.5748, 0.0, 0.2818, 0.7458, 0.5202, 0.8533, 0.7484] +2026-04-09 16:34:04.698394: Epoch time: 102.4 s +2026-04-09 16:34:05.753096: +2026-04-09 16:34:05.754747: Epoch 857 +2026-04-09 16:34:05.756256: Current learning rate: 0.00174 +2026-04-09 16:35:48.235235: train_loss -0.4624 +2026-04-09 16:35:48.240828: val_loss -0.3924 +2026-04-09 16:35:48.242280: Pseudo dice [0.7759, 0.0, 0.5855, 0.8149, 0.6401, 0.3913, 0.5913] +2026-04-09 16:35:48.244270: Epoch time: 102.49 s +2026-04-09 16:35:49.312734: +2026-04-09 16:35:49.314115: Epoch 858 +2026-04-09 16:35:49.315337: Current learning rate: 0.00173 +2026-04-09 16:37:31.966545: train_loss -0.4461 +2026-04-09 16:37:31.971638: val_loss -0.3814 +2026-04-09 16:37:31.973747: Pseudo dice [0.5811, 0.0, 0.8359, 0.8333, 0.444, 0.8495, 0.6044] +2026-04-09 16:37:31.975562: Epoch time: 102.66 s +2026-04-09 16:37:33.050090: +2026-04-09 16:37:33.051809: Epoch 859 +2026-04-09 16:37:33.053164: Current learning rate: 0.00172 +2026-04-09 16:39:15.605170: train_loss -0.4538 +2026-04-09 16:39:15.609179: val_loss -0.399 +2026-04-09 16:39:15.610613: Pseudo dice [0.2363, 0.0, 0.8664, 0.7844, 0.3661, 0.5856, 0.7986] +2026-04-09 16:39:15.611970: Epoch time: 102.56 s +2026-04-09 16:39:16.673714: +2026-04-09 16:39:16.675434: Epoch 860 +2026-04-09 16:39:16.677089: Current learning rate: 0.0017 +2026-04-09 16:40:59.343933: train_loss -0.4598 +2026-04-09 16:40:59.348469: val_loss -0.3888 +2026-04-09 16:40:59.350759: Pseudo dice [0.6133, 0.0, 0.8188, 0.7807, 0.4969, 0.3756, 0.6232] +2026-04-09 16:40:59.352936: Epoch time: 102.67 s +2026-04-09 16:41:00.437109: +2026-04-09 16:41:00.438448: Epoch 861 +2026-04-09 16:41:00.439678: Current learning rate: 0.00169 +2026-04-09 16:42:42.852612: train_loss -0.461 +2026-04-09 16:42:42.857183: val_loss -0.4261 +2026-04-09 16:42:42.858904: Pseudo dice [0.7344, 0.0, 0.8229, 0.8298, 0.5046, 0.6689, 0.8849] +2026-04-09 16:42:42.860615: Epoch time: 102.42 s +2026-04-09 16:42:43.938026: +2026-04-09 16:42:43.939896: Epoch 862 +2026-04-09 16:42:43.941409: Current learning rate: 0.00168 +2026-04-09 16:44:26.454355: train_loss -0.4601 +2026-04-09 16:44:26.457838: val_loss -0.4181 +2026-04-09 16:44:26.459199: Pseudo dice [0.5855, 0.0, 0.8599, 0.8098, 0.641, 0.727, 0.9371] +2026-04-09 16:44:26.460811: Epoch time: 102.52 s +2026-04-09 16:44:27.518411: +2026-04-09 16:44:27.519985: Epoch 863 +2026-04-09 16:44:27.521201: Current learning rate: 0.00167 +2026-04-09 16:46:09.998614: train_loss -0.4648 +2026-04-09 16:46:10.002818: val_loss -0.4125 +2026-04-09 16:46:10.004349: Pseudo dice [0.6138, 0.0, 0.8199, 0.6103, 0.5181, 0.8509, 0.8907] +2026-04-09 16:46:10.005810: Epoch time: 102.48 s +2026-04-09 16:46:11.056485: +2026-04-09 16:46:11.058444: Epoch 864 +2026-04-09 16:46:11.059891: Current learning rate: 0.00166 +2026-04-09 16:47:53.651590: train_loss -0.4677 +2026-04-09 16:47:53.656745: val_loss -0.3775 +2026-04-09 16:47:53.658550: Pseudo dice [0.8662, 0.0, 0.655, 0.6182, 0.5025, 0.5621, 0.6498] +2026-04-09 16:47:53.660068: Epoch time: 102.6 s +2026-04-09 16:47:54.725702: +2026-04-09 16:47:54.727263: Epoch 865 +2026-04-09 16:47:54.728513: Current learning rate: 0.00165 +2026-04-09 16:49:37.079108: train_loss -0.4574 +2026-04-09 16:49:37.082569: val_loss -0.3876 +2026-04-09 16:49:37.084331: Pseudo dice [0.7764, 0.0, 0.729, 0.1967, 0.5959, 0.8048, 0.8124] +2026-04-09 16:49:37.085864: Epoch time: 102.36 s +2026-04-09 16:49:38.133020: +2026-04-09 16:49:38.134342: Epoch 866 +2026-04-09 16:49:38.135632: Current learning rate: 0.00164 +2026-04-09 16:51:20.494070: train_loss -0.4669 +2026-04-09 16:51:20.498720: val_loss -0.4243 +2026-04-09 16:51:20.500174: Pseudo dice [0.3795, 0.0, 0.8118, 0.3283, 0.5742, 0.854, 0.9271] +2026-04-09 16:51:20.501887: Epoch time: 102.36 s +2026-04-09 16:51:21.589515: +2026-04-09 16:51:21.594145: Epoch 867 +2026-04-09 16:51:21.600485: Current learning rate: 0.00163 +2026-04-09 16:53:04.565490: train_loss -0.4772 +2026-04-09 16:53:04.569771: val_loss -0.4366 +2026-04-09 16:53:04.571318: Pseudo dice [0.6355, 0.0, 0.6668, 0.6363, 0.5704, 0.6746, 0.8288] +2026-04-09 16:53:04.572769: Epoch time: 102.98 s +2026-04-09 16:53:05.631401: +2026-04-09 16:53:05.632895: Epoch 868 +2026-04-09 16:53:05.634540: Current learning rate: 0.00162 +2026-04-09 16:54:47.929286: train_loss -0.464 +2026-04-09 16:54:47.933502: val_loss -0.3933 +2026-04-09 16:54:47.934996: Pseudo dice [0.6067, 0.0, 0.7934, 0.6889, 0.6049, 0.8818, 0.8873] +2026-04-09 16:54:47.936312: Epoch time: 102.3 s +2026-04-09 16:54:49.004641: +2026-04-09 16:54:49.006068: Epoch 869 +2026-04-09 16:54:49.007286: Current learning rate: 0.00161 +2026-04-09 16:56:31.508653: train_loss -0.4628 +2026-04-09 16:56:31.513027: val_loss -0.4162 +2026-04-09 16:56:31.514521: Pseudo dice [0.4757, 0.0, 0.8282, 0.6184, 0.5898, 0.8053, 0.9422] +2026-04-09 16:56:31.516095: Epoch time: 102.51 s +2026-04-09 16:56:32.587098: +2026-04-09 16:56:32.588462: Epoch 870 +2026-04-09 16:56:32.590313: Current learning rate: 0.00159 +2026-04-09 16:58:15.106721: train_loss -0.4646 +2026-04-09 16:58:15.111142: val_loss -0.3932 +2026-04-09 16:58:15.112668: Pseudo dice [0.6752, 0.0, 0.5112, 0.5379, 0.4577, 0.4319, 0.9261] +2026-04-09 16:58:15.114860: Epoch time: 102.52 s +2026-04-09 16:58:16.179996: +2026-04-09 16:58:16.181385: Epoch 871 +2026-04-09 16:58:16.182806: Current learning rate: 0.00158 +2026-04-09 16:59:58.763267: train_loss -0.4676 +2026-04-09 16:59:58.767148: val_loss -0.3876 +2026-04-09 16:59:58.768857: Pseudo dice [0.3703, 0.0, 0.8708, 0.6662, 0.5005, 0.896, 0.9408] +2026-04-09 16:59:58.770381: Epoch time: 102.59 s +2026-04-09 17:00:00.784091: +2026-04-09 17:00:00.785511: Epoch 872 +2026-04-09 17:00:00.786717: Current learning rate: 0.00157 +2026-04-09 17:01:43.211038: train_loss -0.4784 +2026-04-09 17:01:43.215636: val_loss -0.4107 +2026-04-09 17:01:43.217215: Pseudo dice [0.5238, 0.0, 0.8592, 0.7522, 0.5468, 0.6616, 0.8093] +2026-04-09 17:01:43.218821: Epoch time: 102.43 s +2026-04-09 17:01:44.284499: +2026-04-09 17:01:44.286168: Epoch 873 +2026-04-09 17:01:44.287480: Current learning rate: 0.00156 +2026-04-09 17:03:26.756228: train_loss -0.4607 +2026-04-09 17:03:26.761096: val_loss -0.387 +2026-04-09 17:03:26.762719: Pseudo dice [0.5596, 0.0, 0.7715, 0.0413, 0.5626, 0.6016, 0.8847] +2026-04-09 17:03:26.764155: Epoch time: 102.47 s +2026-04-09 17:03:27.820405: +2026-04-09 17:03:27.821864: Epoch 874 +2026-04-09 17:03:27.823307: Current learning rate: 0.00155 +2026-04-09 17:05:10.321176: train_loss -0.4576 +2026-04-09 17:05:10.326645: val_loss -0.3872 +2026-04-09 17:05:10.328599: Pseudo dice [0.5455, 0.0, 0.887, 0.7612, 0.6205, 0.5976, 0.9364] +2026-04-09 17:05:10.330410: Epoch time: 102.5 s +2026-04-09 17:05:11.380014: +2026-04-09 17:05:11.381498: Epoch 875 +2026-04-09 17:05:11.383218: Current learning rate: 0.00154 +2026-04-09 17:06:53.710362: train_loss -0.4646 +2026-04-09 17:06:53.715585: val_loss -0.4111 +2026-04-09 17:06:53.717511: Pseudo dice [0.5964, 0.0, 0.8415, 0.4484, 0.6865, 0.625, 0.6762] +2026-04-09 17:06:53.719247: Epoch time: 102.33 s +2026-04-09 17:06:54.779408: +2026-04-09 17:06:54.781073: Epoch 876 +2026-04-09 17:06:54.782693: Current learning rate: 0.00153 +2026-04-09 17:08:37.097172: train_loss -0.483 +2026-04-09 17:08:37.100901: val_loss -0.412 +2026-04-09 17:08:37.102473: Pseudo dice [0.6814, 0.0, 0.8414, 0.5339, 0.4796, 0.8919, 0.7094] +2026-04-09 17:08:37.103799: Epoch time: 102.32 s +2026-04-09 17:08:38.173912: +2026-04-09 17:08:38.175544: Epoch 877 +2026-04-09 17:08:38.176978: Current learning rate: 0.00152 +2026-04-09 17:10:20.416687: train_loss -0.4564 +2026-04-09 17:10:20.429553: val_loss -0.4237 +2026-04-09 17:10:20.433384: Pseudo dice [0.5288, 0.0, 0.8691, 0.5863, 0.402, 0.6163, 0.9301] +2026-04-09 17:10:20.436515: Epoch time: 102.25 s +2026-04-09 17:10:21.507540: +2026-04-09 17:10:21.508953: Epoch 878 +2026-04-09 17:10:21.510198: Current learning rate: 0.00151 +2026-04-09 17:12:03.947307: train_loss -0.4712 +2026-04-09 17:12:03.950899: val_loss -0.4052 +2026-04-09 17:12:03.952390: Pseudo dice [0.5114, 0.0, 0.8709, 0.8783, 0.4921, 0.8585, 0.8898] +2026-04-09 17:12:03.953881: Epoch time: 102.44 s +2026-04-09 17:12:05.019986: +2026-04-09 17:12:05.022343: Epoch 879 +2026-04-09 17:12:05.024763: Current learning rate: 0.00149 +2026-04-09 17:13:47.448061: train_loss -0.4662 +2026-04-09 17:13:47.452764: val_loss -0.4042 +2026-04-09 17:13:47.454522: Pseudo dice [0.6439, 0.0, 0.7791, 0.8213, 0.5302, 0.704, 0.9569] +2026-04-09 17:13:47.456412: Epoch time: 102.43 s +2026-04-09 17:13:48.540759: +2026-04-09 17:13:48.542238: Epoch 880 +2026-04-09 17:13:48.544138: Current learning rate: 0.00148 +2026-04-09 17:15:31.108621: train_loss -0.455 +2026-04-09 17:15:31.113615: val_loss -0.3987 +2026-04-09 17:15:31.115313: Pseudo dice [0.3425, 0.0, 0.835, 0.5118, 0.5422, 0.4736, 0.8868] +2026-04-09 17:15:31.117132: Epoch time: 102.57 s +2026-04-09 17:15:32.185941: +2026-04-09 17:15:32.187579: Epoch 881 +2026-04-09 17:15:32.189150: Current learning rate: 0.00147 +2026-04-09 17:17:14.847567: train_loss -0.4757 +2026-04-09 17:17:14.853328: val_loss -0.4225 +2026-04-09 17:17:14.855031: Pseudo dice [0.8615, 0.0, 0.8396, 0.7356, 0.5802, 0.4925, 0.9285] +2026-04-09 17:17:14.856667: Epoch time: 102.66 s +2026-04-09 17:17:15.915734: +2026-04-09 17:17:15.923572: Epoch 882 +2026-04-09 17:17:15.931499: Current learning rate: 0.00146 +2026-04-09 17:18:58.482664: train_loss -0.4692 +2026-04-09 17:18:58.486938: val_loss -0.4095 +2026-04-09 17:18:58.488788: Pseudo dice [0.4854, 0.0, 0.7932, 0.8055, 0.5068, 0.6638, 0.9023] +2026-04-09 17:18:58.490402: Epoch time: 102.57 s +2026-04-09 17:18:59.560606: +2026-04-09 17:18:59.562281: Epoch 883 +2026-04-09 17:18:59.563925: Current learning rate: 0.00145 +2026-04-09 17:20:42.164477: train_loss -0.4611 +2026-04-09 17:20:42.169055: val_loss -0.4063 +2026-04-09 17:20:42.171359: Pseudo dice [0.5945, 0.0, 0.8476, 0.7978, 0.3235, 0.9056, 0.8452] +2026-04-09 17:20:42.173589: Epoch time: 102.61 s +2026-04-09 17:20:43.241403: +2026-04-09 17:20:43.242775: Epoch 884 +2026-04-09 17:20:43.244137: Current learning rate: 0.00144 +2026-04-09 17:22:26.143668: train_loss -0.4682 +2026-04-09 17:22:26.147426: val_loss -0.3958 +2026-04-09 17:22:26.148992: Pseudo dice [0.7938, 0.0, 0.8102, 0.0192, 0.4882, 0.6409, 0.7561] +2026-04-09 17:22:26.150601: Epoch time: 102.91 s +2026-04-09 17:22:27.202935: +2026-04-09 17:22:27.204393: Epoch 885 +2026-04-09 17:22:27.205666: Current learning rate: 0.00143 +2026-04-09 17:24:09.858093: train_loss -0.47 +2026-04-09 17:24:09.863908: val_loss -0.4265 +2026-04-09 17:24:09.865817: Pseudo dice [0.8362, 0.0, 0.8341, 0.6344, 0.5266, 0.7524, 0.9065] +2026-04-09 17:24:09.867542: Epoch time: 102.66 s +2026-04-09 17:24:10.921991: +2026-04-09 17:24:10.923589: Epoch 886 +2026-04-09 17:24:10.924938: Current learning rate: 0.00142 +2026-04-09 17:25:53.839602: train_loss -0.4805 +2026-04-09 17:25:53.843458: val_loss -0.3902 +2026-04-09 17:25:53.845275: Pseudo dice [0.5534, 0.0, 0.799, 0.8497, 0.247, 0.514, 0.9521] +2026-04-09 17:25:53.848540: Epoch time: 102.92 s +2026-04-09 17:25:54.914361: +2026-04-09 17:25:54.915627: Epoch 887 +2026-04-09 17:25:54.917527: Current learning rate: 0.00141 +2026-04-09 17:27:37.650173: train_loss -0.4675 +2026-04-09 17:27:37.658142: val_loss -0.4126 +2026-04-09 17:27:37.659972: Pseudo dice [0.7166, 0.0, 0.8384, 0.6962, 0.1147, 0.6553, 0.8684] +2026-04-09 17:27:37.662091: Epoch time: 102.74 s +2026-04-09 17:27:38.719958: +2026-04-09 17:27:38.722026: Epoch 888 +2026-04-09 17:27:38.723573: Current learning rate: 0.00139 +2026-04-09 17:29:22.041459: train_loss -0.4811 +2026-04-09 17:29:22.046181: val_loss -0.4219 +2026-04-09 17:29:22.047804: Pseudo dice [0.817, 0.0, 0.8062, 0.7825, 0.5834, 0.6869, 0.9218] +2026-04-09 17:29:22.049461: Epoch time: 103.32 s +2026-04-09 17:29:23.104650: +2026-04-09 17:29:23.106447: Epoch 889 +2026-04-09 17:29:23.115336: Current learning rate: 0.00138 +2026-04-09 17:31:06.155168: train_loss -0.4607 +2026-04-09 17:31:06.161398: val_loss -0.4214 +2026-04-09 17:31:06.163836: Pseudo dice [0.7335, 0.0, 0.8585, 0.8702, 0.5662, 0.7245, 0.9559] +2026-04-09 17:31:06.165450: Epoch time: 103.05 s +2026-04-09 17:31:07.231829: +2026-04-09 17:31:07.233358: Epoch 890 +2026-04-09 17:31:07.234915: Current learning rate: 0.00137 +2026-04-09 17:32:49.943942: train_loss -0.4838 +2026-04-09 17:32:49.950226: val_loss -0.3884 +2026-04-09 17:32:49.952895: Pseudo dice [0.4837, 0.0, 0.8333, 0.7862, 0.3907, 0.905, 0.9259] +2026-04-09 17:32:49.954423: Epoch time: 102.72 s +2026-04-09 17:32:51.049687: +2026-04-09 17:32:51.051514: Epoch 891 +2026-04-09 17:32:51.053287: Current learning rate: 0.00136 +2026-04-09 17:34:33.750224: train_loss -0.4818 +2026-04-09 17:34:33.754507: val_loss -0.4025 +2026-04-09 17:34:33.756176: Pseudo dice [0.8054, 0.0, 0.7783, 0.476, 0.553, 0.7657, 0.9341] +2026-04-09 17:34:33.757650: Epoch time: 102.7 s +2026-04-09 17:34:34.819135: +2026-04-09 17:34:34.820639: Epoch 892 +2026-04-09 17:34:34.821817: Current learning rate: 0.00135 +2026-04-09 17:36:17.640269: train_loss -0.4663 +2026-04-09 17:36:17.645147: val_loss -0.4039 +2026-04-09 17:36:17.646776: Pseudo dice [0.6027, 0.0, 0.7638, 0.5352, 0.4996, 0.7514, 0.9102] +2026-04-09 17:36:17.648427: Epoch time: 102.82 s +2026-04-09 17:36:19.732846: +2026-04-09 17:36:19.734300: Epoch 893 +2026-04-09 17:36:19.735561: Current learning rate: 0.00134 +2026-04-09 17:38:02.333704: train_loss -0.46 +2026-04-09 17:38:02.338598: val_loss -0.4161 +2026-04-09 17:38:02.340302: Pseudo dice [0.5168, 0.0, 0.8258, 0.9382, 0.5993, 0.6451, 0.9155] +2026-04-09 17:38:02.341941: Epoch time: 102.6 s +2026-04-09 17:38:03.404997: +2026-04-09 17:38:03.406360: Epoch 894 +2026-04-09 17:38:03.407542: Current learning rate: 0.00133 +2026-04-09 17:39:46.439557: train_loss -0.4713 +2026-04-09 17:39:46.443196: val_loss -0.3913 +2026-04-09 17:39:46.445117: Pseudo dice [0.6112, 0.0, 0.776, 0.3599, 0.6164, 0.3698, 0.8904] +2026-04-09 17:39:46.446650: Epoch time: 103.04 s +2026-04-09 17:39:47.507445: +2026-04-09 17:39:47.509449: Epoch 895 +2026-04-09 17:39:47.510977: Current learning rate: 0.00132 +2026-04-09 17:41:30.949363: train_loss -0.4727 +2026-04-09 17:41:30.953643: val_loss -0.4141 +2026-04-09 17:41:30.955112: Pseudo dice [0.4735, 0.0, 0.7775, 0.8236, 0.4458, 0.5496, 0.9533] +2026-04-09 17:41:30.956409: Epoch time: 103.45 s +2026-04-09 17:41:32.016073: +2026-04-09 17:41:32.017671: Epoch 896 +2026-04-09 17:41:32.018985: Current learning rate: 0.0013 +2026-04-09 17:43:15.337677: train_loss -0.4744 +2026-04-09 17:43:15.342128: val_loss -0.4191 +2026-04-09 17:43:15.344186: Pseudo dice [0.6712, 0.0, 0.7702, 0.8447, 0.408, 0.8628, 0.902] +2026-04-09 17:43:15.345761: Epoch time: 103.32 s +2026-04-09 17:43:16.391267: +2026-04-09 17:43:16.392760: Epoch 897 +2026-04-09 17:43:16.393990: Current learning rate: 0.00129 +2026-04-09 17:45:00.000270: train_loss -0.477 +2026-04-09 17:45:00.005036: val_loss -0.412 +2026-04-09 17:45:00.007090: Pseudo dice [0.8588, 0.0, 0.8498, 0.3426, 0.6187, 0.6455, 0.6283] +2026-04-09 17:45:00.008454: Epoch time: 103.61 s +2026-04-09 17:45:01.066299: +2026-04-09 17:45:01.068635: Epoch 898 +2026-04-09 17:45:01.070072: Current learning rate: 0.00128 +2026-04-09 17:46:44.221074: train_loss -0.4756 +2026-04-09 17:46:44.233572: val_loss -0.4171 +2026-04-09 17:46:44.235785: Pseudo dice [0.7867, 0.0, 0.8237, 0.8935, 0.5985, 0.7359, 0.6262] +2026-04-09 17:46:44.237618: Epoch time: 103.16 s +2026-04-09 17:46:45.279639: +2026-04-09 17:46:45.280971: Epoch 899 +2026-04-09 17:46:45.282273: Current learning rate: 0.00127 +2026-04-09 17:48:28.138749: train_loss -0.479 +2026-04-09 17:48:28.143431: val_loss -0.3986 +2026-04-09 17:48:28.145041: Pseudo dice [0.7532, 0.0, 0.8491, 0.621, 0.4624, 0.3825, 0.9436] +2026-04-09 17:48:28.146895: Epoch time: 102.86 s +2026-04-09 17:48:31.012012: +2026-04-09 17:48:31.013368: Epoch 900 +2026-04-09 17:48:31.014629: Current learning rate: 0.00126 +2026-04-09 17:50:14.130941: train_loss -0.4855 +2026-04-09 17:50:14.137394: val_loss -0.3929 +2026-04-09 17:50:14.140761: Pseudo dice [0.73, 0.0, 0.8538, 0.7413, 0.5393, 0.784, 0.829] +2026-04-09 17:50:14.144894: Epoch time: 103.12 s +2026-04-09 17:50:15.228382: +2026-04-09 17:50:15.229679: Epoch 901 +2026-04-09 17:50:15.231030: Current learning rate: 0.00125 +2026-04-09 17:51:57.878570: train_loss -0.4768 +2026-04-09 17:51:57.882611: val_loss -0.4445 +2026-04-09 17:51:57.884290: Pseudo dice [0.5118, 0.0, 0.9225, 0.7804, 0.5328, 0.8583, 0.9414] +2026-04-09 17:51:57.885991: Epoch time: 102.65 s +2026-04-09 17:51:58.938782: +2026-04-09 17:51:58.940114: Epoch 902 +2026-04-09 17:51:58.941393: Current learning rate: 0.00124 +2026-04-09 17:53:41.858546: train_loss -0.4648 +2026-04-09 17:53:41.862937: val_loss -0.4032 +2026-04-09 17:53:41.864740: Pseudo dice [0.4183, 0.0, 0.7817, 0.7786, 0.4515, 0.474, 0.94] +2026-04-09 17:53:41.866491: Epoch time: 102.92 s +2026-04-09 17:53:42.925856: +2026-04-09 17:53:42.927428: Epoch 903 +2026-04-09 17:53:42.928786: Current learning rate: 0.00122 +2026-04-09 17:55:25.710077: train_loss -0.4719 +2026-04-09 17:55:25.713899: val_loss -0.3975 +2026-04-09 17:55:25.715900: Pseudo dice [0.8244, 0.0, 0.833, 0.4549, 0.3994, 0.4412, 0.8402] +2026-04-09 17:55:25.717459: Epoch time: 102.79 s +2026-04-09 17:55:26.767537: +2026-04-09 17:55:26.769300: Epoch 904 +2026-04-09 17:55:26.770927: Current learning rate: 0.00121 +2026-04-09 17:57:09.424374: train_loss -0.4851 +2026-04-09 17:57:09.427774: val_loss -0.4226 +2026-04-09 17:57:09.429266: Pseudo dice [0.7746, 0.0, 0.8593, 0.894, 0.4512, 0.8532, 0.9222] +2026-04-09 17:57:09.430710: Epoch time: 102.66 s +2026-04-09 17:57:10.489656: +2026-04-09 17:57:10.491273: Epoch 905 +2026-04-09 17:57:10.492571: Current learning rate: 0.0012 +2026-04-09 17:58:53.482021: train_loss -0.4735 +2026-04-09 17:58:53.485782: val_loss -0.3977 +2026-04-09 17:58:53.487551: Pseudo dice [0.5027, 0.0, 0.8625, 0.5267, 0.318, 0.8418, 0.6364] +2026-04-09 17:58:53.489101: Epoch time: 103.0 s +2026-04-09 17:58:54.535722: +2026-04-09 17:58:54.537416: Epoch 906 +2026-04-09 17:58:54.538783: Current learning rate: 0.00119 +2026-04-09 18:00:37.292906: train_loss -0.4636 +2026-04-09 18:00:37.296657: val_loss -0.4167 +2026-04-09 18:00:37.298098: Pseudo dice [0.8135, 0.0, 0.8137, 0.1868, 0.5296, 0.9006, 0.938] +2026-04-09 18:00:37.299562: Epoch time: 102.76 s +2026-04-09 18:00:38.369896: +2026-04-09 18:00:38.371655: Epoch 907 +2026-04-09 18:00:38.373062: Current learning rate: 0.00118 +2026-04-09 18:02:21.011908: train_loss -0.4736 +2026-04-09 18:02:21.017343: val_loss -0.4293 +2026-04-09 18:02:21.018967: Pseudo dice [0.6538, 0.0, 0.8069, 0.5537, 0.4967, 0.8423, 0.945] +2026-04-09 18:02:21.020367: Epoch time: 102.65 s +2026-04-09 18:02:22.099847: +2026-04-09 18:02:22.102240: Epoch 908 +2026-04-09 18:02:22.104331: Current learning rate: 0.00117 +2026-04-09 18:04:04.810874: train_loss -0.4797 +2026-04-09 18:04:04.815389: val_loss -0.3863 +2026-04-09 18:04:04.816936: Pseudo dice [0.8512, 0.0, 0.836, 0.8666, 0.5835, 0.3313, 0.7305] +2026-04-09 18:04:04.818615: Epoch time: 102.71 s +2026-04-09 18:04:05.891207: +2026-04-09 18:04:05.893703: Epoch 909 +2026-04-09 18:04:05.895308: Current learning rate: 0.00116 +2026-04-09 18:05:48.535829: train_loss -0.4748 +2026-04-09 18:05:48.539621: val_loss -0.3977 +2026-04-09 18:05:48.541162: Pseudo dice [0.6159, 0.0, 0.7889, 0.6922, 0.4702, 0.825, 0.8517] +2026-04-09 18:05:48.549857: Epoch time: 102.65 s +2026-04-09 18:05:49.630328: +2026-04-09 18:05:49.631875: Epoch 910 +2026-04-09 18:05:49.633189: Current learning rate: 0.00115 +2026-04-09 18:07:32.084946: train_loss -0.4845 +2026-04-09 18:07:32.088958: val_loss -0.4353 +2026-04-09 18:07:32.090654: Pseudo dice [0.779, 0.0, 0.8466, 0.528, 0.4961, 0.886, 0.9214] +2026-04-09 18:07:32.092100: Epoch time: 102.46 s +2026-04-09 18:07:33.156680: +2026-04-09 18:07:33.157991: Epoch 911 +2026-04-09 18:07:33.159254: Current learning rate: 0.00113 +2026-04-09 18:09:16.002327: train_loss -0.4913 +2026-04-09 18:09:16.009530: val_loss -0.4222 +2026-04-09 18:09:16.016711: Pseudo dice [0.8363, 0.0, 0.7842, 0.587, 0.4439, 0.8936, 0.9435] +2026-04-09 18:09:16.019410: Epoch time: 102.85 s +2026-04-09 18:09:17.105687: +2026-04-09 18:09:17.107750: Epoch 912 +2026-04-09 18:09:17.109769: Current learning rate: 0.00112 +2026-04-09 18:11:01.641693: train_loss -0.4819 +2026-04-09 18:11:01.649113: val_loss -0.4397 +2026-04-09 18:11:01.651870: Pseudo dice [0.8254, 0.0, 0.8908, 0.7393, 0.3597, 0.8569, 0.8739] +2026-04-09 18:11:01.654388: Epoch time: 104.54 s +2026-04-09 18:11:02.717788: +2026-04-09 18:11:02.719309: Epoch 913 +2026-04-09 18:11:02.721398: Current learning rate: 0.00111 +2026-04-09 18:12:47.965353: train_loss -0.4817 +2026-04-09 18:12:47.973702: val_loss -0.4165 +2026-04-09 18:12:47.976192: Pseudo dice [0.5224, 0.0, 0.894, 0.7534, 0.4822, 0.8264, 0.9077] +2026-04-09 18:12:47.979208: Epoch time: 105.25 s +2026-04-09 18:12:50.192339: +2026-04-09 18:12:50.195622: Epoch 914 +2026-04-09 18:12:50.198054: Current learning rate: 0.0011 +2026-04-09 18:14:37.971249: train_loss -0.4778 +2026-04-09 18:14:37.987656: val_loss -0.3976 +2026-04-09 18:14:37.991600: Pseudo dice [0.5092, 0.0, 0.8553, 0.5573, 0.535, 0.7565, 0.7469] +2026-04-09 18:14:37.997684: Epoch time: 107.78 s +2026-04-09 18:14:39.085009: +2026-04-09 18:14:39.089159: Epoch 915 +2026-04-09 18:14:39.093749: Current learning rate: 0.00109 +2026-04-09 18:16:21.949186: train_loss -0.4682 +2026-04-09 18:16:21.956143: val_loss -0.4043 +2026-04-09 18:16:21.958502: Pseudo dice [0.767, 0.0, 0.7452, 0.7867, 0.3581, 0.9247, 0.9218] +2026-04-09 18:16:21.961553: Epoch time: 102.87 s +2026-04-09 18:16:23.042979: +2026-04-09 18:16:23.047246: Epoch 916 +2026-04-09 18:16:23.049477: Current learning rate: 0.00108 +2026-04-09 18:18:06.981223: train_loss -0.4717 +2026-04-09 18:18:06.989326: val_loss -0.4028 +2026-04-09 18:18:06.991578: Pseudo dice [0.4872, 0.0, 0.774, 0.7972, 0.5373, 0.4895, 0.9431] +2026-04-09 18:18:06.993836: Epoch time: 103.94 s +2026-04-09 18:18:08.101947: +2026-04-09 18:18:08.105056: Epoch 917 +2026-04-09 18:18:08.108517: Current learning rate: 0.00106 +2026-04-09 18:19:51.577798: train_loss -0.4766 +2026-04-09 18:19:51.586073: val_loss -0.4173 +2026-04-09 18:19:51.589639: Pseudo dice [0.7989, 0.0, 0.7913, 0.4362, 0.5721, 0.6035, 0.9515] +2026-04-09 18:19:51.593085: Epoch time: 103.48 s +2026-04-09 18:19:52.672179: +2026-04-09 18:19:52.675076: Epoch 918 +2026-04-09 18:19:52.678969: Current learning rate: 0.00105 +2026-04-09 18:21:36.595811: train_loss -0.4857 +2026-04-09 18:21:36.603832: val_loss -0.3975 +2026-04-09 18:21:36.619715: Pseudo dice [0.4655, 0.0, 0.8474, 0.7964, 0.491, 0.6989, 0.9381] +2026-04-09 18:21:36.622421: Epoch time: 103.93 s +2026-04-09 18:21:37.703798: +2026-04-09 18:21:37.709043: Epoch 919 +2026-04-09 18:21:37.711986: Current learning rate: 0.00104 +2026-04-09 18:23:22.118111: train_loss -0.492 +2026-04-09 18:23:22.138002: val_loss -0.4195 +2026-04-09 18:23:22.142747: Pseudo dice [0.551, 0.0, 0.8074, 0.8505, 0.4233, 0.7253, 0.8902] +2026-04-09 18:23:22.152469: Epoch time: 104.42 s +2026-04-09 18:23:23.243903: +2026-04-09 18:23:23.250684: Epoch 920 +2026-04-09 18:23:23.254483: Current learning rate: 0.00103 +2026-04-09 18:25:06.254141: train_loss -0.4778 +2026-04-09 18:25:06.260551: val_loss -0.4112 +2026-04-09 18:25:06.262865: Pseudo dice [0.7587, 0.0, 0.6694, 0.9141, 0.6045, 0.424, 0.9191] +2026-04-09 18:25:06.267664: Epoch time: 103.01 s +2026-04-09 18:25:07.343887: +2026-04-09 18:25:07.346125: Epoch 921 +2026-04-09 18:25:07.348873: Current learning rate: 0.00102 +2026-04-09 18:26:52.369723: train_loss -0.4854 +2026-04-09 18:26:52.375353: val_loss -0.4231 +2026-04-09 18:26:52.377794: Pseudo dice [0.4163, 0.0, 0.9079, 0.8107, 0.5554, 0.6673, 0.8884] +2026-04-09 18:26:52.380166: Epoch time: 105.03 s +2026-04-09 18:26:53.446338: +2026-04-09 18:26:53.449143: Epoch 922 +2026-04-09 18:26:53.451246: Current learning rate: 0.00101 +2026-04-09 18:28:37.991751: train_loss -0.4804 +2026-04-09 18:28:37.998509: val_loss -0.4263 +2026-04-09 18:28:38.000753: Pseudo dice [0.6981, 0.0, 0.8267, 0.8019, 0.5246, 0.6622, 0.7287] +2026-04-09 18:28:38.004168: Epoch time: 104.55 s +2026-04-09 18:28:39.085423: +2026-04-09 18:28:39.091253: Epoch 923 +2026-04-09 18:28:39.093385: Current learning rate: 0.001 +2026-04-09 18:30:21.920347: train_loss -0.4855 +2026-04-09 18:30:21.928509: val_loss -0.4089 +2026-04-09 18:30:21.931645: Pseudo dice [0.8493, 0.0, 0.9062, 0.6474, 0.3237, 0.6704, 0.9214] +2026-04-09 18:30:21.935675: Epoch time: 102.84 s +2026-04-09 18:30:23.017781: +2026-04-09 18:30:23.021382: Epoch 924 +2026-04-09 18:30:23.025091: Current learning rate: 0.00098 +2026-04-09 18:32:07.183587: train_loss -0.486 +2026-04-09 18:32:07.190205: val_loss -0.4153 +2026-04-09 18:32:07.194002: Pseudo dice [0.8533, 0.0, 0.8565, 0.8375, 0.6212, 0.5381, 0.902] +2026-04-09 18:32:07.196913: Epoch time: 104.17 s +2026-04-09 18:32:08.264299: +2026-04-09 18:32:08.272223: Epoch 925 +2026-04-09 18:32:08.276004: Current learning rate: 0.00097 +2026-04-09 18:33:51.899860: train_loss -0.4641 +2026-04-09 18:33:51.907112: val_loss -0.4156 +2026-04-09 18:33:51.909566: Pseudo dice [0.5895, 0.0, 0.7752, 0.4474, 0.493, 0.8085, 0.9284] +2026-04-09 18:33:51.911983: Epoch time: 103.64 s +2026-04-09 18:33:52.994622: +2026-04-09 18:33:52.996619: Epoch 926 +2026-04-09 18:33:52.999241: Current learning rate: 0.00096 +2026-04-09 18:35:36.641498: train_loss -0.4698 +2026-04-09 18:35:36.647587: val_loss -0.4195 +2026-04-09 18:35:36.649705: Pseudo dice [0.8731, 0.0, 0.8706, 0.5254, 0.5918, 0.871, 0.917] +2026-04-09 18:35:36.651977: Epoch time: 103.65 s +2026-04-09 18:35:37.739777: +2026-04-09 18:35:37.744740: Epoch 927 +2026-04-09 18:35:37.747420: Current learning rate: 0.00095 +2026-04-09 18:37:21.765574: train_loss -0.4846 +2026-04-09 18:37:21.772932: val_loss -0.4062 +2026-04-09 18:37:21.776811: Pseudo dice [0.6969, 0.0, 0.6941, 0.6895, 0.5245, 0.5325, 0.8397] +2026-04-09 18:37:21.779709: Epoch time: 104.03 s +2026-04-09 18:37:22.861811: +2026-04-09 18:37:22.864501: Epoch 928 +2026-04-09 18:37:22.866541: Current learning rate: 0.00094 +2026-04-09 18:39:07.416102: train_loss -0.4783 +2026-04-09 18:39:07.422814: val_loss -0.4304 +2026-04-09 18:39:07.425637: Pseudo dice [0.5193, 0.0, 0.7221, 0.6653, 0.5512, 0.891, 0.8478] +2026-04-09 18:39:07.428511: Epoch time: 104.56 s +2026-04-09 18:39:08.487581: +2026-04-09 18:39:08.489630: Epoch 929 +2026-04-09 18:39:08.491187: Current learning rate: 0.00092 +2026-04-09 18:40:52.590249: train_loss -0.4829 +2026-04-09 18:40:52.601089: val_loss -0.4106 +2026-04-09 18:40:52.605562: Pseudo dice [0.5424, 0.0, 0.8836, 0.8454, 0.4922, 0.8395, 0.9236] +2026-04-09 18:40:52.609303: Epoch time: 104.11 s +2026-04-09 18:40:53.689991: +2026-04-09 18:40:53.692314: Epoch 930 +2026-04-09 18:40:53.694881: Current learning rate: 0.00091 +2026-04-09 18:42:38.753978: train_loss -0.4933 +2026-04-09 18:42:38.762404: val_loss -0.4239 +2026-04-09 18:42:38.766067: Pseudo dice [0.5917, 0.0, 0.8545, 0.791, 0.3874, 0.8913, 0.8917] +2026-04-09 18:42:38.769239: Epoch time: 105.07 s +2026-04-09 18:42:39.860272: +2026-04-09 18:42:39.866922: Epoch 931 +2026-04-09 18:42:39.869867: Current learning rate: 0.0009 +2026-04-09 18:44:24.189400: train_loss -0.4812 +2026-04-09 18:44:24.201380: val_loss -0.4115 +2026-04-09 18:44:24.206849: Pseudo dice [0.7851, 0.0, 0.8467, 0.7947, 0.437, 0.5714, 0.8728] +2026-04-09 18:44:24.210286: Epoch time: 104.33 s +2026-04-09 18:44:25.323125: +2026-04-09 18:44:25.326054: Epoch 932 +2026-04-09 18:44:25.328883: Current learning rate: 0.00089 +2026-04-09 18:46:09.636252: train_loss -0.4894 +2026-04-09 18:46:09.646683: val_loss -0.4116 +2026-04-09 18:46:09.649938: Pseudo dice [0.4031, 0.0, 0.7701, 0.8216, 0.6247, 0.9107, 0.9165] +2026-04-09 18:46:09.653397: Epoch time: 104.32 s +2026-04-09 18:46:10.755368: +2026-04-09 18:46:10.758404: Epoch 933 +2026-04-09 18:46:10.761518: Current learning rate: 0.00088 +2026-04-09 18:47:55.229721: train_loss -0.4858 +2026-04-09 18:47:55.236663: val_loss -0.4325 +2026-04-09 18:47:55.239211: Pseudo dice [0.5603, 0.0, 0.8552, 0.5858, 0.5848, 0.4989, 0.9556] +2026-04-09 18:47:55.248300: Epoch time: 104.48 s +2026-04-09 18:47:56.391850: +2026-04-09 18:47:56.395369: Epoch 934 +2026-04-09 18:47:56.398234: Current learning rate: 0.00087 +2026-04-09 18:49:40.365621: train_loss -0.4915 +2026-04-09 18:49:40.372764: val_loss -0.4182 +2026-04-09 18:49:40.376505: Pseudo dice [0.5273, 0.0, 0.849, 0.7285, 0.5723, 0.7426, 0.861] +2026-04-09 18:49:40.379503: Epoch time: 103.98 s +2026-04-09 18:49:42.570311: +2026-04-09 18:49:42.573703: Epoch 935 +2026-04-09 18:49:42.577383: Current learning rate: 0.00085 +2026-04-09 18:51:25.876070: train_loss -0.4947 +2026-04-09 18:51:25.882652: val_loss -0.4158 +2026-04-09 18:51:25.884660: Pseudo dice [0.8307, 0.0, 0.869, 0.8102, 0.5579, 0.8696, 0.8978] +2026-04-09 18:51:25.886310: Epoch time: 103.31 s +2026-04-09 18:51:26.961262: +2026-04-09 18:51:26.962831: Epoch 936 +2026-04-09 18:51:26.964401: Current learning rate: 0.00084 +2026-04-09 18:53:09.618705: train_loss -0.489 +2026-04-09 18:53:09.623629: val_loss -0.4116 +2026-04-09 18:53:09.625049: Pseudo dice [0.8026, 0.0, 0.6571, 0.4908, 0.666, 0.9022, 0.8841] +2026-04-09 18:53:09.626620: Epoch time: 102.66 s +2026-04-09 18:53:10.776930: +2026-04-09 18:53:10.778419: Epoch 937 +2026-04-09 18:53:10.780432: Current learning rate: 0.00083 +2026-04-09 18:54:53.572916: train_loss -0.4935 +2026-04-09 18:54:53.577656: val_loss -0.4438 +2026-04-09 18:54:53.579512: Pseudo dice [0.8345, 0.0, 0.8135, 0.464, 0.509, 0.8531, 0.9549] +2026-04-09 18:54:53.581744: Epoch time: 102.8 s +2026-04-09 18:54:54.650246: +2026-04-09 18:54:54.653255: Epoch 938 +2026-04-09 18:54:54.655361: Current learning rate: 0.00082 +2026-04-09 18:56:38.234495: train_loss -0.479 +2026-04-09 18:56:38.242875: val_loss -0.4073 +2026-04-09 18:56:38.245628: Pseudo dice [0.5037, 0.0, 0.7821, 0.1549, 0.3822, 0.8014, 0.924] +2026-04-09 18:56:38.247975: Epoch time: 103.59 s +2026-04-09 18:56:39.334911: +2026-04-09 18:56:39.339576: Epoch 939 +2026-04-09 18:56:39.342193: Current learning rate: 0.00081 +2026-04-09 18:58:22.190551: train_loss -0.4913 +2026-04-09 18:58:22.195029: val_loss -0.4215 +2026-04-09 18:58:22.196736: Pseudo dice [0.5075, 0.0, 0.8648, 0.7101, 0.4735, 0.8628, 0.9259] +2026-04-09 18:58:22.198471: Epoch time: 102.86 s +2026-04-09 18:58:23.273484: +2026-04-09 18:58:23.275244: Epoch 940 +2026-04-09 18:58:23.276642: Current learning rate: 0.00079 +2026-04-09 19:00:06.728392: train_loss -0.4767 +2026-04-09 19:00:06.734843: val_loss -0.4205 +2026-04-09 19:00:06.736901: Pseudo dice [0.7905, 0.0, 0.8169, 0.686, 0.4119, 0.6645, 0.8723] +2026-04-09 19:00:06.738887: Epoch time: 103.46 s +2026-04-09 19:00:07.801042: +2026-04-09 19:00:07.802862: Epoch 941 +2026-04-09 19:00:07.804441: Current learning rate: 0.00078 +2026-04-09 19:01:51.354029: train_loss -0.4839 +2026-04-09 19:01:51.373983: val_loss -0.4316 +2026-04-09 19:01:51.379703: Pseudo dice [0.6895, 0.0, 0.8183, 0.7242, 0.5472, 0.7083, 0.8875] +2026-04-09 19:01:51.398652: Epoch time: 103.56 s +2026-04-09 19:01:52.480385: +2026-04-09 19:01:52.484160: Epoch 942 +2026-04-09 19:01:52.487086: Current learning rate: 0.00077 +2026-04-09 19:03:36.810788: train_loss -0.4901 +2026-04-09 19:03:36.817248: val_loss -0.3747 +2026-04-09 19:03:36.819741: Pseudo dice [0.5211, 0.0, 0.8186, 0.7981, 0.5205, 0.6766, 0.8381] +2026-04-09 19:03:36.823512: Epoch time: 104.33 s +2026-04-09 19:03:37.909243: +2026-04-09 19:03:37.911927: Epoch 943 +2026-04-09 19:03:37.914198: Current learning rate: 0.00076 +2026-04-09 19:05:22.024198: train_loss -0.4877 +2026-04-09 19:05:22.032179: val_loss -0.4384 +2026-04-09 19:05:22.035539: Pseudo dice [0.8643, 0.0, 0.8255, 0.8318, 0.5603, 0.6556, 0.9002] +2026-04-09 19:05:22.039651: Epoch time: 104.12 s +2026-04-09 19:05:23.115447: +2026-04-09 19:05:23.117796: Epoch 944 +2026-04-09 19:05:23.120522: Current learning rate: 0.00075 +2026-04-09 19:07:06.928308: train_loss -0.4887 +2026-04-09 19:07:06.939544: val_loss -0.4287 +2026-04-09 19:07:06.943121: Pseudo dice [0.4793, 0.0, 0.8445, 0.8575, 0.6259, 0.6407, 0.907] +2026-04-09 19:07:06.946728: Epoch time: 103.82 s +2026-04-09 19:07:08.031341: +2026-04-09 19:07:08.034146: Epoch 945 +2026-04-09 19:07:08.036270: Current learning rate: 0.00074 +2026-04-09 19:08:51.981509: train_loss -0.4714 +2026-04-09 19:08:51.989015: val_loss -0.4169 +2026-04-09 19:08:51.992484: Pseudo dice [0.6636, 0.0, 0.8175, 0.826, 0.6231, 0.8483, 0.9435] +2026-04-09 19:08:51.994756: Epoch time: 103.95 s +2026-04-09 19:08:53.064514: +2026-04-09 19:08:53.067931: Epoch 946 +2026-04-09 19:08:53.070516: Current learning rate: 0.00072 +2026-04-09 19:10:36.890087: train_loss -0.4954 +2026-04-09 19:10:36.896267: val_loss -0.4346 +2026-04-09 19:10:36.900151: Pseudo dice [0.5956, 0.0, 0.8794, 0.7144, 0.5843, 0.5073, 0.9267] +2026-04-09 19:10:36.902356: Epoch time: 103.83 s +2026-04-09 19:10:38.016729: +2026-04-09 19:10:38.020611: Epoch 947 +2026-04-09 19:10:38.022475: Current learning rate: 0.00071 +2026-04-09 19:12:22.412494: train_loss -0.4836 +2026-04-09 19:12:22.419025: val_loss -0.4161 +2026-04-09 19:12:22.421777: Pseudo dice [0.5622, 0.0, 0.8107, 0.6193, 0.3436, 0.8378, 0.8107] +2026-04-09 19:12:22.424774: Epoch time: 104.4 s +2026-04-09 19:12:23.512447: +2026-04-09 19:12:23.514217: Epoch 948 +2026-04-09 19:12:23.515584: Current learning rate: 0.0007 +2026-04-09 19:14:09.430958: train_loss -0.4901 +2026-04-09 19:14:09.441404: val_loss -0.417 +2026-04-09 19:14:09.444655: Pseudo dice [0.4452, 0.0, 0.8297, 0.7161, 0.4908, 0.9193, 0.9338] +2026-04-09 19:14:09.447329: Epoch time: 105.92 s +2026-04-09 19:14:10.543383: +2026-04-09 19:14:10.545379: Epoch 949 +2026-04-09 19:14:10.547582: Current learning rate: 0.00069 +2026-04-09 19:15:54.606531: train_loss -0.4959 +2026-04-09 19:15:54.615789: val_loss -0.405 +2026-04-09 19:15:54.618824: Pseudo dice [0.4898, 0.0, 0.8968, 0.7183, 0.6117, 0.8262, 0.9272] +2026-04-09 19:15:54.621548: Epoch time: 104.07 s +2026-04-09 19:15:57.396861: +2026-04-09 19:15:57.398592: Epoch 950 +2026-04-09 19:15:57.400380: Current learning rate: 0.00067 +2026-04-09 19:17:41.899444: train_loss -0.4834 +2026-04-09 19:17:41.907583: val_loss -0.4267 +2026-04-09 19:17:41.910707: Pseudo dice [0.6976, 0.0, 0.3225, 0.8667, 0.5732, 0.5989, 0.9016] +2026-04-09 19:17:41.913069: Epoch time: 104.51 s +2026-04-09 19:17:43.059841: +2026-04-09 19:17:43.063828: Epoch 951 +2026-04-09 19:17:43.066620: Current learning rate: 0.00066 +2026-04-09 19:19:28.740057: train_loss -0.4987 +2026-04-09 19:19:28.746594: val_loss -0.4041 +2026-04-09 19:19:28.748574: Pseudo dice [0.5973, 0.0, 0.7332, 0.7808, 0.5483, 0.5476, 0.2078] +2026-04-09 19:19:28.750865: Epoch time: 105.68 s +2026-04-09 19:19:29.801400: +2026-04-09 19:19:29.803269: Epoch 952 +2026-04-09 19:19:29.806077: Current learning rate: 0.00065 +2026-04-09 19:21:12.728044: train_loss -0.499 +2026-04-09 19:21:12.733380: val_loss -0.4179 +2026-04-09 19:21:12.735650: Pseudo dice [0.6759, 0.0, 0.8509, 0.8154, 0.4419, 0.7337, 0.9045] +2026-04-09 19:21:12.738483: Epoch time: 102.93 s +2026-04-09 19:21:13.851341: +2026-04-09 19:21:13.853085: Epoch 953 +2026-04-09 19:21:13.854703: Current learning rate: 0.00064 +2026-04-09 19:22:58.874262: train_loss -0.4895 +2026-04-09 19:22:58.879214: val_loss -0.4184 +2026-04-09 19:22:58.891825: Pseudo dice [0.6089, 0.0, 0.8455, 0.5136, 0.4945, 0.896, 0.8955] +2026-04-09 19:22:58.895360: Epoch time: 105.03 s +2026-04-09 19:23:00.014063: +2026-04-09 19:23:00.016977: Epoch 954 +2026-04-09 19:23:00.019833: Current learning rate: 0.00063 +2026-04-09 19:24:43.781045: train_loss -0.4982 +2026-04-09 19:24:43.787613: val_loss -0.4381 +2026-04-09 19:24:43.790712: Pseudo dice [0.8464, 0.0, 0.8747, 0.8984, 0.3827, 0.857, 0.9218] +2026-04-09 19:24:43.793279: Epoch time: 103.77 s +2026-04-09 19:24:44.901546: +2026-04-09 19:24:44.903834: Epoch 955 +2026-04-09 19:24:44.906075: Current learning rate: 0.00061 +2026-04-09 19:26:30.362841: train_loss -0.4943 +2026-04-09 19:26:30.373403: val_loss -0.4031 +2026-04-09 19:26:30.376332: Pseudo dice [0.4694, 0.0, 0.778, 0.804, 0.598, 0.7323, 0.7958] +2026-04-09 19:26:30.379025: Epoch time: 105.46 s +2026-04-09 19:26:31.484762: +2026-04-09 19:26:31.486801: Epoch 956 +2026-04-09 19:26:31.489123: Current learning rate: 0.0006 +2026-04-09 19:28:18.879590: train_loss -0.4938 +2026-04-09 19:28:18.888819: val_loss -0.3998 +2026-04-09 19:28:18.893704: Pseudo dice [0.6842, 0.0, 0.798, 0.8047, 0.5131, 0.5173, 0.9048] +2026-04-09 19:28:18.897039: Epoch time: 107.4 s +2026-04-09 19:28:20.002777: +2026-04-09 19:28:20.005229: Epoch 957 +2026-04-09 19:28:20.007995: Current learning rate: 0.00059 +2026-04-09 19:30:02.439757: train_loss -0.4957 +2026-04-09 19:30:02.448945: val_loss -0.4237 +2026-04-09 19:30:02.453189: Pseudo dice [0.4431, 0.0, 0.8803, 0.8743, 0.5227, 0.8379, 0.738] +2026-04-09 19:30:02.461793: Epoch time: 102.44 s +2026-04-09 19:30:03.548712: +2026-04-09 19:30:03.552789: Epoch 958 +2026-04-09 19:30:03.557621: Current learning rate: 0.00058 +2026-04-09 19:31:46.304902: train_loss -0.4943 +2026-04-09 19:31:46.311334: val_loss -0.3832 +2026-04-09 19:31:46.313720: Pseudo dice [0.7692, 0.0, 0.7617, 0.8558, 0.5602, 0.3232, 0.7084] +2026-04-09 19:31:46.316133: Epoch time: 102.76 s +2026-04-09 19:31:47.401031: +2026-04-09 19:31:47.403914: Epoch 959 +2026-04-09 19:31:47.406855: Current learning rate: 0.00056 +2026-04-09 19:33:31.616683: train_loss -0.4951 +2026-04-09 19:33:31.624871: val_loss -0.4302 +2026-04-09 19:33:31.627491: Pseudo dice [0.6675, 0.0, 0.906, 0.7674, 0.6459, 0.7742, 0.9303] +2026-04-09 19:33:31.630561: Epoch time: 104.22 s +2026-04-09 19:33:32.732784: +2026-04-09 19:33:32.735452: Epoch 960 +2026-04-09 19:33:32.737333: Current learning rate: 0.00055 +2026-04-09 19:35:16.687638: train_loss -0.4979 +2026-04-09 19:35:16.694289: val_loss -0.4148 +2026-04-09 19:35:16.696856: Pseudo dice [0.7308, 0.0, 0.847, 0.4427, 0.43, 0.6829, 0.8235] +2026-04-09 19:35:16.698816: Epoch time: 103.96 s +2026-04-09 19:35:17.798445: +2026-04-09 19:35:17.800227: Epoch 961 +2026-04-09 19:35:17.801957: Current learning rate: 0.00054 +2026-04-09 19:37:01.165420: train_loss -0.5005 +2026-04-09 19:37:01.174757: val_loss -0.4083 +2026-04-09 19:37:01.178009: Pseudo dice [0.5009, 0.0, 0.8761, 0.7311, 0.5719, 0.4618, 0.7855] +2026-04-09 19:37:01.181766: Epoch time: 103.37 s +2026-04-09 19:37:02.293863: +2026-04-09 19:37:02.296722: Epoch 962 +2026-04-09 19:37:02.299257: Current learning rate: 0.00053 +2026-04-09 19:38:46.020044: train_loss -0.4907 +2026-04-09 19:38:46.027632: val_loss -0.4189 +2026-04-09 19:38:46.030231: Pseudo dice [0.7942, 0.0, 0.7571, 0.8176, 0.567, 0.8067, 0.8497] +2026-04-09 19:38:46.034147: Epoch time: 103.73 s +2026-04-09 19:38:47.124575: +2026-04-09 19:38:47.129305: Epoch 963 +2026-04-09 19:38:47.132366: Current learning rate: 0.00051 +2026-04-09 19:40:32.093121: train_loss -0.493 +2026-04-09 19:40:32.102141: val_loss -0.423 +2026-04-09 19:40:32.104548: Pseudo dice [0.6768, 0.0, 0.8617, 0.8345, 0.5953, 0.728, 0.9474] +2026-04-09 19:40:32.108999: Epoch time: 104.97 s +2026-04-09 19:40:33.205133: +2026-04-09 19:40:33.208107: Epoch 964 +2026-04-09 19:40:33.210774: Current learning rate: 0.0005 +2026-04-09 19:42:16.740795: train_loss -0.4851 +2026-04-09 19:42:16.746575: val_loss -0.3966 +2026-04-09 19:42:16.748804: Pseudo dice [0.5718, 0.0, 0.7598, 0.5427, 0.5093, 0.8181, 0.8878] +2026-04-09 19:42:16.751535: Epoch time: 103.54 s +2026-04-09 19:42:17.839575: +2026-04-09 19:42:17.841456: Epoch 965 +2026-04-09 19:42:17.843909: Current learning rate: 0.00049 +2026-04-09 19:44:00.903175: train_loss -0.4967 +2026-04-09 19:44:00.911724: val_loss -0.4355 +2026-04-09 19:44:00.914890: Pseudo dice [0.6611, 0.0, 0.9128, 0.8626, 0.4514, 0.905, 0.7346] +2026-04-09 19:44:00.919244: Epoch time: 103.07 s +2026-04-09 19:44:02.073361: +2026-04-09 19:44:02.076249: Epoch 966 +2026-04-09 19:44:02.079958: Current learning rate: 0.00048 +2026-04-09 19:45:46.417034: train_loss -0.4912 +2026-04-09 19:45:46.425004: val_loss -0.4384 +2026-04-09 19:45:46.428377: Pseudo dice [0.8071, 0.0, 0.802, 0.8387, 0.486, 0.8891, 0.9019] +2026-04-09 19:45:46.431582: Epoch time: 104.35 s +2026-04-09 19:45:47.535998: +2026-04-09 19:45:47.539346: Epoch 967 +2026-04-09 19:45:47.542106: Current learning rate: 0.00046 +2026-04-09 19:47:31.313314: train_loss -0.4989 +2026-04-09 19:47:31.320363: val_loss -0.4294 +2026-04-09 19:47:31.323369: Pseudo dice [0.5288, 0.0, 0.9008, 0.8621, 0.3811, 0.8905, 0.9137] +2026-04-09 19:47:31.326305: Epoch time: 103.78 s +2026-04-09 19:47:32.417924: +2026-04-09 19:47:32.420451: Epoch 968 +2026-04-09 19:47:32.422790: Current learning rate: 0.00045 +2026-04-09 19:49:17.612617: train_loss -0.4883 +2026-04-09 19:49:17.617670: val_loss -0.4352 +2026-04-09 19:49:17.620301: Pseudo dice [0.4937, 0.0, 0.8297, 0.8396, 0.5579, 0.8791, 0.9047] +2026-04-09 19:49:17.622800: Epoch time: 105.2 s +2026-04-09 19:49:18.712145: +2026-04-09 19:49:18.714971: Epoch 969 +2026-04-09 19:49:18.716980: Current learning rate: 0.00044 +2026-04-09 19:51:03.365809: train_loss -0.5006 +2026-04-09 19:51:03.374692: val_loss -0.4061 +2026-04-09 19:51:03.377734: Pseudo dice [0.5821, 0.0, 0.8378, 0.779, 0.373, 0.8036, 0.9202] +2026-04-09 19:51:03.382069: Epoch time: 104.66 s +2026-04-09 19:51:04.487515: +2026-04-09 19:51:04.495724: Epoch 970 +2026-04-09 19:51:04.499370: Current learning rate: 0.00043 +2026-04-09 19:52:46.616806: train_loss -0.5021 +2026-04-09 19:52:46.625891: val_loss -0.4339 +2026-04-09 19:52:46.628123: Pseudo dice [0.6274, 0.0, 0.8977, 0.8324, 0.3975, 0.8085, 0.8996] +2026-04-09 19:52:46.629678: Epoch time: 102.13 s +2026-04-09 19:52:47.712366: +2026-04-09 19:52:47.714592: Epoch 971 +2026-04-09 19:52:47.718295: Current learning rate: 0.00041 +2026-04-09 19:54:30.296015: train_loss -0.509 +2026-04-09 19:54:30.303416: val_loss -0.3907 +2026-04-09 19:54:30.305578: Pseudo dice [0.6119, 0.0, 0.8335, 0.8282, 0.2538, 0.5194, 0.8346] +2026-04-09 19:54:30.307926: Epoch time: 102.59 s +2026-04-09 19:54:31.402978: +2026-04-09 19:54:31.404867: Epoch 972 +2026-04-09 19:54:31.406524: Current learning rate: 0.0004 +2026-04-09 19:56:14.028598: train_loss -0.5085 +2026-04-09 19:56:14.040587: val_loss -0.4367 +2026-04-09 19:56:14.043092: Pseudo dice [0.4579, 0.0, 0.8446, 0.8549, 0.6661, 0.9139, 0.9473] +2026-04-09 19:56:14.045930: Epoch time: 102.63 s +2026-04-09 19:56:15.154716: +2026-04-09 19:56:15.156797: Epoch 973 +2026-04-09 19:56:15.159484: Current learning rate: 0.00039 +2026-04-09 19:57:57.547640: train_loss -0.4922 +2026-04-09 19:57:57.557348: val_loss -0.4152 +2026-04-09 19:57:57.563154: Pseudo dice [0.692, 0.0, 0.824, 0.8395, 0.3422, 0.7065, 0.9191] +2026-04-09 19:57:57.565610: Epoch time: 102.4 s +2026-04-09 19:57:58.693395: +2026-04-09 19:57:58.695044: Epoch 974 +2026-04-09 19:57:58.696457: Current learning rate: 0.00037 +2026-04-09 19:59:41.429937: train_loss -0.493 +2026-04-09 19:59:41.436256: val_loss -0.4482 +2026-04-09 19:59:41.437927: Pseudo dice [0.8231, 0.0, 0.7304, 0.8563, 0.6534, 0.7765, 0.9417] +2026-04-09 19:59:41.439906: Epoch time: 102.74 s +2026-04-09 19:59:42.527783: +2026-04-09 19:59:42.529863: Epoch 975 +2026-04-09 19:59:42.531856: Current learning rate: 0.00036 +2026-04-09 20:01:26.069211: train_loss -0.4984 +2026-04-09 20:01:26.083184: val_loss -0.4467 +2026-04-09 20:01:26.086873: Pseudo dice [0.3919, 0.0, 0.8927, 0.8196, 0.6714, 0.6552, 0.8647] +2026-04-09 20:01:26.090616: Epoch time: 103.54 s +2026-04-09 20:01:28.250840: +2026-04-09 20:01:28.252307: Epoch 976 +2026-04-09 20:01:28.253820: Current learning rate: 0.00035 +2026-04-09 20:03:10.748426: train_loss -0.5012 +2026-04-09 20:03:10.759939: val_loss -0.3993 +2026-04-09 20:03:10.762614: Pseudo dice [0.5018, 0.0, 0.815, 0.8319, 0.5933, 0.789, 0.932] +2026-04-09 20:03:10.765198: Epoch time: 102.5 s +2026-04-09 20:03:11.865330: +2026-04-09 20:03:11.867309: Epoch 977 +2026-04-09 20:03:11.869791: Current learning rate: 0.00034 +2026-04-09 20:04:54.564955: train_loss -0.4913 +2026-04-09 20:04:54.571484: val_loss -0.414 +2026-04-09 20:04:54.573602: Pseudo dice [0.7169, 0.0, 0.6151, 0.9045, 0.4333, 0.6724, 0.8733] +2026-04-09 20:04:54.576609: Epoch time: 102.7 s +2026-04-09 20:04:55.711823: +2026-04-09 20:04:55.714781: Epoch 978 +2026-04-09 20:04:55.717675: Current learning rate: 0.00032 +2026-04-09 20:06:40.351902: train_loss -0.4955 +2026-04-09 20:06:40.357298: val_loss -0.4334 +2026-04-09 20:06:40.359222: Pseudo dice [0.6997, 0.0, 0.8735, 0.8512, 0.5432, 0.4205, 0.8929] +2026-04-09 20:06:40.361310: Epoch time: 104.64 s +2026-04-09 20:06:41.429521: +2026-04-09 20:06:41.434228: Epoch 979 +2026-04-09 20:06:41.438128: Current learning rate: 0.00031 +2026-04-09 20:08:27.192407: train_loss -0.5121 +2026-04-09 20:08:27.204018: val_loss -0.4277 +2026-04-09 20:08:27.210107: Pseudo dice [0.8143, 0.0, 0.8502, 0.683, 0.5594, 0.6797, 0.9294] +2026-04-09 20:08:27.219691: Epoch time: 105.77 s +2026-04-09 20:08:28.319599: +2026-04-09 20:08:28.325757: Epoch 980 +2026-04-09 20:08:28.330221: Current learning rate: 0.0003 +2026-04-09 20:10:11.311420: train_loss -0.5041 +2026-04-09 20:10:11.322814: val_loss -0.4206 +2026-04-09 20:10:11.327084: Pseudo dice [0.7971, 0.0, 0.8752, 0.8254, 0.4505, 0.7105, 0.8192] +2026-04-09 20:10:11.331077: Epoch time: 102.99 s +2026-04-09 20:10:12.404834: +2026-04-09 20:10:12.408776: Epoch 981 +2026-04-09 20:10:12.413371: Current learning rate: 0.00028 +2026-04-09 20:11:56.898131: train_loss -0.4873 +2026-04-09 20:11:56.912156: val_loss -0.4042 +2026-04-09 20:11:56.916635: Pseudo dice [0.7586, 0.0, 0.2234, 0.8256, 0.4659, 0.8216, 0.9232] +2026-04-09 20:11:56.921082: Epoch time: 104.5 s +2026-04-09 20:11:58.020931: +2026-04-09 20:11:58.025371: Epoch 982 +2026-04-09 20:11:58.028847: Current learning rate: 0.00027 +2026-04-09 20:13:41.244656: train_loss -0.4906 +2026-04-09 20:13:41.253001: val_loss -0.4186 +2026-04-09 20:13:41.255880: Pseudo dice [0.3896, 0.0, 0.7635, 0.6951, 0.6348, 0.8888, 0.9269] +2026-04-09 20:13:41.258558: Epoch time: 103.23 s +2026-04-09 20:13:42.361329: +2026-04-09 20:13:42.364169: Epoch 983 +2026-04-09 20:13:42.367186: Current learning rate: 0.00026 +2026-04-09 20:15:26.020228: train_loss -0.4884 +2026-04-09 20:15:26.028800: val_loss -0.4166 +2026-04-09 20:15:26.031219: Pseudo dice [0.5004, 0.0, 0.8909, 0.3244, 0.6438, 0.885, 0.9027] +2026-04-09 20:15:26.035279: Epoch time: 103.66 s +2026-04-09 20:15:27.130235: +2026-04-09 20:15:27.133593: Epoch 984 +2026-04-09 20:15:27.135638: Current learning rate: 0.00024 +2026-04-09 20:17:09.385577: train_loss -0.4969 +2026-04-09 20:17:09.391274: val_loss -0.4471 +2026-04-09 20:17:09.393002: Pseudo dice [0.8763, 0.0, 0.9014, 0.8364, 0.5711, 0.8831, 0.9511] +2026-04-09 20:17:09.395501: Epoch time: 102.26 s +2026-04-09 20:17:10.490720: +2026-04-09 20:17:10.492985: Epoch 985 +2026-04-09 20:17:10.494777: Current learning rate: 0.00023 +2026-04-09 20:18:52.967932: train_loss -0.4862 +2026-04-09 20:18:52.972607: val_loss -0.4233 +2026-04-09 20:18:52.974771: Pseudo dice [0.4349, 0.0, 0.8987, 0.7746, 0.5738, 0.8491, 0.9289] +2026-04-09 20:18:52.976655: Epoch time: 102.48 s +2026-04-09 20:18:54.047969: +2026-04-09 20:18:54.050398: Epoch 986 +2026-04-09 20:18:54.052455: Current learning rate: 0.00021 +2026-04-09 20:20:36.072005: train_loss -0.5031 +2026-04-09 20:20:36.077895: val_loss -0.4375 +2026-04-09 20:20:36.081335: Pseudo dice [0.7024, 0.0, 0.8021, 0.8561, 0.6044, 0.8416, 0.8529] +2026-04-09 20:20:36.083862: Epoch time: 102.03 s +2026-04-09 20:20:37.163680: +2026-04-09 20:20:37.166220: Epoch 987 +2026-04-09 20:20:37.168484: Current learning rate: 0.0002 +2026-04-09 20:22:20.166697: train_loss -0.5031 +2026-04-09 20:22:20.172333: val_loss -0.3874 +2026-04-09 20:22:20.174627: Pseudo dice [0.6869, 0.0, 0.7768, 0.5128, 0.3554, 0.6186, 0.9415] +2026-04-09 20:22:20.176395: Epoch time: 103.01 s +2026-04-09 20:22:21.275840: +2026-04-09 20:22:21.277842: Epoch 988 +2026-04-09 20:22:21.279545: Current learning rate: 0.00019 +2026-04-09 20:24:03.838591: train_loss -0.5003 +2026-04-09 20:24:03.846761: val_loss -0.3885 +2026-04-09 20:24:03.850186: Pseudo dice [0.7078, 0.0, 0.8412, 0.7864, 0.5207, 0.6884, 0.8064] +2026-04-09 20:24:03.852162: Epoch time: 102.57 s +2026-04-09 20:24:04.943388: +2026-04-09 20:24:04.945020: Epoch 989 +2026-04-09 20:24:04.946530: Current learning rate: 0.00017 +2026-04-09 20:25:46.557118: train_loss -0.4963 +2026-04-09 20:25:46.563778: val_loss -0.442 +2026-04-09 20:25:46.566462: Pseudo dice [0.5124, 0.0, 0.8939, 0.7928, 0.6332, 0.7504, 0.9518] +2026-04-09 20:25:46.568810: Epoch time: 101.62 s +2026-04-09 20:25:47.654783: +2026-04-09 20:25:47.656626: Epoch 990 +2026-04-09 20:25:47.658345: Current learning rate: 0.00016 +2026-04-09 20:27:29.773943: train_loss -0.4996 +2026-04-09 20:27:29.778209: val_loss -0.4088 +2026-04-09 20:27:29.779946: Pseudo dice [0.6531, 0.0, 0.5824, 0.7985, 0.4251, 0.8343, 0.9499] +2026-04-09 20:27:29.781361: Epoch time: 102.12 s +2026-04-09 20:27:30.865399: +2026-04-09 20:27:30.867292: Epoch 991 +2026-04-09 20:27:30.869376: Current learning rate: 0.00014 +2026-04-09 20:29:13.973457: train_loss -0.4887 +2026-04-09 20:29:13.979803: val_loss -0.4131 +2026-04-09 20:29:13.981621: Pseudo dice [0.2663, 0.0, 0.8581, 0.8032, 0.5615, 0.6003, 0.872] +2026-04-09 20:29:13.983517: Epoch time: 103.11 s +2026-04-09 20:29:15.063027: +2026-04-09 20:29:15.064596: Epoch 992 +2026-04-09 20:29:15.066341: Current learning rate: 0.00013 +2026-04-09 20:30:56.923945: train_loss -0.4992 +2026-04-09 20:30:56.930548: val_loss -0.412 +2026-04-09 20:30:56.932184: Pseudo dice [0.4676, 0.0, 0.8157, 0.4607, 0.6421, 0.8713, 0.8154] +2026-04-09 20:30:56.933956: Epoch time: 101.86 s +2026-04-09 20:30:58.012598: +2026-04-09 20:30:58.013969: Epoch 993 +2026-04-09 20:30:58.015309: Current learning rate: 0.00011 +2026-04-09 20:32:39.980160: train_loss -0.5129 +2026-04-09 20:32:39.989414: val_loss -0.413 +2026-04-09 20:32:39.991967: Pseudo dice [0.7246, 0.0, 0.7797, 0.8401, 0.6314, 0.8491, 0.8623] +2026-04-09 20:32:39.994191: Epoch time: 101.97 s +2026-04-09 20:32:41.122280: +2026-04-09 20:32:41.123849: Epoch 994 +2026-04-09 20:32:41.125354: Current learning rate: 0.0001 +2026-04-09 20:34:22.957101: train_loss -0.5064 +2026-04-09 20:34:22.962115: val_loss -0.4327 +2026-04-09 20:34:22.963726: Pseudo dice [0.5801, 0.0, 0.7894, 0.7993, 0.6462, 0.9274, 0.9044] +2026-04-09 20:34:22.965350: Epoch time: 101.84 s +2026-04-09 20:34:24.051425: +2026-04-09 20:34:24.054603: Epoch 995 +2026-04-09 20:34:24.058013: Current learning rate: 8e-05 +2026-04-09 20:36:06.070252: train_loss -0.5035 +2026-04-09 20:36:06.075381: val_loss -0.4187 +2026-04-09 20:36:06.078140: Pseudo dice [0.8359, 0.0, 0.8498, 0.9027, 0.4431, 0.914, 0.9008] +2026-04-09 20:36:06.080651: Epoch time: 102.02 s +2026-04-09 20:36:07.220170: +2026-04-09 20:36:07.222264: Epoch 996 +2026-04-09 20:36:07.224396: Current learning rate: 7e-05 +2026-04-09 20:37:50.022504: train_loss -0.4916 +2026-04-09 20:37:50.027687: val_loss -0.4481 +2026-04-09 20:37:50.030096: Pseudo dice [0.4285, 0.0, 0.8844, 0.8663, 0.6294, 0.7689, 0.8122] +2026-04-09 20:37:50.031853: Epoch time: 102.81 s +2026-04-09 20:37:51.122558: +2026-04-09 20:37:51.124729: Epoch 997 +2026-04-09 20:37:51.126213: Current learning rate: 5e-05 +2026-04-09 20:39:32.805717: train_loss -0.4955 +2026-04-09 20:39:32.813194: val_loss -0.434 +2026-04-09 20:39:32.815284: Pseudo dice [0.6226, 0.0, 0.8995, 0.898, 0.4473, 0.7596, 0.7139] +2026-04-09 20:39:32.817704: Epoch time: 101.69 s +2026-04-09 20:39:33.961487: +2026-04-09 20:39:33.963641: Epoch 998 +2026-04-09 20:39:33.965725: Current learning rate: 4e-05 +2026-04-09 20:41:16.123682: train_loss -0.5088 +2026-04-09 20:41:16.132566: val_loss -0.4259 +2026-04-09 20:41:16.135487: Pseudo dice [0.6879, 0.0, 0.8613, 0.5986, 0.4751, 0.868, 0.9021] +2026-04-09 20:41:16.140277: Epoch time: 102.17 s +2026-04-09 20:41:17.228348: +2026-04-09 20:41:17.230531: Epoch 999 +2026-04-09 20:41:17.232704: Current learning rate: 2e-05 +2026-04-09 20:42:58.928984: train_loss -0.5122 +2026-04-09 20:42:58.934038: val_loss -0.4529 +2026-04-09 20:42:58.936230: Pseudo dice [0.7979, 0.0, 0.9125, 0.889, 0.3987, 0.9119, 0.9204] +2026-04-09 20:42:58.938122: Epoch time: 101.7 s +2026-04-09 20:43:01.815295: Training done. +2026-04-09 20:43:02.103985: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-09 20:43:02.107848: The split file contains 5 splits. +2026-04-09 20:43:02.109333: Desired fold for training: 2 +2026-04-09 20:43:02.110822: This split has 387 training and 97 validation cases. +2026-04-09 20:43:02.112888: predicting MSWAL_0014 +2026-04-09 20:43:02.121178: MSWAL_0014, shape torch.Size([1, 165, 491, 491]), rank 0 +2026-04-09 20:43:49.715386: predicting MSWAL_0015 +2026-04-09 20:43:49.730164: MSWAL_0015, shape torch.Size([1, 185, 536, 536]), rank 0 +2026-04-09 20:44:11.713019: predicting MSWAL_0022 +2026-04-09 20:44:11.729624: MSWAL_0022, shape torch.Size([1, 205, 549, 549]), rank 0 +2026-04-09 20:44:33.845959: predicting MSWAL_0024 +2026-04-09 20:44:33.871392: MSWAL_0024, shape torch.Size([1, 196, 507, 507]), rank 0 +2026-04-09 20:44:46.640489: predicting MSWAL_0026 +2026-04-09 20:44:46.654646: MSWAL_0026, shape torch.Size([1, 253, 507, 507]), rank 0 +2026-04-09 20:45:03.726678: predicting MSWAL_0033 +2026-04-09 20:45:03.741520: MSWAL_0033, shape torch.Size([1, 165, 507, 507]), rank 0 +2026-04-09 20:45:12.710473: predicting MSWAL_0039 +2026-04-09 20:45:12.735471: MSWAL_0039, shape torch.Size([1, 177, 444, 444]), rank 0 +2026-04-09 20:45:25.567418: predicting MSWAL_0057 +2026-04-09 20:45:25.578621: MSWAL_0057, shape torch.Size([1, 205, 535, 535]), rank 0 +2026-04-09 20:45:47.822727: predicting MSWAL_0061 +2026-04-09 20:45:47.844579: MSWAL_0061, shape torch.Size([1, 177, 516, 516]), rank 0 +2026-04-09 20:46:09.826237: predicting MSWAL_0064 +2026-04-09 20:46:09.851449: MSWAL_0064, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 20:46:22.470734: predicting MSWAL_0065 +2026-04-09 20:46:22.491524: MSWAL_0065, shape torch.Size([1, 177, 544, 544]), rank 0 +2026-04-09 20:46:44.516227: predicting MSWAL_0072 +2026-04-09 20:46:44.543172: MSWAL_0072, shape torch.Size([1, 165, 551, 551]), rank 0 +2026-04-09 20:46:59.519233: predicting MSWAL_0082 +2026-04-09 20:46:59.544554: MSWAL_0082, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 20:47:11.993612: predicting MSWAL_0104 +2026-04-09 20:47:12.011869: MSWAL_0104, shape torch.Size([1, 293, 507, 507]), rank 0 +2026-04-09 20:47:33.061620: predicting MSWAL_0129 +2026-04-09 20:47:33.103994: MSWAL_0129, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 20:47:41.662136: predicting MSWAL_0138 +2026-04-09 20:47:41.672451: MSWAL_0138, shape torch.Size([1, 462, 589, 589]), rank 0 +2026-04-09 20:48:40.828193: predicting MSWAL_0149 +2026-04-09 20:48:40.863308: MSWAL_0149, shape torch.Size([1, 259, 507, 507]), rank 0 +2026-04-09 20:48:57.970402: predicting MSWAL_0170 +2026-04-09 20:48:57.988782: MSWAL_0170, shape torch.Size([1, 286, 511, 511]), rank 0 +2026-04-09 20:49:19.070962: predicting MSWAL_0173 +2026-04-09 20:49:19.112986: MSWAL_0173, shape torch.Size([1, 421, 580, 580]), rank 0 +2026-04-09 20:50:10.831159: predicting MSWAL_0175 +2026-04-09 20:50:10.864231: MSWAL_0175, shape torch.Size([1, 402, 572, 572]), rank 0 +2026-04-09 20:51:02.313677: predicting MSWAL_0176 +2026-04-09 20:51:02.341460: MSWAL_0176, shape torch.Size([1, 469, 648, 648]), rank 0 +2026-04-09 20:52:33.976713: predicting MSWAL_0180 +2026-04-09 20:52:34.022302: MSWAL_0180, shape torch.Size([1, 284, 543, 543]), rank 0 +2026-04-09 20:53:10.954731: predicting MSWAL_0193 +2026-04-09 20:53:10.968415: MSWAL_0193, shape torch.Size([1, 350, 537, 537]), rank 0 +2026-04-09 20:53:54.962288: predicting MSWAL_0201 +2026-04-09 20:53:54.999951: MSWAL_0201, shape torch.Size([1, 151, 545, 545]), rank 0 +2026-04-09 20:54:10.037725: predicting MSWAL_0202 +2026-04-09 20:54:10.060606: MSWAL_0202, shape torch.Size([1, 261, 507, 507]), rank 0 +2026-04-09 20:54:27.006611: predicting MSWAL_0208 +2026-04-09 20:54:27.032576: MSWAL_0208, shape torch.Size([1, 318, 580, 580]), rank 0 +2026-04-09 20:55:03.875910: predicting MSWAL_0214 +2026-04-09 20:55:03.897780: MSWAL_0214, shape torch.Size([1, 422, 527, 527]), rank 0 +2026-04-09 20:55:55.323673: predicting MSWAL_0218 +2026-04-09 20:55:55.348465: MSWAL_0218, shape torch.Size([1, 201, 507, 507]), rank 0 +2026-04-09 20:56:08.187974: predicting MSWAL_0221 +2026-04-09 20:56:08.209266: MSWAL_0221, shape torch.Size([1, 305, 507, 507]), rank 0 +2026-04-09 20:56:29.399635: predicting MSWAL_0241 +2026-04-09 20:56:29.423477: MSWAL_0241, shape torch.Size([1, 316, 547, 547]), rank 0 +2026-04-09 20:57:06.192640: predicting MSWAL_0247 +2026-04-09 20:57:06.220225: MSWAL_0247, shape torch.Size([1, 498, 559, 559]), rank 0 +2026-04-09 20:58:05.289366: predicting MSWAL_0252 +2026-04-09 20:58:05.342114: MSWAL_0252, shape torch.Size([1, 542, 569, 569]), rank 0 +2026-04-09 20:59:11.790012: predicting MSWAL_0253 +2026-04-09 20:59:11.839003: MSWAL_0253, shape torch.Size([1, 160, 579, 579]), rank 0 +2026-04-09 20:59:26.947397: predicting MSWAL_0256 +2026-04-09 20:59:26.959062: MSWAL_0256, shape torch.Size([1, 174, 496, 496]), rank 0 +2026-04-09 20:59:40.213342: predicting MSWAL_0258 +2026-04-09 20:59:40.227873: MSWAL_0258, shape torch.Size([1, 307, 496, 496]), rank 0 +2026-04-09 21:00:01.285410: predicting MSWAL_0267 +2026-04-09 21:00:01.325605: MSWAL_0267, shape torch.Size([1, 342, 565, 565]), rank 0 +2026-04-09 21:00:45.947349: predicting MSWAL_0277 +2026-04-09 21:00:45.978024: MSWAL_0277, shape torch.Size([1, 355, 572, 572]), rank 0 +2026-04-09 21:01:30.242691: predicting MSWAL_0285 +2026-04-09 21:01:30.285139: MSWAL_0285, shape torch.Size([1, 197, 480, 480]), rank 0 +2026-04-09 21:01:42.822472: predicting MSWAL_0301 +2026-04-09 21:01:42.842207: MSWAL_0301, shape torch.Size([1, 373, 565, 565]), rank 0 +2026-04-09 21:02:27.032072: predicting MSWAL_0307 +2026-04-09 21:02:27.067572: MSWAL_0307, shape torch.Size([1, 189, 507, 507]), rank 0 +2026-04-09 21:02:39.835230: predicting MSWAL_0320 +2026-04-09 21:02:39.852583: MSWAL_0320, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:02:52.780772: predicting MSWAL_0341 +2026-04-09 21:02:52.803652: MSWAL_0341, shape torch.Size([1, 437, 605, 605]), rank 0 +2026-04-09 21:03:44.730064: predicting MSWAL_0343 +2026-04-09 21:03:44.776817: MSWAL_0343, shape torch.Size([1, 360, 581, 581]), rank 0 +2026-04-09 21:04:29.397158: predicting MSWAL_0348 +2026-04-09 21:04:29.442578: MSWAL_0348, shape torch.Size([1, 357, 507, 507]), rank 0 +2026-04-09 21:04:54.761932: predicting MSWAL_0360 +2026-04-09 21:04:54.791275: MSWAL_0360, shape torch.Size([1, 377, 580, 580]), rank 0 +2026-04-09 21:05:39.284640: predicting MSWAL_0370 +2026-04-09 21:05:39.311213: MSWAL_0370, shape torch.Size([1, 357, 597, 597]), rank 0 +2026-04-09 21:06:24.079040: predicting MSWAL_0374 +2026-04-09 21:06:24.108935: MSWAL_0374, shape torch.Size([1, 297, 529, 529]), rank 0 +2026-04-09 21:07:01.039928: predicting MSWAL_0379 +2026-04-09 21:07:01.084689: MSWAL_0379, shape torch.Size([1, 415, 549, 549]), rank 0 +2026-04-09 21:07:52.793449: predicting MSWAL_0387 +2026-04-09 21:07:52.824283: MSWAL_0387, shape torch.Size([1, 217, 536, 536]), rank 0 +2026-04-09 21:08:15.147045: predicting MSWAL_0392 +2026-04-09 21:08:15.174253: MSWAL_0392, shape torch.Size([1, 295, 511, 511]), rank 0 +2026-04-09 21:08:36.260515: predicting MSWAL_0397 +2026-04-09 21:08:36.279341: MSWAL_0397, shape torch.Size([1, 325, 507, 507]), rank 0 +2026-04-09 21:08:57.709694: predicting MSWAL_0398 +2026-04-09 21:08:57.733769: MSWAL_0398, shape torch.Size([1, 295, 504, 504]), rank 0 +2026-04-09 21:09:19.136763: predicting MSWAL_0400 +2026-04-09 21:09:19.166242: MSWAL_0400, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:09:32.327371: predicting MSWAL_0426 +2026-04-09 21:09:32.341989: MSWAL_0426, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:09:45.251151: predicting MSWAL_0427 +2026-04-09 21:09:45.264138: MSWAL_0427, shape torch.Size([1, 309, 543, 543]), rank 0 +2026-04-09 21:10:22.439821: predicting MSWAL_0435 +2026-04-09 21:10:22.475657: MSWAL_0435, shape torch.Size([1, 229, 603, 603]), rank 0 +2026-04-09 21:10:52.332299: predicting MSWAL_0437 +2026-04-09 21:10:52.360558: MSWAL_0437, shape torch.Size([1, 361, 507, 507]), rank 0 +2026-04-09 21:11:17.397835: predicting MSWAL_0438 +2026-04-09 21:11:17.428479: MSWAL_0438, shape torch.Size([1, 325, 507, 507]), rank 0 +2026-04-09 21:11:38.780907: predicting MSWAL_0440 +2026-04-09 21:11:38.809101: MSWAL_0440, shape torch.Size([1, 186, 507, 507]), rank 0 +2026-04-09 21:11:51.800545: predicting MSWAL_0464 +2026-04-09 21:11:51.814118: MSWAL_0464, shape torch.Size([1, 261, 507, 507]), rank 0 +2026-04-09 21:12:08.888658: predicting MSWAL_0470 +2026-04-09 21:12:08.912062: MSWAL_0470, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:12:21.600480: predicting MSWAL_0471 +2026-04-09 21:12:21.620789: MSWAL_0471, shape torch.Size([1, 209, 583, 583]), rank 0 +2026-04-09 21:12:44.536673: predicting MSWAL_0474 +2026-04-09 21:12:44.568064: MSWAL_0474, shape torch.Size([1, 177, 583, 583]), rank 0 +2026-04-09 21:13:06.992692: predicting MSWAL_0477 +2026-04-09 21:13:07.018028: MSWAL_0477, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:13:20.018790: predicting MSWAL_0482 +2026-04-09 21:13:20.038229: MSWAL_0482, shape torch.Size([1, 257, 507, 507]), rank 0 +2026-04-09 21:13:36.840688: predicting MSWAL_0485 +2026-04-09 21:13:36.860751: MSWAL_0485, shape torch.Size([1, 229, 524, 524]), rank 0 +2026-04-09 21:14:06.325560: predicting MSWAL_0488 +2026-04-09 21:14:06.351949: MSWAL_0488, shape torch.Size([1, 213, 521, 521]), rank 0 +2026-04-09 21:14:28.575341: predicting MSWAL_0490 +2026-04-09 21:14:28.602858: MSWAL_0490, shape torch.Size([1, 177, 575, 575]), rank 0 +2026-04-09 21:14:50.816561: predicting MSWAL_0491 +2026-04-09 21:14:50.845891: MSWAL_0491, shape torch.Size([1, 197, 507, 507]), rank 0 +2026-04-09 21:15:03.575205: predicting MSWAL_0506 +2026-04-09 21:15:03.612779: MSWAL_0506, shape torch.Size([1, 337, 507, 507]), rank 0 +2026-04-09 21:15:29.042125: predicting MSWAL_0522 +2026-04-09 21:15:29.080713: MSWAL_0522, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:15:41.859711: predicting MSWAL_0523 +2026-04-09 21:15:41.882553: MSWAL_0523, shape torch.Size([1, 177, 540, 540]), rank 0 +2026-04-09 21:16:04.081516: predicting MSWAL_0559 +2026-04-09 21:16:04.104700: MSWAL_0559, shape torch.Size([1, 316, 589, 589]), rank 0 +2026-04-09 21:16:41.177467: predicting MSWAL_0562 +2026-04-09 21:16:41.204432: MSWAL_0562, shape torch.Size([1, 329, 625, 625]), rank 0 +2026-04-09 21:17:18.490665: predicting MSWAL_0564 +2026-04-09 21:17:18.531761: MSWAL_0564, shape torch.Size([1, 369, 651, 651]), rank 0 +2026-04-09 21:18:27.801709: predicting MSWAL_0577 +2026-04-09 21:18:27.839007: MSWAL_0577, shape torch.Size([1, 332, 583, 583]), rank 0 +2026-04-09 21:19:05.014149: predicting MSWAL_0582 +2026-04-09 21:19:05.051976: MSWAL_0582, shape torch.Size([1, 326, 520, 520]), rank 0 +2026-04-09 21:19:42.067117: predicting MSWAL_0590 +2026-04-09 21:19:42.090591: MSWAL_0590, shape torch.Size([1, 346, 584, 584]), rank 0 +2026-04-09 21:20:26.526131: predicting MSWAL_0592 +2026-04-09 21:20:26.568911: MSWAL_0592, shape torch.Size([1, 201, 540, 540]), rank 0 +2026-04-09 21:20:48.923165: predicting MSWAL_0595 +2026-04-09 21:20:48.945691: MSWAL_0595, shape torch.Size([1, 174, 480, 480]), rank 0 +2026-04-09 21:21:01.535923: predicting MSWAL_0596 +2026-04-09 21:21:01.597451: MSWAL_0596, shape torch.Size([1, 210, 480, 480]), rank 0 +2026-04-09 21:21:14.298949: predicting MSWAL_0597 +2026-04-09 21:21:14.312096: MSWAL_0597, shape torch.Size([1, 336, 579, 579]), rank 0 +2026-04-09 21:21:51.476729: predicting MSWAL_0599 +2026-04-09 21:21:51.513585: MSWAL_0599, shape torch.Size([1, 306, 480, 480]), rank 0 +2026-04-09 21:22:12.573997: predicting MSWAL_0616 +2026-04-09 21:22:12.594192: MSWAL_0616, shape torch.Size([1, 300, 553, 553]), rank 0 +2026-04-09 21:22:49.462396: predicting MSWAL_0623 +2026-04-09 21:22:49.481804: MSWAL_0623, shape torch.Size([1, 316, 467, 467]), rank 0 +2026-04-09 21:23:10.436313: predicting MSWAL_0640 +2026-04-09 21:23:10.456239: MSWAL_0640, shape torch.Size([1, 322, 551, 551]), rank 0 +2026-04-09 21:23:47.576589: predicting MSWAL_0644 +2026-04-09 21:23:47.595590: MSWAL_0644, shape torch.Size([1, 320, 545, 545]), rank 0 +2026-04-09 21:24:24.693629: predicting MSWAL_0646 +2026-04-09 21:24:24.725426: MSWAL_0646, shape torch.Size([1, 409, 579, 579]), rank 0 +2026-04-09 21:25:16.346973: predicting MSWAL_0648 +2026-04-09 21:25:16.375701: MSWAL_0648, shape torch.Size([1, 277, 549, 549]), rank 0 +2026-04-09 21:25:46.012676: predicting MSWAL_0653 +2026-04-09 21:25:46.041017: MSWAL_0653, shape torch.Size([1, 277, 507, 507]), rank 0 +2026-04-09 21:26:03.117139: predicting MSWAL_0660 +2026-04-09 21:26:03.136678: MSWAL_0660, shape torch.Size([1, 405, 511, 511]), rank 0 +2026-04-09 21:26:32.791754: predicting MSWAL_0666 +2026-04-09 21:26:32.816257: MSWAL_0666, shape torch.Size([1, 325, 424, 424]), rank 0 +2026-04-09 21:26:53.693880: predicting MSWAL_0668 +2026-04-09 21:26:53.714711: MSWAL_0668, shape torch.Size([1, 331, 507, 507]), rank 0 +2026-04-09 21:27:14.987580: predicting MSWAL_0676 +2026-04-09 21:27:15.017556: MSWAL_0676, shape torch.Size([1, 259, 467, 467]), rank 0 +2026-04-09 21:27:31.830625: predicting MSWAL_0686 +2026-04-09 21:27:31.841856: MSWAL_0686, shape torch.Size([1, 300, 624, 624]), rank 0 +2026-04-09 21:28:08.940454: predicting MSWAL_0687 +2026-04-09 21:28:08.977677: MSWAL_0687, shape torch.Size([1, 340, 499, 499]), rank 0 +2026-04-09 21:28:33.983726: predicting MSWAL_0688 +2026-04-09 21:28:34.012999: MSWAL_0688, shape torch.Size([1, 474, 516, 516]), rank 0 +2026-04-09 21:31:31.845248: Validation complete +2026-04-09 21:31:31.848144: Mean Validation Dice: 0.3974539770955491 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/checkpoint_best.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..4d2879f9ca49b38aabecff22924db79516d33785 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e7fc9629cf1b0e421a136d0b7479e5fdb6941f6fd09e1bf391ccacc71d9b81f7 +size 1129840530 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/checkpoint_final.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/checkpoint_final.pth new file mode 100644 index 0000000000000000000000000000000000000000..9388032937bc40882daec386b0b340b54b4512f7 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/checkpoint_final.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:97b910c3c5abdc2cb793e1b3f1c39ea7cf873ea5d00e65aee2d029835b1ff8c1 +size 1129851670 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/debug.json b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/debug.json new file mode 100644 index 0000000000000000000000000000000000000000..2572dcc0bce43874e4004e65faa9e0841539551b --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/debug.json @@ -0,0 +1,53 @@ +{ + "_best_ema": "None", + "batch_size": "2", + "configuration_manager": "{'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}", + "configuration_name": "3d_fullres", + "cudnn_version": 90100, + "current_epoch": "0", + "dataloader_train": "", + "dataloader_train.generator": "", + "dataloader_train.num_processes": "12", + "dataloader_train.transform": "None", + "dataloader_val": "", + "dataloader_val.generator": "", + "dataloader_val.num_processes": "6", + "dataloader_val.transform": "None", + "dataset_json": "{'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}", + "device": "cuda:0", + "disable_checkpointing": "False", + "enable_deep_supervision": "True", + "fold": "3", + "folder_with_segs_from_previous_stage": "None", + "gpu_name": "NVIDIA A100-SXM4-80GB", + "grad_scaler": "", + "hostname": "cn0079", + "inference_allowed_mirroring_axes": "(0, 1, 2)", + "initial_lr": "0.01", + "is_cascaded": "False", + "is_ddp": "False", + "label_manager": "", + "local_rank": "0", + "log_file": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/training_log_2026_4_8_15_55_58.txt", + "logger": "", + "loss": "DeepSupervisionWrapper(\n (loss): DC_and_CE_loss(\n (ce): RobustCrossEntropyLoss()\n (dc): OptimizedModule(\n (_orig_mod): MemoryEfficientSoftDiceLoss()\n )\n )\n)", + "lr_scheduler": "", + "my_init_kwargs": "{'plans': {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}, 'configuration': '3d_fullres', 'fold': 3, 'dataset_json': {'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}, 'unpack_dataset': True, 'device': device(type='cuda')}", + "network": "OptimizedModule", + "num_epochs": "1000", + "num_input_channels": "1", + "num_iterations_per_epoch": "250", + "num_val_iterations_per_epoch": "50", + "optimizer": "SGD (\nParameter Group 0\n dampening: 0\n differentiable: False\n foreach: None\n fused: None\n initial_lr: 0.01\n lr: 0.01\n maximize: False\n momentum: 0.99\n nesterov: True\n weight_decay: 3e-05\n)", + "output_folder": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3", + "output_folder_base": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres", + "oversample_foreground_percent": "0.33", + "plans_manager": "{'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}", + "preprocessed_dataset_folder": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/nnUNetPlans_3d_fullres", + "preprocessed_dataset_folder_base": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL", + "save_every": "50", + "torch_version": "2.5.0+cu121", + "unpack_dataset": "True", + "was_initialized": "True", + "weight_decay": "3e-05" +} \ No newline at end of file diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/progress.png b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/progress.png new file mode 100644 index 0000000000000000000000000000000000000000..3a121c90faaff73422677ac0e9ecd1ef76b7f619 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/progress.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1df517bb688a8649654b00fd14470b96ac37a3abe6a35096c045f4d93be41f0 +size 1243368 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/training_log_2026_4_8_15_29_13.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/training_log_2026_4_8_15_29_13.txt new file mode 100644 index 0000000000000000000000000000000000000000..02799c007372d551a631d0c1452279213276204f --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/training_log_2026_4_8_15_29_13.txt @@ -0,0 +1,11 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 15:29:13.407142: do_dummy_2d_data_aug: False +2026-04-08 15:29:13.412213: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-08 15:29:13.414359: The split file contains 5 splits. +2026-04-08 15:29:13.415981: Desired fold for training: 3 +2026-04-08 15:29:13.417588: This split has 387 training and 97 validation cases. diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/training_log_2026_4_8_15_55_58.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/training_log_2026_4_8_15_55_58.txt new file mode 100644 index 0000000000000000000000000000000000000000..6c32a2b14211396a457c5d677c2dc3a061e43848 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_3/training_log_2026_4_8_15_55_58.txt @@ -0,0 +1,7396 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 15:55:58.247345: do_dummy_2d_data_aug: False +2026-04-08 15:55:58.323944: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-08 15:55:58.327748: The split file contains 5 splits. +2026-04-08 15:55:58.329991: Desired fold for training: 3 +2026-04-08 15:55:58.332405: This split has 387 training and 97 validation cases. +2026-04-08 15:56:14.414390: Using torch.compile... + +This is the configuration used by this training: +Configuration name: 3d_fullres + {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True} + +These are the global plan.json settings: + {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}} + +2026-04-08 15:56:16.278989: unpacking dataset... +2026-04-08 15:56:26.420583: unpacking done... +2026-04-08 15:56:26.452514: Unable to plot network architecture: nnUNet_compile is enabled! +2026-04-08 15:56:26.513585: +2026-04-08 15:56:26.515410: Epoch 0 +2026-04-08 15:56:26.517446: Current learning rate: 0.01 +2026-04-08 16:00:37.488277: train_loss 0.2036 +2026-04-08 16:00:37.502760: val_loss 0.0857 +2026-04-08 16:00:37.505850: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:00:37.509860: Epoch time: 250.98 s +2026-04-08 16:00:37.514173: Yayy! New best EMA pseudo Dice: 0.0 +2026-04-08 16:00:40.448700: +2026-04-08 16:00:40.454671: Epoch 1 +2026-04-08 16:00:40.459939: Current learning rate: 0.00999 +2026-04-08 16:02:27.141958: train_loss 0.0843 +2026-04-08 16:02:27.150529: val_loss 0.0501 +2026-04-08 16:02:27.153193: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:02:27.156580: Epoch time: 106.7 s +2026-04-08 16:02:28.224591: +2026-04-08 16:02:28.227232: Epoch 2 +2026-04-08 16:02:28.229683: Current learning rate: 0.00998 +2026-04-08 16:04:09.752624: train_loss 0.0633 +2026-04-08 16:04:09.760198: val_loss 0.0457 +2026-04-08 16:04:09.762309: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:04:09.765492: Epoch time: 101.53 s +2026-04-08 16:04:10.838182: +2026-04-08 16:04:10.840652: Epoch 3 +2026-04-08 16:04:10.842571: Current learning rate: 0.00997 +2026-04-08 16:06:04.768734: train_loss 0.0577 +2026-04-08 16:06:04.776798: val_loss 0.0597 +2026-04-08 16:06:04.779323: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:06:04.782196: Epoch time: 113.93 s +2026-04-08 16:06:05.814012: +2026-04-08 16:06:05.816929: Epoch 4 +2026-04-08 16:06:05.819193: Current learning rate: 0.00996 +2026-04-08 16:07:52.684090: train_loss 0.0529 +2026-04-08 16:07:52.693724: val_loss 0.0594 +2026-04-08 16:07:52.696805: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:07:52.700101: Epoch time: 106.87 s +2026-04-08 16:07:53.811359: +2026-04-08 16:07:53.814950: Epoch 5 +2026-04-08 16:07:53.824088: Current learning rate: 0.00995 +2026-04-08 16:09:45.247181: train_loss 0.0552 +2026-04-08 16:09:45.256514: val_loss 0.0767 +2026-04-08 16:09:45.262812: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:09:45.268806: Epoch time: 111.44 s +2026-04-08 16:09:46.294309: +2026-04-08 16:09:46.297411: Epoch 6 +2026-04-08 16:09:46.300208: Current learning rate: 0.00995 +2026-04-08 16:12:05.831686: train_loss 0.052 +2026-04-08 16:12:05.839151: val_loss 0.0612 +2026-04-08 16:12:05.841480: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:12:05.844278: Epoch time: 139.54 s +2026-04-08 16:12:06.905794: +2026-04-08 16:12:06.908075: Epoch 7 +2026-04-08 16:12:06.909633: Current learning rate: 0.00994 +2026-04-08 16:13:56.041128: train_loss 0.052 +2026-04-08 16:13:56.051425: val_loss 0.0465 +2026-04-08 16:13:56.055628: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:13:56.060694: Epoch time: 109.14 s +2026-04-08 16:13:57.196902: +2026-04-08 16:13:57.200680: Epoch 8 +2026-04-08 16:13:57.203938: Current learning rate: 0.00993 +2026-04-08 16:15:50.948514: train_loss 0.0518 +2026-04-08 16:15:50.955741: val_loss 0.0572 +2026-04-08 16:15:50.958914: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:15:50.964927: Epoch time: 113.75 s +2026-04-08 16:15:52.067693: +2026-04-08 16:15:52.070210: Epoch 9 +2026-04-08 16:15:52.073494: Current learning rate: 0.00992 +2026-04-08 16:17:48.768445: train_loss 0.0717 +2026-04-08 16:17:48.785794: val_loss 0.046 +2026-04-08 16:17:48.793101: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:17:48.800809: Epoch time: 116.7 s +2026-04-08 16:17:49.825967: +2026-04-08 16:17:49.828292: Epoch 10 +2026-04-08 16:17:49.830970: Current learning rate: 0.00991 +2026-04-08 16:19:31.759923: train_loss 0.0529 +2026-04-08 16:19:31.767368: val_loss 0.0525 +2026-04-08 16:19:31.770938: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:19:31.773656: Epoch time: 101.94 s +2026-04-08 16:19:32.802277: +2026-04-08 16:19:32.804233: Epoch 11 +2026-04-08 16:19:32.805908: Current learning rate: 0.0099 +2026-04-08 16:21:36.343250: train_loss 0.0521 +2026-04-08 16:21:36.353979: val_loss 0.0396 +2026-04-08 16:21:36.358792: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:21:36.362843: Epoch time: 123.54 s +2026-04-08 16:21:37.400528: +2026-04-08 16:21:37.403177: Epoch 12 +2026-04-08 16:21:37.405415: Current learning rate: 0.00989 +2026-04-08 16:23:37.894711: train_loss 0.052 +2026-04-08 16:23:37.906720: val_loss 0.0408 +2026-04-08 16:23:37.910232: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:23:37.914105: Epoch time: 120.5 s +2026-04-08 16:23:38.951226: +2026-04-08 16:23:38.954052: Epoch 13 +2026-04-08 16:23:38.956075: Current learning rate: 0.00988 +2026-04-08 16:25:36.461032: train_loss 0.0648 +2026-04-08 16:25:36.470635: val_loss 0.0591 +2026-04-08 16:25:36.472850: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:25:36.476068: Epoch time: 117.51 s +2026-04-08 16:25:37.528713: +2026-04-08 16:25:37.531483: Epoch 14 +2026-04-08 16:25:37.534035: Current learning rate: 0.00987 +2026-04-08 16:27:25.374086: train_loss 0.0587 +2026-04-08 16:27:25.383032: val_loss 0.0488 +2026-04-08 16:27:25.385507: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:27:25.388714: Epoch time: 107.85 s +2026-04-08 16:27:26.473362: +2026-04-08 16:27:26.477295: Epoch 15 +2026-04-08 16:27:26.480155: Current learning rate: 0.00986 +2026-04-08 16:29:31.220097: train_loss 0.0465 +2026-04-08 16:29:31.233685: val_loss 0.0477 +2026-04-08 16:29:31.236639: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:29:31.239417: Epoch time: 124.75 s +2026-04-08 16:29:32.328160: +2026-04-08 16:29:32.331738: Epoch 16 +2026-04-08 16:29:32.335222: Current learning rate: 0.00986 +2026-04-08 16:31:38.162941: train_loss 0.0504 +2026-04-08 16:31:38.173952: val_loss 0.0468 +2026-04-08 16:31:38.176502: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:31:38.179656: Epoch time: 125.84 s +2026-04-08 16:31:40.501303: +2026-04-08 16:31:40.504042: Epoch 17 +2026-04-08 16:31:40.505929: Current learning rate: 0.00985 +2026-04-08 16:33:24.585217: train_loss 0.0608 +2026-04-08 16:33:24.593429: val_loss 0.0383 +2026-04-08 16:33:24.595944: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:33:24.598666: Epoch time: 104.09 s +2026-04-08 16:33:25.670253: +2026-04-08 16:33:25.673046: Epoch 18 +2026-04-08 16:33:25.675332: Current learning rate: 0.00984 +2026-04-08 16:35:13.414158: train_loss 0.0558 +2026-04-08 16:35:13.469317: val_loss 0.0452 +2026-04-08 16:35:13.474007: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:35:13.478225: Epoch time: 107.75 s +2026-04-08 16:35:14.552841: +2026-04-08 16:35:14.555243: Epoch 19 +2026-04-08 16:35:14.557539: Current learning rate: 0.00983 +2026-04-08 16:37:02.580598: train_loss 0.0397 +2026-04-08 16:37:02.589670: val_loss 0.0633 +2026-04-08 16:37:02.592893: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:37:02.595228: Epoch time: 108.03 s +2026-04-08 16:37:03.694186: +2026-04-08 16:37:03.697706: Epoch 20 +2026-04-08 16:37:03.700020: Current learning rate: 0.00982 +2026-04-08 16:38:47.600898: train_loss 0.0601 +2026-04-08 16:38:47.609892: val_loss 0.0517 +2026-04-08 16:38:47.612712: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:38:47.615934: Epoch time: 103.91 s +2026-04-08 16:38:48.721196: +2026-04-08 16:38:48.723174: Epoch 21 +2026-04-08 16:38:48.724869: Current learning rate: 0.00981 +2026-04-08 16:40:38.128935: train_loss 0.0459 +2026-04-08 16:40:38.140960: val_loss 0.0346 +2026-04-08 16:40:38.144832: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:40:38.149057: Epoch time: 109.41 s +2026-04-08 16:40:39.193746: +2026-04-08 16:40:39.197326: Epoch 22 +2026-04-08 16:40:39.202808: Current learning rate: 0.0098 +2026-04-08 16:42:24.910711: train_loss 0.0431 +2026-04-08 16:42:24.922130: val_loss 0.0313 +2026-04-08 16:42:24.924390: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:42:24.927344: Epoch time: 105.72 s +2026-04-08 16:42:25.968154: +2026-04-08 16:42:25.971514: Epoch 23 +2026-04-08 16:42:25.973695: Current learning rate: 0.00979 +2026-04-08 16:44:22.191053: train_loss 0.0491 +2026-04-08 16:44:22.198932: val_loss 0.0491 +2026-04-08 16:44:22.201359: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:44:22.203643: Epoch time: 116.23 s +2026-04-08 16:44:23.197529: +2026-04-08 16:44:23.200408: Epoch 24 +2026-04-08 16:44:23.203175: Current learning rate: 0.00978 +2026-04-08 16:46:13.306782: train_loss 0.0579 +2026-04-08 16:46:13.322071: val_loss 0.0347 +2026-04-08 16:46:13.324954: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:46:13.329425: Epoch time: 110.11 s +2026-04-08 16:46:14.372353: +2026-04-08 16:46:14.375125: Epoch 25 +2026-04-08 16:46:14.378608: Current learning rate: 0.00977 +2026-04-08 16:48:01.203402: train_loss 0.042 +2026-04-08 16:48:01.212227: val_loss 0.0747 +2026-04-08 16:48:01.214898: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:48:01.217724: Epoch time: 106.83 s +2026-04-08 16:48:02.322127: +2026-04-08 16:48:02.326161: Epoch 26 +2026-04-08 16:48:02.328150: Current learning rate: 0.00977 +2026-04-08 16:49:45.430376: train_loss 0.0375 +2026-04-08 16:49:45.442914: val_loss 0.0379 +2026-04-08 16:49:45.446124: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:49:45.448598: Epoch time: 103.11 s +2026-04-08 16:49:46.485301: +2026-04-08 16:49:46.487603: Epoch 27 +2026-04-08 16:49:46.489769: Current learning rate: 0.00976 +2026-04-08 16:51:34.287468: train_loss 0.0444 +2026-04-08 16:51:34.336537: val_loss 0.0346 +2026-04-08 16:51:34.340904: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:51:34.345199: Epoch time: 107.81 s +2026-04-08 16:51:35.387311: +2026-04-08 16:51:35.390991: Epoch 28 +2026-04-08 16:51:35.397048: Current learning rate: 0.00975 +2026-04-08 16:53:23.854121: train_loss 0.0479 +2026-04-08 16:53:23.877910: val_loss 0.0343 +2026-04-08 16:53:23.896708: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:53:23.904124: Epoch time: 108.47 s +2026-04-08 16:53:24.982896: +2026-04-08 16:53:24.989522: Epoch 29 +2026-04-08 16:53:24.998205: Current learning rate: 0.00974 +2026-04-08 16:55:14.073076: train_loss 0.0433 +2026-04-08 16:55:14.082637: val_loss 0.0341 +2026-04-08 16:55:14.084661: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:55:14.088043: Epoch time: 109.09 s +2026-04-08 16:55:15.169253: +2026-04-08 16:55:15.172057: Epoch 30 +2026-04-08 16:55:15.175367: Current learning rate: 0.00973 +2026-04-08 16:57:07.234166: train_loss 0.0333 +2026-04-08 16:57:07.241867: val_loss 0.0387 +2026-04-08 16:57:07.247489: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:57:07.250474: Epoch time: 112.07 s +2026-04-08 16:57:08.295779: +2026-04-08 16:57:08.298971: Epoch 31 +2026-04-08 16:57:08.302571: Current learning rate: 0.00972 +2026-04-08 16:58:51.432600: train_loss 0.0402 +2026-04-08 16:58:51.447406: val_loss 0.0284 +2026-04-08 16:58:51.450264: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:58:51.452922: Epoch time: 103.14 s +2026-04-08 16:58:52.496996: +2026-04-08 16:58:52.500028: Epoch 32 +2026-04-08 16:58:52.501964: Current learning rate: 0.00971 +2026-04-08 17:00:38.556531: train_loss 0.047 +2026-04-08 17:00:38.563714: val_loss 0.0503 +2026-04-08 17:00:38.567464: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:00:38.570831: Epoch time: 106.06 s +2026-04-08 17:00:39.599121: +2026-04-08 17:00:39.601324: Epoch 33 +2026-04-08 17:00:39.605113: Current learning rate: 0.0097 +2026-04-08 17:02:26.830563: train_loss 0.0357 +2026-04-08 17:02:26.837053: val_loss 0.0292 +2026-04-08 17:02:26.839226: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:02:26.841453: Epoch time: 107.24 s +2026-04-08 17:02:27.885525: +2026-04-08 17:02:27.887972: Epoch 34 +2026-04-08 17:02:27.891025: Current learning rate: 0.00969 +2026-04-08 17:04:14.034876: train_loss 0.0408 +2026-04-08 17:04:14.042893: val_loss 0.0326 +2026-04-08 17:04:14.064726: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:04:14.067081: Epoch time: 106.15 s +2026-04-08 17:04:15.123796: +2026-04-08 17:04:15.126935: Epoch 35 +2026-04-08 17:04:15.128835: Current learning rate: 0.00968 +2026-04-08 17:06:06.310948: train_loss 0.04 +2026-04-08 17:06:06.321069: val_loss 0.0418 +2026-04-08 17:06:06.329484: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:06:06.332274: Epoch time: 111.19 s +2026-04-08 17:06:07.418210: +2026-04-08 17:06:07.422213: Epoch 36 +2026-04-08 17:06:07.427273: Current learning rate: 0.00968 +2026-04-08 17:07:53.013049: train_loss 0.0428 +2026-04-08 17:07:53.021474: val_loss 0.0453 +2026-04-08 17:07:53.023707: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:07:53.026928: Epoch time: 105.6 s +2026-04-08 17:07:54.095106: +2026-04-08 17:07:54.101847: Epoch 37 +2026-04-08 17:07:54.106549: Current learning rate: 0.00967 +2026-04-08 17:09:37.930691: train_loss 0.0433 +2026-04-08 17:09:37.944581: val_loss 0.0324 +2026-04-08 17:09:37.946850: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:09:37.949875: Epoch time: 103.84 s +2026-04-08 17:09:38.997436: +2026-04-08 17:09:38.999791: Epoch 38 +2026-04-08 17:09:39.001648: Current learning rate: 0.00966 +2026-04-08 17:11:22.740286: train_loss 0.0382 +2026-04-08 17:11:22.756039: val_loss 0.0351 +2026-04-08 17:11:22.764635: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:11:22.774782: Epoch time: 103.75 s +2026-04-08 17:11:23.855153: +2026-04-08 17:11:23.857105: Epoch 39 +2026-04-08 17:11:23.859137: Current learning rate: 0.00965 +2026-04-08 17:13:08.070768: train_loss 0.0348 +2026-04-08 17:13:08.076825: val_loss 0.0312 +2026-04-08 17:13:08.078637: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:13:08.080777: Epoch time: 104.22 s +2026-04-08 17:13:09.149108: +2026-04-08 17:13:09.151337: Epoch 40 +2026-04-08 17:13:09.153748: Current learning rate: 0.00964 +2026-04-08 17:14:53.226207: train_loss 0.0358 +2026-04-08 17:14:53.235571: val_loss 0.0213 +2026-04-08 17:14:53.240692: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:14:53.250257: Epoch time: 104.08 s +2026-04-08 17:14:54.373048: +2026-04-08 17:14:54.375993: Epoch 41 +2026-04-08 17:14:54.378380: Current learning rate: 0.00963 +2026-04-08 17:16:38.426155: train_loss 0.0384 +2026-04-08 17:16:38.435017: val_loss 0.0329 +2026-04-08 17:16:38.438245: Pseudo dice [0.0, 0.0, 0.0164, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:16:38.440375: Epoch time: 104.06 s +2026-04-08 17:16:38.444300: Yayy! New best EMA pseudo Dice: 0.0002 +2026-04-08 17:16:41.319968: +2026-04-08 17:16:41.322369: Epoch 42 +2026-04-08 17:16:41.324407: Current learning rate: 0.00962 +2026-04-08 17:18:24.545704: train_loss 0.0372 +2026-04-08 17:18:24.554124: val_loss 0.0357 +2026-04-08 17:18:24.563980: Pseudo dice [0.0, 0.0, 0.0058, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:18:24.568969: Epoch time: 103.23 s +2026-04-08 17:18:24.579269: Yayy! New best EMA pseudo Dice: 0.0003 +2026-04-08 17:18:27.269286: +2026-04-08 17:18:27.272467: Epoch 43 +2026-04-08 17:18:27.274679: Current learning rate: 0.00961 +2026-04-08 17:20:09.347315: train_loss 0.0244 +2026-04-08 17:20:09.356277: val_loss 0.0292 +2026-04-08 17:20:09.358462: Pseudo dice [0.0, 0.0, 0.0032, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:20:09.360705: Epoch time: 102.08 s +2026-04-08 17:20:09.363165: Yayy! New best EMA pseudo Dice: 0.0003 +2026-04-08 17:20:12.173800: +2026-04-08 17:20:12.176133: Epoch 44 +2026-04-08 17:20:12.178426: Current learning rate: 0.0096 +2026-04-08 17:21:56.485363: train_loss 0.0264 +2026-04-08 17:21:56.492790: val_loss 0.034 +2026-04-08 17:21:56.494687: Pseudo dice [0.0, 0.0, 0.2036, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:21:56.497291: Epoch time: 104.31 s +2026-04-08 17:21:56.499111: Yayy! New best EMA pseudo Dice: 0.0032 +2026-04-08 17:21:59.344454: +2026-04-08 17:21:59.346597: Epoch 45 +2026-04-08 17:21:59.348220: Current learning rate: 0.00959 +2026-04-08 17:23:44.151436: train_loss 0.0251 +2026-04-08 17:23:44.158225: val_loss 0.0276 +2026-04-08 17:23:44.160676: Pseudo dice [0.0, 0.0, 0.0072, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:23:44.163623: Epoch time: 104.81 s +2026-04-08 17:23:45.195694: +2026-04-08 17:23:45.198736: Epoch 46 +2026-04-08 17:23:45.200920: Current learning rate: 0.00959 +2026-04-08 17:25:28.873833: train_loss 0.0317 +2026-04-08 17:25:28.880712: val_loss 0.0195 +2026-04-08 17:25:28.884387: Pseudo dice [0.0, 0.0, 0.0764, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:25:28.888489: Epoch time: 103.68 s +2026-04-08 17:25:28.890353: Yayy! New best EMA pseudo Dice: 0.0038 +2026-04-08 17:25:31.581033: +2026-04-08 17:25:31.584355: Epoch 47 +2026-04-08 17:25:31.588744: Current learning rate: 0.00958 +2026-04-08 17:27:14.469495: train_loss 0.0198 +2026-04-08 17:27:14.477807: val_loss 0.0362 +2026-04-08 17:27:14.480322: Pseudo dice [0.0, 0.0, 0.0341, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:27:14.482526: Epoch time: 102.89 s +2026-04-08 17:27:14.484757: Yayy! New best EMA pseudo Dice: 0.0039 +2026-04-08 17:27:17.274223: +2026-04-08 17:27:17.276978: Epoch 48 +2026-04-08 17:27:17.278809: Current learning rate: 0.00957 +2026-04-08 17:29:00.738990: train_loss 0.0181 +2026-04-08 17:29:00.748540: val_loss 0.0141 +2026-04-08 17:29:00.750837: Pseudo dice [0.0, 0.0, 0.3034, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:29:00.753157: Epoch time: 103.47 s +2026-04-08 17:29:00.756234: Yayy! New best EMA pseudo Dice: 0.0078 +2026-04-08 17:29:03.502902: +2026-04-08 17:29:03.504456: Epoch 49 +2026-04-08 17:29:03.506337: Current learning rate: 0.00956 +2026-04-08 17:30:47.292823: train_loss 0.0138 +2026-04-08 17:30:47.299782: val_loss 0.0119 +2026-04-08 17:30:47.302762: Pseudo dice [0.0, 0.0, 0.1153, 0.0, 0.0, 0.0, 0.0917] +2026-04-08 17:30:47.305864: Epoch time: 103.79 s +2026-04-08 17:30:49.069751: Yayy! New best EMA pseudo Dice: 0.01 +2026-04-08 17:30:51.909717: +2026-04-08 17:30:51.913623: Epoch 50 +2026-04-08 17:30:51.916183: Current learning rate: 0.00955 +2026-04-08 17:32:35.653101: train_loss 0.0215 +2026-04-08 17:32:35.659067: val_loss 0.0236 +2026-04-08 17:32:35.661763: Pseudo dice [0.0, 0.0, 0.0267, 0.0, 0.0, 0.0, 0.1251] +2026-04-08 17:32:35.663888: Epoch time: 103.75 s +2026-04-08 17:32:35.666713: Yayy! New best EMA pseudo Dice: 0.0112 +2026-04-08 17:32:38.446577: +2026-04-08 17:32:38.449109: Epoch 51 +2026-04-08 17:32:38.451015: Current learning rate: 0.00954 +2026-04-08 17:34:24.321116: train_loss 0.0203 +2026-04-08 17:34:24.330168: val_loss 0.017 +2026-04-08 17:34:24.332237: Pseudo dice [0.0, 0.0, 0.4476, 0.0, 0.0, 0.0962, 0.1863] +2026-04-08 17:34:24.334744: Epoch time: 105.88 s +2026-04-08 17:34:24.337950: Yayy! New best EMA pseudo Dice: 0.0205 +2026-04-08 17:34:26.852812: +2026-04-08 17:34:26.857296: Epoch 52 +2026-04-08 17:34:26.860306: Current learning rate: 0.00953 +2026-04-08 17:36:13.067098: train_loss 0.013 +2026-04-08 17:36:13.079360: val_loss 0.0027 +2026-04-08 17:36:13.083495: Pseudo dice [0.0, 0.0, 0.2983, 0.0, 0.0, 0.1063, 0.3818] +2026-04-08 17:36:13.087824: Epoch time: 106.22 s +2026-04-08 17:36:13.090698: Yayy! New best EMA pseudo Dice: 0.0297 +2026-04-08 17:36:15.867567: +2026-04-08 17:36:15.869800: Epoch 53 +2026-04-08 17:36:15.871596: Current learning rate: 0.00952 +2026-04-08 17:38:03.188877: train_loss 0.0156 +2026-04-08 17:38:03.198597: val_loss 0.0135 +2026-04-08 17:38:03.206562: Pseudo dice [0.0, 0.0, 0.251, 0.0, 0.0, 0.01, 0.334] +2026-04-08 17:38:03.209560: Epoch time: 107.32 s +2026-04-08 17:38:03.216970: Yayy! New best EMA pseudo Dice: 0.0352 +2026-04-08 17:38:06.191055: +2026-04-08 17:38:06.193908: Epoch 54 +2026-04-08 17:38:06.197923: Current learning rate: 0.00951 +2026-04-08 17:39:50.260896: train_loss 0.0066 +2026-04-08 17:39:50.268063: val_loss 0.0084 +2026-04-08 17:39:50.270187: Pseudo dice [0.0, 0.0, 0.1457, 0.0, 0.0, 0.1202, 0.2511] +2026-04-08 17:39:50.272887: Epoch time: 104.07 s +2026-04-08 17:39:50.275131: Yayy! New best EMA pseudo Dice: 0.0391 +2026-04-08 17:39:53.053757: +2026-04-08 17:39:53.055628: Epoch 55 +2026-04-08 17:39:53.057704: Current learning rate: 0.0095 +2026-04-08 17:41:46.040891: train_loss 0.009 +2026-04-08 17:41:46.054869: val_loss 0.0057 +2026-04-08 17:41:46.058980: Pseudo dice [0.0, 0.0, 0.2263, 0.0, 0.0, 0.1632, 0.1789] +2026-04-08 17:41:46.062294: Epoch time: 112.99 s +2026-04-08 17:41:46.065197: Yayy! New best EMA pseudo Dice: 0.0433 +2026-04-08 17:41:48.829015: +2026-04-08 17:41:48.831547: Epoch 56 +2026-04-08 17:41:48.833965: Current learning rate: 0.00949 +2026-04-08 17:43:36.901805: train_loss 0.0179 +2026-04-08 17:43:36.910458: val_loss 0.0068 +2026-04-08 17:43:36.913348: Pseudo dice [0.0, 0.0, 0.371, 0.0, 0.0, 0.1596, 0.3299] +2026-04-08 17:43:36.916915: Epoch time: 108.08 s +2026-04-08 17:43:36.921845: Yayy! New best EMA pseudo Dice: 0.0512 +2026-04-08 17:43:39.798277: +2026-04-08 17:43:39.803302: Epoch 57 +2026-04-08 17:43:39.809218: Current learning rate: 0.00949 +2026-04-08 17:45:25.112291: train_loss -0.0003 +2026-04-08 17:45:25.123063: val_loss -0.0044 +2026-04-08 17:45:25.125349: Pseudo dice [0.0, 0.0, 0.052, 0.0, 0.0, 0.054, 0.2535] +2026-04-08 17:45:25.127831: Epoch time: 105.32 s +2026-04-08 17:45:25.129843: Yayy! New best EMA pseudo Dice: 0.0513 +2026-04-08 17:45:27.911934: +2026-04-08 17:45:27.914748: Epoch 58 +2026-04-08 17:45:27.917253: Current learning rate: 0.00948 +2026-04-08 17:47:25.729567: train_loss -0.0036 +2026-04-08 17:47:25.736579: val_loss -0.0105 +2026-04-08 17:47:25.739352: Pseudo dice [0.0, 0.0, 0.4716, 0.0, 0.0, 0.274, 0.3862] +2026-04-08 17:47:25.742000: Epoch time: 117.82 s +2026-04-08 17:47:25.744497: Yayy! New best EMA pseudo Dice: 0.0623 +2026-04-08 17:47:28.550382: +2026-04-08 17:47:28.552164: Epoch 59 +2026-04-08 17:47:28.554694: Current learning rate: 0.00947 +2026-04-08 17:49:24.755104: train_loss -0.0076 +2026-04-08 17:49:24.762075: val_loss 0.0037 +2026-04-08 17:49:24.765301: Pseudo dice [0.0, 0.0, 0.1566, 0.0, 0.0, 0.1715, 0.2925] +2026-04-08 17:49:24.768458: Epoch time: 116.21 s +2026-04-08 17:49:24.770363: Yayy! New best EMA pseudo Dice: 0.0649 +2026-04-08 17:49:27.752245: +2026-04-08 17:49:27.754371: Epoch 60 +2026-04-08 17:49:27.756367: Current learning rate: 0.00946 +2026-04-08 17:51:15.143264: train_loss -0.0095 +2026-04-08 17:51:15.154459: val_loss -0.0103 +2026-04-08 17:51:15.158721: Pseudo dice [0.0, 0.0, 0.1599, 0.0, 0.0, 0.2125, 0.1888] +2026-04-08 17:51:15.164824: Epoch time: 107.39 s +2026-04-08 17:51:15.168368: Yayy! New best EMA pseudo Dice: 0.0665 +2026-04-08 17:51:17.972843: +2026-04-08 17:51:17.975051: Epoch 61 +2026-04-08 17:51:17.980392: Current learning rate: 0.00945 +2026-04-08 17:53:08.542302: train_loss -0.0067 +2026-04-08 17:53:08.549560: val_loss -0.0168 +2026-04-08 17:53:08.552666: Pseudo dice [0.0, 0.0, 0.3582, 0.0, 0.0, 0.3805, 0.3877] +2026-04-08 17:53:08.555431: Epoch time: 110.57 s +2026-04-08 17:53:08.558702: Yayy! New best EMA pseudo Dice: 0.0759 +2026-04-08 17:53:11.324603: +2026-04-08 17:53:11.327014: Epoch 62 +2026-04-08 17:53:11.329852: Current learning rate: 0.00944 +2026-04-08 17:54:58.069514: train_loss -0.0041 +2026-04-08 17:54:58.085357: val_loss -0.016 +2026-04-08 17:54:58.089004: Pseudo dice [0.0, 0.0, 0.1838, 0.0, 0.0, 0.2975, 0.2702] +2026-04-08 17:54:58.092795: Epoch time: 106.75 s +2026-04-08 17:54:58.095135: Yayy! New best EMA pseudo Dice: 0.079 +2026-04-08 17:55:01.026505: +2026-04-08 17:55:01.031778: Epoch 63 +2026-04-08 17:55:01.037353: Current learning rate: 0.00943 +2026-04-08 17:56:44.187934: train_loss -0.0127 +2026-04-08 17:56:44.196895: val_loss -0.0199 +2026-04-08 17:56:44.199505: Pseudo dice [0.0, 0.0, 0.4859, 0.0, 0.0, 0.326, 0.5314] +2026-04-08 17:56:44.202612: Epoch time: 103.16 s +2026-04-08 17:56:44.205141: Yayy! New best EMA pseudo Dice: 0.0903 +2026-04-08 17:56:47.085091: +2026-04-08 17:56:47.086657: Epoch 64 +2026-04-08 17:56:47.089135: Current learning rate: 0.00942 +2026-04-08 17:58:32.928382: train_loss -0.0175 +2026-04-08 17:58:32.941580: val_loss -0.005 +2026-04-08 17:58:32.945385: Pseudo dice [0.0, 0.0, 0.4232, 0.0, 0.0, 0.2944, 0.2744] +2026-04-08 17:58:32.948465: Epoch time: 105.85 s +2026-04-08 17:58:32.951326: Yayy! New best EMA pseudo Dice: 0.0955 +2026-04-08 17:58:35.886571: +2026-04-08 17:58:35.889477: Epoch 65 +2026-04-08 17:58:35.892471: Current learning rate: 0.00941 +2026-04-08 18:00:19.554149: train_loss -0.0338 +2026-04-08 18:00:19.562120: val_loss -0.0199 +2026-04-08 18:00:19.565153: Pseudo dice [0.0, 0.0, 0.3637, 0.0, 0.0, 0.2806, 0.4468] +2026-04-08 18:00:19.567764: Epoch time: 103.67 s +2026-04-08 18:00:19.570980: Yayy! New best EMA pseudo Dice: 0.1015 +2026-04-08 18:00:22.533536: +2026-04-08 18:00:22.535908: Epoch 66 +2026-04-08 18:00:22.537825: Current learning rate: 0.0094 +2026-04-08 18:02:07.237939: train_loss -0.0233 +2026-04-08 18:02:07.245839: val_loss -0.023 +2026-04-08 18:02:07.250692: Pseudo dice [0.0, 0.0, 0.1285, 0.0, 0.0, 0.2507, 0.3198] +2026-04-08 18:02:07.253618: Epoch time: 104.71 s +2026-04-08 18:02:08.303795: +2026-04-08 18:02:08.307972: Epoch 67 +2026-04-08 18:02:08.311241: Current learning rate: 0.00939 +2026-04-08 18:03:54.481896: train_loss -0.0245 +2026-04-08 18:03:54.489208: val_loss -0.0253 +2026-04-08 18:03:54.491925: Pseudo dice [0.0, 0.0, 0.2576, 0.0, 0.0, 0.3196, 0.4008] +2026-04-08 18:03:54.495091: Epoch time: 106.18 s +2026-04-08 18:03:54.497633: Yayy! New best EMA pseudo Dice: 0.1052 +2026-04-08 18:03:57.270571: +2026-04-08 18:03:57.273447: Epoch 68 +2026-04-08 18:03:57.275792: Current learning rate: 0.00939 +2026-04-08 18:05:41.559125: train_loss -0.0216 +2026-04-08 18:05:41.565208: val_loss -0.0163 +2026-04-08 18:05:41.567333: Pseudo dice [0.0, 0.0, 0.5613, 0.0, 0.0, 0.1404, 0.4598] +2026-04-08 18:05:41.569754: Epoch time: 104.29 s +2026-04-08 18:05:41.572616: Yayy! New best EMA pseudo Dice: 0.1113 +2026-04-08 18:05:44.404226: +2026-04-08 18:05:44.407544: Epoch 69 +2026-04-08 18:05:44.410454: Current learning rate: 0.00938 +2026-04-08 18:07:29.289828: train_loss 0.0015 +2026-04-08 18:07:29.303169: val_loss -0.0111 +2026-04-08 18:07:29.310247: Pseudo dice [0.0, 0.0, 0.0979, 0.0, 0.0, 0.402, 0.3269] +2026-04-08 18:07:29.314028: Epoch time: 104.89 s +2026-04-08 18:07:29.317057: Yayy! New best EMA pseudo Dice: 0.1119 +2026-04-08 18:07:32.181698: +2026-04-08 18:07:32.184814: Epoch 70 +2026-04-08 18:07:32.187246: Current learning rate: 0.00937 +2026-04-08 18:09:20.759184: train_loss -0.0117 +2026-04-08 18:09:20.767144: val_loss -0.0214 +2026-04-08 18:09:20.771717: Pseudo dice [0.0, 0.0, 0.4971, 0.0, 0.0, 0.3647, 0.4185] +2026-04-08 18:09:20.775270: Epoch time: 108.58 s +2026-04-08 18:09:20.778444: Yayy! New best EMA pseudo Dice: 0.119 +2026-04-08 18:09:23.666632: +2026-04-08 18:09:23.670070: Epoch 71 +2026-04-08 18:09:23.675050: Current learning rate: 0.00936 +2026-04-08 18:11:10.705240: train_loss -0.0154 +2026-04-08 18:11:10.716386: val_loss -0.029 +2026-04-08 18:11:10.719579: Pseudo dice [0.0, 0.0, 0.0107, 0.0, 0.0, 0.3145, 0.4974] +2026-04-08 18:11:10.723206: Epoch time: 107.04 s +2026-04-08 18:11:11.806612: +2026-04-08 18:11:11.808888: Epoch 72 +2026-04-08 18:11:11.810591: Current learning rate: 0.00935 +2026-04-08 18:12:54.509355: train_loss -0.0238 +2026-04-08 18:12:54.515600: val_loss -0.0259 +2026-04-08 18:12:54.518234: Pseudo dice [0.0, 0.0, 0.2089, 0.0, 0.0, 0.2306, 0.5767] +2026-04-08 18:12:54.521255: Epoch time: 102.71 s +2026-04-08 18:12:54.523607: Yayy! New best EMA pseudo Dice: 0.1215 +2026-04-08 18:12:57.137199: +2026-04-08 18:12:57.139202: Epoch 73 +2026-04-08 18:12:57.140712: Current learning rate: 0.00934 +2026-04-08 18:14:40.517491: train_loss -0.0304 +2026-04-08 18:14:40.524968: val_loss -0.0353 +2026-04-08 18:14:40.527231: Pseudo dice [0.0, 0.0, 0.6164, 0.0, 0.0, 0.4293, 0.3516] +2026-04-08 18:14:40.529767: Epoch time: 103.38 s +2026-04-08 18:14:40.532368: Yayy! New best EMA pseudo Dice: 0.1293 +2026-04-08 18:14:43.365750: +2026-04-08 18:14:43.370277: Epoch 74 +2026-04-08 18:14:43.374334: Current learning rate: 0.00933 +2026-04-08 18:16:27.498647: train_loss -0.0383 +2026-04-08 18:16:27.533435: val_loss -0.007 +2026-04-08 18:16:27.536845: Pseudo dice [0.0, 0.0, 0.2691, 0.0, 0.0, 0.4698, 0.5751] +2026-04-08 18:16:27.541734: Epoch time: 104.14 s +2026-04-08 18:16:27.544533: Yayy! New best EMA pseudo Dice: 0.1352 +2026-04-08 18:16:30.424214: +2026-04-08 18:16:30.427533: Epoch 75 +2026-04-08 18:16:30.431122: Current learning rate: 0.00932 +2026-04-08 18:18:14.591715: train_loss -0.031 +2026-04-08 18:18:14.598368: val_loss -0.0412 +2026-04-08 18:18:14.600213: Pseudo dice [0.0, 0.0, 0.3733, 0.0, 0.0, 0.2113, 0.4816] +2026-04-08 18:18:14.602578: Epoch time: 104.17 s +2026-04-08 18:18:14.605647: Yayy! New best EMA pseudo Dice: 0.1369 +2026-04-08 18:18:17.161515: +2026-04-08 18:18:17.163536: Epoch 76 +2026-04-08 18:18:17.165987: Current learning rate: 0.00931 +2026-04-08 18:20:01.249179: train_loss -0.0445 +2026-04-08 18:20:01.258998: val_loss -0.0526 +2026-04-08 18:20:01.262762: Pseudo dice [0.0, 0.0, 0.2883, 0.0, 0.0, 0.4549, 0.5459] +2026-04-08 18:20:01.265212: Epoch time: 104.09 s +2026-04-08 18:20:01.268515: Yayy! New best EMA pseudo Dice: 0.1416 +2026-04-08 18:20:04.103781: +2026-04-08 18:20:04.106278: Epoch 77 +2026-04-08 18:20:04.108378: Current learning rate: 0.0093 +2026-04-08 18:21:47.543179: train_loss -0.0402 +2026-04-08 18:21:47.555305: val_loss -0.025 +2026-04-08 18:21:47.561832: Pseudo dice [0.0, 0.0, 0.1708, 0.0, 0.0, 0.4382, 0.4999] +2026-04-08 18:21:47.565926: Epoch time: 103.44 s +2026-04-08 18:21:47.569571: Yayy! New best EMA pseudo Dice: 0.1433 +2026-04-08 18:21:50.474001: +2026-04-08 18:21:50.478169: Epoch 78 +2026-04-08 18:21:50.480942: Current learning rate: 0.0093 +2026-04-08 18:23:33.406819: train_loss -0.0339 +2026-04-08 18:23:33.412086: val_loss -0.0173 +2026-04-08 18:23:33.414220: Pseudo dice [0.0, 0.0, 0.3137, 0.0, 0.0, 0.1234, 0.7032] +2026-04-08 18:23:33.416418: Epoch time: 102.94 s +2026-04-08 18:23:33.418467: Yayy! New best EMA pseudo Dice: 0.1452 +2026-04-08 18:23:36.238329: +2026-04-08 18:23:36.240720: Epoch 79 +2026-04-08 18:23:36.242796: Current learning rate: 0.00929 +2026-04-08 18:25:19.559875: train_loss -0.0202 +2026-04-08 18:25:19.567669: val_loss -0.0274 +2026-04-08 18:25:19.569776: Pseudo dice [0.0, 0.0, 0.1604, 0.0, 0.0, 0.4108, 0.2367] +2026-04-08 18:25:19.572990: Epoch time: 103.32 s +2026-04-08 18:25:20.678519: +2026-04-08 18:25:20.680988: Epoch 80 +2026-04-08 18:25:20.683170: Current learning rate: 0.00928 +2026-04-08 18:27:04.708437: train_loss -0.0229 +2026-04-08 18:27:04.715664: val_loss -0.0195 +2026-04-08 18:27:04.719217: Pseudo dice [0.0, 0.0, 0.1, 0.0, 0.0, 0.2392, 0.3091] +2026-04-08 18:27:04.722498: Epoch time: 104.03 s +2026-04-08 18:27:05.805592: +2026-04-08 18:27:05.808410: Epoch 81 +2026-04-08 18:27:05.818013: Current learning rate: 0.00927 +2026-04-08 18:28:48.154880: train_loss -0.0265 +2026-04-08 18:28:48.160909: val_loss -0.0317 +2026-04-08 18:28:48.163517: Pseudo dice [0.0, 0.0, 0.2281, 0.0, 0.0, 0.3463, 0.6452] +2026-04-08 18:28:48.168937: Epoch time: 102.35 s +2026-04-08 18:28:49.279435: +2026-04-08 18:28:49.281120: Epoch 82 +2026-04-08 18:28:49.282541: Current learning rate: 0.00926 +2026-04-08 18:30:34.614448: train_loss -0.0268 +2026-04-08 18:30:34.620818: val_loss -0.0319 +2026-04-08 18:30:34.622811: Pseudo dice [0.0, 0.0, 0.1989, 0.0, 0.0, 0.3375, 0.3229] +2026-04-08 18:30:34.625186: Epoch time: 105.34 s +2026-04-08 18:30:35.693303: +2026-04-08 18:30:35.695140: Epoch 83 +2026-04-08 18:30:35.696984: Current learning rate: 0.00925 +2026-04-08 18:32:20.454402: train_loss -0.0256 +2026-04-08 18:32:20.460456: val_loss -0.0493 +2026-04-08 18:32:20.462997: Pseudo dice [0.0, 0.0, 0.4011, 0.0, 0.0, 0.2379, 0.4769] +2026-04-08 18:32:20.466035: Epoch time: 104.76 s +2026-04-08 18:32:21.501307: +2026-04-08 18:32:21.503444: Epoch 84 +2026-04-08 18:32:21.505198: Current learning rate: 0.00924 +2026-04-08 18:34:03.937559: train_loss -0.0469 +2026-04-08 18:34:03.948143: val_loss -0.0306 +2026-04-08 18:34:03.951734: Pseudo dice [0.0, 0.0, 0.3195, 0.0, 0.0, 0.1148, 0.5523] +2026-04-08 18:34:03.955534: Epoch time: 102.44 s +2026-04-08 18:34:04.981331: +2026-04-08 18:34:04.986344: Epoch 85 +2026-04-08 18:34:04.989049: Current learning rate: 0.00923 +2026-04-08 18:35:48.653001: train_loss -0.0313 +2026-04-08 18:35:48.660052: val_loss -0.0168 +2026-04-08 18:35:48.662958: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.6351, 0.5609] +2026-04-08 18:35:48.666212: Epoch time: 103.68 s +2026-04-08 18:35:49.727293: +2026-04-08 18:35:49.733705: Epoch 86 +2026-04-08 18:35:49.737241: Current learning rate: 0.00922 +2026-04-08 18:37:35.972329: train_loss -0.0311 +2026-04-08 18:37:35.979282: val_loss -0.0298 +2026-04-08 18:37:35.982411: Pseudo dice [0.0, 0.0, 0.2832, 0.0, 0.0, 0.2815, 0.3995] +2026-04-08 18:37:35.984715: Epoch time: 106.25 s +2026-04-08 18:37:38.133844: +2026-04-08 18:37:38.135700: Epoch 87 +2026-04-08 18:37:38.139706: Current learning rate: 0.00921 +2026-04-08 18:39:20.484101: train_loss -0.0349 +2026-04-08 18:39:20.490206: val_loss -0.0368 +2026-04-08 18:39:20.492615: Pseudo dice [0.0, 0.0, 0.2279, 0.0, 0.0, 0.2826, 0.4357] +2026-04-08 18:39:20.495059: Epoch time: 102.35 s +2026-04-08 18:39:21.545393: +2026-04-08 18:39:21.548515: Epoch 88 +2026-04-08 18:39:21.555724: Current learning rate: 0.0092 +2026-04-08 18:41:04.454645: train_loss -0.0558 +2026-04-08 18:41:04.460440: val_loss -0.0501 +2026-04-08 18:41:04.462907: Pseudo dice [0.0, 0.0, 0.2397, 0.0, 0.0, 0.3495, 0.4256] +2026-04-08 18:41:04.465548: Epoch time: 102.91 s +2026-04-08 18:41:05.532144: +2026-04-08 18:41:05.534414: Epoch 89 +2026-04-08 18:41:05.537010: Current learning rate: 0.0092 +2026-04-08 18:42:49.982655: train_loss -0.0486 +2026-04-08 18:42:49.993497: val_loss -0.0616 +2026-04-08 18:42:49.996101: Pseudo dice [0.0, 0.0, 0.5301, 0.0, 0.0, 0.4018, 0.4104] +2026-04-08 18:42:49.998727: Epoch time: 104.45 s +2026-04-08 18:42:50.001225: Yayy! New best EMA pseudo Dice: 0.1478 +2026-04-08 18:42:52.874253: +2026-04-08 18:42:52.877308: Epoch 90 +2026-04-08 18:42:52.880617: Current learning rate: 0.00919 +2026-04-08 18:44:35.967717: train_loss -0.0392 +2026-04-08 18:44:35.975403: val_loss -0.0661 +2026-04-08 18:44:35.977684: Pseudo dice [0.0, 0.0, 0.1381, 0.0, 0.0, 0.5718, 0.5628] +2026-04-08 18:44:35.980170: Epoch time: 103.1 s +2026-04-08 18:44:35.982920: Yayy! New best EMA pseudo Dice: 0.1512 +2026-04-08 18:44:38.772976: +2026-04-08 18:44:38.774915: Epoch 91 +2026-04-08 18:44:38.776815: Current learning rate: 0.00918 +2026-04-08 18:46:22.678196: train_loss -0.0455 +2026-04-08 18:46:22.685067: val_loss -0.0407 +2026-04-08 18:46:22.687841: Pseudo dice [0.0, 0.0, 0.2543, 0.0, 0.0, 0.6038, 0.5905] +2026-04-08 18:46:22.690505: Epoch time: 103.91 s +2026-04-08 18:46:22.693218: Yayy! New best EMA pseudo Dice: 0.1568 +2026-04-08 18:46:25.603671: +2026-04-08 18:46:25.606558: Epoch 92 +2026-04-08 18:46:25.608614: Current learning rate: 0.00917 +2026-04-08 18:48:09.186105: train_loss -0.0487 +2026-04-08 18:48:09.191581: val_loss -0.0659 +2026-04-08 18:48:09.195210: Pseudo dice [0.0, 0.0, 0.6614, 0.0, 0.0, 0.2853, 0.3601] +2026-04-08 18:48:09.198198: Epoch time: 103.59 s +2026-04-08 18:48:09.200011: Yayy! New best EMA pseudo Dice: 0.1598 +2026-04-08 18:48:11.990909: +2026-04-08 18:48:11.993156: Epoch 93 +2026-04-08 18:48:11.994736: Current learning rate: 0.00916 +2026-04-08 18:49:54.151809: train_loss -0.0498 +2026-04-08 18:49:54.159419: val_loss -0.0487 +2026-04-08 18:49:54.161955: Pseudo dice [0.0, 0.0, 0.2569, 0.0, 0.0, 0.4253, 0.5366] +2026-04-08 18:49:54.164361: Epoch time: 102.16 s +2026-04-08 18:49:54.166485: Yayy! New best EMA pseudo Dice: 0.1612 +2026-04-08 18:49:56.910424: +2026-04-08 18:49:56.914854: Epoch 94 +2026-04-08 18:49:56.916489: Current learning rate: 0.00915 +2026-04-08 18:51:39.049746: train_loss -0.0549 +2026-04-08 18:51:39.057479: val_loss -0.0461 +2026-04-08 18:51:39.059430: Pseudo dice [0.0, 0.0, 0.4808, 0.0, 0.0, 0.3842, 0.3787] +2026-04-08 18:51:39.061184: Epoch time: 102.14 s +2026-04-08 18:51:39.062911: Yayy! New best EMA pseudo Dice: 0.1628 +2026-04-08 18:51:41.871771: +2026-04-08 18:51:41.873618: Epoch 95 +2026-04-08 18:51:41.875101: Current learning rate: 0.00914 +2026-04-08 18:53:25.175516: train_loss -0.0443 +2026-04-08 18:53:25.183529: val_loss -0.0811 +2026-04-08 18:53:25.185400: Pseudo dice [0.0, 0.0, 0.4332, 0.0, 0.0, 0.5271, 0.4668] +2026-04-08 18:53:25.187790: Epoch time: 103.31 s +2026-04-08 18:53:25.189888: Yayy! New best EMA pseudo Dice: 0.1669 +2026-04-08 18:53:28.083103: +2026-04-08 18:53:28.086635: Epoch 96 +2026-04-08 18:53:28.112556: Current learning rate: 0.00913 +2026-04-08 18:55:10.939559: train_loss -0.058 +2026-04-08 18:55:10.945551: val_loss -0.0596 +2026-04-08 18:55:10.947302: Pseudo dice [0.0, 0.0, 0.5892, 0.0, 0.0, 0.6382, 0.4009] +2026-04-08 18:55:10.949281: Epoch time: 102.86 s +2026-04-08 18:55:10.951197: Yayy! New best EMA pseudo Dice: 0.1735 +2026-04-08 18:55:13.608215: +2026-04-08 18:55:13.614005: Epoch 97 +2026-04-08 18:55:13.618568: Current learning rate: 0.00912 +2026-04-08 18:56:56.509068: train_loss -0.0545 +2026-04-08 18:56:56.515991: val_loss -0.0476 +2026-04-08 18:56:56.518310: Pseudo dice [0.0, 0.0, 0.5904, 0.0, 0.0, 0.5004, 0.3907] +2026-04-08 18:56:56.521103: Epoch time: 102.9 s +2026-04-08 18:56:56.523924: Yayy! New best EMA pseudo Dice: 0.1773 +2026-04-08 18:56:59.197316: +2026-04-08 18:56:59.199136: Epoch 98 +2026-04-08 18:56:59.200746: Current learning rate: 0.00911 +2026-04-08 18:58:42.602032: train_loss -0.0582 +2026-04-08 18:58:42.611430: val_loss -0.0601 +2026-04-08 18:58:42.613027: Pseudo dice [0.0, 0.0, 0.6146, 0.0, 0.0, 0.4239, 0.4487] +2026-04-08 18:58:42.615243: Epoch time: 103.41 s +2026-04-08 18:58:42.617387: Yayy! New best EMA pseudo Dice: 0.1808 +2026-04-08 18:58:45.440686: +2026-04-08 18:58:45.443268: Epoch 99 +2026-04-08 18:58:45.445204: Current learning rate: 0.0091 +2026-04-08 19:00:28.395489: train_loss -0.0575 +2026-04-08 19:00:28.406017: val_loss -0.0656 +2026-04-08 19:00:28.407980: Pseudo dice [0.0, 0.0, 0.5429, 0.0, 0.0, 0.3345, 0.5778] +2026-04-08 19:00:28.410748: Epoch time: 102.96 s +2026-04-08 19:00:30.167504: Yayy! New best EMA pseudo Dice: 0.1835 +2026-04-08 19:00:32.827636: +2026-04-08 19:00:32.830136: Epoch 100 +2026-04-08 19:00:32.831760: Current learning rate: 0.0091 +2026-04-08 19:02:15.391572: train_loss -0.0517 +2026-04-08 19:02:15.401093: val_loss -0.0608 +2026-04-08 19:02:15.402984: Pseudo dice [0.0, 0.0, 0.71, 0.0, 0.0, 0.3146, 0.5096] +2026-04-08 19:02:15.405264: Epoch time: 102.57 s +2026-04-08 19:02:15.406960: Yayy! New best EMA pseudo Dice: 0.1871 +2026-04-08 19:02:18.179014: +2026-04-08 19:02:18.181811: Epoch 101 +2026-04-08 19:02:18.183993: Current learning rate: 0.00909 +2026-04-08 19:04:00.760040: train_loss -0.0536 +2026-04-08 19:04:00.777424: val_loss -0.0518 +2026-04-08 19:04:00.779998: Pseudo dice [0.0, 0.0, 0.268, 0.0, 0.0, 0.5193, 0.4324] +2026-04-08 19:04:00.782411: Epoch time: 102.58 s +2026-04-08 19:04:01.809704: +2026-04-08 19:04:01.811303: Epoch 102 +2026-04-08 19:04:01.812881: Current learning rate: 0.00908 +2026-04-08 19:05:46.644511: train_loss -0.059 +2026-04-08 19:05:46.651694: val_loss -0.0705 +2026-04-08 19:05:46.653672: Pseudo dice [0.0, 0.0, 0.6342, 0.0, 0.0, 0.3449, 0.3734] +2026-04-08 19:05:46.659516: Epoch time: 104.84 s +2026-04-08 19:05:47.734107: +2026-04-08 19:05:47.736658: Epoch 103 +2026-04-08 19:05:47.738225: Current learning rate: 0.00907 +2026-04-08 19:07:29.878936: train_loss -0.0622 +2026-04-08 19:07:29.891143: val_loss -0.0617 +2026-04-08 19:07:29.893672: Pseudo dice [0.0, 0.0, 0.435, 0.0, 0.0, 0.694, 0.7728] +2026-04-08 19:07:29.895982: Epoch time: 102.15 s +2026-04-08 19:07:29.897692: Yayy! New best EMA pseudo Dice: 0.1951 +2026-04-08 19:07:33.829469: +2026-04-08 19:07:33.832131: Epoch 104 +2026-04-08 19:07:33.836853: Current learning rate: 0.00906 +2026-04-08 19:09:18.541260: train_loss -0.0697 +2026-04-08 19:09:18.546965: val_loss -0.0216 +2026-04-08 19:09:18.548734: Pseudo dice [0.0, 0.0, 0.166, 0.0, 0.0, 0.5744, 0.3168] +2026-04-08 19:09:18.551092: Epoch time: 104.72 s +2026-04-08 19:09:19.606763: +2026-04-08 19:09:19.608720: Epoch 105 +2026-04-08 19:09:19.610225: Current learning rate: 0.00905 +2026-04-08 19:11:01.931594: train_loss -0.0497 +2026-04-08 19:11:01.941653: val_loss -0.0602 +2026-04-08 19:11:01.943774: Pseudo dice [0.0, 0.0, 0.6279, 0.0, 0.0, 0.4591, 0.6365] +2026-04-08 19:11:01.946127: Epoch time: 102.33 s +2026-04-08 19:11:01.947786: Yayy! New best EMA pseudo Dice: 0.1962 +2026-04-08 19:11:04.654437: +2026-04-08 19:11:04.657624: Epoch 106 +2026-04-08 19:11:04.660728: Current learning rate: 0.00904 +2026-04-08 19:12:47.257127: train_loss -0.0587 +2026-04-08 19:12:47.270346: val_loss -0.0432 +2026-04-08 19:12:47.272485: Pseudo dice [0.0, 0.0, 0.6301, 0.0, 0.0, 0.3384, 0.4313] +2026-04-08 19:12:47.275124: Epoch time: 102.61 s +2026-04-08 19:12:47.277127: Yayy! New best EMA pseudo Dice: 0.1966 +2026-04-08 19:12:50.041935: +2026-04-08 19:12:50.043765: Epoch 107 +2026-04-08 19:12:50.045680: Current learning rate: 0.00903 +2026-04-08 19:14:33.424219: train_loss -0.0635 +2026-04-08 19:14:33.432138: val_loss -0.0258 +2026-04-08 19:14:33.434654: Pseudo dice [0.0, 0.0, 0.4324, 0.0, 0.0, 0.6101, 0.3508] +2026-04-08 19:14:33.437124: Epoch time: 103.39 s +2026-04-08 19:14:33.439473: Yayy! New best EMA pseudo Dice: 0.1968 +2026-04-08 19:14:36.196255: +2026-04-08 19:14:36.198453: Epoch 108 +2026-04-08 19:14:36.199993: Current learning rate: 0.00902 +2026-04-08 19:16:19.880585: train_loss -0.0616 +2026-04-08 19:16:19.893257: val_loss -0.0343 +2026-04-08 19:16:19.895475: Pseudo dice [0.0, 0.0, 0.4077, 0.0, 0.0, 0.4631, 0.2887] +2026-04-08 19:16:19.898750: Epoch time: 103.69 s +2026-04-08 19:16:20.980176: +2026-04-08 19:16:20.982251: Epoch 109 +2026-04-08 19:16:20.983798: Current learning rate: 0.00901 +2026-04-08 19:18:04.413027: train_loss -0.0579 +2026-04-08 19:18:04.422156: val_loss -0.0588 +2026-04-08 19:18:04.425727: Pseudo dice [0.0, 0.0, 0.7296, 0.0, 0.0, 0.2787, 0.6541] +2026-04-08 19:18:04.428284: Epoch time: 103.44 s +2026-04-08 19:18:04.431154: Yayy! New best EMA pseudo Dice: 0.1981 +2026-04-08 19:18:07.192120: +2026-04-08 19:18:07.195056: Epoch 110 +2026-04-08 19:18:07.197753: Current learning rate: 0.009 +2026-04-08 19:19:56.158638: train_loss -0.0489 +2026-04-08 19:19:56.168024: val_loss -0.0604 +2026-04-08 19:19:56.172540: Pseudo dice [0.0, 0.0, 0.754, 0.0, 0.0, 0.1805, 0.6117] +2026-04-08 19:19:56.175567: Epoch time: 108.97 s +2026-04-08 19:19:56.177841: Yayy! New best EMA pseudo Dice: 0.2004 +2026-04-08 19:19:58.936723: +2026-04-08 19:19:58.938718: Epoch 111 +2026-04-08 19:19:58.940368: Current learning rate: 0.009 +2026-04-08 19:21:41.384208: train_loss -0.0644 +2026-04-08 19:21:41.394009: val_loss -0.0586 +2026-04-08 19:21:41.399005: Pseudo dice [0.0, 0.0, 0.682, 0.0, 0.0, 0.3679, 0.3829] +2026-04-08 19:21:41.401634: Epoch time: 102.45 s +2026-04-08 19:21:41.404924: Yayy! New best EMA pseudo Dice: 0.2008 +2026-04-08 19:21:44.128177: +2026-04-08 19:21:44.130543: Epoch 112 +2026-04-08 19:21:44.132234: Current learning rate: 0.00899 +2026-04-08 19:23:27.358881: train_loss -0.0594 +2026-04-08 19:23:27.367668: val_loss -0.0331 +2026-04-08 19:23:27.369882: Pseudo dice [0.0, 0.0, 0.3969, 0.0, 0.0, 0.2059, 0.1816] +2026-04-08 19:23:27.372053: Epoch time: 103.23 s +2026-04-08 19:23:28.414644: +2026-04-08 19:23:28.416567: Epoch 113 +2026-04-08 19:23:28.419167: Current learning rate: 0.00898 +2026-04-08 19:25:11.291060: train_loss -0.0756 +2026-04-08 19:25:11.296955: val_loss -0.0599 +2026-04-08 19:25:11.298837: Pseudo dice [0.0, 0.0, 0.2623, 0.0001, 0.0, 0.4131, 0.6669] +2026-04-08 19:25:11.300966: Epoch time: 102.88 s +2026-04-08 19:25:12.340724: +2026-04-08 19:25:12.343254: Epoch 114 +2026-04-08 19:25:12.345165: Current learning rate: 0.00897 +2026-04-08 19:27:00.781136: train_loss -0.0662 +2026-04-08 19:27:00.792874: val_loss -0.0681 +2026-04-08 19:27:00.794996: Pseudo dice [0.0, 0.0, 0.6223, 0.0, 0.0, 0.5131, 0.5538] +2026-04-08 19:27:00.797574: Epoch time: 108.44 s +2026-04-08 19:27:01.828084: +2026-04-08 19:27:01.830101: Epoch 115 +2026-04-08 19:27:01.831775: Current learning rate: 0.00896 +2026-04-08 19:28:46.215274: train_loss -0.0755 +2026-04-08 19:28:46.225211: val_loss -0.0419 +2026-04-08 19:28:46.228152: Pseudo dice [0.0, 0.0, 0.584, 0.0, 0.0, 0.4451, 0.6131] +2026-04-08 19:28:46.231127: Epoch time: 104.39 s +2026-04-08 19:28:47.303111: +2026-04-08 19:28:47.305615: Epoch 116 +2026-04-08 19:28:47.307957: Current learning rate: 0.00895 +2026-04-08 19:30:42.829431: train_loss -0.0783 +2026-04-08 19:30:42.842966: val_loss -0.0568 +2026-04-08 19:30:42.847644: Pseudo dice [0.0117, 0.0, 0.4344, 0.0, 0.0, 0.4381, 0.5744] +2026-04-08 19:30:42.853325: Epoch time: 115.53 s +2026-04-08 19:30:42.857091: Yayy! New best EMA pseudo Dice: 0.2014 +2026-04-08 19:30:45.859508: +2026-04-08 19:30:45.861797: Epoch 117 +2026-04-08 19:30:45.863937: Current learning rate: 0.00894 +2026-04-08 19:32:29.286704: train_loss -0.0838 +2026-04-08 19:32:29.296752: val_loss -0.0687 +2026-04-08 19:32:29.301609: Pseudo dice [0.1971, 0.0, 0.5775, 0.0, 0.0, 0.5484, 0.2852] +2026-04-08 19:32:29.304118: Epoch time: 103.43 s +2026-04-08 19:32:29.307473: Yayy! New best EMA pseudo Dice: 0.2042 +2026-04-08 19:32:32.200304: +2026-04-08 19:32:32.207813: Epoch 118 +2026-04-08 19:32:32.212467: Current learning rate: 0.00893 +2026-04-08 19:34:19.531593: train_loss -0.0874 +2026-04-08 19:34:19.538320: val_loss -0.0797 +2026-04-08 19:34:19.540809: Pseudo dice [0.4097, 0.0, 0.5777, 0.0, 0.0, 0.6864, 0.5648] +2026-04-08 19:34:19.543256: Epoch time: 107.33 s +2026-04-08 19:34:19.545564: Yayy! New best EMA pseudo Dice: 0.2158 +2026-04-08 19:34:22.470886: +2026-04-08 19:34:22.475303: Epoch 119 +2026-04-08 19:34:22.480619: Current learning rate: 0.00892 +2026-04-08 19:36:06.885329: train_loss -0.0679 +2026-04-08 19:36:06.893276: val_loss -0.0761 +2026-04-08 19:36:06.896359: Pseudo dice [0.2105, 0.0, 0.2302, 0.0, 0.0, 0.3836, 0.6262] +2026-04-08 19:36:06.899107: Epoch time: 104.42 s +2026-04-08 19:36:07.950747: +2026-04-08 19:36:07.952867: Epoch 120 +2026-04-08 19:36:07.955424: Current learning rate: 0.00891 +2026-04-08 19:37:50.855495: train_loss -0.0737 +2026-04-08 19:37:50.862157: val_loss -0.0707 +2026-04-08 19:37:50.864542: Pseudo dice [0.179, 0.0, 0.5077, 0.0075, 0.0, 0.5134, 0.641] +2026-04-08 19:37:50.867105: Epoch time: 102.91 s +2026-04-08 19:37:50.869680: Yayy! New best EMA pseudo Dice: 0.2199 +2026-04-08 19:37:53.728907: +2026-04-08 19:37:53.732380: Epoch 121 +2026-04-08 19:37:53.734592: Current learning rate: 0.0089 +2026-04-08 19:39:38.586788: train_loss -0.072 +2026-04-08 19:39:38.593492: val_loss -0.0723 +2026-04-08 19:39:38.595874: Pseudo dice [0.3079, 0.0, 0.5528, 0.0368, 0.0, 0.5417, 0.4144] +2026-04-08 19:39:38.599259: Epoch time: 104.86 s +2026-04-08 19:39:38.602296: Yayy! New best EMA pseudo Dice: 0.2243 +2026-04-08 19:39:41.404144: +2026-04-08 19:39:41.406682: Epoch 122 +2026-04-08 19:39:41.408539: Current learning rate: 0.00889 +2026-04-08 19:41:24.336979: train_loss -0.0756 +2026-04-08 19:41:24.342605: val_loss -0.0659 +2026-04-08 19:41:24.344533: Pseudo dice [0.1257, 0.1716, 0.0153, 0.009, 0.0, 0.66, 0.4028] +2026-04-08 19:41:24.346376: Epoch time: 102.94 s +2026-04-08 19:41:25.398772: +2026-04-08 19:41:25.400913: Epoch 123 +2026-04-08 19:41:25.402787: Current learning rate: 0.00889 +2026-04-08 19:43:08.807799: train_loss -0.0806 +2026-04-08 19:43:08.817624: val_loss -0.0707 +2026-04-08 19:43:08.819754: Pseudo dice [0.4387, 0.0908, 0.6629, 0.0, 0.0, 0.3709, 0.6603] +2026-04-08 19:43:08.822445: Epoch time: 103.41 s +2026-04-08 19:43:08.828146: Yayy! New best EMA pseudo Dice: 0.2313 +2026-04-08 19:43:11.758835: +2026-04-08 19:43:11.760721: Epoch 124 +2026-04-08 19:43:11.763719: Current learning rate: 0.00888 +2026-04-08 19:44:54.994488: train_loss -0.0837 +2026-04-08 19:44:55.005941: val_loss -0.0788 +2026-04-08 19:44:55.009294: Pseudo dice [0.1737, 0.265, 0.3569, 0.0, 0.0, 0.4251, 0.5018] +2026-04-08 19:44:55.012606: Epoch time: 103.24 s +2026-04-08 19:44:55.015471: Yayy! New best EMA pseudo Dice: 0.2328 +2026-04-08 19:44:57.901220: +2026-04-08 19:44:57.902911: Epoch 125 +2026-04-08 19:44:57.904735: Current learning rate: 0.00887 +2026-04-08 19:46:41.391819: train_loss -0.0793 +2026-04-08 19:46:41.399815: val_loss -0.0655 +2026-04-08 19:46:41.401904: Pseudo dice [0.2588, 0.2825, 0.5278, 0.0001, 0.0, 0.3773, 0.3293] +2026-04-08 19:46:41.404088: Epoch time: 103.49 s +2026-04-08 19:46:41.405838: Yayy! New best EMA pseudo Dice: 0.2349 +2026-04-08 19:46:44.210554: +2026-04-08 19:46:44.215669: Epoch 126 +2026-04-08 19:46:44.218907: Current learning rate: 0.00886 +2026-04-08 19:48:27.572137: train_loss -0.0854 +2026-04-08 19:48:27.579312: val_loss -0.0705 +2026-04-08 19:48:27.581485: Pseudo dice [0.2649, 0.0332, 0.4777, 0.0, 0.0, 0.566, 0.6893] +2026-04-08 19:48:27.583679: Epoch time: 103.36 s +2026-04-08 19:48:27.586092: Yayy! New best EMA pseudo Dice: 0.2404 +2026-04-08 19:48:30.395177: +2026-04-08 19:48:30.398026: Epoch 127 +2026-04-08 19:48:30.399882: Current learning rate: 0.00885 +2026-04-08 19:50:15.551723: train_loss -0.0958 +2026-04-08 19:50:15.558353: val_loss -0.0685 +2026-04-08 19:50:15.560369: Pseudo dice [0.2987, 0.0656, 0.3685, 0.0, 0.018, 0.6689, 0.5168] +2026-04-08 19:50:15.563016: Epoch time: 105.16 s +2026-04-08 19:50:15.565138: Yayy! New best EMA pseudo Dice: 0.244 +2026-04-08 19:50:18.333245: +2026-04-08 19:50:18.335158: Epoch 128 +2026-04-08 19:50:18.337257: Current learning rate: 0.00884 +2026-04-08 19:52:03.058221: train_loss -0.0828 +2026-04-08 19:52:03.069629: val_loss -0.1017 +2026-04-08 19:52:03.072579: Pseudo dice [0.4846, 0.0065, 0.6559, 0.0, 0.0, 0.4829, 0.683] +2026-04-08 19:52:03.077647: Epoch time: 104.73 s +2026-04-08 19:52:03.080186: Yayy! New best EMA pseudo Dice: 0.2527 +2026-04-08 19:52:05.905577: +2026-04-08 19:52:05.908603: Epoch 129 +2026-04-08 19:52:05.910267: Current learning rate: 0.00883 +2026-04-08 19:53:57.352780: train_loss -0.0896 +2026-04-08 19:53:57.362728: val_loss -0.0745 +2026-04-08 19:53:57.367092: Pseudo dice [0.2397, 0.1164, 0.6491, 0.0008, 0.0187, 0.6338, 0.3012] +2026-04-08 19:53:57.370143: Epoch time: 111.45 s +2026-04-08 19:53:57.372606: Yayy! New best EMA pseudo Dice: 0.2554 +2026-04-08 19:54:00.381276: +2026-04-08 19:54:00.383459: Epoch 130 +2026-04-08 19:54:00.386343: Current learning rate: 0.00882 +2026-04-08 19:55:44.332126: train_loss -0.093 +2026-04-08 19:55:44.339080: val_loss -0.0578 +2026-04-08 19:55:44.341033: Pseudo dice [0.2134, 0.1007, 0.5665, 0.0064, 0.0445, 0.3955, 0.5085] +2026-04-08 19:55:44.343711: Epoch time: 103.95 s +2026-04-08 19:55:44.345544: Yayy! New best EMA pseudo Dice: 0.2561 +2026-04-08 19:55:47.216775: +2026-04-08 19:55:47.219261: Epoch 131 +2026-04-08 19:55:47.221471: Current learning rate: 0.00881 +2026-04-08 19:57:31.021216: train_loss -0.0843 +2026-04-08 19:57:31.027635: val_loss -0.088 +2026-04-08 19:57:31.031151: Pseudo dice [0.2784, 0.1274, 0.5323, 0.1525, 0.2625, 0.3654, 0.6243] +2026-04-08 19:57:31.034439: Epoch time: 103.81 s +2026-04-08 19:57:31.036822: Yayy! New best EMA pseudo Dice: 0.2639 +2026-04-08 19:57:33.875047: +2026-04-08 19:57:33.877601: Epoch 132 +2026-04-08 19:57:33.880012: Current learning rate: 0.0088 +2026-04-08 19:59:17.466039: train_loss -0.0997 +2026-04-08 19:59:17.473890: val_loss -0.0824 +2026-04-08 19:59:17.475917: Pseudo dice [0.2678, 0.267, 0.5826, 0.0487, 0.0707, 0.6319, 0.5415] +2026-04-08 19:59:17.478320: Epoch time: 103.59 s +2026-04-08 19:59:17.481694: Yayy! New best EMA pseudo Dice: 0.272 +2026-04-08 19:59:20.482570: +2026-04-08 19:59:20.486805: Epoch 133 +2026-04-08 19:59:20.491027: Current learning rate: 0.00879 +2026-04-08 20:01:03.693040: train_loss -0.0828 +2026-04-08 20:01:03.698759: val_loss -0.1058 +2026-04-08 20:01:03.701150: Pseudo dice [0.5304, 0.2495, 0.6523, 0.0028, 0.0796, 0.6071, 0.5254] +2026-04-08 20:01:03.703614: Epoch time: 103.21 s +2026-04-08 20:01:03.705504: Yayy! New best EMA pseudo Dice: 0.2826 +2026-04-08 20:01:06.455318: +2026-04-08 20:01:06.457429: Epoch 134 +2026-04-08 20:01:06.459495: Current learning rate: 0.00879 +2026-04-08 20:02:50.108752: train_loss -0.1075 +2026-04-08 20:02:50.117920: val_loss -0.05 +2026-04-08 20:02:50.120257: Pseudo dice [0.2054, 0.0476, 0.4242, 0.1296, 0.1093, 0.5599, 0.4568] +2026-04-08 20:02:50.123607: Epoch time: 103.66 s +2026-04-08 20:02:51.208713: +2026-04-08 20:02:51.210800: Epoch 135 +2026-04-08 20:02:51.213363: Current learning rate: 0.00878 +2026-04-08 20:04:37.366537: train_loss -0.081 +2026-04-08 20:04:37.373641: val_loss -0.0969 +2026-04-08 20:04:37.375252: Pseudo dice [0.4851, 0.1998, 0.6668, 0.0048, 0.2165, 0.2939, 0.7823] +2026-04-08 20:04:37.377802: Epoch time: 106.16 s +2026-04-08 20:04:37.380800: Yayy! New best EMA pseudo Dice: 0.2916 +2026-04-08 20:04:40.298788: +2026-04-08 20:04:40.300511: Epoch 136 +2026-04-08 20:04:40.302365: Current learning rate: 0.00877 +2026-04-08 20:06:22.973292: train_loss -0.1038 +2026-04-08 20:06:22.980268: val_loss -0.0997 +2026-04-08 20:06:22.982666: Pseudo dice [0.2821, 0.1455, 0.5156, 0.0007, 0.3478, 0.513, 0.714] +2026-04-08 20:06:22.984972: Epoch time: 102.68 s +2026-04-08 20:06:22.986741: Yayy! New best EMA pseudo Dice: 0.2984 +2026-04-08 20:06:25.864044: +2026-04-08 20:06:25.866285: Epoch 137 +2026-04-08 20:06:25.868045: Current learning rate: 0.00876 +2026-04-08 20:08:09.855687: train_loss -0.1208 +2026-04-08 20:08:09.866347: val_loss -0.0821 +2026-04-08 20:08:09.868730: Pseudo dice [0.5883, 0.0061, 0.6713, 0.1039, 0.276, 0.2492, 0.6277] +2026-04-08 20:08:09.871876: Epoch time: 104.0 s +2026-04-08 20:08:09.873958: Yayy! New best EMA pseudo Dice: 0.3046 +2026-04-08 20:08:12.601884: +2026-04-08 20:08:12.604860: Epoch 138 +2026-04-08 20:08:12.609604: Current learning rate: 0.00875 +2026-04-08 20:09:56.730650: train_loss -0.1044 +2026-04-08 20:09:56.740278: val_loss -0.0958 +2026-04-08 20:09:56.742505: Pseudo dice [0.2731, 0.4635, 0.4513, 0.0473, 0.1337, 0.2129, 0.5536] +2026-04-08 20:09:56.745933: Epoch time: 104.13 s +2026-04-08 20:09:56.748484: Yayy! New best EMA pseudo Dice: 0.3047 +2026-04-08 20:09:59.698940: +2026-04-08 20:09:59.701083: Epoch 139 +2026-04-08 20:09:59.703146: Current learning rate: 0.00874 +2026-04-08 20:11:45.704773: train_loss -0.1112 +2026-04-08 20:11:45.716625: val_loss -0.0523 +2026-04-08 20:11:45.719555: Pseudo dice [0.4006, 0.1035, 0.3908, 0.2632, 0.3503, 0.3368, 0.1259] +2026-04-08 20:11:45.723293: Epoch time: 106.01 s +2026-04-08 20:11:46.806103: +2026-04-08 20:11:46.808620: Epoch 140 +2026-04-08 20:11:46.810439: Current learning rate: 0.00873 +2026-04-08 20:13:31.106301: train_loss -0.1106 +2026-04-08 20:13:31.114247: val_loss -0.0983 +2026-04-08 20:13:31.117998: Pseudo dice [0.479, 0.1801, 0.6769, 0.1515, 0.2849, 0.349, 0.4503] +2026-04-08 20:13:31.120312: Epoch time: 104.3 s +2026-04-08 20:13:31.122714: Yayy! New best EMA pseudo Dice: 0.3089 +2026-04-08 20:13:33.973683: +2026-04-08 20:13:33.975903: Epoch 141 +2026-04-08 20:13:33.977611: Current learning rate: 0.00872 +2026-04-08 20:15:18.193558: train_loss -0.0932 +2026-04-08 20:15:18.202235: val_loss -0.0996 +2026-04-08 20:15:18.204528: Pseudo dice [0.5694, 0.0325, 0.7375, 0.3018, 0.2155, 0.3208, 0.6058] +2026-04-08 20:15:18.206763: Epoch time: 104.22 s +2026-04-08 20:15:18.208447: Yayy! New best EMA pseudo Dice: 0.3177 +2026-04-08 20:15:21.006213: +2026-04-08 20:15:21.008187: Epoch 142 +2026-04-08 20:15:21.009843: Current learning rate: 0.00871 +2026-04-08 20:17:03.957936: train_loss -0.1042 +2026-04-08 20:17:03.965583: val_loss -0.1143 +2026-04-08 20:17:03.967732: Pseudo dice [0.5466, 0.0649, 0.3844, 0.0022, 0.2522, 0.6868, 0.5392] +2026-04-08 20:17:03.970685: Epoch time: 102.95 s +2026-04-08 20:17:03.973434: Yayy! New best EMA pseudo Dice: 0.3213 +2026-04-08 20:17:06.788635: +2026-04-08 20:17:06.792424: Epoch 143 +2026-04-08 20:17:06.795781: Current learning rate: 0.0087 +2026-04-08 20:18:50.424725: train_loss -0.1098 +2026-04-08 20:18:50.430732: val_loss -0.0856 +2026-04-08 20:18:50.432825: Pseudo dice [0.3285, 0.3788, 0.7186, 0.1197, 0.1483, 0.5051, 0.4651] +2026-04-08 20:18:50.435917: Epoch time: 103.64 s +2026-04-08 20:18:50.440283: Yayy! New best EMA pseudo Dice: 0.3273 +2026-04-08 20:18:53.329540: +2026-04-08 20:18:53.331800: Epoch 144 +2026-04-08 20:18:53.333655: Current learning rate: 0.00869 +2026-04-08 20:20:37.562777: train_loss -0.1109 +2026-04-08 20:20:37.574886: val_loss -0.1071 +2026-04-08 20:20:37.582498: Pseudo dice [0.4833, 0.2542, 0.5979, 0.0022, 0.2367, 0.5462, 0.458] +2026-04-08 20:20:37.590279: Epoch time: 104.24 s +2026-04-08 20:20:37.594610: Yayy! New best EMA pseudo Dice: 0.3314 +2026-04-08 20:20:40.539731: +2026-04-08 20:20:40.542333: Epoch 145 +2026-04-08 20:20:40.546351: Current learning rate: 0.00868 +2026-04-08 20:22:23.381543: train_loss -0.1125 +2026-04-08 20:22:23.387242: val_loss -0.0818 +2026-04-08 20:22:23.389979: Pseudo dice [0.1283, 0.2742, 0.5678, 0.3618, 0.2775, 0.4419, 0.6592] +2026-04-08 20:22:23.392062: Epoch time: 102.85 s +2026-04-08 20:22:23.394103: Yayy! New best EMA pseudo Dice: 0.337 +2026-04-08 20:22:26.182282: +2026-04-08 20:22:26.184117: Epoch 146 +2026-04-08 20:22:26.186037: Current learning rate: 0.00868 +2026-04-08 20:24:08.951986: train_loss -0.1069 +2026-04-08 20:24:08.960637: val_loss -0.054 +2026-04-08 20:24:08.963294: Pseudo dice [0.0894, 0.2893, 0.4797, 0.2926, 0.0622, 0.2786, 0.2835] +2026-04-08 20:24:08.966570: Epoch time: 102.77 s +2026-04-08 20:24:10.052482: +2026-04-08 20:24:10.055083: Epoch 147 +2026-04-08 20:24:10.057154: Current learning rate: 0.00867 +2026-04-08 20:25:53.718129: train_loss -0.107 +2026-04-08 20:25:53.729346: val_loss -0.095 +2026-04-08 20:25:53.733804: Pseudo dice [0.2202, 0.1429, 0.6315, 0.4707, 0.2502, 0.3292, 0.749] +2026-04-08 20:25:53.739447: Epoch time: 103.67 s +2026-04-08 20:25:54.807710: +2026-04-08 20:25:54.809516: Epoch 148 +2026-04-08 20:25:54.811290: Current learning rate: 0.00866 +2026-04-08 20:27:37.947128: train_loss -0.0881 +2026-04-08 20:27:37.956920: val_loss -0.0885 +2026-04-08 20:27:37.960675: Pseudo dice [0.5776, 0.1609, 0.3492, 0.0329, 0.0986, 0.4112, 0.5917] +2026-04-08 20:27:37.965731: Epoch time: 103.14 s +2026-04-08 20:27:39.030759: +2026-04-08 20:27:39.033257: Epoch 149 +2026-04-08 20:27:39.035531: Current learning rate: 0.00865 +2026-04-08 20:29:21.964679: train_loss -0.0997 +2026-04-08 20:29:21.972288: val_loss -0.0976 +2026-04-08 20:29:21.974360: Pseudo dice [0.3192, 0.0079, 0.6308, 0.2234, 0.2772, 0.3709, 0.6341] +2026-04-08 20:29:21.977496: Epoch time: 102.94 s +2026-04-08 20:29:24.725178: +2026-04-08 20:29:24.726951: Epoch 150 +2026-04-08 20:29:24.728730: Current learning rate: 0.00864 +2026-04-08 20:31:06.928113: train_loss -0.1147 +2026-04-08 20:31:06.934956: val_loss -0.123 +2026-04-08 20:31:06.936918: Pseudo dice [0.5099, 0.0908, 0.3965, 0.292, 0.3101, 0.6497, 0.6214] +2026-04-08 20:31:06.939349: Epoch time: 102.21 s +2026-04-08 20:31:06.941523: Yayy! New best EMA pseudo Dice: 0.3431 +2026-04-08 20:31:09.799325: +2026-04-08 20:31:09.801259: Epoch 151 +2026-04-08 20:31:09.802749: Current learning rate: 0.00863 +2026-04-08 20:32:54.914012: train_loss -0.1179 +2026-04-08 20:32:54.921327: val_loss -0.1069 +2026-04-08 20:32:54.924395: Pseudo dice [0.2055, 0.0992, 0.6417, 0.1314, 0.3858, 0.6225, 0.6398] +2026-04-08 20:32:54.926752: Epoch time: 105.12 s +2026-04-08 20:32:54.929316: Yayy! New best EMA pseudo Dice: 0.3477 +2026-04-08 20:32:58.086168: +2026-04-08 20:32:58.088886: Epoch 152 +2026-04-08 20:32:58.090521: Current learning rate: 0.00862 +2026-04-08 20:34:41.051760: train_loss -0.1239 +2026-04-08 20:34:41.060237: val_loss -0.0853 +2026-04-08 20:34:41.061953: Pseudo dice [0.4284, 0.1944, 0.6548, 0.1111, 0.167, 0.4314, 0.5282] +2026-04-08 20:34:41.070520: Epoch time: 102.97 s +2026-04-08 20:34:41.073843: Yayy! New best EMA pseudo Dice: 0.3489 +2026-04-08 20:34:43.919539: +2026-04-08 20:34:43.921831: Epoch 153 +2026-04-08 20:34:43.923859: Current learning rate: 0.00861 +2026-04-08 20:36:31.208575: train_loss -0.1225 +2026-04-08 20:36:31.215478: val_loss -0.1158 +2026-04-08 20:36:31.218176: Pseudo dice [0.5759, 0.2079, 0.4872, 0.7463, 0.3116, 0.5964, 0.6574] +2026-04-08 20:36:31.221326: Epoch time: 107.29 s +2026-04-08 20:36:31.223877: Yayy! New best EMA pseudo Dice: 0.3652 +2026-04-08 20:36:34.152976: +2026-04-08 20:36:34.154753: Epoch 154 +2026-04-08 20:36:34.156716: Current learning rate: 0.0086 +2026-04-08 20:38:18.868069: train_loss -0.1084 +2026-04-08 20:38:18.879596: val_loss -0.1042 +2026-04-08 20:38:18.885154: Pseudo dice [0.4764, 0.337, 0.382, 0.2629, 0.1122, 0.2911, 0.6122] +2026-04-08 20:38:18.889335: Epoch time: 104.72 s +2026-04-08 20:38:20.004488: +2026-04-08 20:38:20.007427: Epoch 155 +2026-04-08 20:38:20.010254: Current learning rate: 0.00859 +2026-04-08 20:40:04.174385: train_loss -0.1202 +2026-04-08 20:40:04.182691: val_loss -0.1357 +2026-04-08 20:40:04.184712: Pseudo dice [0.3642, 0.1176, 0.6904, 0.5421, 0.2599, 0.3384, 0.6314] +2026-04-08 20:40:04.187254: Epoch time: 104.17 s +2026-04-08 20:40:04.189277: Yayy! New best EMA pseudo Dice: 0.3697 +2026-04-08 20:40:07.100198: +2026-04-08 20:40:07.102895: Epoch 156 +2026-04-08 20:40:07.104773: Current learning rate: 0.00858 +2026-04-08 20:41:50.333249: train_loss -0.1273 +2026-04-08 20:41:50.339878: val_loss -0.0923 +2026-04-08 20:41:50.341876: Pseudo dice [0.4567, 0.0732, 0.1983, 0.4463, 0.3754, 0.3813, 0.6279] +2026-04-08 20:41:50.344120: Epoch time: 103.24 s +2026-04-08 20:41:51.433681: +2026-04-08 20:41:51.436169: Epoch 157 +2026-04-08 20:41:51.438633: Current learning rate: 0.00858 +2026-04-08 20:43:34.795958: train_loss -0.1122 +2026-04-08 20:43:34.803608: val_loss -0.1089 +2026-04-08 20:43:34.805856: Pseudo dice [0.4295, 0.5462, 0.4744, 0.3466, 0.2304, 0.6912, 0.6717] +2026-04-08 20:43:34.808184: Epoch time: 103.37 s +2026-04-08 20:43:34.810758: Yayy! New best EMA pseudo Dice: 0.3808 +2026-04-08 20:43:37.722564: +2026-04-08 20:43:37.724455: Epoch 158 +2026-04-08 20:43:37.726561: Current learning rate: 0.00857 +2026-04-08 20:45:21.309993: train_loss -0.11 +2026-04-08 20:45:21.315910: val_loss -0.0771 +2026-04-08 20:45:21.317909: Pseudo dice [0.5325, 0.2967, 0.5593, 0.6003, 0.0842, 0.4513, 0.2181] +2026-04-08 20:45:21.319870: Epoch time: 103.59 s +2026-04-08 20:45:21.321660: Yayy! New best EMA pseudo Dice: 0.3819 +2026-04-08 20:45:24.145677: +2026-04-08 20:45:24.150039: Epoch 159 +2026-04-08 20:45:24.153049: Current learning rate: 0.00856 +2026-04-08 20:47:10.449735: train_loss -0.1104 +2026-04-08 20:47:10.456861: val_loss -0.0817 +2026-04-08 20:47:10.459364: Pseudo dice [0.0709, 0.1163, 0.4085, 0.3923, 0.2299, 0.4129, 0.7599] +2026-04-08 20:47:10.461525: Epoch time: 106.31 s +2026-04-08 20:47:11.621956: +2026-04-08 20:47:11.623840: Epoch 160 +2026-04-08 20:47:11.625974: Current learning rate: 0.00855 +2026-04-08 20:48:58.942192: train_loss -0.1108 +2026-04-08 20:48:58.950057: val_loss -0.1085 +2026-04-08 20:48:58.952003: Pseudo dice [0.4127, 0.2247, 0.367, 0.607, 0.1143, 0.5148, 0.6303] +2026-04-08 20:48:58.954419: Epoch time: 107.32 s +2026-04-08 20:49:00.056610: +2026-04-08 20:49:00.060806: Epoch 161 +2026-04-08 20:49:00.065207: Current learning rate: 0.00854 +2026-04-08 20:51:17.415978: train_loss -0.1115 +2026-04-08 20:51:17.433659: val_loss -0.1046 +2026-04-08 20:51:17.446068: Pseudo dice [0.3907, 0.4219, 0.294, 0.558, 0.09, 0.532, 0.622] +2026-04-08 20:51:17.451651: Epoch time: 137.36 s +2026-04-08 20:51:17.457647: Yayy! New best EMA pseudo Dice: 0.3845 +2026-04-08 20:51:20.500854: +2026-04-08 20:51:20.509115: Epoch 162 +2026-04-08 20:51:20.514885: Current learning rate: 0.00853 +2026-04-08 20:53:36.775778: train_loss -0.1228 +2026-04-08 20:53:36.790091: val_loss -0.0787 +2026-04-08 20:53:36.794481: Pseudo dice [0.411, 0.1985, 0.3372, 0.7221, 0.206, 0.2696, 0.6055] +2026-04-08 20:53:36.800169: Epoch time: 136.28 s +2026-04-08 20:53:36.803699: Yayy! New best EMA pseudo Dice: 0.3853 +2026-04-08 20:53:40.018741: +2026-04-08 20:53:40.022559: Epoch 163 +2026-04-08 20:53:40.026008: Current learning rate: 0.00852 +2026-04-08 20:55:52.990085: train_loss -0.1164 +2026-04-08 20:55:53.003451: val_loss -0.0953 +2026-04-08 20:55:53.011103: Pseudo dice [0.4317, 0.2293, 0.2292, 0.0015, 0.1066, 0.8286, 0.6188] +2026-04-08 20:55:53.014856: Epoch time: 132.97 s +2026-04-08 20:55:54.162262: +2026-04-08 20:55:54.166222: Epoch 164 +2026-04-08 20:55:54.170452: Current learning rate: 0.00851 +2026-04-08 20:58:07.762532: train_loss -0.1249 +2026-04-08 20:58:07.771287: val_loss -0.0941 +2026-04-08 20:58:07.773803: Pseudo dice [0.3975, 0.0197, 0.6391, 0.4271, 0.2221, 0.1797, 0.5747] +2026-04-08 20:58:07.776808: Epoch time: 133.6 s +2026-04-08 20:58:08.855317: +2026-04-08 20:58:08.857478: Epoch 165 +2026-04-08 20:58:08.859426: Current learning rate: 0.0085 +2026-04-08 21:00:08.888547: train_loss -0.1225 +2026-04-08 21:00:08.894540: val_loss -0.1343 +2026-04-08 21:00:08.898206: Pseudo dice [0.4426, 0.1448, 0.7085, 0.1928, 0.3107, 0.6139, 0.7443] +2026-04-08 21:00:08.901122: Epoch time: 120.04 s +2026-04-08 21:00:08.903370: Yayy! New best EMA pseudo Dice: 0.3859 +2026-04-08 21:00:11.780667: +2026-04-08 21:00:11.783237: Epoch 166 +2026-04-08 21:00:11.786619: Current learning rate: 0.00849 +2026-04-08 21:01:56.887800: train_loss -0.1275 +2026-04-08 21:01:56.898097: val_loss -0.1025 +2026-04-08 21:01:56.900404: Pseudo dice [0.4175, 0.0884, 0.5583, 0.5727, 0.2449, 0.3068, 0.5111] +2026-04-08 21:01:56.904031: Epoch time: 105.11 s +2026-04-08 21:01:57.984977: +2026-04-08 21:01:57.988460: Epoch 167 +2026-04-08 21:01:57.991263: Current learning rate: 0.00848 +2026-04-08 21:03:43.543881: train_loss -0.1246 +2026-04-08 21:03:43.551357: val_loss -0.1187 +2026-04-08 21:03:43.555116: Pseudo dice [0.4639, 0.1362, 0.7238, 0.4823, 0.2064, 0.7353, 0.7319] +2026-04-08 21:03:43.557846: Epoch time: 105.56 s +2026-04-08 21:03:43.560169: Yayy! New best EMA pseudo Dice: 0.397 +2026-04-08 21:03:46.437505: +2026-04-08 21:03:46.439165: Epoch 168 +2026-04-08 21:03:46.441104: Current learning rate: 0.00847 +2026-04-08 21:05:30.298184: train_loss -0.1299 +2026-04-08 21:05:30.303812: val_loss -0.0923 +2026-04-08 21:05:30.306430: Pseudo dice [0.7595, 0.09, 0.6774, 0.0008, 0.3195, 0.4499, 0.4301] +2026-04-08 21:05:30.309851: Epoch time: 103.86 s +2026-04-08 21:05:31.419643: +2026-04-08 21:05:31.421867: Epoch 169 +2026-04-08 21:05:31.424533: Current learning rate: 0.00847 +2026-04-08 21:07:15.198596: train_loss -0.1139 +2026-04-08 21:07:15.207377: val_loss -0.0951 +2026-04-08 21:07:15.209798: Pseudo dice [0.1973, 0.1975, 0.4761, 0.4328, 0.2171, 0.2084, 0.6786] +2026-04-08 21:07:15.213010: Epoch time: 103.78 s +2026-04-08 21:07:17.380807: +2026-04-08 21:07:17.383795: Epoch 170 +2026-04-08 21:07:17.386754: Current learning rate: 0.00846 +2026-04-08 21:09:01.011827: train_loss -0.1308 +2026-04-08 21:09:01.021875: val_loss -0.1244 +2026-04-08 21:09:01.025058: Pseudo dice [0.3085, 0.0253, 0.5579, 0.3905, 0.3112, 0.6063, 0.761] +2026-04-08 21:09:01.027935: Epoch time: 103.63 s +2026-04-08 21:09:02.131827: +2026-04-08 21:09:02.134252: Epoch 171 +2026-04-08 21:09:02.136481: Current learning rate: 0.00845 +2026-04-08 21:10:45.538879: train_loss -0.1153 +2026-04-08 21:10:45.555006: val_loss -0.0866 +2026-04-08 21:10:45.560429: Pseudo dice [0.1056, 0.3756, 0.6535, 0.1316, 0.1279, 0.1479, 0.741] +2026-04-08 21:10:45.565677: Epoch time: 103.41 s +2026-04-08 21:10:46.662771: +2026-04-08 21:10:46.666091: Epoch 172 +2026-04-08 21:10:46.668808: Current learning rate: 0.00844 +2026-04-08 21:12:29.516588: train_loss -0.122 +2026-04-08 21:12:29.523276: val_loss -0.0959 +2026-04-08 21:12:29.526230: Pseudo dice [0.2807, 0.4168, 0.708, 0.392, 0.1972, 0.338, 0.6898] +2026-04-08 21:12:29.528866: Epoch time: 102.86 s +2026-04-08 21:12:30.626050: +2026-04-08 21:12:30.629053: Epoch 173 +2026-04-08 21:12:30.631998: Current learning rate: 0.00843 +2026-04-08 21:14:13.840582: train_loss -0.1376 +2026-04-08 21:14:13.848559: val_loss -0.0982 +2026-04-08 21:14:13.850283: Pseudo dice [0.326, 0.3658, 0.5479, 0.4906, 0.2971, 0.3055, 0.7852] +2026-04-08 21:14:13.854598: Epoch time: 103.22 s +2026-04-08 21:14:13.859834: Yayy! New best EMA pseudo Dice: 0.3972 +2026-04-08 21:14:16.729877: +2026-04-08 21:14:16.732220: Epoch 174 +2026-04-08 21:14:16.734086: Current learning rate: 0.00842 +2026-04-08 21:15:59.788598: train_loss -0.1332 +2026-04-08 21:15:59.796593: val_loss -0.0881 +2026-04-08 21:15:59.799309: Pseudo dice [0.4761, 0.3536, 0.6288, 0.2058, 0.3637, 0.2209, 0.3555] +2026-04-08 21:15:59.801419: Epoch time: 103.06 s +2026-04-08 21:16:00.911462: +2026-04-08 21:16:00.915111: Epoch 175 +2026-04-08 21:16:00.919962: Current learning rate: 0.00841 +2026-04-08 21:17:44.113493: train_loss -0.1175 +2026-04-08 21:17:44.119663: val_loss -0.0963 +2026-04-08 21:17:44.121763: Pseudo dice [0.3365, 0.0369, 0.6326, 0.3857, 0.2566, 0.5973, 0.4869] +2026-04-08 21:17:44.124185: Epoch time: 103.21 s +2026-04-08 21:17:45.221689: +2026-04-08 21:17:45.223679: Epoch 176 +2026-04-08 21:17:45.225759: Current learning rate: 0.0084 +2026-04-08 21:19:31.265658: train_loss -0.1252 +2026-04-08 21:19:31.283109: val_loss -0.097 +2026-04-08 21:19:31.287559: Pseudo dice [0.1512, 0.1102, 0.5453, 0.6344, 0.1891, 0.5368, 0.7558] +2026-04-08 21:19:31.293759: Epoch time: 106.05 s +2026-04-08 21:19:32.425747: +2026-04-08 21:19:32.431013: Epoch 177 +2026-04-08 21:19:32.438679: Current learning rate: 0.00839 +2026-04-08 21:21:16.265404: train_loss -0.1244 +2026-04-08 21:21:16.272476: val_loss -0.0911 +2026-04-08 21:21:16.274715: Pseudo dice [0.1201, 0.381, 0.3929, 0.6422, 0.2039, 0.5772, 0.5933] +2026-04-08 21:21:16.277485: Epoch time: 103.84 s +2026-04-08 21:21:16.279628: Yayy! New best EMA pseudo Dice: 0.3985 +2026-04-08 21:21:19.168406: +2026-04-08 21:21:19.170291: Epoch 178 +2026-04-08 21:21:19.173304: Current learning rate: 0.00838 +2026-04-08 21:23:02.574317: train_loss -0.1381 +2026-04-08 21:23:02.588203: val_loss -0.1037 +2026-04-08 21:23:02.591215: Pseudo dice [0.2252, 0.0181, 0.7325, 0.5345, 0.2076, 0.4621, 0.7712] +2026-04-08 21:23:02.594188: Epoch time: 103.41 s +2026-04-08 21:23:02.596828: Yayy! New best EMA pseudo Dice: 0.4008 +2026-04-08 21:23:05.478584: +2026-04-08 21:23:05.480149: Epoch 179 +2026-04-08 21:23:05.481718: Current learning rate: 0.00837 +2026-04-08 21:24:49.902190: train_loss -0.1184 +2026-04-08 21:24:49.910218: val_loss -0.1139 +2026-04-08 21:24:49.912620: Pseudo dice [0.2349, 0.0609, 0.5011, 0.3505, 0.4808, 0.6074, 0.6128] +2026-04-08 21:24:49.915347: Epoch time: 104.43 s +2026-04-08 21:24:49.918405: Yayy! New best EMA pseudo Dice: 0.4014 +2026-04-08 21:24:52.764148: +2026-04-08 21:24:52.766443: Epoch 180 +2026-04-08 21:24:52.767978: Current learning rate: 0.00836 +2026-04-08 21:26:35.829805: train_loss -0.1292 +2026-04-08 21:26:35.837603: val_loss -0.1282 +2026-04-08 21:26:35.839828: Pseudo dice [0.5153, 0.0967, 0.7928, 0.2928, 0.3379, 0.6667, 0.696] +2026-04-08 21:26:35.842660: Epoch time: 103.07 s +2026-04-08 21:26:35.844688: Yayy! New best EMA pseudo Dice: 0.4098 +2026-04-08 21:26:38.681158: +2026-04-08 21:26:38.684686: Epoch 181 +2026-04-08 21:26:38.686527: Current learning rate: 0.00836 +2026-04-08 21:28:23.049409: train_loss -0.1488 +2026-04-08 21:28:23.059059: val_loss -0.0995 +2026-04-08 21:28:23.061097: Pseudo dice [0.1939, 0.2162, 0.7359, 0.1652, 0.3754, 0.2087, 0.7453] +2026-04-08 21:28:23.064175: Epoch time: 104.37 s +2026-04-08 21:28:24.178790: +2026-04-08 21:28:24.182800: Epoch 182 +2026-04-08 21:28:24.187243: Current learning rate: 0.00835 +2026-04-08 21:30:09.770861: train_loss -0.1322 +2026-04-08 21:30:09.779025: val_loss -0.0955 +2026-04-08 21:30:09.780889: Pseudo dice [0.6006, 0.2717, 0.6614, 0.0824, 0.2204, 0.4479, 0.4857] +2026-04-08 21:30:09.783043: Epoch time: 105.6 s +2026-04-08 21:30:10.876096: +2026-04-08 21:30:10.878180: Epoch 183 +2026-04-08 21:30:10.880869: Current learning rate: 0.00834 +2026-04-08 21:31:55.214263: train_loss -0.1425 +2026-04-08 21:31:55.224669: val_loss -0.1085 +2026-04-08 21:31:55.229738: Pseudo dice [0.5296, 0.5186, 0.4671, 0.7022, 0.2659, 0.472, 0.4697] +2026-04-08 21:31:55.233375: Epoch time: 104.34 s +2026-04-08 21:31:55.236519: Yayy! New best EMA pseudo Dice: 0.4139 +2026-04-08 21:31:58.192029: +2026-04-08 21:31:58.194203: Epoch 184 +2026-04-08 21:31:58.196350: Current learning rate: 0.00833 +2026-04-08 21:33:41.346943: train_loss -0.1304 +2026-04-08 21:33:41.352693: val_loss -0.1029 +2026-04-08 21:33:41.355030: Pseudo dice [0.5639, 0.1439, 0.5522, 0.1268, 0.3257, 0.5876, 0.6364] +2026-04-08 21:33:41.357345: Epoch time: 103.16 s +2026-04-08 21:33:41.358801: Yayy! New best EMA pseudo Dice: 0.4144 +2026-04-08 21:33:44.097128: +2026-04-08 21:33:44.099644: Epoch 185 +2026-04-08 21:33:44.101789: Current learning rate: 0.00832 +2026-04-08 21:35:27.703490: train_loss -0.1383 +2026-04-08 21:35:27.709996: val_loss -0.1141 +2026-04-08 21:35:27.712762: Pseudo dice [0.6286, 0.0397, 0.7313, 0.489, 0.4711, 0.6469, 0.7516] +2026-04-08 21:35:27.715683: Epoch time: 103.61 s +2026-04-08 21:35:27.718045: Yayy! New best EMA pseudo Dice: 0.4267 +2026-04-08 21:35:30.593770: +2026-04-08 21:35:30.595345: Epoch 186 +2026-04-08 21:35:30.596840: Current learning rate: 0.00831 +2026-04-08 21:37:13.884588: train_loss -0.1368 +2026-04-08 21:37:13.890775: val_loss -0.1113 +2026-04-08 21:37:13.893420: Pseudo dice [0.4503, 0.169, 0.5921, 0.0133, 0.2241, 0.3502, 0.7649] +2026-04-08 21:37:13.895918: Epoch time: 103.29 s +2026-04-08 21:37:16.083327: +2026-04-08 21:37:16.086329: Epoch 187 +2026-04-08 21:37:16.089669: Current learning rate: 0.0083 +2026-04-08 21:38:58.608464: train_loss -0.1314 +2026-04-08 21:38:58.615600: val_loss -0.1263 +2026-04-08 21:38:58.617607: Pseudo dice [0.5559, 0.1111, 0.7201, 0.2348, 0.2358, 0.5744, 0.6398] +2026-04-08 21:38:58.620085: Epoch time: 102.53 s +2026-04-08 21:38:59.684813: +2026-04-08 21:38:59.687017: Epoch 188 +2026-04-08 21:38:59.689562: Current learning rate: 0.00829 +2026-04-08 21:40:43.741219: train_loss -0.1298 +2026-04-08 21:40:43.748446: val_loss -0.1262 +2026-04-08 21:40:43.750740: Pseudo dice [0.3179, 0.3107, 0.6359, 0.4587, 0.2285, 0.7622, 0.7919] +2026-04-08 21:40:43.754197: Epoch time: 104.06 s +2026-04-08 21:40:43.756526: Yayy! New best EMA pseudo Dice: 0.4303 +2026-04-08 21:40:46.636263: +2026-04-08 21:40:46.638085: Epoch 189 +2026-04-08 21:40:46.639840: Current learning rate: 0.00828 +2026-04-08 21:42:29.457301: train_loss -0.1333 +2026-04-08 21:42:29.465639: val_loss -0.0885 +2026-04-08 21:42:29.467915: Pseudo dice [0.3545, 0.2133, 0.214, 0.7454, 0.1834, 0.5621, 0.3896] +2026-04-08 21:42:29.471718: Epoch time: 102.82 s +2026-04-08 21:42:30.569143: +2026-04-08 21:42:30.572329: Epoch 190 +2026-04-08 21:42:30.575090: Current learning rate: 0.00827 +2026-04-08 21:44:16.368789: train_loss -0.1352 +2026-04-08 21:44:16.376725: val_loss -0.1282 +2026-04-08 21:44:16.380912: Pseudo dice [0.4173, 0.1487, 0.6854, 0.332, 0.313, 0.2484, 0.8669] +2026-04-08 21:44:16.384283: Epoch time: 105.8 s +2026-04-08 21:44:17.485548: +2026-04-08 21:44:17.493812: Epoch 191 +2026-04-08 21:44:17.503046: Current learning rate: 0.00826 +2026-04-08 21:46:01.343962: train_loss -0.1472 +2026-04-08 21:46:01.351371: val_loss -0.1249 +2026-04-08 21:46:01.354718: Pseudo dice [0.4307, 0.046, 0.5933, 0.6347, 0.3759, 0.4083, 0.3571] +2026-04-08 21:46:01.357359: Epoch time: 103.86 s +2026-04-08 21:46:02.437115: +2026-04-08 21:46:02.438862: Epoch 192 +2026-04-08 21:46:02.440365: Current learning rate: 0.00825 +2026-04-08 21:47:47.354221: train_loss -0.1323 +2026-04-08 21:47:47.361005: val_loss -0.1325 +2026-04-08 21:47:47.362982: Pseudo dice [0.6041, 0.0896, 0.5911, 0.5897, 0.3489, 0.444, 0.7237] +2026-04-08 21:47:47.365119: Epoch time: 104.92 s +2026-04-08 21:47:48.466614: +2026-04-08 21:47:48.469343: Epoch 193 +2026-04-08 21:47:48.471244: Current learning rate: 0.00824 +2026-04-08 21:49:33.596546: train_loss -0.1372 +2026-04-08 21:49:33.605222: val_loss -0.1214 +2026-04-08 21:49:33.608887: Pseudo dice [0.4818, 0.0782, 0.2725, 0.7292, 0.2547, 0.7139, 0.5211] +2026-04-08 21:49:33.613676: Epoch time: 105.13 s +2026-04-08 21:49:33.617281: Yayy! New best EMA pseudo Dice: 0.4305 +2026-04-08 21:49:36.482590: +2026-04-08 21:49:36.484590: Epoch 194 +2026-04-08 21:49:36.486129: Current learning rate: 0.00824 +2026-04-08 21:51:30.690482: train_loss -0.1477 +2026-04-08 21:51:30.699820: val_loss -0.1266 +2026-04-08 21:51:30.703644: Pseudo dice [0.3356, 0.0793, 0.2709, 0.4788, 0.4138, 0.3767, 0.5925] +2026-04-08 21:51:30.707614: Epoch time: 114.21 s +2026-04-08 21:51:31.802462: +2026-04-08 21:51:31.805052: Epoch 195 +2026-04-08 21:51:31.807950: Current learning rate: 0.00823 +2026-04-08 21:53:17.913944: train_loss -0.1454 +2026-04-08 21:53:17.922690: val_loss -0.114 +2026-04-08 21:53:17.926055: Pseudo dice [0.3087, 0.5189, 0.8021, 0.7458, 0.3508, 0.3377, 0.3736] +2026-04-08 21:53:17.929706: Epoch time: 106.11 s +2026-04-08 21:53:17.932622: Yayy! New best EMA pseudo Dice: 0.4306 +2026-04-08 21:53:20.996290: +2026-04-08 21:53:20.998689: Epoch 196 +2026-04-08 21:53:21.001096: Current learning rate: 0.00822 +2026-04-08 21:55:06.050460: train_loss -0.1433 +2026-04-08 21:55:06.059395: val_loss -0.1312 +2026-04-08 21:55:06.062441: Pseudo dice [0.211, 0.2001, 0.6256, 0.4248, 0.4236, 0.4887, 0.5688] +2026-04-08 21:55:06.067839: Epoch time: 105.06 s +2026-04-08 21:55:07.320255: +2026-04-08 21:55:07.322957: Epoch 197 +2026-04-08 21:55:07.325927: Current learning rate: 0.00821 +2026-04-08 21:57:26.437161: train_loss -0.1353 +2026-04-08 21:57:26.444729: val_loss -0.0793 +2026-04-08 21:57:26.447352: Pseudo dice [0.1614, 0.3198, 0.5824, 0.3966, 0.2788, 0.4244, 0.533] +2026-04-08 21:57:26.450728: Epoch time: 139.12 s +2026-04-08 21:57:27.617270: +2026-04-08 21:57:27.621300: Epoch 198 +2026-04-08 21:57:27.625572: Current learning rate: 0.0082 +2026-04-08 21:59:57.640222: train_loss -0.1347 +2026-04-08 21:59:57.650506: val_loss -0.1467 +2026-04-08 21:59:57.654389: Pseudo dice [0.2852, 0.2489, 0.6348, 0.7739, 0.4346, 0.4112, 0.828] +2026-04-08 21:59:57.658899: Epoch time: 150.03 s +2026-04-08 21:59:57.664121: Yayy! New best EMA pseudo Dice: 0.4343 +2026-04-08 22:00:00.924939: +2026-04-08 22:00:00.929848: Epoch 199 +2026-04-08 22:00:00.933785: Current learning rate: 0.00819 +2026-04-08 22:02:12.121650: train_loss -0.1437 +2026-04-08 22:02:12.129673: val_loss -0.1243 +2026-04-08 22:02:12.131270: Pseudo dice [0.6871, 0.144, 0.6324, 0.0035, 0.2726, 0.7882, 0.3321] +2026-04-08 22:02:12.133409: Epoch time: 131.2 s +2026-04-08 22:02:15.047213: +2026-04-08 22:02:15.048999: Epoch 200 +2026-04-08 22:02:15.050796: Current learning rate: 0.00818 +2026-04-08 22:04:45.114087: train_loss -0.1464 +2026-04-08 22:04:45.141318: val_loss -0.1055 +2026-04-08 22:04:45.152762: Pseudo dice [0.4438, 0.1514, 0.4831, 0.1134, 0.09, 0.659, 0.8004] +2026-04-08 22:04:45.156598: Epoch time: 150.07 s +2026-04-08 22:04:46.261148: +2026-04-08 22:04:46.265892: Epoch 201 +2026-04-08 22:04:46.269084: Current learning rate: 0.00817 +2026-04-08 22:07:04.538934: train_loss -0.1282 +2026-04-08 22:07:04.553800: val_loss -0.1323 +2026-04-08 22:07:04.556149: Pseudo dice [0.2812, 0.2097, 0.7081, 0.5627, 0.2778, 0.4847, 0.74] +2026-04-08 22:07:04.560781: Epoch time: 138.28 s +2026-04-08 22:07:05.659646: +2026-04-08 22:07:05.662264: Epoch 202 +2026-04-08 22:07:05.664469: Current learning rate: 0.00816 +2026-04-08 22:09:01.773286: train_loss -0.1414 +2026-04-08 22:09:01.789317: val_loss -0.1162 +2026-04-08 22:09:01.791615: Pseudo dice [0.311, 0.1141, 0.5185, 0.2926, 0.3759, 0.6047, 0.6995] +2026-04-08 22:09:01.794376: Epoch time: 116.12 s +2026-04-08 22:09:02.895007: +2026-04-08 22:09:02.897927: Epoch 203 +2026-04-08 22:09:02.900799: Current learning rate: 0.00815 +2026-04-08 22:10:55.628591: train_loss -0.1334 +2026-04-08 22:10:55.634651: val_loss -0.1366 +2026-04-08 22:10:55.637686: Pseudo dice [0.3839, 0.3456, 0.5959, 0.3931, 0.3789, 0.596, 0.7704] +2026-04-08 22:10:55.640593: Epoch time: 112.74 s +2026-04-08 22:10:55.642901: Yayy! New best EMA pseudo Dice: 0.4365 +2026-04-08 22:10:59.631130: +2026-04-08 22:10:59.632955: Epoch 204 +2026-04-08 22:10:59.634923: Current learning rate: 0.00814 +2026-04-08 22:12:47.663077: train_loss -0.1168 +2026-04-08 22:12:47.671256: val_loss -0.1136 +2026-04-08 22:12:47.673618: Pseudo dice [0.1003, 0.3845, 0.6411, 0.4331, 0.3672, 0.5999, 0.4828] +2026-04-08 22:12:47.676227: Epoch time: 108.04 s +2026-04-08 22:12:48.795866: +2026-04-08 22:12:48.800097: Epoch 205 +2026-04-08 22:12:48.803843: Current learning rate: 0.00813 +2026-04-08 22:14:40.326286: train_loss -0.1389 +2026-04-08 22:14:40.343195: val_loss -0.1357 +2026-04-08 22:14:40.349110: Pseudo dice [0.2812, 0.1642, 0.4823, 0.6382, 0.4904, 0.2451, 0.517] +2026-04-08 22:14:40.353898: Epoch time: 111.53 s +2026-04-08 22:14:41.394691: +2026-04-08 22:14:41.399575: Epoch 206 +2026-04-08 22:14:41.404663: Current learning rate: 0.00813 +2026-04-08 22:16:26.398549: train_loss -0.147 +2026-04-08 22:16:26.405756: val_loss -0.0969 +2026-04-08 22:16:26.408722: Pseudo dice [0.1866, 0.1475, 0.4373, 0.78, 0.0989, 0.5011, 0.702] +2026-04-08 22:16:26.413092: Epoch time: 105.01 s +2026-04-08 22:16:27.456017: +2026-04-08 22:16:27.459394: Epoch 207 +2026-04-08 22:16:27.466482: Current learning rate: 0.00812 +2026-04-08 22:18:17.070682: train_loss -0.1328 +2026-04-08 22:18:17.078891: val_loss -0.0837 +2026-04-08 22:18:17.081466: Pseudo dice [0.2086, 0.1918, 0.6123, 0.5037, 0.1931, 0.489, 0.3404] +2026-04-08 22:18:17.086834: Epoch time: 109.62 s +2026-04-08 22:18:18.112315: +2026-04-08 22:18:18.115953: Epoch 208 +2026-04-08 22:18:18.119056: Current learning rate: 0.00811 +2026-04-08 22:20:11.494636: train_loss -0.1311 +2026-04-08 22:20:11.505634: val_loss -0.1089 +2026-04-08 22:20:11.510497: Pseudo dice [0.4608, 0.2295, 0.7674, 0.4962, 0.4057, 0.3812, 0.5878] +2026-04-08 22:20:11.515078: Epoch time: 113.39 s +2026-04-08 22:20:12.546201: +2026-04-08 22:20:12.557318: Epoch 209 +2026-04-08 22:20:12.559987: Current learning rate: 0.0081 +2026-04-08 22:21:58.510013: train_loss -0.1529 +2026-04-08 22:21:58.523965: val_loss -0.1334 +2026-04-08 22:21:58.527699: Pseudo dice [0.3821, 0.4221, 0.6413, 0.7646, 0.2129, 0.509, 0.6143] +2026-04-08 22:21:58.532267: Epoch time: 105.97 s +2026-04-08 22:21:59.572973: +2026-04-08 22:21:59.575656: Epoch 210 +2026-04-08 22:21:59.577628: Current learning rate: 0.00809 +2026-04-08 22:23:54.493629: train_loss -0.1369 +2026-04-08 22:23:54.504278: val_loss -0.1378 +2026-04-08 22:23:54.508193: Pseudo dice [0.4774, 0.1363, 0.5019, 0.6798, 0.399, 0.4966, 0.6255] +2026-04-08 22:23:54.512506: Epoch time: 114.92 s +2026-04-08 22:23:54.516010: Yayy! New best EMA pseudo Dice: 0.4401 +2026-04-08 22:23:57.666497: +2026-04-08 22:23:57.671591: Epoch 211 +2026-04-08 22:23:57.676321: Current learning rate: 0.00808 +2026-04-08 22:25:47.420566: train_loss -0.1428 +2026-04-08 22:25:47.431852: val_loss -0.1071 +2026-04-08 22:25:47.434647: Pseudo dice [0.2721, 0.2546, 0.7452, 0.7132, 0.18, 0.6539, 0.4475] +2026-04-08 22:25:47.437243: Epoch time: 109.76 s +2026-04-08 22:25:47.439964: Yayy! New best EMA pseudo Dice: 0.4427 +2026-04-08 22:25:50.310115: +2026-04-08 22:25:50.327368: Epoch 212 +2026-04-08 22:25:50.329889: Current learning rate: 0.00807 +2026-04-08 22:27:37.519729: train_loss -0.1483 +2026-04-08 22:27:37.528203: val_loss -0.1123 +2026-04-08 22:27:37.531603: Pseudo dice [0.4201, 0.2595, 0.5739, 0.4479, 0.1627, 0.7543, 0.8038] +2026-04-08 22:27:37.534792: Epoch time: 107.21 s +2026-04-08 22:27:37.538440: Yayy! New best EMA pseudo Dice: 0.4474 +2026-04-08 22:27:40.574811: +2026-04-08 22:27:40.577906: Epoch 213 +2026-04-08 22:27:40.581596: Current learning rate: 0.00806 +2026-04-08 22:29:25.850302: train_loss -0.1448 +2026-04-08 22:29:25.859335: val_loss -0.1391 +2026-04-08 22:29:25.861690: Pseudo dice [0.2999, 0.1523, 0.6075, 0.6258, 0.1716, 0.6405, 0.758] +2026-04-08 22:29:25.863952: Epoch time: 105.28 s +2026-04-08 22:29:25.866122: Yayy! New best EMA pseudo Dice: 0.4491 +2026-04-08 22:29:28.791512: +2026-04-08 22:29:28.794169: Epoch 214 +2026-04-08 22:29:28.796788: Current learning rate: 0.00805 +2026-04-08 22:31:16.738587: train_loss -0.149 +2026-04-08 22:31:16.748006: val_loss -0.1377 +2026-04-08 22:31:16.750103: Pseudo dice [0.4722, 0.2417, 0.659, 0.1079, 0.3745, 0.6283, 0.6809] +2026-04-08 22:31:16.753156: Epoch time: 107.95 s +2026-04-08 22:31:16.756237: Yayy! New best EMA pseudo Dice: 0.4494 +2026-04-08 22:31:19.728640: +2026-04-08 22:31:19.732840: Epoch 215 +2026-04-08 22:31:19.737488: Current learning rate: 0.00804 +2026-04-08 22:33:03.746853: train_loss -0.1494 +2026-04-08 22:33:03.755087: val_loss -0.1189 +2026-04-08 22:33:03.757706: Pseudo dice [0.4344, 0.1007, 0.4649, 0.5875, 0.3683, 0.5184, 0.6768] +2026-04-08 22:33:03.760483: Epoch time: 104.02 s +2026-04-08 22:33:03.763103: Yayy! New best EMA pseudo Dice: 0.4495 +2026-04-08 22:33:06.499926: +2026-04-08 22:33:06.501831: Epoch 216 +2026-04-08 22:33:06.503672: Current learning rate: 0.00803 +2026-04-08 22:34:50.238504: train_loss -0.1526 +2026-04-08 22:34:50.244721: val_loss -0.1296 +2026-04-08 22:34:50.247639: Pseudo dice [0.7052, 0.0396, 0.803, 0.7589, 0.3742, 0.7128, 0.3491] +2026-04-08 22:34:50.249963: Epoch time: 103.74 s +2026-04-08 22:34:50.252257: Yayy! New best EMA pseudo Dice: 0.458 +2026-04-08 22:34:53.086332: +2026-04-08 22:34:53.088625: Epoch 217 +2026-04-08 22:34:53.090460: Current learning rate: 0.00802 +2026-04-08 22:36:38.234431: train_loss -0.1612 +2026-04-08 22:36:38.241896: val_loss -0.1416 +2026-04-08 22:36:38.244294: Pseudo dice [0.4268, 0.3803, 0.7875, 0.661, 0.4001, 0.5861, 0.7235] +2026-04-08 22:36:38.248512: Epoch time: 105.15 s +2026-04-08 22:36:38.250862: Yayy! New best EMA pseudo Dice: 0.4689 +2026-04-08 22:36:41.035462: +2026-04-08 22:36:41.037375: Epoch 218 +2026-04-08 22:36:41.040985: Current learning rate: 0.00801 +2026-04-08 22:38:24.148694: train_loss -0.1474 +2026-04-08 22:38:24.154264: val_loss -0.1157 +2026-04-08 22:38:24.156662: Pseudo dice [0.2509, 0.0744, 0.6999, 0.5625, 0.313, 0.4256, 0.7831] +2026-04-08 22:38:24.158723: Epoch time: 103.12 s +2026-04-08 22:38:25.194587: +2026-04-08 22:38:25.196845: Epoch 219 +2026-04-08 22:38:25.199733: Current learning rate: 0.00801 +2026-04-08 22:40:12.388211: train_loss -0.1477 +2026-04-08 22:40:12.399610: val_loss -0.1193 +2026-04-08 22:40:12.403451: Pseudo dice [0.2617, 0.1944, 0.8039, 0.5556, 0.3427, 0.2903, 0.7639] +2026-04-08 22:40:12.407645: Epoch time: 107.2 s +2026-04-08 22:40:13.590428: +2026-04-08 22:40:13.595719: Epoch 220 +2026-04-08 22:40:13.598528: Current learning rate: 0.008 +2026-04-08 22:41:58.986411: train_loss -0.1444 +2026-04-08 22:41:59.003418: val_loss -0.1192 +2026-04-08 22:41:59.007142: Pseudo dice [0.4182, 0.3326, 0.775, 0.5025, 0.3458, 0.6694, 0.6582] +2026-04-08 22:41:59.012740: Epoch time: 105.4 s +2026-04-08 22:41:59.018381: Yayy! New best EMA pseudo Dice: 0.472 +2026-04-08 22:42:03.470222: +2026-04-08 22:42:03.473209: Epoch 221 +2026-04-08 22:42:03.476321: Current learning rate: 0.00799 +2026-04-08 22:43:46.971230: train_loss -0.1526 +2026-04-08 22:43:46.977863: val_loss -0.1405 +2026-04-08 22:43:46.980448: Pseudo dice [0.3059, 0.1948, 0.6762, 0.5707, 0.3686, 0.4217, 0.7427] +2026-04-08 22:43:46.983459: Epoch time: 103.5 s +2026-04-08 22:43:48.025909: +2026-04-08 22:43:48.029381: Epoch 222 +2026-04-08 22:43:48.033246: Current learning rate: 0.00798 +2026-04-08 22:45:31.950683: train_loss -0.1534 +2026-04-08 22:45:31.956797: val_loss -0.1127 +2026-04-08 22:45:31.958884: Pseudo dice [0.5966, 0.252, 0.5139, 0.6294, 0.398, 0.4063, 0.6012] +2026-04-08 22:45:31.961484: Epoch time: 103.93 s +2026-04-08 22:45:31.963240: Yayy! New best EMA pseudo Dice: 0.473 +2026-04-08 22:45:34.778723: +2026-04-08 22:45:34.781531: Epoch 223 +2026-04-08 22:45:34.783563: Current learning rate: 0.00797 +2026-04-08 22:47:29.686884: train_loss -0.1423 +2026-04-08 22:47:29.701838: val_loss -0.1385 +2026-04-08 22:47:29.708827: Pseudo dice [0.2138, 0.4087, 0.6973, 0.7537, 0.1984, 0.7668, 0.6664] +2026-04-08 22:47:29.714138: Epoch time: 114.91 s +2026-04-08 22:47:29.718013: Yayy! New best EMA pseudo Dice: 0.4786 +2026-04-08 22:47:32.749627: +2026-04-08 22:47:32.758227: Epoch 224 +2026-04-08 22:47:32.764603: Current learning rate: 0.00796 +2026-04-08 22:49:21.692344: train_loss -0.1531 +2026-04-08 22:49:21.707885: val_loss -0.1232 +2026-04-08 22:49:21.713005: Pseudo dice [0.3933, 0.2252, 0.6781, 0.5547, 0.3415, 0.3417, 0.7004] +2026-04-08 22:49:21.716933: Epoch time: 108.95 s +2026-04-08 22:49:22.748730: +2026-04-08 22:49:22.752737: Epoch 225 +2026-04-08 22:49:22.756321: Current learning rate: 0.00795 +2026-04-08 22:51:17.973268: train_loss -0.136 +2026-04-08 22:51:17.984500: val_loss -0.1252 +2026-04-08 22:51:17.994623: Pseudo dice [0.5028, 0.3017, 0.5393, 0.0449, 0.329, 0.5919, 0.6461] +2026-04-08 22:51:17.997399: Epoch time: 115.23 s +2026-04-08 22:51:19.029458: +2026-04-08 22:51:19.032260: Epoch 226 +2026-04-08 22:51:19.034322: Current learning rate: 0.00794 +2026-04-08 22:53:07.130178: train_loss -0.1525 +2026-04-08 22:53:07.136459: val_loss -0.1108 +2026-04-08 22:53:07.140469: Pseudo dice [0.2409, 0.1291, 0.6683, 0.0087, 0.3411, 0.739, 0.8504] +2026-04-08 22:53:07.144521: Epoch time: 108.1 s +2026-04-08 22:53:08.163174: +2026-04-08 22:53:08.166969: Epoch 227 +2026-04-08 22:53:08.170878: Current learning rate: 0.00793 +2026-04-08 22:54:56.555051: train_loss -0.1608 +2026-04-08 22:54:56.562405: val_loss -0.1097 +2026-04-08 22:54:56.565330: Pseudo dice [0.1479, 0.2526, 0.6261, 0.0404, 0.406, 0.7035, 0.6325] +2026-04-08 22:54:56.568292: Epoch time: 108.4 s +2026-04-08 22:54:57.597596: +2026-04-08 22:54:57.600314: Epoch 228 +2026-04-08 22:54:57.602515: Current learning rate: 0.00792 +2026-04-08 22:56:41.336832: train_loss -0.156 +2026-04-08 22:56:41.345582: val_loss -0.1187 +2026-04-08 22:56:41.348309: Pseudo dice [0.392, 0.1315, 0.7337, 0.6988, 0.2638, 0.6493, 0.6536] +2026-04-08 22:56:41.352445: Epoch time: 103.74 s +2026-04-08 22:56:42.362817: +2026-04-08 22:56:42.367275: Epoch 229 +2026-04-08 22:56:42.370559: Current learning rate: 0.00791 +2026-04-08 22:58:28.102835: train_loss -0.135 +2026-04-08 22:58:28.112721: val_loss -0.0824 +2026-04-08 22:58:28.115799: Pseudo dice [0.2672, 0.0533, 0.7608, 0.2745, 0.2785, 0.1491, 0.4574] +2026-04-08 22:58:28.118464: Epoch time: 105.74 s +2026-04-08 22:58:29.147327: +2026-04-08 22:58:29.149601: Epoch 230 +2026-04-08 22:58:29.153027: Current learning rate: 0.0079 +2026-04-08 23:00:17.378751: train_loss -0.152 +2026-04-08 23:00:17.389884: val_loss -0.1418 +2026-04-08 23:00:17.393044: Pseudo dice [0.475, 0.2046, 0.6244, 0.5434, 0.489, 0.5743, 0.6416] +2026-04-08 23:00:17.397304: Epoch time: 108.23 s +2026-04-08 23:00:18.426930: +2026-04-08 23:00:18.429718: Epoch 231 +2026-04-08 23:00:18.432365: Current learning rate: 0.00789 +2026-04-08 23:02:05.482332: train_loss -0.1536 +2026-04-08 23:02:05.491327: val_loss -0.1206 +2026-04-08 23:02:05.494100: Pseudo dice [0.2772, 0.2228, 0.7246, 0.798, 0.327, 0.455, 0.7438] +2026-04-08 23:02:05.496935: Epoch time: 107.06 s +2026-04-08 23:02:06.513018: +2026-04-08 23:02:06.517021: Epoch 232 +2026-04-08 23:02:06.521423: Current learning rate: 0.00789 +2026-04-08 23:03:50.390701: train_loss -0.1605 +2026-04-08 23:03:50.402539: val_loss -0.1371 +2026-04-08 23:03:50.404898: Pseudo dice [0.4882, 0.5004, 0.7976, 0.5925, 0.2088, 0.4716, 0.8255] +2026-04-08 23:03:50.407531: Epoch time: 103.88 s +2026-04-08 23:03:51.423820: +2026-04-08 23:03:51.426633: Epoch 233 +2026-04-08 23:03:51.430235: Current learning rate: 0.00788 +2026-04-08 23:05:43.787992: train_loss -0.1517 +2026-04-08 23:05:43.796878: val_loss -0.1158 +2026-04-08 23:05:43.799781: Pseudo dice [0.4227, 0.0132, 0.7643, 0.636, 0.3824, 0.4821, 0.5655] +2026-04-08 23:05:43.803927: Epoch time: 112.37 s +2026-04-08 23:05:44.831143: +2026-04-08 23:05:44.833807: Epoch 234 +2026-04-08 23:05:44.836295: Current learning rate: 0.00787 +2026-04-08 23:07:30.914805: train_loss -0.1438 +2026-04-08 23:07:30.924622: val_loss -0.1379 +2026-04-08 23:07:30.927114: Pseudo dice [0.3899, 0.5215, 0.6587, 0.7057, 0.3203, 0.6468, 0.6567] +2026-04-08 23:07:30.930806: Epoch time: 106.09 s +2026-04-08 23:07:30.933701: Yayy! New best EMA pseudo Dice: 0.4787 +2026-04-08 23:07:33.867582: +2026-04-08 23:07:33.869685: Epoch 235 +2026-04-08 23:07:33.871674: Current learning rate: 0.00786 +2026-04-08 23:09:20.316971: train_loss -0.1458 +2026-04-08 23:09:20.329834: val_loss -0.0937 +2026-04-08 23:09:20.333979: Pseudo dice [0.1454, 0.1355, 0.6366, 0.7868, 0.1437, 0.3938, 0.6658] +2026-04-08 23:09:20.338809: Epoch time: 106.45 s +2026-04-08 23:09:21.380962: +2026-04-08 23:09:21.383464: Epoch 236 +2026-04-08 23:09:21.386812: Current learning rate: 0.00785 +2026-04-08 23:11:18.100962: train_loss -0.1439 +2026-04-08 23:11:18.109480: val_loss -0.1054 +2026-04-08 23:11:18.111762: Pseudo dice [0.2207, 0.537, 0.6047, 0.6779, 0.3475, 0.6356, 0.5473] +2026-04-08 23:11:18.115342: Epoch time: 116.72 s +2026-04-08 23:11:19.153992: +2026-04-08 23:11:19.161379: Epoch 237 +2026-04-08 23:11:19.166233: Current learning rate: 0.00784 +2026-04-08 23:13:07.615417: train_loss -0.1537 +2026-04-08 23:13:07.628297: val_loss -0.1271 +2026-04-08 23:13:07.632779: Pseudo dice [0.4793, 0.1777, 0.7094, 0.6664, 0.2882, 0.7234, 0.7302] +2026-04-08 23:13:07.641450: Epoch time: 108.46 s +2026-04-08 23:13:07.645303: Yayy! New best EMA pseudo Dice: 0.4825 +2026-04-08 23:13:10.523011: +2026-04-08 23:13:10.525321: Epoch 238 +2026-04-08 23:13:10.527662: Current learning rate: 0.00783 +2026-04-08 23:15:00.497944: train_loss -0.1556 +2026-04-08 23:15:00.512575: val_loss -0.1273 +2026-04-08 23:15:00.517439: Pseudo dice [0.236, 0.4407, 0.3884, 0.643, 0.2507, 0.7165, 0.7825] +2026-04-08 23:15:00.521728: Epoch time: 109.98 s +2026-04-08 23:15:00.525483: Yayy! New best EMA pseudo Dice: 0.4836 +2026-04-08 23:15:03.797088: +2026-04-08 23:15:03.800800: Epoch 239 +2026-04-08 23:15:03.806175: Current learning rate: 0.00782 +2026-04-08 23:17:08.245504: train_loss -0.1601 +2026-04-08 23:17:08.255507: val_loss -0.1316 +2026-04-08 23:17:08.258275: Pseudo dice [0.4485, 0.0685, 0.664, 0.6987, 0.3703, 0.7333, 0.8264] +2026-04-08 23:17:08.261176: Epoch time: 124.45 s +2026-04-08 23:17:08.263163: Yayy! New best EMA pseudo Dice: 0.4897 +2026-04-08 23:17:11.101964: +2026-04-08 23:17:11.103996: Epoch 240 +2026-04-08 23:17:11.106182: Current learning rate: 0.00781 +2026-04-08 23:19:06.378595: train_loss -0.1702 +2026-04-08 23:19:06.386006: val_loss -0.1474 +2026-04-08 23:19:06.388284: Pseudo dice [0.6383, 0.0576, 0.7803, 0.7112, 0.4138, 0.6671, 0.7625] +2026-04-08 23:19:06.391907: Epoch time: 115.28 s +2026-04-08 23:19:06.394396: Yayy! New best EMA pseudo Dice: 0.4983 +2026-04-08 23:19:09.356057: +2026-04-08 23:19:09.358519: Epoch 241 +2026-04-08 23:19:09.360872: Current learning rate: 0.0078 +2026-04-08 23:20:57.485672: train_loss -0.1517 +2026-04-08 23:20:57.499249: val_loss -0.1309 +2026-04-08 23:20:57.503033: Pseudo dice [0.4025, 0.5412, 0.7033, 0.6426, 0.3998, 0.5354, 0.7889] +2026-04-08 23:20:57.507088: Epoch time: 108.13 s +2026-04-08 23:20:57.510777: Yayy! New best EMA pseudo Dice: 0.5058 +2026-04-08 23:21:00.445777: +2026-04-08 23:21:00.448504: Epoch 242 +2026-04-08 23:21:00.451767: Current learning rate: 0.00779 +2026-04-08 23:22:47.490627: train_loss -0.1481 +2026-04-08 23:22:47.499438: val_loss -0.1019 +2026-04-08 23:22:47.501685: Pseudo dice [0.1242, 0.5867, 0.7843, 0.7856, 0.2354, 0.6875, 0.5684] +2026-04-08 23:22:47.504747: Epoch time: 107.05 s +2026-04-08 23:22:47.509036: Yayy! New best EMA pseudo Dice: 0.5091 +2026-04-08 23:22:50.338378: +2026-04-08 23:22:50.346037: Epoch 243 +2026-04-08 23:22:50.347794: Current learning rate: 0.00778 +2026-04-08 23:24:44.781694: train_loss -0.164 +2026-04-08 23:24:44.789679: val_loss -0.1151 +2026-04-08 23:24:44.792016: Pseudo dice [0.5379, 0.0707, 0.7497, 0.7133, 0.3234, 0.4509, 0.4712] +2026-04-08 23:24:44.795155: Epoch time: 114.45 s +2026-04-08 23:24:45.877680: +2026-04-08 23:24:45.880571: Epoch 244 +2026-04-08 23:24:45.883932: Current learning rate: 0.00777 +2026-04-08 23:26:38.073854: train_loss -0.1425 +2026-04-08 23:26:38.088765: val_loss -0.1033 +2026-04-08 23:26:38.092103: Pseudo dice [0.3103, 0.1977, 0.3915, 0.7499, 0.3192, 0.4664, 0.5723] +2026-04-08 23:26:38.098308: Epoch time: 112.2 s +2026-04-08 23:26:39.182235: +2026-04-08 23:26:39.186048: Epoch 245 +2026-04-08 23:26:39.190943: Current learning rate: 0.00777 +2026-04-08 23:28:29.384639: train_loss -0.1413 +2026-04-08 23:28:29.419097: val_loss -0.113 +2026-04-08 23:28:29.422640: Pseudo dice [0.2418, 0.1551, 0.6532, 0.6911, 0.2474, 0.4429, 0.7467] +2026-04-08 23:28:29.425267: Epoch time: 110.21 s +2026-04-08 23:28:30.478276: +2026-04-08 23:28:30.481633: Epoch 246 +2026-04-08 23:28:30.485155: Current learning rate: 0.00776 +2026-04-08 23:30:18.728976: train_loss -0.1575 +2026-04-08 23:30:18.742628: val_loss -0.1031 +2026-04-08 23:30:18.745420: Pseudo dice [0.3644, 0.1628, 0.709, 0.4467, 0.2196, 0.6251, 0.4411] +2026-04-08 23:30:18.750869: Epoch time: 108.25 s +2026-04-08 23:30:19.837901: +2026-04-08 23:30:19.840447: Epoch 247 +2026-04-08 23:30:19.844050: Current learning rate: 0.00775 +2026-04-08 23:32:03.158558: train_loss -0.1518 +2026-04-08 23:32:03.165462: val_loss -0.0998 +2026-04-08 23:32:03.169106: Pseudo dice [0.337, 0.129, 0.7191, 0.2401, 0.4089, 0.5337, 0.5888] +2026-04-08 23:32:03.171653: Epoch time: 103.32 s +2026-04-08 23:32:04.229156: +2026-04-08 23:32:04.231955: Epoch 248 +2026-04-08 23:32:04.234565: Current learning rate: 0.00774 +2026-04-08 23:33:57.591033: train_loss -0.1505 +2026-04-08 23:33:57.598768: val_loss -0.0905 +2026-04-08 23:33:57.602036: Pseudo dice [0.492, 0.1623, 0.3664, 0.0282, 0.2056, 0.6904, 0.6528] +2026-04-08 23:33:57.606095: Epoch time: 113.37 s +2026-04-08 23:33:58.651617: +2026-04-08 23:33:58.655091: Epoch 249 +2026-04-08 23:33:58.657682: Current learning rate: 0.00773 +2026-04-08 23:35:44.458267: train_loss -0.157 +2026-04-08 23:35:44.465852: val_loss -0.1351 +2026-04-08 23:35:44.469299: Pseudo dice [0.6186, 0.1026, 0.8204, 0.4984, 0.4826, 0.7456, 0.8075] +2026-04-08 23:35:44.471839: Epoch time: 105.81 s +2026-04-08 23:35:47.340083: +2026-04-08 23:35:47.342677: Epoch 250 +2026-04-08 23:35:47.345891: Current learning rate: 0.00772 +2026-04-08 23:37:36.075207: train_loss -0.145 +2026-04-08 23:37:36.082291: val_loss -0.1149 +2026-04-08 23:37:36.084688: Pseudo dice [0.512, 0.113, 0.7326, 0.7694, 0.1449, 0.754, 0.6088] +2026-04-08 23:37:36.087662: Epoch time: 108.74 s +2026-04-08 23:37:37.125109: +2026-04-08 23:37:37.127143: Epoch 251 +2026-04-08 23:37:37.129135: Current learning rate: 0.00771 +2026-04-08 23:39:27.954674: train_loss -0.1546 +2026-04-08 23:39:27.964916: val_loss -0.1139 +2026-04-08 23:39:27.968611: Pseudo dice [0.4008, 0.2335, 0.6096, 0.4573, 0.3434, 0.4855, 0.6923] +2026-04-08 23:39:27.975455: Epoch time: 110.83 s +2026-04-08 23:39:29.031143: +2026-04-08 23:39:29.033688: Epoch 252 +2026-04-08 23:39:29.036636: Current learning rate: 0.0077 +2026-04-08 23:41:19.845268: train_loss -0.1215 +2026-04-08 23:41:19.854605: val_loss -0.1062 +2026-04-08 23:41:19.858267: Pseudo dice [0.5286, 0.1912, 0.46, 0.7191, 0.174, 0.5085, 0.6137] +2026-04-08 23:41:19.860948: Epoch time: 110.82 s +2026-04-08 23:41:20.906417: +2026-04-08 23:41:20.910001: Epoch 253 +2026-04-08 23:41:20.914678: Current learning rate: 0.00769 +2026-04-08 23:43:06.298787: train_loss -0.1476 +2026-04-08 23:43:06.311603: val_loss -0.1319 +2026-04-08 23:43:06.315515: Pseudo dice [0.2563, 0.5221, 0.5983, 0.7807, 0.4884, 0.5194, 0.7266] +2026-04-08 23:43:06.319178: Epoch time: 105.4 s +2026-04-08 23:43:07.394928: +2026-04-08 23:43:07.398649: Epoch 254 +2026-04-08 23:43:07.402437: Current learning rate: 0.00768 +2026-04-08 23:44:52.072177: train_loss -0.1445 +2026-04-08 23:44:52.081805: val_loss -0.1154 +2026-04-08 23:44:52.085321: Pseudo dice [0.2687, 0.4365, 0.7301, 0.8205, 0.2749, 0.3736, 0.6776] +2026-04-08 23:44:52.089373: Epoch time: 104.68 s +2026-04-08 23:44:53.149979: +2026-04-08 23:44:53.153140: Epoch 255 +2026-04-08 23:44:53.155294: Current learning rate: 0.00767 +2026-04-08 23:46:42.831028: train_loss -0.1487 +2026-04-08 23:46:42.841775: val_loss -0.1188 +2026-04-08 23:46:42.845320: Pseudo dice [0.6312, 0.5677, 0.7076, 0.6967, 0.4569, 0.6832, 0.6122] +2026-04-08 23:46:42.850108: Epoch time: 109.68 s +2026-04-08 23:46:43.962896: +2026-04-08 23:46:43.967291: Epoch 256 +2026-04-08 23:46:43.969259: Current learning rate: 0.00766 +2026-04-08 23:48:30.572402: train_loss -0.1322 +2026-04-08 23:48:30.584818: val_loss -0.132 +2026-04-08 23:48:30.588381: Pseudo dice [0.4878, 0.0668, 0.7866, 0.7939, 0.3688, 0.2067, 0.6826] +2026-04-08 23:48:30.591499: Epoch time: 106.61 s +2026-04-08 23:48:31.624533: +2026-04-08 23:48:31.626984: Epoch 257 +2026-04-08 23:48:31.630183: Current learning rate: 0.00765 +2026-04-08 23:50:17.478055: train_loss -0.1461 +2026-04-08 23:50:17.487932: val_loss -0.1028 +2026-04-08 23:50:17.490746: Pseudo dice [0.263, 0.0828, 0.1181, 0.5102, 0.4096, 0.5221, 0.348] +2026-04-08 23:50:17.493259: Epoch time: 105.86 s +2026-04-08 23:50:19.653277: +2026-04-08 23:50:19.655139: Epoch 258 +2026-04-08 23:50:19.656908: Current learning rate: 0.00764 +2026-04-08 23:52:05.929730: train_loss -0.1434 +2026-04-08 23:52:05.937335: val_loss -0.1119 +2026-04-08 23:52:05.939543: Pseudo dice [0.437, 0.4593, 0.6602, 0.6516, 0.4542, 0.5744, 0.4535] +2026-04-08 23:52:05.941901: Epoch time: 106.28 s +2026-04-08 23:52:07.012744: +2026-04-08 23:52:07.016463: Epoch 259 +2026-04-08 23:52:07.020160: Current learning rate: 0.00764 +2026-04-08 23:53:50.147424: train_loss -0.1675 +2026-04-08 23:53:50.154474: val_loss -0.1342 +2026-04-08 23:53:50.157356: Pseudo dice [0.2179, 0.0675, 0.7182, 0.4717, 0.3887, 0.7012, 0.7549] +2026-04-08 23:53:50.160251: Epoch time: 103.14 s +2026-04-08 23:53:51.212099: +2026-04-08 23:53:51.213965: Epoch 260 +2026-04-08 23:53:51.217128: Current learning rate: 0.00763 +2026-04-08 23:55:34.432279: train_loss -0.1574 +2026-04-08 23:55:34.439677: val_loss -0.1212 +2026-04-08 23:55:34.441762: Pseudo dice [0.1811, 0.5922, 0.6442, 0.757, 0.3112, 0.6332, 0.6742] +2026-04-08 23:55:34.444702: Epoch time: 103.22 s +2026-04-08 23:55:35.525238: +2026-04-08 23:55:35.527630: Epoch 261 +2026-04-08 23:55:35.530700: Current learning rate: 0.00762 +2026-04-08 23:57:28.716440: train_loss -0.1523 +2026-04-08 23:57:28.727647: val_loss -0.1071 +2026-04-08 23:57:28.731316: Pseudo dice [0.5348, 0.3797, 0.6261, 0.7726, 0.2256, 0.4675, 0.7259] +2026-04-08 23:57:28.736047: Epoch time: 113.19 s +2026-04-08 23:57:29.835123: +2026-04-08 23:57:29.837975: Epoch 262 +2026-04-08 23:57:29.841214: Current learning rate: 0.00761 +2026-04-08 23:59:14.095618: train_loss -0.168 +2026-04-08 23:59:14.102601: val_loss -0.1297 +2026-04-08 23:59:14.105252: Pseudo dice [0.629, 0.365, 0.8337, 0.5393, 0.4242, 0.7787, 0.5565] +2026-04-08 23:59:14.107451: Epoch time: 104.26 s +2026-04-08 23:59:15.153716: +2026-04-08 23:59:15.156938: Epoch 263 +2026-04-08 23:59:15.161777: Current learning rate: 0.0076 +2026-04-09 00:01:13.075743: train_loss -0.1589 +2026-04-09 00:01:13.096160: val_loss -0.1048 +2026-04-09 00:01:13.102373: Pseudo dice [0.184, 0.0577, 0.5823, 0.2209, 0.1556, 0.6776, 0.6313] +2026-04-09 00:01:13.110044: Epoch time: 117.93 s +2026-04-09 00:01:14.175801: +2026-04-09 00:01:14.180533: Epoch 264 +2026-04-09 00:01:14.187282: Current learning rate: 0.00759 +2026-04-09 00:03:58.796707: train_loss -0.1642 +2026-04-09 00:03:58.807163: val_loss -0.1167 +2026-04-09 00:03:58.816175: Pseudo dice [0.3498, 0.3807, 0.6027, 0.8432, 0.4814, 0.5322, 0.8336] +2026-04-09 00:03:58.820225: Epoch time: 164.62 s +2026-04-09 00:03:59.926221: +2026-04-09 00:03:59.932673: Epoch 265 +2026-04-09 00:03:59.937488: Current learning rate: 0.00758 +2026-04-09 00:06:31.231725: train_loss -0.1563 +2026-04-09 00:06:31.247309: val_loss -0.1289 +2026-04-09 00:06:31.252614: Pseudo dice [0.2331, 0.109, 0.6716, 0.6254, 0.4226, 0.6538, 0.5643] +2026-04-09 00:06:31.260683: Epoch time: 151.31 s +2026-04-09 00:06:32.340644: +2026-04-09 00:06:32.351377: Epoch 266 +2026-04-09 00:06:32.354934: Current learning rate: 0.00757 +2026-04-09 00:08:47.126059: train_loss -0.147 +2026-04-09 00:08:47.135311: val_loss -0.1392 +2026-04-09 00:08:47.139662: Pseudo dice [0.5159, 0.325, 0.8025, 0.4959, 0.2779, 0.6003, 0.4899] +2026-04-09 00:08:47.143816: Epoch time: 134.79 s +2026-04-09 00:08:48.206673: +2026-04-09 00:08:48.208899: Epoch 267 +2026-04-09 00:08:48.211980: Current learning rate: 0.00756 +2026-04-09 00:10:55.039994: train_loss -0.1509 +2026-04-09 00:10:55.058264: val_loss -0.1318 +2026-04-09 00:10:55.065233: Pseudo dice [0.7077, 0.0996, 0.7039, 0.4189, 0.2059, 0.6262, 0.8065] +2026-04-09 00:10:55.070467: Epoch time: 126.84 s +2026-04-09 00:10:56.137412: +2026-04-09 00:10:56.143916: Epoch 268 +2026-04-09 00:10:56.150681: Current learning rate: 0.00755 +2026-04-09 00:12:43.801369: train_loss -0.1629 +2026-04-09 00:12:43.808829: val_loss -0.1182 +2026-04-09 00:12:43.810817: Pseudo dice [0.3569, 0.1742, 0.2976, 0.4583, 0.1339, 0.4257, 0.6489] +2026-04-09 00:12:43.813605: Epoch time: 107.67 s +2026-04-09 00:12:44.867196: +2026-04-09 00:12:44.869741: Epoch 269 +2026-04-09 00:12:44.872549: Current learning rate: 0.00754 +2026-04-09 00:14:30.515266: train_loss -0.1561 +2026-04-09 00:14:30.527655: val_loss -0.1025 +2026-04-09 00:14:30.531106: Pseudo dice [0.4884, 0.0758, 0.6573, 0.5379, 0.31, 0.7283, 0.6501] +2026-04-09 00:14:30.534809: Epoch time: 105.65 s +2026-04-09 00:14:31.636997: +2026-04-09 00:14:31.639734: Epoch 270 +2026-04-09 00:14:31.643671: Current learning rate: 0.00753 +2026-04-09 00:16:18.192240: train_loss -0.1474 +2026-04-09 00:16:18.199851: val_loss -0.1252 +2026-04-09 00:16:18.202326: Pseudo dice [0.6514, 0.1377, 0.6928, 0.0886, 0.4445, 0.5951, 0.6754] +2026-04-09 00:16:18.205167: Epoch time: 106.56 s +2026-04-09 00:16:19.295919: +2026-04-09 00:16:19.298332: Epoch 271 +2026-04-09 00:16:19.303621: Current learning rate: 0.00752 +2026-04-09 00:18:02.486613: train_loss -0.1412 +2026-04-09 00:18:02.493540: val_loss -0.1179 +2026-04-09 00:18:02.495796: Pseudo dice [0.6187, 0.0207, 0.467, 0.6942, 0.2932, 0.4873, 0.4907] +2026-04-09 00:18:02.498300: Epoch time: 103.19 s +2026-04-09 00:18:03.583432: +2026-04-09 00:18:03.586305: Epoch 272 +2026-04-09 00:18:03.589706: Current learning rate: 0.00751 +2026-04-09 00:19:47.169750: train_loss -0.1546 +2026-04-09 00:19:47.177865: val_loss -0.118 +2026-04-09 00:19:47.180398: Pseudo dice [0.4826, 0.3634, 0.717, 0.8857, 0.2979, 0.2256, 0.7923] +2026-04-09 00:19:47.184716: Epoch time: 103.59 s +2026-04-09 00:19:48.247814: +2026-04-09 00:19:48.249855: Epoch 273 +2026-04-09 00:19:48.251893: Current learning rate: 0.00751 +2026-04-09 00:21:31.167698: train_loss -0.1655 +2026-04-09 00:21:31.174709: val_loss -0.1517 +2026-04-09 00:21:31.176532: Pseudo dice [0.562, 0.096, 0.6515, 0.687, 0.4071, 0.6541, 0.7784] +2026-04-09 00:21:31.179321: Epoch time: 102.92 s +2026-04-09 00:21:32.256176: +2026-04-09 00:21:32.257924: Epoch 274 +2026-04-09 00:21:32.260478: Current learning rate: 0.0075 +2026-04-09 00:23:16.666025: train_loss -0.1616 +2026-04-09 00:23:16.673576: val_loss -0.1152 +2026-04-09 00:23:16.676435: Pseudo dice [0.1989, 0.1494, 0.7535, 0.7264, 0.1922, 0.532, 0.5542] +2026-04-09 00:23:16.680204: Epoch time: 104.41 s +2026-04-09 00:23:17.751046: +2026-04-09 00:23:17.753532: Epoch 275 +2026-04-09 00:23:17.756582: Current learning rate: 0.00749 +2026-04-09 00:25:01.766536: train_loss -0.1656 +2026-04-09 00:25:01.777402: val_loss -0.1313 +2026-04-09 00:25:01.780102: Pseudo dice [0.3396, 0.5071, 0.6993, 0.7743, 0.2983, 0.4401, 0.6948] +2026-04-09 00:25:01.782842: Epoch time: 104.02 s +2026-04-09 00:25:02.846424: +2026-04-09 00:25:02.849089: Epoch 276 +2026-04-09 00:25:02.851032: Current learning rate: 0.00748 +2026-04-09 00:26:45.911689: train_loss -0.1464 +2026-04-09 00:26:45.919581: val_loss -0.1165 +2026-04-09 00:26:45.921797: Pseudo dice [0.5765, 0.2622, 0.7154, 0.7632, 0.2421, 0.4462, 0.7627] +2026-04-09 00:26:45.924555: Epoch time: 103.07 s +2026-04-09 00:26:46.959247: +2026-04-09 00:26:46.961378: Epoch 277 +2026-04-09 00:26:46.963233: Current learning rate: 0.00747 +2026-04-09 00:28:30.793187: train_loss -0.1544 +2026-04-09 00:28:30.799481: val_loss -0.1402 +2026-04-09 00:28:30.801570: Pseudo dice [0.504, 0.0918, 0.5581, 0.7423, 0.4612, 0.2517, 0.8006] +2026-04-09 00:28:30.804247: Epoch time: 103.84 s +2026-04-09 00:28:31.861666: +2026-04-09 00:28:31.863717: Epoch 278 +2026-04-09 00:28:31.865567: Current learning rate: 0.00746 +2026-04-09 00:30:15.789334: train_loss -0.1616 +2026-04-09 00:30:15.796103: val_loss -0.1504 +2026-04-09 00:30:15.798646: Pseudo dice [0.7364, 0.2018, 0.5254, 0.8786, 0.3175, 0.4522, 0.8942] +2026-04-09 00:30:15.801461: Epoch time: 103.93 s +2026-04-09 00:30:16.856318: +2026-04-09 00:30:16.858929: Epoch 279 +2026-04-09 00:30:16.863231: Current learning rate: 0.00745 +2026-04-09 00:32:00.001266: train_loss -0.167 +2026-04-09 00:32:00.007298: val_loss -0.1269 +2026-04-09 00:32:00.009171: Pseudo dice [0.4302, 0.1118, 0.7031, 0.7927, 0.2982, 0.4359, 0.6611] +2026-04-09 00:32:00.012588: Epoch time: 103.15 s +2026-04-09 00:32:01.050908: +2026-04-09 00:32:01.052846: Epoch 280 +2026-04-09 00:32:01.054499: Current learning rate: 0.00744 +2026-04-09 00:33:43.448229: train_loss -0.1666 +2026-04-09 00:33:43.453555: val_loss -0.1108 +2026-04-09 00:33:43.456328: Pseudo dice [0.3314, 0.4055, 0.7272, 0.8094, 0.4059, 0.6523, 0.2803] +2026-04-09 00:33:43.459035: Epoch time: 102.4 s +2026-04-09 00:33:44.543869: +2026-04-09 00:33:44.546870: Epoch 281 +2026-04-09 00:33:44.551061: Current learning rate: 0.00743 +2026-04-09 00:35:28.779747: train_loss -0.1663 +2026-04-09 00:35:28.784873: val_loss -0.0964 +2026-04-09 00:35:28.787746: Pseudo dice [0.3184, 0.1322, 0.5607, 0.8343, 0.2965, 0.552, 0.4504] +2026-04-09 00:35:28.789827: Epoch time: 104.24 s +2026-04-09 00:35:29.849676: +2026-04-09 00:35:29.851732: Epoch 282 +2026-04-09 00:35:29.853617: Current learning rate: 0.00742 +2026-04-09 00:37:12.799966: train_loss -0.1458 +2026-04-09 00:37:12.805763: val_loss -0.1104 +2026-04-09 00:37:12.807706: Pseudo dice [0.4409, 0.4012, 0.605, 0.4179, 0.185, 0.1693, 0.5713] +2026-04-09 00:37:12.809695: Epoch time: 102.95 s +2026-04-09 00:37:13.884252: +2026-04-09 00:37:13.886580: Epoch 283 +2026-04-09 00:37:13.888918: Current learning rate: 0.00741 +2026-04-09 00:38:56.747899: train_loss -0.1424 +2026-04-09 00:38:56.755254: val_loss -0.0884 +2026-04-09 00:38:56.758481: Pseudo dice [0.5249, 0.2882, 0.4512, 0.1646, 0.1686, 0.6103, 0.5638] +2026-04-09 00:38:56.761747: Epoch time: 102.87 s +2026-04-09 00:38:57.839154: +2026-04-09 00:38:57.842818: Epoch 284 +2026-04-09 00:38:57.844324: Current learning rate: 0.0074 +2026-04-09 00:40:40.453203: train_loss -0.1466 +2026-04-09 00:40:40.463577: val_loss -0.1395 +2026-04-09 00:40:40.465875: Pseudo dice [0.7198, 0.4968, 0.7369, 0.7257, 0.4137, 0.7401, 0.7441] +2026-04-09 00:40:40.468351: Epoch time: 102.62 s +2026-04-09 00:40:41.519512: +2026-04-09 00:40:41.523996: Epoch 285 +2026-04-09 00:40:41.527135: Current learning rate: 0.00739 +2026-04-09 00:42:25.935687: train_loss -0.1478 +2026-04-09 00:42:25.942935: val_loss -0.0968 +2026-04-09 00:42:25.944765: Pseudo dice [0.3497, 0.2344, 0.8563, 0.6866, 0.1827, 0.2237, 0.5946] +2026-04-09 00:42:25.947177: Epoch time: 104.42 s +2026-04-09 00:42:26.978493: +2026-04-09 00:42:26.980500: Epoch 286 +2026-04-09 00:42:26.982456: Current learning rate: 0.00738 +2026-04-09 00:44:11.016501: train_loss -0.1516 +2026-04-09 00:44:11.023449: val_loss -0.1374 +2026-04-09 00:44:11.025702: Pseudo dice [0.558, 0.127, 0.318, 0.7532, 0.47, 0.6431, 0.4971] +2026-04-09 00:44:11.028040: Epoch time: 104.04 s +2026-04-09 00:44:12.119968: +2026-04-09 00:44:12.121734: Epoch 287 +2026-04-09 00:44:12.123689: Current learning rate: 0.00738 +2026-04-09 00:45:54.673190: train_loss -0.1485 +2026-04-09 00:45:54.680666: val_loss -0.1119 +2026-04-09 00:45:54.683213: Pseudo dice [0.3302, 0.1997, 0.5816, 0.3524, 0.3602, 0.6084, 0.6132] +2026-04-09 00:45:54.686155: Epoch time: 102.56 s +2026-04-09 00:45:55.762215: +2026-04-09 00:45:55.764724: Epoch 288 +2026-04-09 00:45:55.766825: Current learning rate: 0.00737 +2026-04-09 00:47:38.406085: train_loss -0.1646 +2026-04-09 00:47:38.411689: val_loss -0.1073 +2026-04-09 00:47:38.413797: Pseudo dice [0.6533, 0.2311, 0.704, 0.748, 0.0599, 0.748, 0.3453] +2026-04-09 00:47:38.415924: Epoch time: 102.65 s +2026-04-09 00:47:39.485573: +2026-04-09 00:47:39.487607: Epoch 289 +2026-04-09 00:47:39.489154: Current learning rate: 0.00736 +2026-04-09 00:49:22.488311: train_loss -0.1724 +2026-04-09 00:49:22.495471: val_loss -0.1476 +2026-04-09 00:49:22.498245: Pseudo dice [0.5065, 0.4882, 0.7619, 0.3737, 0.4807, 0.5957, 0.6775] +2026-04-09 00:49:22.501858: Epoch time: 103.01 s +2026-04-09 00:49:23.588337: +2026-04-09 00:49:23.590594: Epoch 290 +2026-04-09 00:49:23.593275: Current learning rate: 0.00735 +2026-04-09 00:51:07.262369: train_loss -0.159 +2026-04-09 00:51:07.270106: val_loss -0.1358 +2026-04-09 00:51:07.272404: Pseudo dice [0.6268, 0.4924, 0.7631, 0.7244, 0.1473, 0.5258, 0.424] +2026-04-09 00:51:07.275183: Epoch time: 103.68 s +2026-04-09 00:51:08.358329: +2026-04-09 00:51:08.360424: Epoch 291 +2026-04-09 00:51:08.362525: Current learning rate: 0.00734 +2026-04-09 00:52:52.923991: train_loss -0.176 +2026-04-09 00:52:52.929712: val_loss -0.1068 +2026-04-09 00:52:52.932516: Pseudo dice [0.5612, 0.2757, 0.3321, 0.7305, 0.189, 0.819, 0.7728] +2026-04-09 00:52:52.934534: Epoch time: 104.57 s +2026-04-09 00:52:54.056563: +2026-04-09 00:52:54.058924: Epoch 292 +2026-04-09 00:52:54.061822: Current learning rate: 0.00733 +2026-04-09 00:54:37.388268: train_loss -0.1748 +2026-04-09 00:54:37.393919: val_loss -0.1078 +2026-04-09 00:54:37.395874: Pseudo dice [0.7362, 0.05, 0.6666, 0.5859, 0.2678, 0.6197, 0.5198] +2026-04-09 00:54:37.397989: Epoch time: 103.33 s +2026-04-09 00:54:38.510926: +2026-04-09 00:54:38.513143: Epoch 293 +2026-04-09 00:54:38.515236: Current learning rate: 0.00732 +2026-04-09 00:56:22.033260: train_loss -0.169 +2026-04-09 00:56:22.049755: val_loss -0.1462 +2026-04-09 00:56:22.053797: Pseudo dice [0.5529, 0.3341, 0.7544, 0.1834, 0.3175, 0.4911, 0.6857] +2026-04-09 00:56:22.058295: Epoch time: 103.53 s +2026-04-09 00:56:23.142633: +2026-04-09 00:56:23.144904: Epoch 294 +2026-04-09 00:56:23.146776: Current learning rate: 0.00731 +2026-04-09 00:58:05.187679: train_loss -0.1675 +2026-04-09 00:58:05.195264: val_loss -0.0961 +2026-04-09 00:58:05.203338: Pseudo dice [0.2915, 0.4909, 0.5729, 0.6711, 0.0885, 0.5135, 0.6261] +2026-04-09 00:58:05.217641: Epoch time: 102.05 s +2026-04-09 00:58:06.310688: +2026-04-09 00:58:06.312236: Epoch 295 +2026-04-09 00:58:06.313698: Current learning rate: 0.0073 +2026-04-09 00:59:49.027641: train_loss -0.1695 +2026-04-09 00:59:49.032688: val_loss -0.1765 +2026-04-09 00:59:49.034517: Pseudo dice [0.7125, 0.4444, 0.7122, 0.7353, 0.2259, 0.6996, 0.8831] +2026-04-09 00:59:49.036271: Epoch time: 102.72 s +2026-04-09 00:59:50.110334: +2026-04-09 00:59:50.112191: Epoch 296 +2026-04-09 00:59:50.114019: Current learning rate: 0.00729 +2026-04-09 01:01:32.754796: train_loss -0.1666 +2026-04-09 01:01:32.759888: val_loss -0.1262 +2026-04-09 01:01:32.761982: Pseudo dice [0.5366, 0.0661, 0.7215, 0.7935, 0.4768, 0.5079, 0.4555] +2026-04-09 01:01:32.763989: Epoch time: 102.65 s +2026-04-09 01:01:33.895243: +2026-04-09 01:01:33.897982: Epoch 297 +2026-04-09 01:01:33.899811: Current learning rate: 0.00728 +2026-04-09 01:03:18.279028: train_loss -0.1658 +2026-04-09 01:03:18.286837: val_loss -0.1218 +2026-04-09 01:03:18.290452: Pseudo dice [0.232, 0.1435, 0.6339, 0.4789, 0.2098, 0.7005, 0.8465] +2026-04-09 01:03:18.292948: Epoch time: 104.39 s +2026-04-09 01:03:19.385446: +2026-04-09 01:03:19.388542: Epoch 298 +2026-04-09 01:03:19.390939: Current learning rate: 0.00727 +2026-04-09 01:05:03.482181: train_loss -0.1789 +2026-04-09 01:05:03.491166: val_loss -0.133 +2026-04-09 01:05:03.493382: Pseudo dice [0.4183, 0.1699, 0.8199, 0.5295, 0.3304, 0.6233, 0.7443] +2026-04-09 01:05:03.495736: Epoch time: 104.1 s +2026-04-09 01:05:04.580536: +2026-04-09 01:05:04.582480: Epoch 299 +2026-04-09 01:05:04.584523: Current learning rate: 0.00726 +2026-04-09 01:06:47.672209: train_loss -0.1717 +2026-04-09 01:06:47.680807: val_loss -0.1335 +2026-04-09 01:06:47.683278: Pseudo dice [0.6864, 0.1742, 0.5291, 0.7434, 0.2897, 0.5648, 0.6754] +2026-04-09 01:06:47.685561: Epoch time: 103.09 s +2026-04-09 01:06:50.697433: +2026-04-09 01:06:50.699445: Epoch 300 +2026-04-09 01:06:50.701834: Current learning rate: 0.00725 +2026-04-09 01:08:33.344785: train_loss -0.1708 +2026-04-09 01:08:33.358261: val_loss -0.1129 +2026-04-09 01:08:33.362859: Pseudo dice [0.3231, 0.2165, 0.6921, 0.4617, 0.2755, 0.7132, 0.3952] +2026-04-09 01:08:33.367928: Epoch time: 102.65 s +2026-04-09 01:08:34.465010: +2026-04-09 01:08:34.467701: Epoch 301 +2026-04-09 01:08:34.469956: Current learning rate: 0.00724 +2026-04-09 01:10:18.006610: train_loss -0.1582 +2026-04-09 01:10:18.017297: val_loss -0.1243 +2026-04-09 01:10:18.019855: Pseudo dice [0.5261, 0.4933, 0.7817, 0.8355, 0.2291, 0.3928, 0.6929] +2026-04-09 01:10:18.022186: Epoch time: 103.55 s +2026-04-09 01:10:19.101214: +2026-04-09 01:10:19.103710: Epoch 302 +2026-04-09 01:10:19.106186: Current learning rate: 0.00724 +2026-04-09 01:12:02.479390: train_loss -0.1621 +2026-04-09 01:12:02.486309: val_loss -0.1203 +2026-04-09 01:12:02.488840: Pseudo dice [0.7148, 0.4005, 0.5586, 0.5914, 0.3067, 0.6172, 0.7435] +2026-04-09 01:12:02.491105: Epoch time: 103.38 s +2026-04-09 01:12:02.493016: Yayy! New best EMA pseudo Dice: 0.5116 +2026-04-09 01:12:05.394521: +2026-04-09 01:12:05.396531: Epoch 303 +2026-04-09 01:12:05.398549: Current learning rate: 0.00723 +2026-04-09 01:13:48.270999: train_loss -0.164 +2026-04-09 01:13:48.277227: val_loss -0.1004 +2026-04-09 01:13:48.279780: Pseudo dice [0.3057, 0.4933, 0.7205, 0.717, 0.2276, 0.4558, 0.6713] +2026-04-09 01:13:48.283266: Epoch time: 102.88 s +2026-04-09 01:13:48.285256: Yayy! New best EMA pseudo Dice: 0.5118 +2026-04-09 01:13:51.048055: +2026-04-09 01:13:51.049953: Epoch 304 +2026-04-09 01:13:51.051449: Current learning rate: 0.00722 +2026-04-09 01:15:33.768950: train_loss -0.1605 +2026-04-09 01:15:33.776383: val_loss -0.114 +2026-04-09 01:15:33.778576: Pseudo dice [0.3864, 0.2268, 0.6722, 0.5201, 0.3259, 0.1492, 0.7763] +2026-04-09 01:15:33.780517: Epoch time: 102.72 s +2026-04-09 01:15:34.864458: +2026-04-09 01:15:34.867414: Epoch 305 +2026-04-09 01:15:34.869606: Current learning rate: 0.00721 +2026-04-09 01:17:17.235305: train_loss -0.1535 +2026-04-09 01:17:17.241562: val_loss -0.1393 +2026-04-09 01:17:17.244882: Pseudo dice [0.7193, 0.5806, 0.7306, 0.7414, 0.2135, 0.5413, 0.755] +2026-04-09 01:17:17.247416: Epoch time: 102.37 s +2026-04-09 01:17:17.249377: Yayy! New best EMA pseudo Dice: 0.515 +2026-04-09 01:17:20.080919: +2026-04-09 01:17:20.083085: Epoch 306 +2026-04-09 01:17:20.085147: Current learning rate: 0.0072 +2026-04-09 01:19:03.709718: train_loss -0.1568 +2026-04-09 01:19:03.714547: val_loss -0.1377 +2026-04-09 01:19:03.716918: Pseudo dice [0.4639, 0.2366, 0.4614, 0.6199, 0.2544, 0.3766, 0.7265] +2026-04-09 01:19:03.718869: Epoch time: 103.63 s +2026-04-09 01:19:04.789980: +2026-04-09 01:19:04.792066: Epoch 307 +2026-04-09 01:19:04.793874: Current learning rate: 0.00719 +2026-04-09 01:20:47.361921: train_loss -0.1631 +2026-04-09 01:20:47.369745: val_loss -0.1363 +2026-04-09 01:20:47.372822: Pseudo dice [0.469, 0.1707, 0.6042, 0.7377, 0.4139, 0.6508, 0.5463] +2026-04-09 01:20:47.374790: Epoch time: 102.58 s +2026-04-09 01:20:48.467428: +2026-04-09 01:20:48.469831: Epoch 308 +2026-04-09 01:20:48.482128: Current learning rate: 0.00718 +2026-04-09 01:22:32.382133: train_loss -0.1814 +2026-04-09 01:22:32.387627: val_loss -0.145 +2026-04-09 01:22:32.390011: Pseudo dice [0.5744, 0.4229, 0.6657, 0.7008, 0.419, 0.4236, 0.8665] +2026-04-09 01:22:32.392677: Epoch time: 103.92 s +2026-04-09 01:22:32.395418: Yayy! New best EMA pseudo Dice: 0.5161 +2026-04-09 01:22:35.248588: +2026-04-09 01:22:35.251346: Epoch 309 +2026-04-09 01:22:35.254119: Current learning rate: 0.00717 +2026-04-09 01:24:17.943467: train_loss -0.1648 +2026-04-09 01:24:17.949022: val_loss -0.1317 +2026-04-09 01:24:17.950972: Pseudo dice [0.2143, 0.4905, 0.6571, 0.7529, 0.316, 0.5945, 0.6263] +2026-04-09 01:24:17.953216: Epoch time: 102.7 s +2026-04-09 01:24:17.954897: Yayy! New best EMA pseudo Dice: 0.5167 +2026-04-09 01:24:20.705684: +2026-04-09 01:24:20.707510: Epoch 310 +2026-04-09 01:24:20.709143: Current learning rate: 0.00716 +2026-04-09 01:26:04.198613: train_loss -0.1626 +2026-04-09 01:26:04.209476: val_loss -0.1277 +2026-04-09 01:26:04.213954: Pseudo dice [0.4007, 0.3259, 0.793, 0.629, 0.4928, 0.3723, 0.8126] +2026-04-09 01:26:04.217071: Epoch time: 103.5 s +2026-04-09 01:26:04.219765: Yayy! New best EMA pseudo Dice: 0.5197 +2026-04-09 01:26:07.007246: +2026-04-09 01:26:07.008984: Epoch 311 +2026-04-09 01:26:07.011034: Current learning rate: 0.00715 +2026-04-09 01:27:49.620410: train_loss -0.1536 +2026-04-09 01:27:49.625577: val_loss -0.1374 +2026-04-09 01:27:49.627385: Pseudo dice [0.6104, 0.303, 0.6788, 0.5001, 0.3524, 0.6155, 0.5172] +2026-04-09 01:27:49.629705: Epoch time: 102.62 s +2026-04-09 01:27:50.734481: +2026-04-09 01:27:50.736514: Epoch 312 +2026-04-09 01:27:50.738557: Current learning rate: 0.00714 +2026-04-09 01:29:34.040979: train_loss -0.165 +2026-04-09 01:29:34.046467: val_loss -0.1463 +2026-04-09 01:29:34.048846: Pseudo dice [0.4486, 0.1226, 0.659, 0.7653, 0.5039, 0.8142, 0.7648] +2026-04-09 01:29:34.051049: Epoch time: 103.31 s +2026-04-09 01:29:34.053281: Yayy! New best EMA pseudo Dice: 0.5252 +2026-04-09 01:29:36.863930: +2026-04-09 01:29:36.866314: Epoch 313 +2026-04-09 01:29:36.868810: Current learning rate: 0.00713 +2026-04-09 01:31:20.138190: train_loss -0.162 +2026-04-09 01:31:20.146317: val_loss -0.1666 +2026-04-09 01:31:20.148946: Pseudo dice [0.4741, 0.3497, 0.6048, 0.8634, 0.2181, 0.6333, 0.8846] +2026-04-09 01:31:20.151736: Epoch time: 103.28 s +2026-04-09 01:31:20.155382: Yayy! New best EMA pseudo Dice: 0.5302 +2026-04-09 01:31:22.967750: +2026-04-09 01:31:22.971481: Epoch 314 +2026-04-09 01:31:22.973922: Current learning rate: 0.00712 +2026-04-09 01:33:06.502326: train_loss -0.1706 +2026-04-09 01:33:06.509844: val_loss -0.1213 +2026-04-09 01:33:06.512245: Pseudo dice [0.3565, 0.2993, 0.7022, 0.7126, 0.4336, 0.1795, 0.8792] +2026-04-09 01:33:06.514393: Epoch time: 103.54 s +2026-04-09 01:33:07.610386: +2026-04-09 01:33:07.612299: Epoch 315 +2026-04-09 01:33:07.614041: Current learning rate: 0.00711 +2026-04-09 01:34:52.364512: train_loss -0.1691 +2026-04-09 01:34:52.370279: val_loss -0.1316 +2026-04-09 01:34:52.372553: Pseudo dice [0.5326, 0.3792, 0.2907, 0.4555, 0.3921, 0.1592, 0.6566] +2026-04-09 01:34:52.374760: Epoch time: 104.76 s +2026-04-09 01:34:53.473475: +2026-04-09 01:34:53.475469: Epoch 316 +2026-04-09 01:34:53.477577: Current learning rate: 0.0071 +2026-04-09 01:36:36.367213: train_loss -0.1595 +2026-04-09 01:36:36.376131: val_loss -0.105 +2026-04-09 01:36:36.379792: Pseudo dice [0.1087, 0.0169, 0.7722, 0.7934, 0.4553, 0.7548, 0.576] +2026-04-09 01:36:36.382942: Epoch time: 102.9 s +2026-04-09 01:36:37.495896: +2026-04-09 01:36:37.498823: Epoch 317 +2026-04-09 01:36:37.500762: Current learning rate: 0.0071 +2026-04-09 01:38:20.453986: train_loss -0.1565 +2026-04-09 01:38:20.460160: val_loss -0.1185 +2026-04-09 01:38:20.462708: Pseudo dice [0.376, 0.2612, 0.7201, 0.4224, 0.2477, 0.3703, 0.7668] +2026-04-09 01:38:20.465246: Epoch time: 102.96 s +2026-04-09 01:38:21.562551: +2026-04-09 01:38:21.565217: Epoch 318 +2026-04-09 01:38:21.567512: Current learning rate: 0.00709 +2026-04-09 01:40:05.612444: train_loss -0.1367 +2026-04-09 01:40:05.618798: val_loss -0.1474 +2026-04-09 01:40:05.620625: Pseudo dice [0.2596, 0.3898, 0.8174, 0.8946, 0.532, 0.5982, 0.7615] +2026-04-09 01:40:05.622972: Epoch time: 104.05 s +2026-04-09 01:40:06.710520: +2026-04-09 01:40:06.712593: Epoch 319 +2026-04-09 01:40:06.714409: Current learning rate: 0.00708 +2026-04-09 01:41:50.236522: train_loss -0.1597 +2026-04-09 01:41:50.243525: val_loss -0.1431 +2026-04-09 01:41:50.245861: Pseudo dice [0.2137, 0.3338, 0.7462, 0.809, 0.2892, 0.7231, 0.4712] +2026-04-09 01:41:50.248324: Epoch time: 103.53 s +2026-04-09 01:41:51.335906: +2026-04-09 01:41:51.339198: Epoch 320 +2026-04-09 01:41:51.343871: Current learning rate: 0.00707 +2026-04-09 01:43:34.520772: train_loss -0.1853 +2026-04-09 01:43:34.529819: val_loss -0.1316 +2026-04-09 01:43:34.533552: Pseudo dice [0.5643, 0.4544, 0.5179, 0.7588, 0.2475, 0.5153, 0.4867] +2026-04-09 01:43:34.536201: Epoch time: 103.19 s +2026-04-09 01:43:35.634650: +2026-04-09 01:43:35.636557: Epoch 321 +2026-04-09 01:43:35.640982: Current learning rate: 0.00706 +2026-04-09 01:45:18.191564: train_loss -0.1728 +2026-04-09 01:45:18.199136: val_loss -0.1265 +2026-04-09 01:45:18.201194: Pseudo dice [0.3132, 0.2619, 0.5786, 0.6756, 0.314, 0.5311, 0.7658] +2026-04-09 01:45:18.203507: Epoch time: 102.56 s +2026-04-09 01:45:19.299779: +2026-04-09 01:45:19.302054: Epoch 322 +2026-04-09 01:45:19.304745: Current learning rate: 0.00705 +2026-04-09 01:47:02.863967: train_loss -0.1593 +2026-04-09 01:47:02.872603: val_loss -0.1352 +2026-04-09 01:47:02.875485: Pseudo dice [0.4256, 0.0522, 0.5042, 0.7321, 0.3658, 0.7124, 0.6208] +2026-04-09 01:47:02.878400: Epoch time: 103.57 s +2026-04-09 01:47:03.981248: +2026-04-09 01:47:03.983324: Epoch 323 +2026-04-09 01:47:03.985686: Current learning rate: 0.00704 +2026-04-09 01:48:47.421274: train_loss -0.1639 +2026-04-09 01:48:47.433085: val_loss -0.1231 +2026-04-09 01:48:47.440267: Pseudo dice [0.4022, 0.3622, 0.7716, 0.5508, 0.3085, 0.6782, 0.7886] +2026-04-09 01:48:47.443764: Epoch time: 103.44 s +2026-04-09 01:48:48.537006: +2026-04-09 01:48:48.539205: Epoch 324 +2026-04-09 01:48:48.541410: Current learning rate: 0.00703 +2026-04-09 01:50:32.029764: train_loss -0.1804 +2026-04-09 01:50:32.048960: val_loss -0.154 +2026-04-09 01:50:32.051631: Pseudo dice [0.5011, 0.1302, 0.7607, 0.786, 0.3823, 0.7956, 0.8863] +2026-04-09 01:50:32.053324: Epoch time: 103.5 s +2026-04-09 01:50:33.161295: +2026-04-09 01:50:33.165105: Epoch 325 +2026-04-09 01:50:33.167028: Current learning rate: 0.00702 +2026-04-09 01:52:16.125643: train_loss -0.1685 +2026-04-09 01:52:16.133749: val_loss -0.1265 +2026-04-09 01:52:16.136277: Pseudo dice [0.6073, 0.0202, 0.6457, 0.8221, 0.3446, 0.77, 0.5165] +2026-04-09 01:52:16.138935: Epoch time: 102.97 s +2026-04-09 01:52:17.247575: +2026-04-09 01:52:17.249754: Epoch 326 +2026-04-09 01:52:17.251739: Current learning rate: 0.00701 +2026-04-09 01:54:00.589328: train_loss -0.1741 +2026-04-09 01:54:00.598179: val_loss -0.1398 +2026-04-09 01:54:00.600201: Pseudo dice [0.3722, 0.4074, 0.5537, 0.7954, 0.2399, 0.5999, 0.6885] +2026-04-09 01:54:00.602681: Epoch time: 103.35 s +2026-04-09 01:54:01.681708: +2026-04-09 01:54:01.683879: Epoch 327 +2026-04-09 01:54:01.686461: Current learning rate: 0.007 +2026-04-09 01:55:45.050723: train_loss -0.1828 +2026-04-09 01:55:45.056386: val_loss -0.1254 +2026-04-09 01:55:45.058497: Pseudo dice [0.4383, 0.2804, 0.3867, 0.5593, 0.385, 0.723, 0.5707] +2026-04-09 01:55:45.060562: Epoch time: 103.37 s +2026-04-09 01:55:46.144509: +2026-04-09 01:55:46.147153: Epoch 328 +2026-04-09 01:55:46.149188: Current learning rate: 0.00699 +2026-04-09 01:57:29.997963: train_loss -0.1755 +2026-04-09 01:57:30.002944: val_loss -0.1233 +2026-04-09 01:57:30.005419: Pseudo dice [0.5222, 0.5046, 0.2248, 0.7847, 0.4166, 0.3051, 0.8651] +2026-04-09 01:57:30.008128: Epoch time: 103.86 s +2026-04-09 01:57:31.097958: +2026-04-09 01:57:31.099709: Epoch 329 +2026-04-09 01:57:31.101394: Current learning rate: 0.00698 +2026-04-09 01:59:14.180521: train_loss -0.1779 +2026-04-09 01:59:14.190501: val_loss -0.1265 +2026-04-09 01:59:14.192514: Pseudo dice [0.721, 0.126, 0.3824, 0.3253, 0.4865, 0.7865, 0.8621] +2026-04-09 01:59:14.194695: Epoch time: 103.09 s +2026-04-09 01:59:15.282367: +2026-04-09 01:59:15.284210: Epoch 330 +2026-04-09 01:59:15.286396: Current learning rate: 0.00697 +2026-04-09 02:00:58.821241: train_loss -0.1626 +2026-04-09 02:00:58.828050: val_loss -0.1317 +2026-04-09 02:00:58.830556: Pseudo dice [0.6235, 0.5831, 0.7425, 0.6839, 0.277, 0.7419, 0.7586] +2026-04-09 02:00:58.832584: Epoch time: 103.54 s +2026-04-09 02:00:58.834319: Yayy! New best EMA pseudo Dice: 0.5316 +2026-04-09 02:01:01.733502: +2026-04-09 02:01:01.736295: Epoch 331 +2026-04-09 02:01:01.738698: Current learning rate: 0.00696 +2026-04-09 02:02:44.705750: train_loss -0.1832 +2026-04-09 02:02:44.713410: val_loss -0.1324 +2026-04-09 02:02:44.715394: Pseudo dice [0.5084, 0.2518, 0.6544, 0.2394, 0.0718, 0.7326, 0.7756] +2026-04-09 02:02:44.718127: Epoch time: 102.98 s +2026-04-09 02:02:45.803514: +2026-04-09 02:02:45.806092: Epoch 332 +2026-04-09 02:02:45.809977: Current learning rate: 0.00696 +2026-04-09 02:04:28.145354: train_loss -0.1715 +2026-04-09 02:04:28.154241: val_loss -0.1459 +2026-04-09 02:04:28.157119: Pseudo dice [0.5221, 0.226, 0.7395, 0.8489, 0.3127, 0.4965, 0.844] +2026-04-09 02:04:28.160016: Epoch time: 102.35 s +2026-04-09 02:04:29.234831: +2026-04-09 02:04:29.236948: Epoch 333 +2026-04-09 02:04:29.239517: Current learning rate: 0.00695 +2026-04-09 02:06:12.316284: train_loss -0.184 +2026-04-09 02:06:12.321943: val_loss -0.1508 +2026-04-09 02:06:12.323980: Pseudo dice [0.7053, 0.3428, 0.6219, 0.6215, 0.5397, 0.5774, 0.7126] +2026-04-09 02:06:12.325699: Epoch time: 103.08 s +2026-04-09 02:06:12.327882: Yayy! New best EMA pseudo Dice: 0.5351 +2026-04-09 02:06:15.111802: +2026-04-09 02:06:15.113483: Epoch 334 +2026-04-09 02:06:15.115030: Current learning rate: 0.00694 +2026-04-09 02:07:59.290027: train_loss -0.1778 +2026-04-09 02:07:59.297373: val_loss -0.1466 +2026-04-09 02:07:59.299541: Pseudo dice [0.615, 0.4842, 0.6968, 0.7114, 0.3455, 0.7763, 0.647] +2026-04-09 02:07:59.301824: Epoch time: 104.18 s +2026-04-09 02:07:59.304136: Yayy! New best EMA pseudo Dice: 0.5427 +2026-04-09 02:08:02.187924: +2026-04-09 02:08:02.190094: Epoch 335 +2026-04-09 02:08:02.192129: Current learning rate: 0.00693 +2026-04-09 02:09:46.422966: train_loss -0.1623 +2026-04-09 02:09:46.428579: val_loss -0.1333 +2026-04-09 02:09:46.430715: Pseudo dice [0.6002, 0.6239, 0.6942, 0.3031, 0.203, 0.5316, 0.8111] +2026-04-09 02:09:46.433050: Epoch time: 104.24 s +2026-04-09 02:09:47.553856: +2026-04-09 02:09:47.557461: Epoch 336 +2026-04-09 02:09:47.559715: Current learning rate: 0.00692 +2026-04-09 02:11:31.019330: train_loss -0.1775 +2026-04-09 02:11:31.025948: val_loss -0.1613 +2026-04-09 02:11:31.028356: Pseudo dice [0.7876, 0.1762, 0.6743, 0.8319, 0.4973, 0.6531, 0.6096] +2026-04-09 02:11:31.030397: Epoch time: 103.47 s +2026-04-09 02:11:31.032169: Yayy! New best EMA pseudo Dice: 0.5484 +2026-04-09 02:11:33.957872: +2026-04-09 02:11:33.963508: Epoch 337 +2026-04-09 02:11:33.967199: Current learning rate: 0.00691 +2026-04-09 02:13:18.864182: train_loss -0.1917 +2026-04-09 02:13:18.870421: val_loss -0.15 +2026-04-09 02:13:18.872569: Pseudo dice [0.6452, 0.2322, 0.6812, 0.6064, 0.4222, 0.5481, 0.6721] +2026-04-09 02:13:18.874845: Epoch time: 104.91 s +2026-04-09 02:13:19.967635: +2026-04-09 02:13:19.969593: Epoch 338 +2026-04-09 02:13:19.971437: Current learning rate: 0.0069 +2026-04-09 02:15:03.931217: train_loss -0.1691 +2026-04-09 02:15:03.937141: val_loss -0.0994 +2026-04-09 02:15:03.939404: Pseudo dice [0.2341, 0.4601, 0.5343, 0.7031, 0.157, 0.4284, 0.7305] +2026-04-09 02:15:03.942185: Epoch time: 103.97 s +2026-04-09 02:15:05.033767: +2026-04-09 02:15:05.035618: Epoch 339 +2026-04-09 02:15:05.037437: Current learning rate: 0.00689 +2026-04-09 02:16:48.189281: train_loss -0.1688 +2026-04-09 02:16:48.199629: val_loss -0.144 +2026-04-09 02:16:48.202308: Pseudo dice [0.3989, 0.5447, 0.6294, 0.5991, 0.1206, 0.8058, 0.8884] +2026-04-09 02:16:48.204676: Epoch time: 103.16 s +2026-04-09 02:16:49.317409: +2026-04-09 02:16:49.320148: Epoch 340 +2026-04-09 02:16:49.322038: Current learning rate: 0.00688 +2026-04-09 02:18:32.614754: train_loss -0.1798 +2026-04-09 02:18:32.621489: val_loss -0.1281 +2026-04-09 02:18:32.625137: Pseudo dice [0.2491, 0.1271, 0.628, 0.6333, 0.4762, 0.729, 0.7343] +2026-04-09 02:18:32.626999: Epoch time: 103.3 s +2026-04-09 02:18:33.726849: +2026-04-09 02:18:33.729259: Epoch 341 +2026-04-09 02:18:33.732404: Current learning rate: 0.00687 +2026-04-09 02:20:17.890524: train_loss -0.1624 +2026-04-09 02:20:17.898872: val_loss -0.1453 +2026-04-09 02:20:17.901606: Pseudo dice [0.6272, 0.2847, 0.5063, 0.7821, 0.2782, 0.7186, 0.739] +2026-04-09 02:20:17.903687: Epoch time: 104.17 s +2026-04-09 02:20:19.028169: +2026-04-09 02:20:19.030005: Epoch 342 +2026-04-09 02:20:19.033498: Current learning rate: 0.00686 +2026-04-09 02:22:02.772105: train_loss -0.1706 +2026-04-09 02:22:02.777406: val_loss -0.1332 +2026-04-09 02:22:02.779861: Pseudo dice [0.1734, 0.0916, 0.6985, 0.6047, 0.4051, 0.6803, 0.8853] +2026-04-09 02:22:02.781501: Epoch time: 103.75 s +2026-04-09 02:22:03.890657: +2026-04-09 02:22:03.893243: Epoch 343 +2026-04-09 02:22:03.895097: Current learning rate: 0.00685 +2026-04-09 02:23:47.054706: train_loss -0.1725 +2026-04-09 02:23:47.059610: val_loss -0.148 +2026-04-09 02:23:47.062387: Pseudo dice [0.4384, 0.165, 0.3009, 0.7209, 0.5281, 0.6121, 0.5274] +2026-04-09 02:23:47.065037: Epoch time: 103.17 s +2026-04-09 02:23:48.146192: +2026-04-09 02:23:48.148389: Epoch 344 +2026-04-09 02:23:48.151200: Current learning rate: 0.00684 +2026-04-09 02:25:31.372253: train_loss -0.1727 +2026-04-09 02:25:31.379286: val_loss -0.1454 +2026-04-09 02:25:31.381277: Pseudo dice [0.3752, 0.1658, 0.7074, 0.4302, 0.5694, 0.783, 0.8984] +2026-04-09 02:25:31.383988: Epoch time: 103.23 s +2026-04-09 02:25:32.509487: +2026-04-09 02:25:32.511915: Epoch 345 +2026-04-09 02:25:32.514697: Current learning rate: 0.00683 +2026-04-09 02:27:15.640390: train_loss -0.174 +2026-04-09 02:27:15.646896: val_loss -0.1218 +2026-04-09 02:27:15.648932: Pseudo dice [0.3748, 0.2147, 0.4714, 0.2474, 0.3892, 0.4041, 0.7654] +2026-04-09 02:27:15.651364: Epoch time: 103.13 s +2026-04-09 02:27:16.742117: +2026-04-09 02:27:16.744042: Epoch 346 +2026-04-09 02:27:16.745979: Current learning rate: 0.00682 +2026-04-09 02:28:59.600452: train_loss -0.1457 +2026-04-09 02:28:59.609167: val_loss -0.1073 +2026-04-09 02:28:59.611808: Pseudo dice [0.5083, 0.4399, 0.6316, 0.8259, 0.2652, 0.3343, 0.6998] +2026-04-09 02:28:59.614620: Epoch time: 102.86 s +2026-04-09 02:29:00.710897: +2026-04-09 02:29:00.714213: Epoch 347 +2026-04-09 02:29:00.716140: Current learning rate: 0.00681 +2026-04-09 02:30:44.330264: train_loss -0.167 +2026-04-09 02:30:44.335632: val_loss -0.1127 +2026-04-09 02:30:44.338764: Pseudo dice [0.6204, 0.0215, 0.7295, 0.3598, 0.3664, 0.598, 0.5122] +2026-04-09 02:30:44.341840: Epoch time: 103.62 s +2026-04-09 02:30:45.442098: +2026-04-09 02:30:45.444184: Epoch 348 +2026-04-09 02:30:45.445983: Current learning rate: 0.0068 +2026-04-09 02:32:28.984452: train_loss -0.1704 +2026-04-09 02:32:28.990699: val_loss -0.1204 +2026-04-09 02:32:28.993224: Pseudo dice [0.4098, 0.3332, 0.7552, 0.6961, 0.3905, 0.2202, 0.831] +2026-04-09 02:32:28.997392: Epoch time: 103.55 s +2026-04-09 02:32:30.072187: +2026-04-09 02:32:30.074223: Epoch 349 +2026-04-09 02:32:30.087096: Current learning rate: 0.0068 +2026-04-09 02:34:13.748464: train_loss -0.1622 +2026-04-09 02:34:13.754965: val_loss -0.1107 +2026-04-09 02:34:13.756675: Pseudo dice [0.6316, 0.3425, 0.7362, 0.4935, 0.4197, 0.5462, 0.8755] +2026-04-09 02:34:13.758618: Epoch time: 103.68 s +2026-04-09 02:34:16.604212: +2026-04-09 02:34:16.606042: Epoch 350 +2026-04-09 02:34:16.608206: Current learning rate: 0.00679 +2026-04-09 02:35:59.333197: train_loss -0.1615 +2026-04-09 02:35:59.338461: val_loss -0.0803 +2026-04-09 02:35:59.340314: Pseudo dice [0.6688, 0.4347, 0.1993, 0.7803, 0.262, 0.4235, 0.6156] +2026-04-09 02:35:59.342494: Epoch time: 102.73 s +2026-04-09 02:36:00.457754: +2026-04-09 02:36:00.459824: Epoch 351 +2026-04-09 02:36:00.461421: Current learning rate: 0.00678 +2026-04-09 02:37:44.101374: train_loss -0.1709 +2026-04-09 02:37:44.118055: val_loss -0.1376 +2026-04-09 02:37:44.127770: Pseudo dice [0.5695, 0.3065, 0.6889, 0.8413, 0.4162, 0.5044, 0.8638] +2026-04-09 02:37:44.132100: Epoch time: 103.65 s +2026-04-09 02:37:45.230494: +2026-04-09 02:37:45.233103: Epoch 352 +2026-04-09 02:37:45.236479: Current learning rate: 0.00677 +2026-04-09 02:39:27.801050: train_loss -0.1623 +2026-04-09 02:39:27.806950: val_loss -0.1358 +2026-04-09 02:39:27.809123: Pseudo dice [0.2856, 0.1834, 0.6832, 0.79, 0.203, 0.6313, 0.8098] +2026-04-09 02:39:27.811487: Epoch time: 102.57 s +2026-04-09 02:39:30.008706: +2026-04-09 02:39:30.010406: Epoch 353 +2026-04-09 02:39:30.012067: Current learning rate: 0.00676 +2026-04-09 02:41:14.043889: train_loss -0.1748 +2026-04-09 02:41:14.059190: val_loss -0.1315 +2026-04-09 02:41:14.064732: Pseudo dice [0.3107, 0.2376, 0.7383, 0.6976, 0.2079, 0.5681, 0.8905] +2026-04-09 02:41:14.068995: Epoch time: 104.04 s +2026-04-09 02:41:15.178399: +2026-04-09 02:41:15.181479: Epoch 354 +2026-04-09 02:41:15.183786: Current learning rate: 0.00675 +2026-04-09 02:42:58.172431: train_loss -0.1783 +2026-04-09 02:42:58.179532: val_loss -0.1694 +2026-04-09 02:42:58.181915: Pseudo dice [0.7824, 0.4675, 0.6922, 0.7385, 0.5121, 0.6202, 0.7965] +2026-04-09 02:42:58.184889: Epoch time: 103.0 s +2026-04-09 02:42:59.280907: +2026-04-09 02:42:59.284061: Epoch 355 +2026-04-09 02:42:59.286560: Current learning rate: 0.00674 +2026-04-09 02:44:42.555949: train_loss -0.1702 +2026-04-09 02:44:42.562000: val_loss -0.1517 +2026-04-09 02:44:42.565043: Pseudo dice [0.5865, 0.0733, 0.5423, 0.7538, 0.2428, 0.7267, 0.6779] +2026-04-09 02:44:42.568829: Epoch time: 103.28 s +2026-04-09 02:44:43.696779: +2026-04-09 02:44:43.699399: Epoch 356 +2026-04-09 02:44:43.701459: Current learning rate: 0.00673 +2026-04-09 02:46:27.308228: train_loss -0.1731 +2026-04-09 02:46:27.314564: val_loss -0.1745 +2026-04-09 02:46:27.316791: Pseudo dice [0.2474, 0.3939, 0.7313, 0.8584, 0.4566, 0.7757, 0.8728] +2026-04-09 02:46:27.318556: Epoch time: 103.61 s +2026-04-09 02:46:28.442497: +2026-04-09 02:46:28.445156: Epoch 357 +2026-04-09 02:46:28.447132: Current learning rate: 0.00672 +2026-04-09 02:48:11.946462: train_loss -0.179 +2026-04-09 02:48:11.954016: val_loss -0.1166 +2026-04-09 02:48:11.957021: Pseudo dice [0.337, 0.1515, 0.6497, 0.5156, 0.103, 0.3774, 0.5114] +2026-04-09 02:48:11.960478: Epoch time: 103.51 s +2026-04-09 02:48:13.092227: +2026-04-09 02:48:13.097215: Epoch 358 +2026-04-09 02:48:13.099163: Current learning rate: 0.00671 +2026-04-09 02:49:56.668178: train_loss -0.1737 +2026-04-09 02:49:56.673645: val_loss -0.1193 +2026-04-09 02:49:56.675984: Pseudo dice [0.344, 0.5259, 0.5947, 0.8206, 0.3307, 0.5956, 0.8017] +2026-04-09 02:49:56.677768: Epoch time: 103.58 s +2026-04-09 02:49:57.786001: +2026-04-09 02:49:57.788487: Epoch 359 +2026-04-09 02:49:57.790398: Current learning rate: 0.0067 +2026-04-09 02:51:41.495811: train_loss -0.1815 +2026-04-09 02:51:41.501020: val_loss -0.1539 +2026-04-09 02:51:41.502993: Pseudo dice [0.6705, 0.219, 0.7648, 0.6687, 0.1184, 0.7479, 0.8689] +2026-04-09 02:51:41.504772: Epoch time: 103.71 s +2026-04-09 02:51:42.616609: +2026-04-09 02:51:42.619852: Epoch 360 +2026-04-09 02:51:42.622062: Current learning rate: 0.00669 +2026-04-09 02:53:27.647238: train_loss -0.1786 +2026-04-09 02:53:27.652912: val_loss -0.1376 +2026-04-09 02:53:27.655490: Pseudo dice [0.4724, 0.0971, 0.6572, 0.2784, 0.4093, 0.5146, 0.548] +2026-04-09 02:53:27.657918: Epoch time: 105.03 s +2026-04-09 02:53:28.767950: +2026-04-09 02:53:28.770504: Epoch 361 +2026-04-09 02:53:28.773046: Current learning rate: 0.00668 +2026-04-09 02:55:12.348191: train_loss -0.1771 +2026-04-09 02:55:12.354950: val_loss -0.1313 +2026-04-09 02:55:12.357011: Pseudo dice [0.2296, 0.5125, 0.6127, 0.7869, 0.2753, 0.642, 0.531] +2026-04-09 02:55:12.364147: Epoch time: 103.58 s +2026-04-09 02:55:13.487321: +2026-04-09 02:55:13.490284: Epoch 362 +2026-04-09 02:55:13.492201: Current learning rate: 0.00667 +2026-04-09 02:56:56.798688: train_loss -0.18 +2026-04-09 02:56:56.804640: val_loss -0.1418 +2026-04-09 02:56:56.807161: Pseudo dice [0.5868, 0.2279, 0.6478, 0.6707, 0.448, 0.246, 0.59] +2026-04-09 02:56:56.809470: Epoch time: 103.31 s +2026-04-09 02:56:57.939325: +2026-04-09 02:56:57.942376: Epoch 363 +2026-04-09 02:56:57.944055: Current learning rate: 0.00666 +2026-04-09 02:58:41.747208: train_loss -0.1658 +2026-04-09 02:58:41.752736: val_loss -0.1525 +2026-04-09 02:58:41.756345: Pseudo dice [0.2774, 0.0801, 0.7644, 0.5534, 0.3174, 0.7408, 0.7315] +2026-04-09 02:58:41.759711: Epoch time: 103.81 s +2026-04-09 02:58:42.880861: +2026-04-09 02:58:42.882738: Epoch 364 +2026-04-09 02:58:42.884686: Current learning rate: 0.00665 +2026-04-09 03:00:26.350595: train_loss -0.1752 +2026-04-09 03:00:26.356397: val_loss -0.1564 +2026-04-09 03:00:26.358173: Pseudo dice [0.6981, 0.4568, 0.6604, 0.535, 0.4434, 0.6211, 0.7027] +2026-04-09 03:00:26.360032: Epoch time: 103.47 s +2026-04-09 03:00:27.483524: +2026-04-09 03:00:27.486072: Epoch 365 +2026-04-09 03:00:27.488630: Current learning rate: 0.00665 +2026-04-09 03:02:10.628547: train_loss -0.1636 +2026-04-09 03:02:10.633245: val_loss -0.1322 +2026-04-09 03:02:10.634974: Pseudo dice [0.5487, 0.623, 0.6439, 0.3749, 0.2883, 0.2582, 0.8019] +2026-04-09 03:02:10.637010: Epoch time: 103.15 s +2026-04-09 03:02:11.743248: +2026-04-09 03:02:11.745805: Epoch 366 +2026-04-09 03:02:11.748052: Current learning rate: 0.00664 +2026-04-09 03:03:57.287240: train_loss -0.1551 +2026-04-09 03:03:57.293921: val_loss -0.12 +2026-04-09 03:03:57.296073: Pseudo dice [0.6351, 0.2403, 0.5672, 0.6304, 0.2838, 0.4374, 0.6143] +2026-04-09 03:03:57.300414: Epoch time: 105.55 s +2026-04-09 03:03:58.418214: +2026-04-09 03:03:58.419948: Epoch 367 +2026-04-09 03:03:58.421615: Current learning rate: 0.00663 +2026-04-09 03:05:42.534587: train_loss -0.1601 +2026-04-09 03:05:42.543105: val_loss -0.1433 +2026-04-09 03:05:42.545683: Pseudo dice [0.4063, 0.1346, 0.6147, 0.7809, 0.1999, 0.7194, 0.5858] +2026-04-09 03:05:42.548491: Epoch time: 104.12 s +2026-04-09 03:05:43.673599: +2026-04-09 03:05:43.676985: Epoch 368 +2026-04-09 03:05:43.680808: Current learning rate: 0.00662 +2026-04-09 03:07:26.974659: train_loss -0.1809 +2026-04-09 03:07:26.980765: val_loss -0.1458 +2026-04-09 03:07:26.982814: Pseudo dice [0.6824, 0.2305, 0.7489, 0.8307, 0.2249, 0.6617, 0.7701] +2026-04-09 03:07:26.984609: Epoch time: 103.3 s +2026-04-09 03:07:28.111274: +2026-04-09 03:07:28.114637: Epoch 369 +2026-04-09 03:07:28.117797: Current learning rate: 0.00661 +2026-04-09 03:09:12.123999: train_loss -0.1769 +2026-04-09 03:09:12.132281: val_loss -0.1467 +2026-04-09 03:09:12.135113: Pseudo dice [0.3931, 0.4467, 0.6731, 0.5001, 0.3688, 0.7534, 0.8675] +2026-04-09 03:09:12.137573: Epoch time: 104.02 s +2026-04-09 03:09:13.263016: +2026-04-09 03:09:13.266073: Epoch 370 +2026-04-09 03:09:13.269434: Current learning rate: 0.0066 +2026-04-09 03:10:56.566934: train_loss -0.1748 +2026-04-09 03:10:56.572930: val_loss -0.1121 +2026-04-09 03:10:56.576427: Pseudo dice [0.22, 0.3315, 0.476, 0.095, 0.1845, 0.6249, 0.5306] +2026-04-09 03:10:56.578447: Epoch time: 103.31 s +2026-04-09 03:10:57.705254: +2026-04-09 03:10:57.709786: Epoch 371 +2026-04-09 03:10:57.713032: Current learning rate: 0.00659 +2026-04-09 03:12:41.561052: train_loss -0.1779 +2026-04-09 03:12:41.565886: val_loss -0.1375 +2026-04-09 03:12:41.567905: Pseudo dice [0.6679, 0.0983, 0.7605, 0.1031, 0.2752, 0.5156, 0.6809] +2026-04-09 03:12:41.569999: Epoch time: 103.86 s +2026-04-09 03:12:42.691312: +2026-04-09 03:12:42.692961: Epoch 372 +2026-04-09 03:12:42.694705: Current learning rate: 0.00658 +2026-04-09 03:14:28.007119: train_loss -0.1839 +2026-04-09 03:14:28.014834: val_loss -0.141 +2026-04-09 03:14:28.018543: Pseudo dice [0.4387, 0.2673, 0.7787, 0.5664, 0.3328, 0.5348, 0.772] +2026-04-09 03:14:28.020881: Epoch time: 105.32 s +2026-04-09 03:14:30.306507: +2026-04-09 03:14:30.308280: Epoch 373 +2026-04-09 03:14:30.310812: Current learning rate: 0.00657 +2026-04-09 03:16:13.396568: train_loss -0.1657 +2026-04-09 03:16:13.409405: val_loss -0.1069 +2026-04-09 03:16:13.411730: Pseudo dice [0.32, 0.3777, 0.6497, 0.3072, 0.2996, 0.5571, 0.3252] +2026-04-09 03:16:13.414641: Epoch time: 103.09 s +2026-04-09 03:16:14.532821: +2026-04-09 03:16:14.534857: Epoch 374 +2026-04-09 03:16:14.536551: Current learning rate: 0.00656 +2026-04-09 03:17:57.571813: train_loss -0.1629 +2026-04-09 03:17:57.578146: val_loss -0.112 +2026-04-09 03:17:57.580757: Pseudo dice [0.2378, 0.3914, 0.6931, 0.4504, 0.3467, 0.4385, 0.6267] +2026-04-09 03:17:57.582976: Epoch time: 103.04 s +2026-04-09 03:17:58.764257: +2026-04-09 03:17:58.767880: Epoch 375 +2026-04-09 03:17:58.770157: Current learning rate: 0.00655 +2026-04-09 03:19:42.422547: train_loss -0.1444 +2026-04-09 03:19:42.429961: val_loss -0.1077 +2026-04-09 03:19:42.432641: Pseudo dice [0.6232, 0.4136, 0.4605, 0.1679, 0.2209, 0.1574, 0.5318] +2026-04-09 03:19:42.435148: Epoch time: 103.66 s +2026-04-09 03:19:43.547030: +2026-04-09 03:19:43.548974: Epoch 376 +2026-04-09 03:19:43.551238: Current learning rate: 0.00654 +2026-04-09 03:21:26.616172: train_loss -0.1693 +2026-04-09 03:21:26.626299: val_loss -0.1229 +2026-04-09 03:21:26.629593: Pseudo dice [0.2337, 0.4777, 0.7452, 0.8483, 0.3791, 0.4417, 0.6647] +2026-04-09 03:21:26.631830: Epoch time: 103.07 s +2026-04-09 03:21:27.758228: +2026-04-09 03:21:27.760878: Epoch 377 +2026-04-09 03:21:27.763268: Current learning rate: 0.00653 +2026-04-09 03:23:12.276183: train_loss -0.1851 +2026-04-09 03:23:12.282091: val_loss -0.1293 +2026-04-09 03:23:12.283915: Pseudo dice [0.4064, 0.448, 0.6087, 0.7972, 0.4261, 0.5512, 0.7596] +2026-04-09 03:23:12.285750: Epoch time: 104.52 s +2026-04-09 03:23:13.413984: +2026-04-09 03:23:13.415852: Epoch 378 +2026-04-09 03:23:13.417650: Current learning rate: 0.00652 +2026-04-09 03:24:56.513676: train_loss -0.173 +2026-04-09 03:24:56.520701: val_loss -0.1547 +2026-04-09 03:24:56.523320: Pseudo dice [0.7606, 0.0708, 0.342, 0.7284, 0.2961, 0.4536, 0.7476] +2026-04-09 03:24:56.525887: Epoch time: 103.1 s +2026-04-09 03:24:57.639981: +2026-04-09 03:24:57.642172: Epoch 379 +2026-04-09 03:24:57.644109: Current learning rate: 0.00651 +2026-04-09 03:26:40.791723: train_loss -0.1742 +2026-04-09 03:26:40.797141: val_loss -0.1339 +2026-04-09 03:26:40.799526: Pseudo dice [0.4633, 0.2227, 0.6311, 0.7338, 0.2967, 0.3316, 0.6745] +2026-04-09 03:26:40.802009: Epoch time: 103.16 s +2026-04-09 03:26:41.914655: +2026-04-09 03:26:41.916748: Epoch 380 +2026-04-09 03:26:41.918664: Current learning rate: 0.0065 +2026-04-09 03:28:24.380695: train_loss -0.1578 +2026-04-09 03:28:24.387791: val_loss -0.1519 +2026-04-09 03:28:24.390129: Pseudo dice [0.5277, 0.5216, 0.8234, 0.7914, 0.4519, 0.4238, 0.8663] +2026-04-09 03:28:24.392228: Epoch time: 102.47 s +2026-04-09 03:28:25.499919: +2026-04-09 03:28:25.502245: Epoch 381 +2026-04-09 03:28:25.504096: Current learning rate: 0.00649 +2026-04-09 03:30:09.162924: train_loss -0.1768 +2026-04-09 03:30:09.168165: val_loss -0.1486 +2026-04-09 03:30:09.170470: Pseudo dice [0.7159, 0.2661, 0.6861, 0.5922, 0.3936, 0.4776, 0.6193] +2026-04-09 03:30:09.173159: Epoch time: 103.67 s +2026-04-09 03:30:10.304968: +2026-04-09 03:30:10.306834: Epoch 382 +2026-04-09 03:30:10.308286: Current learning rate: 0.00648 +2026-04-09 03:31:53.331529: train_loss -0.1785 +2026-04-09 03:31:53.336012: val_loss -0.1426 +2026-04-09 03:31:53.337564: Pseudo dice [0.6347, 0.2169, 0.6647, 0.6799, 0.1306, 0.4975, 0.7108] +2026-04-09 03:31:53.339399: Epoch time: 103.03 s +2026-04-09 03:31:54.473681: +2026-04-09 03:31:54.475632: Epoch 383 +2026-04-09 03:31:54.477260: Current learning rate: 0.00648 +2026-04-09 03:33:38.219883: train_loss -0.1516 +2026-04-09 03:33:38.224971: val_loss -0.1582 +2026-04-09 03:33:38.226820: Pseudo dice [0.6744, 0.3546, 0.7644, 0.1788, 0.4862, 0.5873, 0.7606] +2026-04-09 03:33:38.228741: Epoch time: 103.75 s +2026-04-09 03:33:39.363814: +2026-04-09 03:33:39.365666: Epoch 384 +2026-04-09 03:33:39.369262: Current learning rate: 0.00647 +2026-04-09 03:35:22.613214: train_loss -0.1825 +2026-04-09 03:35:22.618763: val_loss -0.1426 +2026-04-09 03:35:22.620705: Pseudo dice [0.4074, 0.392, 0.7891, 0.747, 0.2798, 0.7668, 0.6951] +2026-04-09 03:35:22.622857: Epoch time: 103.25 s +2026-04-09 03:35:23.932600: +2026-04-09 03:35:23.934675: Epoch 385 +2026-04-09 03:35:23.936424: Current learning rate: 0.00646 +2026-04-09 03:37:07.320908: train_loss -0.1851 +2026-04-09 03:37:07.328520: val_loss -0.1278 +2026-04-09 03:37:07.330912: Pseudo dice [0.3454, 0.2174, 0.6197, 0.4889, 0.1914, 0.7257, 0.7102] +2026-04-09 03:37:07.332904: Epoch time: 103.39 s +2026-04-09 03:37:08.484261: +2026-04-09 03:37:08.485939: Epoch 386 +2026-04-09 03:37:08.488082: Current learning rate: 0.00645 +2026-04-09 03:38:53.169643: train_loss -0.1805 +2026-04-09 03:38:53.177681: val_loss -0.15 +2026-04-09 03:38:53.180030: Pseudo dice [0.6834, 0.2676, 0.5274, 0.8172, 0.3768, 0.4406, 0.7636] +2026-04-09 03:38:53.183547: Epoch time: 104.69 s +2026-04-09 03:38:54.323719: +2026-04-09 03:38:54.326195: Epoch 387 +2026-04-09 03:38:54.329347: Current learning rate: 0.00644 +2026-04-09 03:40:38.247929: train_loss -0.1793 +2026-04-09 03:40:38.252879: val_loss -0.1674 +2026-04-09 03:40:38.254846: Pseudo dice [0.3091, 0.1719, 0.8254, 0.6988, 0.2525, 0.7179, 0.7109] +2026-04-09 03:40:38.256984: Epoch time: 103.93 s +2026-04-09 03:40:39.397315: +2026-04-09 03:40:39.399640: Epoch 388 +2026-04-09 03:40:39.401839: Current learning rate: 0.00643 +2026-04-09 03:42:25.067311: train_loss -0.1717 +2026-04-09 03:42:25.084410: val_loss -0.1122 +2026-04-09 03:42:25.088716: Pseudo dice [0.4861, 0.2824, 0.5188, 0.807, 0.1398, 0.7023, 0.7023] +2026-04-09 03:42:25.093132: Epoch time: 105.67 s +2026-04-09 03:42:26.231836: +2026-04-09 03:42:26.234883: Epoch 389 +2026-04-09 03:42:26.237323: Current learning rate: 0.00642 +2026-04-09 03:44:10.323552: train_loss -0.1707 +2026-04-09 03:44:10.328903: val_loss -0.1405 +2026-04-09 03:44:10.331060: Pseudo dice [0.2867, 0.2672, 0.6434, 0.539, 0.4908, 0.7469, 0.8776] +2026-04-09 03:44:10.333660: Epoch time: 104.09 s +2026-04-09 03:44:11.458607: +2026-04-09 03:44:11.460834: Epoch 390 +2026-04-09 03:44:11.463359: Current learning rate: 0.00641 +2026-04-09 03:45:54.449564: train_loss -0.1932 +2026-04-09 03:45:54.454838: val_loss -0.116 +2026-04-09 03:45:54.456869: Pseudo dice [0.3362, 0.2897, 0.4261, 0.8507, 0.144, 0.408, 0.7955] +2026-04-09 03:45:54.459051: Epoch time: 102.99 s +2026-04-09 03:45:55.596701: +2026-04-09 03:45:55.599449: Epoch 391 +2026-04-09 03:45:55.602336: Current learning rate: 0.0064 +2026-04-09 03:47:41.470217: train_loss -0.1818 +2026-04-09 03:47:41.478735: val_loss -0.1446 +2026-04-09 03:47:41.481532: Pseudo dice [0.4447, 0.3952, 0.7014, 0.7638, 0.4658, 0.7675, 0.6929] +2026-04-09 03:47:41.484429: Epoch time: 105.88 s +2026-04-09 03:47:42.637283: +2026-04-09 03:47:42.642113: Epoch 392 +2026-04-09 03:47:42.647277: Current learning rate: 0.00639 +2026-04-09 03:49:27.968813: train_loss -0.166 +2026-04-09 03:49:27.975094: val_loss -0.129 +2026-04-09 03:49:27.977772: Pseudo dice [0.277, 0.1991, 0.5699, 0.7885, 0.528, 0.5829, 0.6838] +2026-04-09 03:49:27.980196: Epoch time: 105.33 s +2026-04-09 03:49:29.126795: +2026-04-09 03:49:29.129081: Epoch 393 +2026-04-09 03:49:29.130928: Current learning rate: 0.00638 +2026-04-09 03:51:14.402093: train_loss -0.1856 +2026-04-09 03:51:14.411839: val_loss -0.1426 +2026-04-09 03:51:14.416035: Pseudo dice [0.3132, 0.1143, 0.4125, 0.5655, 0.4997, 0.5275, 0.8448] +2026-04-09 03:51:14.418698: Epoch time: 105.28 s +2026-04-09 03:51:15.570387: +2026-04-09 03:51:15.572359: Epoch 394 +2026-04-09 03:51:15.573902: Current learning rate: 0.00637 +2026-04-09 03:53:00.815419: train_loss -0.1737 +2026-04-09 03:53:00.820374: val_loss -0.1329 +2026-04-09 03:53:00.822973: Pseudo dice [0.4683, 0.5911, 0.707, 0.8598, 0.3955, 0.459, 0.3404] +2026-04-09 03:53:00.825243: Epoch time: 105.25 s +2026-04-09 03:53:01.973871: +2026-04-09 03:53:01.976192: Epoch 395 +2026-04-09 03:53:01.978940: Current learning rate: 0.00636 +2026-04-09 03:54:46.167279: train_loss -0.1791 +2026-04-09 03:54:46.177094: val_loss -0.1486 +2026-04-09 03:54:46.183111: Pseudo dice [0.6826, 0.1255, 0.7663, 0.4504, 0.3984, 0.6819, 0.7787] +2026-04-09 03:54:46.194689: Epoch time: 104.2 s +2026-04-09 03:54:47.339492: +2026-04-09 03:54:47.341796: Epoch 396 +2026-04-09 03:54:47.343681: Current learning rate: 0.00635 +2026-04-09 03:56:31.192821: train_loss -0.1729 +2026-04-09 03:56:31.199555: val_loss -0.1352 +2026-04-09 03:56:31.201736: Pseudo dice [0.2957, 0.1588, 0.5567, 0.2048, 0.3855, 0.4504, 0.6639] +2026-04-09 03:56:31.203883: Epoch time: 103.86 s +2026-04-09 03:56:32.330368: +2026-04-09 03:56:32.333242: Epoch 397 +2026-04-09 03:56:32.335700: Current learning rate: 0.00634 +2026-04-09 03:58:16.998883: train_loss -0.1823 +2026-04-09 03:58:17.006748: val_loss -0.1194 +2026-04-09 03:58:17.009531: Pseudo dice [0.6437, 0.4153, 0.6501, 0.7874, 0.1131, 0.2473, 0.4642] +2026-04-09 03:58:17.012018: Epoch time: 104.67 s +2026-04-09 03:58:18.133004: +2026-04-09 03:58:18.135711: Epoch 398 +2026-04-09 03:58:18.137832: Current learning rate: 0.00633 +2026-04-09 04:00:03.866014: train_loss -0.1675 +2026-04-09 04:00:03.875182: val_loss -0.1702 +2026-04-09 04:00:03.877964: Pseudo dice [0.6597, 0.0652, 0.5576, 0.7268, 0.5129, 0.8001, 0.8944] +2026-04-09 04:00:03.880742: Epoch time: 105.74 s +2026-04-09 04:00:05.023083: +2026-04-09 04:00:05.025635: Epoch 399 +2026-04-09 04:00:05.027734: Current learning rate: 0.00632 +2026-04-09 04:01:52.018218: train_loss -0.1735 +2026-04-09 04:01:52.025039: val_loss -0.1304 +2026-04-09 04:01:52.027094: Pseudo dice [0.3246, 0.2214, 0.7179, 0.8014, 0.2441, 0.7306, 0.8217] +2026-04-09 04:01:52.028928: Epoch time: 107.0 s +2026-04-09 04:01:55.048678: +2026-04-09 04:01:55.051200: Epoch 400 +2026-04-09 04:01:55.053264: Current learning rate: 0.00631 +2026-04-09 04:03:43.754889: train_loss -0.1824 +2026-04-09 04:03:43.763798: val_loss -0.1297 +2026-04-09 04:03:43.766380: Pseudo dice [0.3345, 0.2194, 0.8202, 0.2428, 0.4573, 0.698, 0.757] +2026-04-09 04:03:43.769587: Epoch time: 108.71 s +2026-04-09 04:03:44.908226: +2026-04-09 04:03:44.911625: Epoch 401 +2026-04-09 04:03:44.915523: Current learning rate: 0.0063 +2026-04-09 04:05:28.961542: train_loss -0.1726 +2026-04-09 04:05:28.968440: val_loss -0.1439 +2026-04-09 04:05:28.970249: Pseudo dice [0.3834, 0.3675, 0.7741, 0.4885, 0.3631, 0.7277, 0.5119] +2026-04-09 04:05:28.973150: Epoch time: 104.06 s +2026-04-09 04:05:30.123373: +2026-04-09 04:05:30.126229: Epoch 402 +2026-04-09 04:05:30.128801: Current learning rate: 0.0063 +2026-04-09 04:07:15.016857: train_loss -0.1728 +2026-04-09 04:07:15.023697: val_loss -0.1636 +2026-04-09 04:07:15.026184: Pseudo dice [0.5598, 0.0606, 0.7769, 0.7711, 0.287, 0.5869, 0.8808] +2026-04-09 04:07:15.028887: Epoch time: 104.9 s +2026-04-09 04:07:16.176522: +2026-04-09 04:07:16.178799: Epoch 403 +2026-04-09 04:07:16.181440: Current learning rate: 0.00629 +2026-04-09 04:08:59.515100: train_loss -0.1787 +2026-04-09 04:08:59.524437: val_loss -0.1534 +2026-04-09 04:08:59.527547: Pseudo dice [0.4041, 0.3919, 0.5682, 0.7738, 0.1483, 0.5718, 0.8187] +2026-04-09 04:08:59.530411: Epoch time: 103.34 s +2026-04-09 04:09:00.707635: +2026-04-09 04:09:00.710006: Epoch 404 +2026-04-09 04:09:00.712432: Current learning rate: 0.00628 +2026-04-09 04:10:44.662611: train_loss -0.1656 +2026-04-09 04:10:44.672278: val_loss -0.1129 +2026-04-09 04:10:44.674670: Pseudo dice [0.2976, 0.2017, 0.1388, 0.3681, 0.316, 0.4257, 0.6581] +2026-04-09 04:10:44.676791: Epoch time: 103.96 s +2026-04-09 04:10:45.817047: +2026-04-09 04:10:45.819736: Epoch 405 +2026-04-09 04:10:45.821501: Current learning rate: 0.00627 +2026-04-09 04:12:30.583947: train_loss -0.1725 +2026-04-09 04:12:30.588993: val_loss -0.1371 +2026-04-09 04:12:30.591135: Pseudo dice [0.4044, 0.3711, 0.6623, 0.0764, 0.4087, 0.2612, 0.7822] +2026-04-09 04:12:30.593248: Epoch time: 104.77 s +2026-04-09 04:12:31.734612: +2026-04-09 04:12:31.737819: Epoch 406 +2026-04-09 04:12:31.739957: Current learning rate: 0.00626 +2026-04-09 04:14:16.305176: train_loss -0.1818 +2026-04-09 04:14:16.311618: val_loss -0.1292 +2026-04-09 04:14:16.314609: Pseudo dice [0.3433, 0.1187, 0.6916, 0.7658, 0.4817, 0.5661, 0.4406] +2026-04-09 04:14:16.316610: Epoch time: 104.57 s +2026-04-09 04:14:17.480138: +2026-04-09 04:14:17.482041: Epoch 407 +2026-04-09 04:14:17.484580: Current learning rate: 0.00625 +2026-04-09 04:16:01.721477: train_loss -0.1827 +2026-04-09 04:16:01.728433: val_loss -0.0934 +2026-04-09 04:16:01.730424: Pseudo dice [0.1929, 0.1039, 0.4596, 0.7664, 0.302, 0.5588, 0.613] +2026-04-09 04:16:01.732441: Epoch time: 104.24 s +2026-04-09 04:16:02.907560: +2026-04-09 04:16:02.909480: Epoch 408 +2026-04-09 04:16:02.912141: Current learning rate: 0.00624 +2026-04-09 04:17:47.115423: train_loss -0.1801 +2026-04-09 04:17:47.126435: val_loss -0.1364 +2026-04-09 04:17:47.129737: Pseudo dice [0.4995, 0.0813, 0.3974, 0.6318, 0.3368, 0.7179, 0.7655] +2026-04-09 04:17:47.133931: Epoch time: 104.21 s +2026-04-09 04:17:48.270345: +2026-04-09 04:17:48.273681: Epoch 409 +2026-04-09 04:17:48.276486: Current learning rate: 0.00623 +2026-04-09 04:19:31.894764: train_loss -0.1778 +2026-04-09 04:19:31.900331: val_loss -0.1007 +2026-04-09 04:19:31.902369: Pseudo dice [0.509, 0.2722, 0.4805, 0.3864, 0.2699, 0.7241, 0.7803] +2026-04-09 04:19:31.904582: Epoch time: 103.63 s +2026-04-09 04:19:33.036158: +2026-04-09 04:19:33.039064: Epoch 410 +2026-04-09 04:19:33.043434: Current learning rate: 0.00622 +2026-04-09 04:21:16.872750: train_loss -0.1659 +2026-04-09 04:21:16.877968: val_loss -0.1349 +2026-04-09 04:21:16.880131: Pseudo dice [0.3429, 0.1153, 0.7191, 0.461, 0.4276, 0.8437, 0.5828] +2026-04-09 04:21:16.882647: Epoch time: 103.84 s +2026-04-09 04:21:17.964644: +2026-04-09 04:21:17.966619: Epoch 411 +2026-04-09 04:21:17.968311: Current learning rate: 0.00621 +2026-04-09 04:23:02.577409: train_loss -0.1687 +2026-04-09 04:23:02.584712: val_loss -0.1439 +2026-04-09 04:23:02.587609: Pseudo dice [0.4457, 0.5632, 0.7046, 0.2419, 0.5043, 0.4694, 0.7557] +2026-04-09 04:23:02.590415: Epoch time: 104.62 s +2026-04-09 04:23:03.656197: +2026-04-09 04:23:03.657941: Epoch 412 +2026-04-09 04:23:03.660547: Current learning rate: 0.0062 +2026-04-09 04:24:51.248413: train_loss -0.1799 +2026-04-09 04:24:51.257122: val_loss -0.1419 +2026-04-09 04:24:51.259206: Pseudo dice [0.3474, 0.4464, 0.6549, 0.7509, 0.2445, 0.7894, 0.6363] +2026-04-09 04:24:51.263445: Epoch time: 107.6 s +2026-04-09 04:24:52.315364: +2026-04-09 04:24:52.318717: Epoch 413 +2026-04-09 04:24:52.321516: Current learning rate: 0.00619 +2026-04-09 04:26:37.244243: train_loss -0.1791 +2026-04-09 04:26:37.250731: val_loss -0.1035 +2026-04-09 04:26:37.252859: Pseudo dice [0.3747, 0.031, 0.6561, 0.0944, 0.2504, 0.4902, 0.3476] +2026-04-09 04:26:37.254842: Epoch time: 104.93 s +2026-04-09 04:26:38.336654: +2026-04-09 04:26:38.339760: Epoch 414 +2026-04-09 04:26:38.345661: Current learning rate: 0.00618 +2026-04-09 04:28:22.267756: train_loss -0.1774 +2026-04-09 04:28:22.275704: val_loss -0.1559 +2026-04-09 04:28:22.278204: Pseudo dice [0.7791, 0.1199, 0.4816, 0.8108, 0.5134, 0.6171, 0.7613] +2026-04-09 04:28:22.280908: Epoch time: 103.93 s +2026-04-09 04:28:23.384538: +2026-04-09 04:28:23.386749: Epoch 415 +2026-04-09 04:28:23.388641: Current learning rate: 0.00617 +2026-04-09 04:30:07.431479: train_loss -0.1856 +2026-04-09 04:30:07.440103: val_loss -0.1535 +2026-04-09 04:30:07.442885: Pseudo dice [0.513, 0.4052, 0.7035, 0.4795, 0.4031, 0.5257, 0.7187] +2026-04-09 04:30:07.445548: Epoch time: 104.05 s +2026-04-09 04:30:08.511999: +2026-04-09 04:30:08.518244: Epoch 416 +2026-04-09 04:30:08.525769: Current learning rate: 0.00616 +2026-04-09 04:31:53.749563: train_loss -0.193 +2026-04-09 04:31:53.757076: val_loss -0.1416 +2026-04-09 04:31:53.758710: Pseudo dice [0.4498, 0.6334, 0.7365, 0.4534, 0.5048, 0.7198, 0.714] +2026-04-09 04:31:53.761291: Epoch time: 105.24 s +2026-04-09 04:31:54.855258: +2026-04-09 04:31:54.859452: Epoch 417 +2026-04-09 04:31:54.862884: Current learning rate: 0.00615 +2026-04-09 04:33:40.695149: train_loss -0.1845 +2026-04-09 04:33:40.705539: val_loss -0.1504 +2026-04-09 04:33:40.708718: Pseudo dice [0.5901, 0.3842, 0.7, 0.8639, 0.3526, 0.4539, 0.7771] +2026-04-09 04:33:40.711016: Epoch time: 105.84 s +2026-04-09 04:33:41.776818: +2026-04-09 04:33:41.779485: Epoch 418 +2026-04-09 04:33:41.781822: Current learning rate: 0.00614 +2026-04-09 04:35:25.321162: train_loss -0.1732 +2026-04-09 04:35:25.333763: val_loss -0.1495 +2026-04-09 04:35:25.337529: Pseudo dice [0.6942, 0.2831, 0.6716, 0.5526, 0.3809, 0.2878, 0.8854] +2026-04-09 04:35:25.341442: Epoch time: 103.55 s +2026-04-09 04:35:26.394674: +2026-04-09 04:35:26.396871: Epoch 419 +2026-04-09 04:35:26.399058: Current learning rate: 0.00613 +2026-04-09 04:37:11.967398: train_loss -0.1644 +2026-04-09 04:37:11.984561: val_loss -0.1152 +2026-04-09 04:37:11.989160: Pseudo dice [0.6629, 0.2251, 0.6922, 0.7139, 0.2137, 0.6381, 0.7105] +2026-04-09 04:37:11.993682: Epoch time: 105.58 s +2026-04-09 04:37:13.060232: +2026-04-09 04:37:13.062371: Epoch 420 +2026-04-09 04:37:13.065525: Current learning rate: 0.00612 +2026-04-09 04:39:00.470628: train_loss -0.1701 +2026-04-09 04:39:00.477475: val_loss -0.137 +2026-04-09 04:39:00.479468: Pseudo dice [0.4551, 0.4961, 0.8049, 0.5242, 0.483, 0.4924, 0.4976] +2026-04-09 04:39:00.481720: Epoch time: 107.41 s +2026-04-09 04:39:01.567184: +2026-04-09 04:39:01.569602: Epoch 421 +2026-04-09 04:39:01.572135: Current learning rate: 0.00612 +2026-04-09 04:40:45.585414: train_loss -0.1695 +2026-04-09 04:40:45.594225: val_loss -0.1169 +2026-04-09 04:40:45.596264: Pseudo dice [0.7611, 0.1615, 0.8702, 0.7956, 0.2492, 0.7203, 0.5579] +2026-04-09 04:40:45.598413: Epoch time: 104.02 s +2026-04-09 04:40:46.725530: +2026-04-09 04:40:46.727607: Epoch 422 +2026-04-09 04:40:46.729388: Current learning rate: 0.00611 +2026-04-09 04:42:30.066731: train_loss -0.1824 +2026-04-09 04:42:30.073296: val_loss -0.1299 +2026-04-09 04:42:30.075761: Pseudo dice [0.4143, 0.0835, 0.6294, 0.8646, 0.4876, 0.4663, 0.549] +2026-04-09 04:42:30.078143: Epoch time: 103.34 s +2026-04-09 04:42:31.132996: +2026-04-09 04:42:31.135794: Epoch 423 +2026-04-09 04:42:31.137862: Current learning rate: 0.0061 +2026-04-09 04:44:18.867541: train_loss -0.1886 +2026-04-09 04:44:18.880336: val_loss -0.1151 +2026-04-09 04:44:18.887001: Pseudo dice [0.266, 0.1614, 0.6646, 0.736, 0.2089, 0.449, 0.5497] +2026-04-09 04:44:18.890881: Epoch time: 107.74 s +2026-04-09 04:44:19.971135: +2026-04-09 04:44:19.973688: Epoch 424 +2026-04-09 04:44:19.975755: Current learning rate: 0.00609 +2026-04-09 04:46:03.869951: train_loss -0.1902 +2026-04-09 04:46:03.876832: val_loss -0.1395 +2026-04-09 04:46:03.879065: Pseudo dice [0.6521, 0.2076, 0.639, 0.7025, 0.3777, 0.6336, 0.7416] +2026-04-09 04:46:03.881086: Epoch time: 103.9 s +2026-04-09 04:46:04.966454: +2026-04-09 04:46:04.968874: Epoch 425 +2026-04-09 04:46:04.970701: Current learning rate: 0.00608 +2026-04-09 04:47:48.432954: train_loss -0.1473 +2026-04-09 04:47:48.438867: val_loss -0.149 +2026-04-09 04:47:48.441067: Pseudo dice [0.3729, 0.0564, 0.4241, 0.8893, 0.3706, 0.6958, 0.7808] +2026-04-09 04:47:48.443053: Epoch time: 103.47 s +2026-04-09 04:47:49.497063: +2026-04-09 04:47:49.498720: Epoch 426 +2026-04-09 04:47:49.500628: Current learning rate: 0.00607 +2026-04-09 04:49:35.150943: train_loss -0.1755 +2026-04-09 04:49:35.157103: val_loss -0.1324 +2026-04-09 04:49:35.159581: Pseudo dice [0.4527, 0.6353, 0.611, 0.4799, 0.3483, 0.5732, 0.7869] +2026-04-09 04:49:35.161887: Epoch time: 105.66 s +2026-04-09 04:49:36.226382: +2026-04-09 04:49:36.229860: Epoch 427 +2026-04-09 04:49:36.232511: Current learning rate: 0.00606 +2026-04-09 04:51:18.824715: train_loss -0.179 +2026-04-09 04:51:18.829902: val_loss -0.1221 +2026-04-09 04:51:18.832011: Pseudo dice [0.4169, 0.6056, 0.52, 0.6993, 0.2469, 0.7711, 0.5846] +2026-04-09 04:51:18.834532: Epoch time: 102.6 s +2026-04-09 04:51:19.943168: +2026-04-09 04:51:19.945634: Epoch 428 +2026-04-09 04:51:19.947373: Current learning rate: 0.00605 +2026-04-09 04:53:04.190274: train_loss -0.1689 +2026-04-09 04:53:04.195310: val_loss -0.1502 +2026-04-09 04:53:04.197434: Pseudo dice [0.1159, 0.6455, 0.473, 0.6988, 0.2229, 0.8118, 0.7367] +2026-04-09 04:53:04.199525: Epoch time: 104.25 s +2026-04-09 04:53:05.300482: +2026-04-09 04:53:05.302321: Epoch 429 +2026-04-09 04:53:05.304159: Current learning rate: 0.00604 +2026-04-09 04:54:48.410294: train_loss -0.1844 +2026-04-09 04:54:48.416881: val_loss -0.1475 +2026-04-09 04:54:48.418872: Pseudo dice [0.6702, 0.0968, 0.5946, 0.655, 0.5045, 0.6902, 0.3584] +2026-04-09 04:54:48.421808: Epoch time: 103.11 s +2026-04-09 04:54:49.512730: +2026-04-09 04:54:49.515184: Epoch 430 +2026-04-09 04:54:49.517765: Current learning rate: 0.00603 +2026-04-09 04:56:33.173503: train_loss -0.1848 +2026-04-09 04:56:33.180143: val_loss -0.1458 +2026-04-09 04:56:33.182434: Pseudo dice [0.7319, 0.211, 0.7491, 0.3791, 0.3152, 0.6658, 0.8503] +2026-04-09 04:56:33.185142: Epoch time: 103.66 s +2026-04-09 04:56:34.251636: +2026-04-09 04:56:34.254333: Epoch 431 +2026-04-09 04:56:34.256634: Current learning rate: 0.00602 +2026-04-09 04:58:20.797552: train_loss -0.183 +2026-04-09 04:58:20.803651: val_loss -0.1327 +2026-04-09 04:58:20.805738: Pseudo dice [0.6051, 0.235, 0.6733, 0.4764, 0.2683, 0.6169, 0.7519] +2026-04-09 04:58:20.807905: Epoch time: 106.55 s +2026-04-09 04:58:21.850138: +2026-04-09 04:58:21.853587: Epoch 432 +2026-04-09 04:58:21.856481: Current learning rate: 0.00601 +2026-04-09 05:00:05.487011: train_loss -0.1894 +2026-04-09 05:00:05.492186: val_loss -0.151 +2026-04-09 05:00:05.494232: Pseudo dice [0.4716, 0.3639, 0.585, 0.6725, 0.4246, 0.7488, 0.5608] +2026-04-09 05:00:05.497092: Epoch time: 103.64 s +2026-04-09 05:00:06.571215: +2026-04-09 05:00:06.573052: Epoch 433 +2026-04-09 05:00:06.575204: Current learning rate: 0.006 +2026-04-09 05:01:50.458485: train_loss -0.1819 +2026-04-09 05:01:50.466764: val_loss -0.1279 +2026-04-09 05:01:50.469195: Pseudo dice [0.6824, 0.1312, 0.5032, 0.8143, 0.342, 0.7665, 0.5816] +2026-04-09 05:01:50.472808: Epoch time: 103.89 s +2026-04-09 05:01:51.553288: +2026-04-09 05:01:51.556743: Epoch 434 +2026-04-09 05:01:51.560175: Current learning rate: 0.00599 +2026-04-09 05:03:35.969494: train_loss -0.1578 +2026-04-09 05:03:35.978336: val_loss -0.1129 +2026-04-09 05:03:35.981826: Pseudo dice [0.5293, 0.1852, 0.635, 0.5611, 0.3424, 0.722, 0.679] +2026-04-09 05:03:35.984394: Epoch time: 104.42 s +2026-04-09 05:03:37.054877: +2026-04-09 05:03:37.056822: Epoch 435 +2026-04-09 05:03:37.059028: Current learning rate: 0.00598 +2026-04-09 05:05:21.585178: train_loss -0.1667 +2026-04-09 05:05:21.590377: val_loss -0.1169 +2026-04-09 05:05:21.592909: Pseudo dice [0.3308, 0.5254, 0.6687, 0.7088, 0.2377, 0.726, 0.7451] +2026-04-09 05:05:21.594667: Epoch time: 104.53 s +2026-04-09 05:05:22.687825: +2026-04-09 05:05:22.690975: Epoch 436 +2026-04-09 05:05:22.693031: Current learning rate: 0.00597 +2026-04-09 05:07:06.598239: train_loss -0.1858 +2026-04-09 05:07:06.605542: val_loss -0.1367 +2026-04-09 05:07:06.608732: Pseudo dice [0.278, 0.4302, 0.7448, 0.4695, 0.2953, 0.641, 0.6428] +2026-04-09 05:07:06.611463: Epoch time: 103.91 s +2026-04-09 05:07:07.677184: +2026-04-09 05:07:07.679649: Epoch 437 +2026-04-09 05:07:07.682286: Current learning rate: 0.00596 +2026-04-09 05:08:50.981399: train_loss -0.1752 +2026-04-09 05:08:50.986860: val_loss -0.1516 +2026-04-09 05:08:50.988821: Pseudo dice [0.6071, 0.4158, 0.8212, 0.8505, 0.2259, 0.7372, 0.7539] +2026-04-09 05:08:50.990784: Epoch time: 103.31 s +2026-04-09 05:08:52.051525: +2026-04-09 05:08:52.053185: Epoch 438 +2026-04-09 05:08:52.055666: Current learning rate: 0.00595 +2026-04-09 05:10:35.161810: train_loss -0.1809 +2026-04-09 05:10:35.172681: val_loss -0.167 +2026-04-09 05:10:35.175520: Pseudo dice [0.4884, 0.1417, 0.5965, 0.6529, 0.3196, 0.733, 0.8605] +2026-04-09 05:10:35.178758: Epoch time: 103.11 s +2026-04-09 05:10:36.250590: +2026-04-09 05:10:36.253971: Epoch 439 +2026-04-09 05:10:36.256594: Current learning rate: 0.00594 +2026-04-09 05:12:18.704374: train_loss -0.1911 +2026-04-09 05:12:18.716066: val_loss -0.1153 +2026-04-09 05:12:18.723466: Pseudo dice [0.628, 0.1424, 0.7124, 0.7378, 0.2568, 0.2912, 0.803] +2026-04-09 05:12:18.731784: Epoch time: 102.46 s +2026-04-09 05:12:19.818563: +2026-04-09 05:12:19.820385: Epoch 440 +2026-04-09 05:12:19.822387: Current learning rate: 0.00593 +2026-04-09 05:14:02.911406: train_loss -0.1797 +2026-04-09 05:14:02.916724: val_loss -0.1472 +2026-04-09 05:14:02.918790: Pseudo dice [0.4907, 0.2078, 0.706, 0.8433, 0.2561, 0.5332, 0.6702] +2026-04-09 05:14:02.920386: Epoch time: 103.1 s +2026-04-09 05:14:03.986020: +2026-04-09 05:14:03.988180: Epoch 441 +2026-04-09 05:14:03.989911: Current learning rate: 0.00592 +2026-04-09 05:15:49.915267: train_loss -0.1906 +2026-04-09 05:15:49.922486: val_loss -0.1302 +2026-04-09 05:15:49.925100: Pseudo dice [0.511, 0.3444, 0.6406, 0.6456, 0.3928, 0.7037, 0.8828] +2026-04-09 05:15:49.926795: Epoch time: 105.93 s +2026-04-09 05:15:51.013783: +2026-04-09 05:15:51.016280: Epoch 442 +2026-04-09 05:15:51.017927: Current learning rate: 0.00592 +2026-04-09 05:17:34.315763: train_loss -0.1614 +2026-04-09 05:17:34.321414: val_loss -0.1524 +2026-04-09 05:17:34.323304: Pseudo dice [0.5379, 0.2614, 0.5883, 0.6694, 0.297, 0.7582, 0.7863] +2026-04-09 05:17:34.325280: Epoch time: 103.31 s +2026-04-09 05:17:35.416381: +2026-04-09 05:17:35.418391: Epoch 443 +2026-04-09 05:17:35.421255: Current learning rate: 0.00591 +2026-04-09 05:19:20.586051: train_loss -0.1956 +2026-04-09 05:19:20.593508: val_loss -0.147 +2026-04-09 05:19:20.595679: Pseudo dice [0.4515, 0.0566, 0.7162, 0.5567, 0.6652, 0.6751, 0.805] +2026-04-09 05:19:20.597964: Epoch time: 105.17 s +2026-04-09 05:19:21.656935: +2026-04-09 05:19:21.658651: Epoch 444 +2026-04-09 05:19:21.660371: Current learning rate: 0.0059 +2026-04-09 05:21:04.717252: train_loss -0.1852 +2026-04-09 05:21:04.723617: val_loss -0.1468 +2026-04-09 05:21:04.726805: Pseudo dice [0.7198, 0.154, 0.8232, 0.5775, 0.5336, 0.8261, 0.8733] +2026-04-09 05:21:04.729072: Epoch time: 103.06 s +2026-04-09 05:21:04.731039: Yayy! New best EMA pseudo Dice: 0.5549 +2026-04-09 05:21:07.531362: +2026-04-09 05:21:07.533537: Epoch 445 +2026-04-09 05:21:07.535394: Current learning rate: 0.00589 +2026-04-09 05:22:53.659684: train_loss -0.1719 +2026-04-09 05:22:53.671031: val_loss -0.1446 +2026-04-09 05:22:53.675680: Pseudo dice [0.4077, 0.2209, 0.7535, 0.6787, 0.4421, 0.6655, 0.7272] +2026-04-09 05:22:53.679708: Epoch time: 106.13 s +2026-04-09 05:22:53.683192: Yayy! New best EMA pseudo Dice: 0.555 +2026-04-09 05:22:56.577665: +2026-04-09 05:22:56.580026: Epoch 446 +2026-04-09 05:22:56.581728: Current learning rate: 0.00588 +2026-04-09 05:24:39.696942: train_loss -0.1824 +2026-04-09 05:24:39.703743: val_loss -0.1495 +2026-04-09 05:24:39.706666: Pseudo dice [0.3074, 0.5656, 0.565, 0.8545, 0.4332, 0.6774, 0.8031] +2026-04-09 05:24:39.709394: Epoch time: 103.12 s +2026-04-09 05:24:39.711437: Yayy! New best EMA pseudo Dice: 0.5596 +2026-04-09 05:24:42.497840: +2026-04-09 05:24:42.499830: Epoch 447 +2026-04-09 05:24:42.501500: Current learning rate: 0.00587 +2026-04-09 05:26:27.276217: train_loss -0.1842 +2026-04-09 05:26:27.281320: val_loss -0.1512 +2026-04-09 05:26:27.283104: Pseudo dice [0.4968, 0.5493, 0.7589, 0.6614, 0.4457, 0.6531, 0.6935] +2026-04-09 05:26:27.290884: Epoch time: 104.78 s +2026-04-09 05:26:27.293466: Yayy! New best EMA pseudo Dice: 0.5645 +2026-04-09 05:26:30.088376: +2026-04-09 05:26:30.092742: Epoch 448 +2026-04-09 05:26:30.097247: Current learning rate: 0.00586 +2026-04-09 05:28:14.763296: train_loss -0.1771 +2026-04-09 05:28:14.769491: val_loss -0.1593 +2026-04-09 05:28:14.771374: Pseudo dice [0.4637, 0.4256, 0.8074, 0.6238, 0.5634, 0.78, 0.7523] +2026-04-09 05:28:14.773026: Epoch time: 104.68 s +2026-04-09 05:28:14.774801: Yayy! New best EMA pseudo Dice: 0.5711 +2026-04-09 05:28:17.604415: +2026-04-09 05:28:17.607241: Epoch 449 +2026-04-09 05:28:17.609132: Current learning rate: 0.00585 +2026-04-09 05:30:01.515107: train_loss -0.1901 +2026-04-09 05:30:01.528327: val_loss -0.1306 +2026-04-09 05:30:01.531221: Pseudo dice [0.3756, 0.3444, 0.7069, 0.6825, 0.3957, 0.5278, 0.6961] +2026-04-09 05:30:01.533005: Epoch time: 103.91 s +2026-04-09 05:30:05.235448: +2026-04-09 05:30:05.237897: Epoch 450 +2026-04-09 05:30:05.240086: Current learning rate: 0.00584 +2026-04-09 05:31:49.433675: train_loss -0.1793 +2026-04-09 05:31:49.438997: val_loss -0.1443 +2026-04-09 05:31:49.441237: Pseudo dice [0.6949, 0.2819, 0.64, 0.8433, 0.3242, 0.6641, 0.6823] +2026-04-09 05:31:49.443135: Epoch time: 104.2 s +2026-04-09 05:31:50.496289: +2026-04-09 05:31:50.498259: Epoch 451 +2026-04-09 05:31:50.501263: Current learning rate: 0.00583 +2026-04-09 05:33:33.911057: train_loss -0.1943 +2026-04-09 05:33:33.917985: val_loss -0.1347 +2026-04-09 05:33:33.919834: Pseudo dice [0.6305, 0.0528, 0.3606, 0.3644, 0.4714, 0.3355, 0.6517] +2026-04-09 05:33:33.922895: Epoch time: 103.42 s +2026-04-09 05:33:35.014645: +2026-04-09 05:33:35.029345: Epoch 452 +2026-04-09 05:33:35.032412: Current learning rate: 0.00582 +2026-04-09 05:35:19.718064: train_loss -0.1778 +2026-04-09 05:35:19.723455: val_loss -0.1338 +2026-04-09 05:35:19.726280: Pseudo dice [0.5635, 0.0939, 0.3376, 0.2048, 0.3191, 0.6085, 0.6799] +2026-04-09 05:35:19.728433: Epoch time: 104.71 s +2026-04-09 05:35:20.777535: +2026-04-09 05:35:20.779504: Epoch 453 +2026-04-09 05:35:20.781348: Current learning rate: 0.00581 +2026-04-09 05:37:05.148989: train_loss -0.1822 +2026-04-09 05:37:05.154981: val_loss -0.1626 +2026-04-09 05:37:05.158241: Pseudo dice [0.5155, 0.3991, 0.731, 0.8549, 0.2785, 0.8461, 0.7866] +2026-04-09 05:37:05.160428: Epoch time: 104.37 s +2026-04-09 05:37:06.237664: +2026-04-09 05:37:06.240968: Epoch 454 +2026-04-09 05:37:06.243424: Current learning rate: 0.0058 +2026-04-09 05:38:53.279132: train_loss -0.1912 +2026-04-09 05:38:53.285475: val_loss -0.1804 +2026-04-09 05:38:53.287541: Pseudo dice [0.5689, 0.4979, 0.6651, 0.6526, 0.6358, 0.5319, 0.813] +2026-04-09 05:38:53.289917: Epoch time: 107.04 s +2026-04-09 05:38:54.350933: +2026-04-09 05:38:54.353081: Epoch 455 +2026-04-09 05:38:54.355775: Current learning rate: 0.00579 +2026-04-09 05:40:39.277515: train_loss -0.178 +2026-04-09 05:40:39.283364: val_loss -0.1495 +2026-04-09 05:40:39.285720: Pseudo dice [0.6074, 0.4034, 0.7721, 0.6116, 0.4525, 0.5573, 0.7901] +2026-04-09 05:40:39.288799: Epoch time: 104.93 s +2026-04-09 05:40:40.344663: +2026-04-09 05:40:40.346679: Epoch 456 +2026-04-09 05:40:40.349098: Current learning rate: 0.00578 +2026-04-09 05:42:23.815413: train_loss -0.1894 +2026-04-09 05:42:23.821822: val_loss -0.1372 +2026-04-09 05:42:23.824940: Pseudo dice [0.6236, 0.3509, 0.6443, 0.534, 0.2813, 0.7159, 0.8814] +2026-04-09 05:42:23.827089: Epoch time: 103.47 s +2026-04-09 05:42:24.913357: +2026-04-09 05:42:24.915352: Epoch 457 +2026-04-09 05:42:24.917181: Current learning rate: 0.00577 +2026-04-09 05:44:12.889066: train_loss -0.1702 +2026-04-09 05:44:12.896143: val_loss -0.1245 +2026-04-09 05:44:12.899663: Pseudo dice [0.4736, 0.3306, 0.6225, 0.3698, 0.2978, 0.5567, 0.7184] +2026-04-09 05:44:12.902353: Epoch time: 107.98 s +2026-04-09 05:44:13.981765: +2026-04-09 05:44:13.984121: Epoch 458 +2026-04-09 05:44:13.986635: Current learning rate: 0.00576 +2026-04-09 05:45:58.293768: train_loss -0.1822 +2026-04-09 05:45:58.300565: val_loss -0.1208 +2026-04-09 05:45:58.303453: Pseudo dice [0.5831, 0.5096, 0.6683, 0.8189, 0.3755, 0.3143, 0.8455] +2026-04-09 05:45:58.305557: Epoch time: 104.32 s +2026-04-09 05:45:59.375680: +2026-04-09 05:45:59.377821: Epoch 459 +2026-04-09 05:45:59.379903: Current learning rate: 0.00575 +2026-04-09 05:47:43.379532: train_loss -0.171 +2026-04-09 05:47:43.385824: val_loss -0.1164 +2026-04-09 05:47:43.389074: Pseudo dice [0.472, 0.2898, 0.6668, 0.8683, 0.4632, 0.7948, 0.5485] +2026-04-09 05:47:43.391457: Epoch time: 104.01 s +2026-04-09 05:47:44.474355: +2026-04-09 05:47:44.476438: Epoch 460 +2026-04-09 05:47:44.478631: Current learning rate: 0.00574 +2026-04-09 05:49:28.355240: train_loss -0.1745 +2026-04-09 05:49:28.362227: val_loss -0.1209 +2026-04-09 05:49:28.365044: Pseudo dice [0.3847, 0.2519, 0.7769, 0.7815, 0.4851, 0.7206, 0.7116] +2026-04-09 05:49:28.367261: Epoch time: 103.88 s +2026-04-09 05:49:29.425354: +2026-04-09 05:49:29.429413: Epoch 461 +2026-04-09 05:49:29.433892: Current learning rate: 0.00573 +2026-04-09 05:51:12.918866: train_loss -0.1888 +2026-04-09 05:51:12.924728: val_loss -0.1284 +2026-04-09 05:51:12.926668: Pseudo dice [0.6823, 0.1751, 0.5446, 0.3542, 0.2589, 0.7181, 0.7249] +2026-04-09 05:51:12.928724: Epoch time: 103.5 s +2026-04-09 05:51:13.994987: +2026-04-09 05:51:13.996850: Epoch 462 +2026-04-09 05:51:13.998564: Current learning rate: 0.00572 +2026-04-09 05:53:01.268428: train_loss -0.1909 +2026-04-09 05:53:01.275350: val_loss -0.1504 +2026-04-09 05:53:01.277568: Pseudo dice [0.3134, 0.1476, 0.6716, 0.9075, 0.3174, 0.8763, 0.8295] +2026-04-09 05:53:01.279527: Epoch time: 107.28 s +2026-04-09 05:53:02.360757: +2026-04-09 05:53:02.362762: Epoch 463 +2026-04-09 05:53:02.364529: Current learning rate: 0.00571 +2026-04-09 05:54:47.810535: train_loss -0.1902 +2026-04-09 05:54:47.816529: val_loss -0.1405 +2026-04-09 05:54:47.818602: Pseudo dice [0.5465, 0.4569, 0.6763, 0.5681, 0.468, 0.7448, 0.5301] +2026-04-09 05:54:47.821245: Epoch time: 105.45 s +2026-04-09 05:54:48.889548: +2026-04-09 05:54:48.891455: Epoch 464 +2026-04-09 05:54:48.893116: Current learning rate: 0.0057 +2026-04-09 05:56:32.215068: train_loss -0.1771 +2026-04-09 05:56:32.221324: val_loss -0.1574 +2026-04-09 05:56:32.223648: Pseudo dice [0.5051, 0.3915, 0.6601, 0.7286, 0.3615, 0.7428, 0.7803] +2026-04-09 05:56:32.225667: Epoch time: 103.33 s +2026-04-09 05:56:33.290152: +2026-04-09 05:56:33.292554: Epoch 465 +2026-04-09 05:56:33.296285: Current learning rate: 0.0057 +2026-04-09 05:58:17.293445: train_loss -0.1851 +2026-04-09 05:58:17.313887: val_loss -0.1286 +2026-04-09 05:58:17.320357: Pseudo dice [0.3996, 0.2137, 0.7458, 0.7993, 0.2306, 0.7786, 0.8956] +2026-04-09 05:58:17.333032: Epoch time: 104.01 s +2026-04-09 05:58:18.657784: +2026-04-09 05:58:18.659614: Epoch 466 +2026-04-09 05:58:18.662040: Current learning rate: 0.00569 +2026-04-09 06:00:02.012922: train_loss -0.188 +2026-04-09 06:00:02.019382: val_loss -0.1555 +2026-04-09 06:00:02.021230: Pseudo dice [0.5148, 0.5166, 0.6561, 0.7534, 0.4332, 0.5343, 0.8165] +2026-04-09 06:00:02.024464: Epoch time: 103.36 s +2026-04-09 06:00:03.109000: +2026-04-09 06:00:03.111226: Epoch 467 +2026-04-09 06:00:03.112907: Current learning rate: 0.00568 +2026-04-09 06:01:46.830495: train_loss -0.1818 +2026-04-09 06:01:46.837964: val_loss -0.1329 +2026-04-09 06:01:46.840425: Pseudo dice [0.3715, 0.0682, 0.4423, 0.8411, 0.1964, 0.1421, 0.855] +2026-04-09 06:01:46.842656: Epoch time: 103.72 s +2026-04-09 06:01:47.910254: +2026-04-09 06:01:47.912178: Epoch 468 +2026-04-09 06:01:47.913924: Current learning rate: 0.00567 +2026-04-09 06:03:32.389842: train_loss -0.1807 +2026-04-09 06:03:32.394364: val_loss -0.1356 +2026-04-09 06:03:32.396697: Pseudo dice [0.5394, 0.0553, 0.7112, 0.3963, 0.5633, 0.7161, 0.5941] +2026-04-09 06:03:32.399030: Epoch time: 104.48 s +2026-04-09 06:03:33.475633: +2026-04-09 06:03:33.478305: Epoch 469 +2026-04-09 06:03:33.480199: Current learning rate: 0.00566 +2026-04-09 06:05:17.873845: train_loss -0.1872 +2026-04-09 06:05:17.882707: val_loss -0.1502 +2026-04-09 06:05:17.884657: Pseudo dice [0.3661, 0.0481, 0.6767, 0.8896, 0.5956, 0.6901, 0.6478] +2026-04-09 06:05:17.886733: Epoch time: 104.4 s +2026-04-09 06:05:18.970805: +2026-04-09 06:05:18.973817: Epoch 470 +2026-04-09 06:05:18.976489: Current learning rate: 0.00565 +2026-04-09 06:07:04.621722: train_loss -0.1891 +2026-04-09 06:07:04.626990: val_loss -0.1526 +2026-04-09 06:07:04.629059: Pseudo dice [0.2465, 0.2795, 0.7443, 0.8147, 0.3569, 0.725, 0.7971] +2026-04-09 06:07:04.631022: Epoch time: 105.65 s +2026-04-09 06:07:05.700001: +2026-04-09 06:07:05.701652: Epoch 471 +2026-04-09 06:07:05.703218: Current learning rate: 0.00564 +2026-04-09 06:08:50.453941: train_loss -0.1879 +2026-04-09 06:08:50.459050: val_loss -0.1188 +2026-04-09 06:08:50.460830: Pseudo dice [0.6644, 0.4588, 0.6334, 0.5399, 0.1746, 0.6484, 0.8305] +2026-04-09 06:08:50.462787: Epoch time: 104.76 s +2026-04-09 06:08:51.520224: +2026-04-09 06:08:51.522476: Epoch 472 +2026-04-09 06:08:51.524643: Current learning rate: 0.00563 +2026-04-09 06:10:35.764127: train_loss -0.1829 +2026-04-09 06:10:35.769378: val_loss -0.1406 +2026-04-09 06:10:35.771502: Pseudo dice [0.5986, 0.2408, 0.1762, 0.8009, 0.3054, 0.6332, 0.7922] +2026-04-09 06:10:35.773240: Epoch time: 104.25 s +2026-04-09 06:10:36.833472: +2026-04-09 06:10:36.835421: Epoch 473 +2026-04-09 06:10:36.837249: Current learning rate: 0.00562 +2026-04-09 06:12:19.629941: train_loss -0.1755 +2026-04-09 06:12:19.637494: val_loss -0.1488 +2026-04-09 06:12:19.640635: Pseudo dice [0.4856, 0.2728, 0.7126, 0.4729, 0.5606, 0.6918, 0.8099] +2026-04-09 06:12:19.643704: Epoch time: 102.8 s +2026-04-09 06:12:20.705148: +2026-04-09 06:12:20.707493: Epoch 474 +2026-04-09 06:12:20.709267: Current learning rate: 0.00561 +2026-04-09 06:14:03.971381: train_loss -0.2012 +2026-04-09 06:14:03.976613: val_loss -0.144 +2026-04-09 06:14:03.978544: Pseudo dice [0.5099, 0.3605, 0.7202, 0.817, 0.284, 0.7424, 0.5683] +2026-04-09 06:14:03.980821: Epoch time: 103.27 s +2026-04-09 06:14:05.047904: +2026-04-09 06:14:05.051962: Epoch 475 +2026-04-09 06:14:05.055047: Current learning rate: 0.0056 +2026-04-09 06:15:47.825815: train_loss -0.187 +2026-04-09 06:15:47.833114: val_loss -0.134 +2026-04-09 06:15:47.835807: Pseudo dice [0.6627, 0.2072, 0.5217, 0.8327, 0.3967, 0.6509, 0.8383] +2026-04-09 06:15:47.839918: Epoch time: 102.78 s +2026-04-09 06:15:48.903532: +2026-04-09 06:15:48.905598: Epoch 476 +2026-04-09 06:15:48.907257: Current learning rate: 0.00559 +2026-04-09 06:17:33.257678: train_loss -0.1751 +2026-04-09 06:17:33.263060: val_loss -0.125 +2026-04-09 06:17:33.268648: Pseudo dice [0.4627, 0.3683, 0.7863, 0.2507, 0.2915, 0.2964, 0.6433] +2026-04-09 06:17:33.271540: Epoch time: 104.36 s +2026-04-09 06:17:34.366512: +2026-04-09 06:17:34.372079: Epoch 477 +2026-04-09 06:17:34.385846: Current learning rate: 0.00558 +2026-04-09 06:19:17.728106: train_loss -0.1882 +2026-04-09 06:19:17.734187: val_loss -0.1692 +2026-04-09 06:19:17.736828: Pseudo dice [0.68, 0.1392, 0.8082, 0.7691, 0.5822, 0.8232, 0.868] +2026-04-09 06:19:17.738933: Epoch time: 103.36 s +2026-04-09 06:19:18.842570: +2026-04-09 06:19:18.845418: Epoch 478 +2026-04-09 06:19:18.847625: Current learning rate: 0.00557 +2026-04-09 06:21:02.479406: train_loss -0.1753 +2026-04-09 06:21:02.484231: val_loss -0.1254 +2026-04-09 06:21:02.486128: Pseudo dice [0.1772, 0.4492, 0.6447, 0.765, 0.3037, 0.7449, 0.704] +2026-04-09 06:21:02.488008: Epoch time: 103.64 s +2026-04-09 06:21:03.572168: +2026-04-09 06:21:03.574276: Epoch 479 +2026-04-09 06:21:03.575911: Current learning rate: 0.00556 +2026-04-09 06:22:46.947241: train_loss -0.1794 +2026-04-09 06:22:46.959063: val_loss -0.1473 +2026-04-09 06:22:46.963355: Pseudo dice [0.437, 0.1917, 0.5674, 0.8388, 0.2852, 0.5047, 0.8194] +2026-04-09 06:22:46.967260: Epoch time: 103.38 s +2026-04-09 06:22:48.076729: +2026-04-09 06:22:48.079082: Epoch 480 +2026-04-09 06:22:48.081686: Current learning rate: 0.00555 +2026-04-09 06:24:31.323450: train_loss -0.1786 +2026-04-09 06:24:31.332086: val_loss -0.1475 +2026-04-09 06:24:31.334314: Pseudo dice [0.6777, 0.3037, 0.7268, 0.813, 0.3717, 0.5542, 0.8423] +2026-04-09 06:24:31.336084: Epoch time: 103.25 s +2026-04-09 06:24:32.446389: +2026-04-09 06:24:32.448654: Epoch 481 +2026-04-09 06:24:32.451586: Current learning rate: 0.00554 +2026-04-09 06:26:15.626650: train_loss -0.1888 +2026-04-09 06:26:15.631323: val_loss -0.1615 +2026-04-09 06:26:15.632996: Pseudo dice [0.6092, 0.377, 0.7489, 0.7471, 0.176, 0.7573, 0.8153] +2026-04-09 06:26:15.634812: Epoch time: 103.18 s +2026-04-09 06:26:16.713373: +2026-04-09 06:26:16.715904: Epoch 482 +2026-04-09 06:26:16.718431: Current learning rate: 0.00553 +2026-04-09 06:27:59.501087: train_loss -0.1877 +2026-04-09 06:27:59.507255: val_loss -0.1679 +2026-04-09 06:27:59.509271: Pseudo dice [0.4144, 0.3339, 0.6541, 0.8454, 0.3807, 0.6854, 0.6449] +2026-04-09 06:27:59.511431: Epoch time: 102.79 s +2026-04-09 06:28:00.611118: +2026-04-09 06:28:00.612908: Epoch 483 +2026-04-09 06:28:00.615849: Current learning rate: 0.00552 +2026-04-09 06:29:44.410972: train_loss -0.2072 +2026-04-09 06:29:44.416813: val_loss -0.1475 +2026-04-09 06:29:44.418856: Pseudo dice [0.3602, 0.4766, 0.6271, 0.382, 0.2542, 0.5132, 0.6151] +2026-04-09 06:29:44.420740: Epoch time: 103.8 s +2026-04-09 06:29:45.479243: +2026-04-09 06:29:45.481925: Epoch 484 +2026-04-09 06:29:45.484026: Current learning rate: 0.00551 +2026-04-09 06:31:29.966573: train_loss -0.1927 +2026-04-09 06:31:29.971937: val_loss -0.1852 +2026-04-09 06:31:29.974513: Pseudo dice [0.3767, 0.4026, 0.6885, 0.8659, 0.2154, 0.647, 0.8389] +2026-04-09 06:31:29.977171: Epoch time: 104.49 s +2026-04-09 06:31:31.071697: +2026-04-09 06:31:31.073586: Epoch 485 +2026-04-09 06:31:31.075373: Current learning rate: 0.0055 +2026-04-09 06:33:14.305880: train_loss -0.2411 +2026-04-09 06:33:14.316142: val_loss -0.2262 +2026-04-09 06:33:14.318165: Pseudo dice [0.2653, 0.3228, 0.7263, 0.7227, 0.505, 0.5604, 0.8273] +2026-04-09 06:33:14.320971: Epoch time: 103.24 s +2026-04-09 06:33:15.400334: +2026-04-09 06:33:15.402420: Epoch 486 +2026-04-09 06:33:15.405364: Current learning rate: 0.00549 +2026-04-09 06:34:58.536891: train_loss -0.2881 +2026-04-09 06:34:58.544932: val_loss -0.2243 +2026-04-09 06:34:58.546842: Pseudo dice [0.2346, 0.3464, 0.4655, 0.3606, 0.1608, 0.7787, 0.7837] +2026-04-09 06:34:58.549090: Epoch time: 103.14 s +2026-04-09 06:34:59.643989: +2026-04-09 06:34:59.646208: Epoch 487 +2026-04-09 06:34:59.648012: Current learning rate: 0.00548 +2026-04-09 06:36:42.283891: train_loss -0.2937 +2026-04-09 06:36:42.290658: val_loss -0.2531 +2026-04-09 06:36:42.292424: Pseudo dice [0.4295, 0.5964, 0.8008, 0.8725, 0.2083, 0.5702, 0.7586] +2026-04-09 06:36:42.294625: Epoch time: 102.64 s +2026-04-09 06:36:43.354223: +2026-04-09 06:36:43.356288: Epoch 488 +2026-04-09 06:36:43.358440: Current learning rate: 0.00547 +2026-04-09 06:38:26.507138: train_loss -0.2591 +2026-04-09 06:38:26.514733: val_loss -0.2151 +2026-04-09 06:38:26.516632: Pseudo dice [0.3268, 0.2233, 0.3947, 0.7727, 0.2053, 0.4916, 0.3806] +2026-04-09 06:38:26.518999: Epoch time: 103.16 s +2026-04-09 06:38:27.614873: +2026-04-09 06:38:27.617172: Epoch 489 +2026-04-09 06:38:27.618863: Current learning rate: 0.00546 +2026-04-09 06:40:11.886994: train_loss -0.2586 +2026-04-09 06:40:11.893834: val_loss -0.2359 +2026-04-09 06:40:11.895752: Pseudo dice [0.3908, 0.5859, 0.5608, 0.3903, 0.2597, 0.5001, 0.4454] +2026-04-09 06:40:11.897975: Epoch time: 104.28 s +2026-04-09 06:40:12.966409: +2026-04-09 06:40:12.968261: Epoch 490 +2026-04-09 06:40:12.970243: Current learning rate: 0.00546 +2026-04-09 06:42:03.881960: train_loss -0.2847 +2026-04-09 06:42:03.896996: val_loss -0.2327 +2026-04-09 06:42:03.901864: Pseudo dice [0.3283, 0.4054, 0.7099, 0.8636, 0.3156, 0.6016, 0.5378] +2026-04-09 06:42:03.905797: Epoch time: 110.92 s +2026-04-09 06:42:06.183832: +2026-04-09 06:42:06.189810: Epoch 491 +2026-04-09 06:42:06.193221: Current learning rate: 0.00545 +2026-04-09 06:43:49.761534: train_loss -0.2775 +2026-04-09 06:43:49.768012: val_loss -0.2447 +2026-04-09 06:43:49.770008: Pseudo dice [0.4175, 0.1001, 0.6687, 0.6757, 0.5504, 0.6951, 0.2759] +2026-04-09 06:43:49.771903: Epoch time: 103.58 s +2026-04-09 06:43:50.842643: +2026-04-09 06:43:50.846460: Epoch 492 +2026-04-09 06:43:50.848402: Current learning rate: 0.00544 +2026-04-09 06:45:33.976125: train_loss -0.2742 +2026-04-09 06:45:33.982761: val_loss -0.2186 +2026-04-09 06:45:33.984700: Pseudo dice [0.3329, 0.1475, 0.7307, 0.5343, 0.2803, 0.6786, 0.7275] +2026-04-09 06:45:33.986715: Epoch time: 103.14 s +2026-04-09 06:45:35.039741: +2026-04-09 06:45:35.042120: Epoch 493 +2026-04-09 06:45:35.045058: Current learning rate: 0.00543 +2026-04-09 06:47:19.424525: train_loss -0.279 +2026-04-09 06:47:19.434594: val_loss -0.2458 +2026-04-09 06:47:19.436822: Pseudo dice [0.3353, 0.3071, 0.7737, 0.7169, 0.5188, 0.7095, 0.8572] +2026-04-09 06:47:19.438777: Epoch time: 104.39 s +2026-04-09 06:47:20.552716: +2026-04-09 06:47:20.556288: Epoch 494 +2026-04-09 06:47:20.560236: Current learning rate: 0.00542 +2026-04-09 06:49:04.163692: train_loss -0.2475 +2026-04-09 06:49:04.170766: val_loss -0.2449 +2026-04-09 06:49:04.173085: Pseudo dice [0.2202, 0.1109, 0.6425, 0.8032, 0.2709, 0.5451, 0.8584] +2026-04-09 06:49:04.175074: Epoch time: 103.61 s +2026-04-09 06:49:05.263751: +2026-04-09 06:49:05.265491: Epoch 495 +2026-04-09 06:49:05.267319: Current learning rate: 0.00541 +2026-04-09 06:50:48.654078: train_loss -0.2675 +2026-04-09 06:50:48.662372: val_loss -0.2294 +2026-04-09 06:50:48.664318: Pseudo dice [0.5006, 0.4485, 0.3101, 0.8164, 0.4044, 0.2251, 0.777] +2026-04-09 06:50:48.666420: Epoch time: 103.39 s +2026-04-09 06:50:49.716510: +2026-04-09 06:50:49.718370: Epoch 496 +2026-04-09 06:50:49.720311: Current learning rate: 0.0054 +2026-04-09 06:52:33.797995: train_loss -0.2728 +2026-04-09 06:52:33.804326: val_loss -0.2094 +2026-04-09 06:52:33.806727: Pseudo dice [0.2939, 0.0786, 0.5128, 0.8025, 0.4012, 0.5478, 0.8201] +2026-04-09 06:52:33.808904: Epoch time: 104.08 s +2026-04-09 06:52:34.923406: +2026-04-09 06:52:34.926005: Epoch 497 +2026-04-09 06:52:34.928655: Current learning rate: 0.00539 +2026-04-09 06:54:16.932898: train_loss -0.2774 +2026-04-09 06:54:16.943104: val_loss -0.2086 +2026-04-09 06:54:16.945828: Pseudo dice [0.7341, 0.5888, 0.4199, 0.464, 0.0454, 0.4557, 0.1865] +2026-04-09 06:54:16.948998: Epoch time: 102.01 s +2026-04-09 06:54:18.027810: +2026-04-09 06:54:18.029902: Epoch 498 +2026-04-09 06:54:18.032068: Current learning rate: 0.00538 +2026-04-09 06:56:01.731142: train_loss -0.2744 +2026-04-09 06:56:01.741758: val_loss -0.2675 +2026-04-09 06:56:01.743983: Pseudo dice [0.4243, 0.2393, 0.7494, 0.7653, 0.3552, 0.7457, 0.9163] +2026-04-09 06:56:01.746592: Epoch time: 103.71 s +2026-04-09 06:56:02.825125: +2026-04-09 06:56:02.827269: Epoch 499 +2026-04-09 06:56:02.829510: Current learning rate: 0.00537 +2026-04-09 06:57:46.281877: train_loss -0.3013 +2026-04-09 06:57:46.289781: val_loss -0.233 +2026-04-09 06:57:46.291867: Pseudo dice [0.3927, 0.1007, 0.6574, 0.5994, 0.5312, 0.5765, 0.7049] +2026-04-09 06:57:46.294554: Epoch time: 103.46 s +2026-04-09 06:57:49.098929: +2026-04-09 06:57:49.101538: Epoch 500 +2026-04-09 06:57:49.104470: Current learning rate: 0.00536 +2026-04-09 06:59:31.677439: train_loss -0.3014 +2026-04-09 06:59:31.684127: val_loss -0.2761 +2026-04-09 06:59:31.687060: Pseudo dice [0.5248, 0.0576, 0.6145, 0.7674, 0.348, 0.6272, 0.8257] +2026-04-09 06:59:31.690125: Epoch time: 102.58 s +2026-04-09 06:59:32.788626: +2026-04-09 06:59:32.791207: Epoch 501 +2026-04-09 06:59:32.793616: Current learning rate: 0.00535 +2026-04-09 07:01:17.425504: train_loss -0.2712 +2026-04-09 07:01:17.432825: val_loss -0.2487 +2026-04-09 07:01:17.434871: Pseudo dice [0.2441, 0.3492, 0.5446, 0.7681, 0.3536, 0.7371, 0.6825] +2026-04-09 07:01:17.436767: Epoch time: 104.64 s +2026-04-09 07:01:18.534868: +2026-04-09 07:01:18.537213: Epoch 502 +2026-04-09 07:01:18.539378: Current learning rate: 0.00534 +2026-04-09 07:03:01.396732: train_loss -0.2784 +2026-04-09 07:03:01.407409: val_loss -0.2189 +2026-04-09 07:03:01.409676: Pseudo dice [0.3966, 0.3365, 0.5911, 0.5546, 0.2483, 0.5605, 0.529] +2026-04-09 07:03:01.412055: Epoch time: 102.87 s +2026-04-09 07:03:02.480706: +2026-04-09 07:03:02.484432: Epoch 503 +2026-04-09 07:03:02.488089: Current learning rate: 0.00533 +2026-04-09 07:04:48.164458: train_loss -0.2751 +2026-04-09 07:04:48.174798: val_loss -0.2358 +2026-04-09 07:04:48.176572: Pseudo dice [0.7603, 0.3239, 0.4706, 0.6841, 0.2146, 0.3534, 0.7659] +2026-04-09 07:04:48.178592: Epoch time: 105.69 s +2026-04-09 07:04:49.234064: +2026-04-09 07:04:49.236172: Epoch 504 +2026-04-09 07:04:49.237757: Current learning rate: 0.00532 +2026-04-09 07:06:32.104688: train_loss -0.2808 +2026-04-09 07:06:32.113083: val_loss -0.2432 +2026-04-09 07:06:32.115077: Pseudo dice [0.4417, 0.2281, 0.1571, 0.7718, 0.1968, 0.4788, 0.8219] +2026-04-09 07:06:32.117211: Epoch time: 102.87 s +2026-04-09 07:06:33.165962: +2026-04-09 07:06:33.167593: Epoch 505 +2026-04-09 07:06:33.169220: Current learning rate: 0.00531 +2026-04-09 07:08:15.748095: train_loss -0.2777 +2026-04-09 07:08:15.753486: val_loss -0.2242 +2026-04-09 07:08:15.755129: Pseudo dice [0.2163, 0.2645, 0.7471, 0.5399, 0.1382, 0.7985, 0.7258] +2026-04-09 07:08:15.757076: Epoch time: 102.59 s +2026-04-09 07:08:16.832701: +2026-04-09 07:08:16.834465: Epoch 506 +2026-04-09 07:08:16.836386: Current learning rate: 0.0053 +2026-04-09 07:09:59.265450: train_loss -0.2788 +2026-04-09 07:09:59.272933: val_loss -0.2234 +2026-04-09 07:09:59.275299: Pseudo dice [0.11, 0.5562, 0.6617, 0.4329, 0.2557, 0.7074, 0.2927] +2026-04-09 07:09:59.277812: Epoch time: 102.44 s +2026-04-09 07:10:00.471246: +2026-04-09 07:10:00.472891: Epoch 507 +2026-04-09 07:10:00.474543: Current learning rate: 0.00529 +2026-04-09 07:11:43.359541: train_loss -0.2861 +2026-04-09 07:11:43.366679: val_loss -0.2657 +2026-04-09 07:11:43.368657: Pseudo dice [0.4091, 0.5367, 0.6302, 0.358, 0.3522, 0.7524, 0.8986] +2026-04-09 07:11:43.371354: Epoch time: 102.89 s +2026-04-09 07:11:44.470188: +2026-04-09 07:11:44.471991: Epoch 508 +2026-04-09 07:11:44.473635: Current learning rate: 0.00528 +2026-04-09 07:13:27.666389: train_loss -0.2984 +2026-04-09 07:13:27.671835: val_loss -0.2798 +2026-04-09 07:13:27.673853: Pseudo dice [0.6804, 0.2925, 0.5886, 0.5953, 0.1715, 0.702, 0.8564] +2026-04-09 07:13:27.675405: Epoch time: 103.2 s +2026-04-09 07:13:28.727448: +2026-04-09 07:13:28.729414: Epoch 509 +2026-04-09 07:13:28.731428: Current learning rate: 0.00527 +2026-04-09 07:15:11.687803: train_loss -0.2931 +2026-04-09 07:15:11.693308: val_loss -0.2629 +2026-04-09 07:15:11.695829: Pseudo dice [0.5559, 0.2094, 0.8237, 0.5424, 0.2812, 0.7606, 0.6725] +2026-04-09 07:15:11.697820: Epoch time: 102.96 s +2026-04-09 07:15:12.773534: +2026-04-09 07:15:12.775975: Epoch 510 +2026-04-09 07:15:12.778212: Current learning rate: 0.00526 +2026-04-09 07:16:57.692576: train_loss -0.2798 +2026-04-09 07:16:57.699176: val_loss -0.2484 +2026-04-09 07:16:57.701394: Pseudo dice [0.5072, 0.5723, 0.5933, 0.5639, 0.2425, 0.8667, 0.7089] +2026-04-09 07:16:57.703511: Epoch time: 104.92 s +2026-04-09 07:16:58.862926: +2026-04-09 07:16:58.865766: Epoch 511 +2026-04-09 07:16:58.868061: Current learning rate: 0.00525 +2026-04-09 07:18:44.533603: train_loss -0.2813 +2026-04-09 07:18:44.538039: val_loss -0.2671 +2026-04-09 07:18:44.540321: Pseudo dice [0.4244, 0.099, 0.6776, 0.647, 0.4405, 0.819, 0.7922] +2026-04-09 07:18:44.542193: Epoch time: 105.67 s +2026-04-09 07:18:45.642601: +2026-04-09 07:18:45.644677: Epoch 512 +2026-04-09 07:18:45.647484: Current learning rate: 0.00524 +2026-04-09 07:20:28.933381: train_loss -0.2976 +2026-04-09 07:20:28.939343: val_loss -0.2523 +2026-04-09 07:20:28.941687: Pseudo dice [0.4424, 0.2131, 0.8304, 0.7862, 0.231, 0.6836, 0.8492] +2026-04-09 07:20:28.944750: Epoch time: 103.29 s +2026-04-09 07:20:30.012687: +2026-04-09 07:20:30.014527: Epoch 513 +2026-04-09 07:20:30.016658: Current learning rate: 0.00523 +2026-04-09 07:22:13.097786: train_loss -0.2942 +2026-04-09 07:22:13.103095: val_loss -0.2578 +2026-04-09 07:22:13.105084: Pseudo dice [0.2102, 0.2556, 0.732, 0.7889, 0.1433, 0.5562, 0.8985] +2026-04-09 07:22:13.107957: Epoch time: 103.09 s +2026-04-09 07:22:14.181700: +2026-04-09 07:22:14.187413: Epoch 514 +2026-04-09 07:22:14.191904: Current learning rate: 0.00522 +2026-04-09 07:23:57.453476: train_loss -0.29 +2026-04-09 07:23:57.458741: val_loss -0.2388 +2026-04-09 07:23:57.460466: Pseudo dice [0.195, 0.0761, 0.7129, 0.7683, 0.237, 0.5352, 0.7942] +2026-04-09 07:23:57.462451: Epoch time: 103.27 s +2026-04-09 07:23:58.547781: +2026-04-09 07:23:58.549732: Epoch 515 +2026-04-09 07:23:58.551263: Current learning rate: 0.00521 +2026-04-09 07:25:42.480594: train_loss -0.2905 +2026-04-09 07:25:42.486837: val_loss -0.2589 +2026-04-09 07:25:42.489799: Pseudo dice [0.4024, 0.2977, 0.7214, 0.1295, 0.3527, 0.5017, 0.6405] +2026-04-09 07:25:42.491951: Epoch time: 103.94 s +2026-04-09 07:25:43.581881: +2026-04-09 07:25:43.585380: Epoch 516 +2026-04-09 07:25:43.587532: Current learning rate: 0.0052 +2026-04-09 07:27:26.760197: train_loss -0.3091 +2026-04-09 07:27:26.765780: val_loss -0.2628 +2026-04-09 07:27:26.769617: Pseudo dice [0.3225, 0.2272, 0.6993, 0.666, 0.3691, 0.6411, 0.6869] +2026-04-09 07:27:26.772718: Epoch time: 103.18 s +2026-04-09 07:27:27.852359: +2026-04-09 07:27:27.854268: Epoch 517 +2026-04-09 07:27:27.855987: Current learning rate: 0.00519 +2026-04-09 07:29:11.039417: train_loss -0.2883 +2026-04-09 07:29:11.045282: val_loss -0.2698 +2026-04-09 07:29:11.047556: Pseudo dice [0.7557, 0.4316, 0.7564, 0.6076, 0.3946, 0.6632, 0.8887] +2026-04-09 07:29:11.049630: Epoch time: 103.19 s +2026-04-09 07:29:12.141056: +2026-04-09 07:29:12.142940: Epoch 518 +2026-04-09 07:29:12.144478: Current learning rate: 0.00518 +2026-04-09 07:30:55.536473: train_loss -0.3149 +2026-04-09 07:30:55.542113: val_loss -0.2471 +2026-04-09 07:30:55.545252: Pseudo dice [0.5881, 0.3852, 0.7028, 0.8366, 0.2908, 0.1539, 0.8954] +2026-04-09 07:30:55.547526: Epoch time: 103.4 s +2026-04-09 07:30:56.623994: +2026-04-09 07:30:56.626147: Epoch 519 +2026-04-09 07:30:56.628302: Current learning rate: 0.00518 +2026-04-09 07:32:40.131854: train_loss -0.2879 +2026-04-09 07:32:40.139413: val_loss -0.1999 +2026-04-09 07:32:40.141683: Pseudo dice [0.2493, 0.1294, 0.3471, 0.4563, 0.2749, 0.4326, 0.4518] +2026-04-09 07:32:40.143703: Epoch time: 103.51 s +2026-04-09 07:32:41.218199: +2026-04-09 07:32:41.219893: Epoch 520 +2026-04-09 07:32:41.221994: Current learning rate: 0.00517 +2026-04-09 07:34:24.563786: train_loss -0.289 +2026-04-09 07:34:24.569299: val_loss -0.2446 +2026-04-09 07:34:24.571212: Pseudo dice [0.1798, 0.1862, 0.4648, 0.6853, 0.5702, 0.3241, 0.6389] +2026-04-09 07:34:24.573179: Epoch time: 103.35 s +2026-04-09 07:34:25.657782: +2026-04-09 07:34:25.659819: Epoch 521 +2026-04-09 07:34:25.661747: Current learning rate: 0.00516 +2026-04-09 07:36:08.598249: train_loss -0.2789 +2026-04-09 07:36:08.602626: val_loss -0.2286 +2026-04-09 07:36:08.604439: Pseudo dice [0.609, 0.5258, 0.7008, 0.7588, 0.2477, 0.1059, 0.6716] +2026-04-09 07:36:08.606111: Epoch time: 102.94 s +2026-04-09 07:36:09.685771: +2026-04-09 07:36:09.687721: Epoch 522 +2026-04-09 07:36:09.689385: Current learning rate: 0.00515 +2026-04-09 07:37:51.828692: train_loss -0.284 +2026-04-09 07:37:51.834147: val_loss -0.2475 +2026-04-09 07:37:51.835712: Pseudo dice [0.4788, 0.4063, 0.5967, 0.3375, 0.3499, 0.5953, 0.6634] +2026-04-09 07:37:51.837749: Epoch time: 102.15 s +2026-04-09 07:37:52.923774: +2026-04-09 07:37:52.925700: Epoch 523 +2026-04-09 07:37:52.927450: Current learning rate: 0.00514 +2026-04-09 07:39:35.891225: train_loss -0.2953 +2026-04-09 07:39:35.898276: val_loss -0.2411 +2026-04-09 07:39:35.901863: Pseudo dice [0.3724, 0.3925, 0.4918, 0.3347, 0.3089, 0.1166, 0.7825] +2026-04-09 07:39:35.904317: Epoch time: 102.97 s +2026-04-09 07:39:36.980239: +2026-04-09 07:39:36.982404: Epoch 524 +2026-04-09 07:39:36.984503: Current learning rate: 0.00513 +2026-04-09 07:41:20.051165: train_loss -0.3055 +2026-04-09 07:41:20.056407: val_loss -0.2552 +2026-04-09 07:41:20.058018: Pseudo dice [0.4764, 0.4386, 0.7356, 0.2904, 0.4698, 0.827, 0.7296] +2026-04-09 07:41:20.059714: Epoch time: 103.07 s +2026-04-09 07:41:21.143672: +2026-04-09 07:41:21.145595: Epoch 525 +2026-04-09 07:41:21.147277: Current learning rate: 0.00512 +2026-04-09 07:43:04.184632: train_loss -0.3022 +2026-04-09 07:43:04.190610: val_loss -0.2359 +2026-04-09 07:43:04.192760: Pseudo dice [0.4685, 0.4875, 0.6975, 0.2768, 0.0421, 0.6139, 0.8292] +2026-04-09 07:43:04.194717: Epoch time: 103.04 s +2026-04-09 07:43:05.279606: +2026-04-09 07:43:05.281690: Epoch 526 +2026-04-09 07:43:05.283227: Current learning rate: 0.00511 +2026-04-09 07:44:47.820805: train_loss -0.2877 +2026-04-09 07:44:47.825423: val_loss -0.2495 +2026-04-09 07:44:47.827303: Pseudo dice [0.3452, 0.2554, 0.6641, 0.407, 0.187, 0.774, 0.8884] +2026-04-09 07:44:47.829170: Epoch time: 102.54 s +2026-04-09 07:44:48.902840: +2026-04-09 07:44:48.904899: Epoch 527 +2026-04-09 07:44:48.906639: Current learning rate: 0.0051 +2026-04-09 07:46:32.011552: train_loss -0.3067 +2026-04-09 07:46:32.017475: val_loss -0.2538 +2026-04-09 07:46:32.020024: Pseudo dice [0.5905, 0.281, 0.7412, 0.8436, 0.3955, 0.4487, 0.6542] +2026-04-09 07:46:32.022241: Epoch time: 103.11 s +2026-04-09 07:46:33.079443: +2026-04-09 07:46:33.081254: Epoch 528 +2026-04-09 07:46:33.083025: Current learning rate: 0.00509 +2026-04-09 07:48:16.344799: train_loss -0.287 +2026-04-09 07:48:16.354617: val_loss -0.272 +2026-04-09 07:48:16.357714: Pseudo dice [0.6707, 0.3157, 0.7751, 0.7352, 0.2897, 0.5082, 0.6375] +2026-04-09 07:48:16.360774: Epoch time: 103.27 s +2026-04-09 07:48:17.444660: +2026-04-09 07:48:17.446577: Epoch 529 +2026-04-09 07:48:17.448667: Current learning rate: 0.00508 +2026-04-09 07:49:59.992573: train_loss -0.3012 +2026-04-09 07:49:59.998299: val_loss -0.2682 +2026-04-09 07:50:00.001046: Pseudo dice [0.4955, 0.2595, 0.8178, 0.7872, 0.4332, 0.6175, 0.7736] +2026-04-09 07:50:00.003080: Epoch time: 102.55 s +2026-04-09 07:50:01.067059: +2026-04-09 07:50:01.068996: Epoch 530 +2026-04-09 07:50:01.070764: Current learning rate: 0.00507 +2026-04-09 07:51:44.595698: train_loss -0.2979 +2026-04-09 07:51:44.600419: val_loss -0.2469 +2026-04-09 07:51:44.602397: Pseudo dice [0.3329, 0.0559, 0.8065, 0.3847, 0.4224, 0.6879, 0.8111] +2026-04-09 07:51:44.604445: Epoch time: 103.53 s +2026-04-09 07:51:45.687326: +2026-04-09 07:51:45.690144: Epoch 531 +2026-04-09 07:51:45.691710: Current learning rate: 0.00506 +2026-04-09 07:53:28.544292: train_loss -0.2919 +2026-04-09 07:53:28.549908: val_loss -0.2336 +2026-04-09 07:53:28.552038: Pseudo dice [0.6342, 0.4286, 0.6497, 0.7013, 0.402, 0.5295, 0.7919] +2026-04-09 07:53:28.553982: Epoch time: 102.86 s +2026-04-09 07:53:30.714999: +2026-04-09 07:53:30.716820: Epoch 532 +2026-04-09 07:53:30.718454: Current learning rate: 0.00505 +2026-04-09 07:55:14.139748: train_loss -0.2806 +2026-04-09 07:55:14.144603: val_loss -0.1783 +2026-04-09 07:55:14.146620: Pseudo dice [0.2668, 0.3397, 0.5376, 0.4866, 0.0693, 0.3533, 0.802] +2026-04-09 07:55:14.149197: Epoch time: 103.43 s +2026-04-09 07:55:15.247486: +2026-04-09 07:55:15.250011: Epoch 533 +2026-04-09 07:55:15.251705: Current learning rate: 0.00504 +2026-04-09 07:56:58.110793: train_loss -0.286 +2026-04-09 07:56:58.124001: val_loss -0.2394 +2026-04-09 07:56:58.126035: Pseudo dice [0.4154, 0.0748, 0.5654, 0.711, 0.2906, 0.258, 0.6461] +2026-04-09 07:56:58.128895: Epoch time: 102.87 s +2026-04-09 07:56:59.204460: +2026-04-09 07:56:59.206556: Epoch 534 +2026-04-09 07:56:59.209373: Current learning rate: 0.00503 +2026-04-09 07:58:42.103963: train_loss -0.3084 +2026-04-09 07:58:42.111910: val_loss -0.2699 +2026-04-09 07:58:42.114176: Pseudo dice [0.6081, 0.4902, 0.8504, 0.6085, 0.4344, 0.868, 0.6186] +2026-04-09 07:58:42.117304: Epoch time: 102.9 s +2026-04-09 07:58:43.205188: +2026-04-09 07:58:43.207085: Epoch 535 +2026-04-09 07:58:43.208792: Current learning rate: 0.00502 +2026-04-09 08:00:29.529555: train_loss -0.3061 +2026-04-09 08:00:29.537282: val_loss -0.2712 +2026-04-09 08:00:29.539933: Pseudo dice [0.5878, 0.7222, 0.6876, 0.8607, 0.4265, 0.7064, 0.7293] +2026-04-09 08:00:29.542147: Epoch time: 106.33 s +2026-04-09 08:00:30.628047: +2026-04-09 08:00:30.631337: Epoch 536 +2026-04-09 08:00:30.634050: Current learning rate: 0.00501 +2026-04-09 08:02:14.866346: train_loss -0.2947 +2026-04-09 08:02:14.879869: val_loss -0.269 +2026-04-09 08:02:14.884165: Pseudo dice [0.3403, 0.7035, 0.6308, 0.7091, 0.4858, 0.3509, 0.5611] +2026-04-09 08:02:14.887487: Epoch time: 104.24 s +2026-04-09 08:02:15.980917: +2026-04-09 08:02:15.984187: Epoch 537 +2026-04-09 08:02:15.987067: Current learning rate: 0.005 +2026-04-09 08:04:00.117206: train_loss -0.2845 +2026-04-09 08:04:00.131379: val_loss -0.2583 +2026-04-09 08:04:00.137472: Pseudo dice [0.2961, 0.4071, 0.699, 0.1239, 0.3261, 0.5939, 0.8945] +2026-04-09 08:04:00.147391: Epoch time: 104.14 s +2026-04-09 08:04:01.260353: +2026-04-09 08:04:01.263679: Epoch 538 +2026-04-09 08:04:01.267543: Current learning rate: 0.00499 +2026-04-09 08:05:43.715655: train_loss -0.2988 +2026-04-09 08:05:43.723292: val_loss -0.2609 +2026-04-09 08:05:43.725613: Pseudo dice [0.6678, 0.5178, 0.6002, 0.5516, 0.5096, 0.5674, 0.859] +2026-04-09 08:05:43.728504: Epoch time: 102.46 s +2026-04-09 08:05:44.790491: +2026-04-09 08:05:44.792438: Epoch 539 +2026-04-09 08:05:44.794470: Current learning rate: 0.00498 +2026-04-09 08:07:27.582259: train_loss -0.3067 +2026-04-09 08:07:27.587542: val_loss -0.2083 +2026-04-09 08:07:27.589553: Pseudo dice [0.1945, 0.5062, 0.4321, 0.7956, 0.1295, 0.7321, 0.797] +2026-04-09 08:07:27.591524: Epoch time: 102.79 s +2026-04-09 08:07:28.676258: +2026-04-09 08:07:28.677938: Epoch 540 +2026-04-09 08:07:28.680649: Current learning rate: 0.00497 +2026-04-09 08:09:13.893485: train_loss -0.2876 +2026-04-09 08:09:13.899157: val_loss -0.2625 +2026-04-09 08:09:13.901496: Pseudo dice [0.6586, 0.1922, 0.7367, 0.787, 0.3833, 0.6483, 0.6967] +2026-04-09 08:09:13.903660: Epoch time: 105.22 s +2026-04-09 08:09:14.978573: +2026-04-09 08:09:14.981353: Epoch 541 +2026-04-09 08:09:14.983307: Current learning rate: 0.00496 +2026-04-09 08:10:58.945247: train_loss -0.2891 +2026-04-09 08:10:58.950991: val_loss -0.2222 +2026-04-09 08:10:58.953665: Pseudo dice [0.5796, 0.0203, 0.752, 0.6169, 0.3981, 0.662, 0.747] +2026-04-09 08:10:58.955754: Epoch time: 103.97 s +2026-04-09 08:11:00.023923: +2026-04-09 08:11:00.026346: Epoch 542 +2026-04-09 08:11:00.028630: Current learning rate: 0.00495 +2026-04-09 08:12:43.863629: train_loss -0.2956 +2026-04-09 08:12:43.869417: val_loss -0.2572 +2026-04-09 08:12:43.871480: Pseudo dice [0.4759, 0.5152, 0.2363, 0.8514, 0.4082, 0.1782, 0.7536] +2026-04-09 08:12:43.873616: Epoch time: 103.84 s +2026-04-09 08:12:44.934544: +2026-04-09 08:12:44.936891: Epoch 543 +2026-04-09 08:12:44.939091: Current learning rate: 0.00494 +2026-04-09 08:14:29.967421: train_loss -0.3109 +2026-04-09 08:14:29.979384: val_loss -0.2884 +2026-04-09 08:14:29.983824: Pseudo dice [0.6571, 0.0796, 0.4797, 0.737, 0.3595, 0.7844, 0.8078] +2026-04-09 08:14:29.987696: Epoch time: 105.04 s +2026-04-09 08:14:31.062168: +2026-04-09 08:14:31.064954: Epoch 544 +2026-04-09 08:14:31.066943: Current learning rate: 0.00493 +2026-04-09 08:16:14.749097: train_loss -0.298 +2026-04-09 08:16:14.755990: val_loss -0.2935 +2026-04-09 08:16:14.760528: Pseudo dice [0.65, 0.2712, 0.7409, 0.866, 0.4186, 0.8239, 0.8393] +2026-04-09 08:16:14.762459: Epoch time: 103.69 s +2026-04-09 08:16:15.858257: +2026-04-09 08:16:15.861335: Epoch 545 +2026-04-09 08:16:15.862928: Current learning rate: 0.00492 +2026-04-09 08:18:00.754071: train_loss -0.308 +2026-04-09 08:18:00.761586: val_loss -0.2568 +2026-04-09 08:18:00.763722: Pseudo dice [0.7944, 0.6221, 0.5439, 0.6606, 0.416, 0.806, 0.8244] +2026-04-09 08:18:00.765617: Epoch time: 104.9 s +2026-04-09 08:18:01.842503: +2026-04-09 08:18:01.844786: Epoch 546 +2026-04-09 08:18:01.846885: Current learning rate: 0.00491 +2026-04-09 08:19:44.624416: train_loss -0.3225 +2026-04-09 08:19:44.629812: val_loss -0.2239 +2026-04-09 08:19:44.631783: Pseudo dice [0.271, 0.339, 0.6971, 0.4341, 0.0308, 0.6458, 0.6092] +2026-04-09 08:19:44.633448: Epoch time: 102.79 s +2026-04-09 08:19:45.728967: +2026-04-09 08:19:45.731020: Epoch 547 +2026-04-09 08:19:45.732778: Current learning rate: 0.0049 +2026-04-09 08:21:28.805389: train_loss -0.2977 +2026-04-09 08:21:28.810237: val_loss -0.2808 +2026-04-09 08:21:28.812650: Pseudo dice [0.7545, 0.3532, 0.6928, 0.848, 0.3985, 0.5403, 0.8653] +2026-04-09 08:21:28.814903: Epoch time: 103.08 s +2026-04-09 08:21:29.883626: +2026-04-09 08:21:29.885552: Epoch 548 +2026-04-09 08:21:29.887072: Current learning rate: 0.00489 +2026-04-09 08:23:14.534502: train_loss -0.2932 +2026-04-09 08:23:14.539221: val_loss -0.2514 +2026-04-09 08:23:14.541524: Pseudo dice [0.3527, 0.5479, 0.5338, 0.6451, 0.3695, 0.2593, 0.8628] +2026-04-09 08:23:14.543660: Epoch time: 104.65 s +2026-04-09 08:23:15.665689: +2026-04-09 08:23:15.668159: Epoch 549 +2026-04-09 08:23:15.669995: Current learning rate: 0.00488 +2026-04-09 08:24:58.763609: train_loss -0.3138 +2026-04-09 08:24:58.770211: val_loss -0.2272 +2026-04-09 08:24:58.773800: Pseudo dice [0.4661, 0.3585, 0.5813, 0.6458, 0.3177, 0.4736, 0.7752] +2026-04-09 08:24:58.775940: Epoch time: 103.1 s +2026-04-09 08:25:01.538248: +2026-04-09 08:25:01.540006: Epoch 550 +2026-04-09 08:25:01.541668: Current learning rate: 0.00487 +2026-04-09 08:26:44.032315: train_loss -0.2822 +2026-04-09 08:26:44.037816: val_loss -0.2462 +2026-04-09 08:26:44.039478: Pseudo dice [0.3945, 0.4073, 0.6446, 0.7758, 0.4573, 0.5732, 0.3659] +2026-04-09 08:26:44.041857: Epoch time: 102.5 s +2026-04-09 08:26:45.137909: +2026-04-09 08:26:45.139749: Epoch 551 +2026-04-09 08:26:45.141207: Current learning rate: 0.00486 +2026-04-09 08:28:27.919016: train_loss -0.2902 +2026-04-09 08:28:27.933960: val_loss -0.2561 +2026-04-09 08:28:27.935845: Pseudo dice [0.5651, 0.2828, 0.5741, 0.898, 0.3999, 0.7159, 0.8363] +2026-04-09 08:28:27.939049: Epoch time: 102.78 s +2026-04-09 08:28:29.015356: +2026-04-09 08:28:29.016870: Epoch 552 +2026-04-09 08:28:29.018556: Current learning rate: 0.00485 +2026-04-09 08:30:13.348986: train_loss -0.3056 +2026-04-09 08:30:13.356053: val_loss -0.2425 +2026-04-09 08:30:13.359599: Pseudo dice [0.4409, 0.5098, 0.6064, 0.6765, 0.374, 0.4635, 0.6467] +2026-04-09 08:30:13.361843: Epoch time: 104.34 s +2026-04-09 08:30:14.456265: +2026-04-09 08:30:14.458571: Epoch 553 +2026-04-09 08:30:14.461771: Current learning rate: 0.00484 +2026-04-09 08:31:58.207881: train_loss -0.3008 +2026-04-09 08:31:58.213526: val_loss -0.2149 +2026-04-09 08:31:58.215213: Pseudo dice [0.5091, 0.5128, 0.6333, 0.4487, 0.1848, 0.5719, 0.5526] +2026-04-09 08:31:58.218277: Epoch time: 103.75 s +2026-04-09 08:31:59.288674: +2026-04-09 08:31:59.291063: Epoch 554 +2026-04-09 08:31:59.292663: Current learning rate: 0.00484 +2026-04-09 08:33:43.376019: train_loss -0.2984 +2026-04-09 08:33:43.382333: val_loss -0.2525 +2026-04-09 08:33:43.384291: Pseudo dice [0.4993, 0.4525, 0.6347, 0.8023, 0.4153, 0.7072, 0.8633] +2026-04-09 08:33:43.386696: Epoch time: 104.09 s +2026-04-09 08:33:44.463388: +2026-04-09 08:33:44.465427: Epoch 555 +2026-04-09 08:33:44.467406: Current learning rate: 0.00483 +2026-04-09 08:35:27.201046: train_loss -0.294 +2026-04-09 08:35:27.207329: val_loss -0.2396 +2026-04-09 08:35:27.211707: Pseudo dice [0.1761, 0.3003, 0.6831, 0.2014, 0.3708, 0.6985, 0.4959] +2026-04-09 08:35:27.215250: Epoch time: 102.74 s +2026-04-09 08:35:28.295757: +2026-04-09 08:35:28.297552: Epoch 556 +2026-04-09 08:35:28.299548: Current learning rate: 0.00482 +2026-04-09 08:37:11.133103: train_loss -0.2678 +2026-04-09 08:37:11.140375: val_loss -0.1035 +2026-04-09 08:37:11.142550: Pseudo dice [0.5339, 0.4548, 0.0139, 0.024, 0.1147, 0.2303, 0.6216] +2026-04-09 08:37:11.144859: Epoch time: 102.84 s +2026-04-09 08:37:12.234254: +2026-04-09 08:37:12.235997: Epoch 557 +2026-04-09 08:37:12.237554: Current learning rate: 0.00481 +2026-04-09 08:38:54.902848: train_loss -0.2439 +2026-04-09 08:38:54.908146: val_loss -0.2621 +2026-04-09 08:38:54.910111: Pseudo dice [0.5117, 0.1747, 0.5471, 0.5169, 0.4884, 0.3935, 0.6351] +2026-04-09 08:38:54.912095: Epoch time: 102.67 s +2026-04-09 08:38:55.987441: +2026-04-09 08:38:55.989648: Epoch 558 +2026-04-09 08:38:55.991060: Current learning rate: 0.0048 +2026-04-09 08:40:38.517417: train_loss -0.2935 +2026-04-09 08:40:38.523522: val_loss -0.2969 +2026-04-09 08:40:38.526295: Pseudo dice [0.3009, 0.1237, 0.5974, 0.7939, 0.4925, 0.7449, 0.8326] +2026-04-09 08:40:38.528090: Epoch time: 102.53 s +2026-04-09 08:40:39.610101: +2026-04-09 08:40:39.612220: Epoch 559 +2026-04-09 08:40:39.614121: Current learning rate: 0.00479 +2026-04-09 08:42:23.948393: train_loss -0.2992 +2026-04-09 08:42:23.955311: val_loss -0.25 +2026-04-09 08:42:23.957925: Pseudo dice [0.2266, 0.2076, 0.8328, 0.4485, 0.3501, 0.7512, 0.8985] +2026-04-09 08:42:23.966125: Epoch time: 104.34 s +2026-04-09 08:42:25.032256: +2026-04-09 08:42:25.035070: Epoch 560 +2026-04-09 08:42:25.038367: Current learning rate: 0.00478 +2026-04-09 08:44:08.790178: train_loss -0.304 +2026-04-09 08:44:08.795843: val_loss -0.2206 +2026-04-09 08:44:08.798125: Pseudo dice [0.4155, 0.4814, 0.7273, 0.0006, 0.0561, 0.5624, 0.4905] +2026-04-09 08:44:08.800469: Epoch time: 103.76 s +2026-04-09 08:44:09.904865: +2026-04-09 08:44:09.906809: Epoch 561 +2026-04-09 08:44:09.908658: Current learning rate: 0.00477 +2026-04-09 08:45:53.389914: train_loss -0.3122 +2026-04-09 08:45:53.398541: val_loss -0.2898 +2026-04-09 08:45:53.400753: Pseudo dice [0.7263, 0.1323, 0.8291, 0.753, 0.4116, 0.7448, 0.6295] +2026-04-09 08:45:53.402725: Epoch time: 103.49 s +2026-04-09 08:45:54.497857: +2026-04-09 08:45:54.500334: Epoch 562 +2026-04-09 08:45:54.502669: Current learning rate: 0.00476 +2026-04-09 08:47:37.167687: train_loss -0.3037 +2026-04-09 08:47:37.180540: val_loss -0.2406 +2026-04-09 08:47:37.182246: Pseudo dice [0.6946, 0.3716, 0.6169, 0.6286, 0.1262, 0.611, 0.7848] +2026-04-09 08:47:37.189283: Epoch time: 102.67 s +2026-04-09 08:47:38.277890: +2026-04-09 08:47:38.279529: Epoch 563 +2026-04-09 08:47:38.281589: Current learning rate: 0.00475 +2026-04-09 08:49:22.218999: train_loss -0.2953 +2026-04-09 08:49:22.224687: val_loss -0.2352 +2026-04-09 08:49:22.226871: Pseudo dice [0.7198, 0.0722, 0.6483, 0.7499, 0.2787, 0.2282, 0.7116] +2026-04-09 08:49:22.230202: Epoch time: 103.94 s +2026-04-09 08:49:23.296803: +2026-04-09 08:49:23.298607: Epoch 564 +2026-04-09 08:49:23.300649: Current learning rate: 0.00474 +2026-04-09 08:51:06.218821: train_loss -0.3054 +2026-04-09 08:51:06.223466: val_loss -0.2502 +2026-04-09 08:51:06.225711: Pseudo dice [0.6009, 0.323, 0.6952, 0.684, 0.4091, 0.8089, 0.5469] +2026-04-09 08:51:06.228336: Epoch time: 102.93 s +2026-04-09 08:51:07.348125: +2026-04-09 08:51:07.350459: Epoch 565 +2026-04-09 08:51:07.352452: Current learning rate: 0.00473 +2026-04-09 08:52:50.261907: train_loss -0.3145 +2026-04-09 08:52:50.267782: val_loss -0.2256 +2026-04-09 08:52:50.269862: Pseudo dice [0.7651, 0.33, 0.6209, 0.5655, 0.1493, 0.5718, 0.5385] +2026-04-09 08:52:50.271794: Epoch time: 102.92 s +2026-04-09 08:52:51.367624: +2026-04-09 08:52:51.369336: Epoch 566 +2026-04-09 08:52:51.371469: Current learning rate: 0.00472 +2026-04-09 08:54:35.284166: train_loss -0.3214 +2026-04-09 08:54:35.289307: val_loss -0.2477 +2026-04-09 08:54:35.291422: Pseudo dice [0.6241, 0.3978, 0.7014, 0.5835, 0.3854, 0.4672, 0.869] +2026-04-09 08:54:35.293506: Epoch time: 103.92 s +2026-04-09 08:54:36.376575: +2026-04-09 08:54:36.379159: Epoch 567 +2026-04-09 08:54:36.382807: Current learning rate: 0.00471 +2026-04-09 08:56:19.574864: train_loss -0.3176 +2026-04-09 08:56:19.580921: val_loss -0.2315 +2026-04-09 08:56:19.583560: Pseudo dice [0.3865, 0.4197, 0.737, 0.71, 0.3214, 0.3744, 0.7041] +2026-04-09 08:56:19.585573: Epoch time: 103.2 s +2026-04-09 08:56:20.673894: +2026-04-09 08:56:20.676329: Epoch 568 +2026-04-09 08:56:20.678344: Current learning rate: 0.0047 +2026-04-09 08:58:04.188559: train_loss -0.297 +2026-04-09 08:58:04.194370: val_loss -0.2339 +2026-04-09 08:58:04.198984: Pseudo dice [0.4915, 0.5365, 0.2654, 0.7926, 0.3527, 0.3027, 0.831] +2026-04-09 08:58:04.200901: Epoch time: 103.52 s +2026-04-09 08:58:05.277589: +2026-04-09 08:58:05.279309: Epoch 569 +2026-04-09 08:58:05.280797: Current learning rate: 0.00469 +2026-04-09 08:59:48.801125: train_loss -0.299 +2026-04-09 08:59:48.809689: val_loss -0.2718 +2026-04-09 08:59:48.814434: Pseudo dice [0.4713, 0.3768, 0.6571, 0.4924, 0.3161, 0.7963, 0.777] +2026-04-09 08:59:48.817912: Epoch time: 103.53 s +2026-04-09 08:59:49.899223: +2026-04-09 08:59:49.902728: Epoch 570 +2026-04-09 08:59:49.906722: Current learning rate: 0.00468 +2026-04-09 09:01:32.341993: train_loss -0.3132 +2026-04-09 09:01:32.347259: val_loss -0.2429 +2026-04-09 09:01:32.349295: Pseudo dice [0.6173, 0.1935, 0.5906, 0.3812, 0.1247, 0.763, 0.2872] +2026-04-09 09:01:32.350912: Epoch time: 102.45 s +2026-04-09 09:01:33.421905: +2026-04-09 09:01:33.423514: Epoch 571 +2026-04-09 09:01:33.425228: Current learning rate: 0.00467 +2026-04-09 09:03:16.851572: train_loss -0.3142 +2026-04-09 09:03:16.857723: val_loss -0.261 +2026-04-09 09:03:16.860088: Pseudo dice [0.1792, 0.2473, 0.8015, 0.8402, 0.4404, 0.3915, 0.8929] +2026-04-09 09:03:16.862565: Epoch time: 103.43 s +2026-04-09 09:03:17.930866: +2026-04-09 09:03:17.932832: Epoch 572 +2026-04-09 09:03:17.934700: Current learning rate: 0.00466 +2026-04-09 09:05:00.753216: train_loss -0.3099 +2026-04-09 09:05:00.757959: val_loss -0.2662 +2026-04-09 09:05:00.759819: Pseudo dice [0.5091, 0.0892, 0.6611, 0.5559, 0.4597, 0.734, 0.8526] +2026-04-09 09:05:00.761767: Epoch time: 102.83 s +2026-04-09 09:05:02.912908: +2026-04-09 09:05:02.914715: Epoch 573 +2026-04-09 09:05:02.916161: Current learning rate: 0.00465 +2026-04-09 09:06:45.683618: train_loss -0.3165 +2026-04-09 09:06:45.691225: val_loss -0.2092 +2026-04-09 09:06:45.693273: Pseudo dice [0.252, 0.357, 0.4943, 0.8487, 0.3423, 0.4093, 0.7927] +2026-04-09 09:06:45.695335: Epoch time: 102.77 s +2026-04-09 09:06:46.827135: +2026-04-09 09:06:46.829994: Epoch 574 +2026-04-09 09:06:46.831991: Current learning rate: 0.00464 +2026-04-09 09:08:30.514017: train_loss -0.2929 +2026-04-09 09:08:30.520126: val_loss -0.2248 +2026-04-09 09:08:30.522280: Pseudo dice [0.6588, 0.4084, 0.7365, 0.0849, 0.2181, 0.6742, 0.6583] +2026-04-09 09:08:30.524263: Epoch time: 103.69 s +2026-04-09 09:08:31.614053: +2026-04-09 09:08:31.616444: Epoch 575 +2026-04-09 09:08:31.618410: Current learning rate: 0.00463 +2026-04-09 09:10:15.521219: train_loss -0.2763 +2026-04-09 09:10:15.526836: val_loss -0.2208 +2026-04-09 09:10:15.529201: Pseudo dice [0.7534, 0.3985, 0.745, 0.6189, 0.3168, 0.407, 0.6237] +2026-04-09 09:10:15.531137: Epoch time: 103.91 s +2026-04-09 09:10:16.618312: +2026-04-09 09:10:16.620283: Epoch 576 +2026-04-09 09:10:16.621681: Current learning rate: 0.00462 +2026-04-09 09:11:59.490367: train_loss -0.3019 +2026-04-09 09:11:59.497000: val_loss -0.2656 +2026-04-09 09:11:59.499753: Pseudo dice [0.6094, 0.4912, 0.7372, 0.8381, 0.409, 0.6089, 0.7498] +2026-04-09 09:11:59.502079: Epoch time: 102.88 s +2026-04-09 09:12:00.600873: +2026-04-09 09:12:00.603612: Epoch 577 +2026-04-09 09:12:00.605706: Current learning rate: 0.00461 +2026-04-09 09:13:46.521526: train_loss -0.3113 +2026-04-09 09:13:46.527471: val_loss -0.2497 +2026-04-09 09:13:46.530380: Pseudo dice [0.5445, 0.4113, 0.6874, 0.8831, 0.4675, 0.512, 0.8088] +2026-04-09 09:13:46.532878: Epoch time: 105.92 s +2026-04-09 09:13:47.636595: +2026-04-09 09:13:47.639679: Epoch 578 +2026-04-09 09:13:47.641779: Current learning rate: 0.0046 +2026-04-09 09:15:30.288514: train_loss -0.3085 +2026-04-09 09:15:30.295406: val_loss -0.2596 +2026-04-09 09:15:30.298253: Pseudo dice [0.5458, 0.202, 0.8493, 0.5332, 0.3031, 0.6794, 0.7798] +2026-04-09 09:15:30.301795: Epoch time: 102.66 s +2026-04-09 09:15:31.410708: +2026-04-09 09:15:31.413400: Epoch 579 +2026-04-09 09:15:31.415367: Current learning rate: 0.00459 +2026-04-09 09:17:14.378113: train_loss -0.3152 +2026-04-09 09:17:14.383828: val_loss -0.2216 +2026-04-09 09:17:14.385652: Pseudo dice [0.6993, 0.0886, 0.8304, 0.4452, 0.2819, 0.8537, 0.1434] +2026-04-09 09:17:14.387636: Epoch time: 102.97 s +2026-04-09 09:17:15.502788: +2026-04-09 09:17:15.504994: Epoch 580 +2026-04-09 09:17:15.507564: Current learning rate: 0.00458 +2026-04-09 09:18:58.806402: train_loss -0.2793 +2026-04-09 09:18:58.810781: val_loss -0.1848 +2026-04-09 09:18:58.813047: Pseudo dice [0.2683, 0.3753, 0.6706, 0.5358, 0.3392, 0.1547, 0.4639] +2026-04-09 09:18:58.814923: Epoch time: 103.31 s +2026-04-09 09:18:59.914078: +2026-04-09 09:18:59.915888: Epoch 581 +2026-04-09 09:18:59.917866: Current learning rate: 0.00457 +2026-04-09 09:20:42.120709: train_loss -0.294 +2026-04-09 09:20:42.126789: val_loss -0.2157 +2026-04-09 09:20:42.129052: Pseudo dice [0.6363, 0.1664, 0.202, 0.4903, 0.0, 0.7275, 0.5807] +2026-04-09 09:20:42.131511: Epoch time: 102.21 s +2026-04-09 09:20:43.215456: +2026-04-09 09:20:43.217443: Epoch 582 +2026-04-09 09:20:43.219289: Current learning rate: 0.00456 +2026-04-09 09:22:26.479880: train_loss -0.2694 +2026-04-09 09:22:26.484205: val_loss -0.2719 +2026-04-09 09:22:26.486358: Pseudo dice [0.5752, 0.1133, 0.6761, 0.8679, 0.5828, 0.7426, 0.7943] +2026-04-09 09:22:26.488083: Epoch time: 103.27 s +2026-04-09 09:22:27.580486: +2026-04-09 09:22:27.582472: Epoch 583 +2026-04-09 09:22:27.584444: Current learning rate: 0.00455 +2026-04-09 09:24:09.680462: train_loss -0.2747 +2026-04-09 09:24:09.686267: val_loss -0.1998 +2026-04-09 09:24:09.688152: Pseudo dice [0.3868, 0.1558, 0.497, 0.6314, 0.375, 0.5245, 0.4847] +2026-04-09 09:24:09.690065: Epoch time: 102.1 s +2026-04-09 09:24:10.782736: +2026-04-09 09:24:10.784477: Epoch 584 +2026-04-09 09:24:10.786124: Current learning rate: 0.00454 +2026-04-09 09:25:55.522163: train_loss -0.314 +2026-04-09 09:25:55.527597: val_loss -0.2564 +2026-04-09 09:25:55.529427: Pseudo dice [0.4307, 0.2711, 0.4919, 0.7462, 0.4257, 0.2621, 0.8594] +2026-04-09 09:25:55.531466: Epoch time: 104.74 s +2026-04-09 09:25:56.632225: +2026-04-09 09:25:56.634519: Epoch 585 +2026-04-09 09:25:56.636827: Current learning rate: 0.00453 +2026-04-09 09:27:39.776394: train_loss -0.3238 +2026-04-09 09:27:39.781480: val_loss -0.2878 +2026-04-09 09:27:39.783126: Pseudo dice [0.509, 0.4552, 0.6668, 0.7291, 0.5149, 0.6794, 0.7847] +2026-04-09 09:27:39.785217: Epoch time: 103.15 s +2026-04-09 09:27:40.894442: +2026-04-09 09:27:40.896793: Epoch 586 +2026-04-09 09:27:40.898386: Current learning rate: 0.00452 +2026-04-09 09:29:24.090729: train_loss -0.3102 +2026-04-09 09:29:24.094715: val_loss -0.2724 +2026-04-09 09:29:24.096748: Pseudo dice [0.4176, 0.3523, 0.7319, 0.8327, 0.2098, 0.6961, 0.7793] +2026-04-09 09:29:24.098796: Epoch time: 103.2 s +2026-04-09 09:29:25.213613: +2026-04-09 09:29:25.215728: Epoch 587 +2026-04-09 09:29:25.217862: Current learning rate: 0.00451 +2026-04-09 09:31:07.658620: train_loss -0.3085 +2026-04-09 09:31:07.664427: val_loss -0.247 +2026-04-09 09:31:07.666475: Pseudo dice [0.4491, 0.438, 0.7326, 0.8818, 0.4361, 0.5892, 0.4948] +2026-04-09 09:31:07.668723: Epoch time: 102.45 s +2026-04-09 09:31:08.753129: +2026-04-09 09:31:08.755691: Epoch 588 +2026-04-09 09:31:08.758004: Current learning rate: 0.0045 +2026-04-09 09:32:51.678667: train_loss -0.3081 +2026-04-09 09:32:51.684919: val_loss -0.2533 +2026-04-09 09:32:51.687310: Pseudo dice [0.4124, 0.5488, 0.7471, 0.6628, 0.5007, 0.7578, 0.5577] +2026-04-09 09:32:51.689881: Epoch time: 102.93 s +2026-04-09 09:32:52.810644: +2026-04-09 09:32:52.813294: Epoch 589 +2026-04-09 09:32:52.815493: Current learning rate: 0.00449 +2026-04-09 09:34:36.997509: train_loss -0.3212 +2026-04-09 09:34:37.002861: val_loss -0.2575 +2026-04-09 09:34:37.004969: Pseudo dice [0.2238, 0.3571, 0.6322, 0.8982, 0.1039, 0.7304, 0.6074] +2026-04-09 09:34:37.007197: Epoch time: 104.19 s +2026-04-09 09:34:38.120545: +2026-04-09 09:34:38.122754: Epoch 590 +2026-04-09 09:34:38.124525: Current learning rate: 0.00448 +2026-04-09 09:36:21.282080: train_loss -0.3132 +2026-04-09 09:36:21.287926: val_loss -0.2426 +2026-04-09 09:36:21.289998: Pseudo dice [0.6818, 0.3601, 0.7704, 0.2001, 0.2855, 0.7522, 0.482] +2026-04-09 09:36:21.291673: Epoch time: 103.16 s +2026-04-09 09:36:22.373428: +2026-04-09 09:36:22.374810: Epoch 591 +2026-04-09 09:36:22.376368: Current learning rate: 0.00447 +2026-04-09 09:38:06.777771: train_loss -0.3145 +2026-04-09 09:38:06.782201: val_loss -0.2756 +2026-04-09 09:38:06.784631: Pseudo dice [0.3695, 0.5675, 0.816, 0.7332, 0.3887, 0.7537, 0.6567] +2026-04-09 09:38:06.786248: Epoch time: 104.41 s +2026-04-09 09:38:07.908214: +2026-04-09 09:38:07.910235: Epoch 592 +2026-04-09 09:38:07.911888: Current learning rate: 0.00446 +2026-04-09 09:39:51.541239: train_loss -0.3178 +2026-04-09 09:39:51.549514: val_loss -0.2563 +2026-04-09 09:39:51.551546: Pseudo dice [0.3697, 0.2122, 0.5826, 0.8362, 0.504, 0.6649, 0.8676] +2026-04-09 09:39:51.554181: Epoch time: 103.64 s +2026-04-09 09:39:52.646003: +2026-04-09 09:39:52.647854: Epoch 593 +2026-04-09 09:39:52.649974: Current learning rate: 0.00445 +2026-04-09 09:41:36.335332: train_loss -0.3312 +2026-04-09 09:41:36.341359: val_loss -0.2632 +2026-04-09 09:41:36.343357: Pseudo dice [0.3511, 0.2043, 0.7407, 0.1757, 0.4344, 0.5485, 0.8419] +2026-04-09 09:41:36.346174: Epoch time: 103.69 s +2026-04-09 09:41:37.432488: +2026-04-09 09:41:37.434402: Epoch 594 +2026-04-09 09:41:37.436532: Current learning rate: 0.00444 +2026-04-09 09:43:19.846836: train_loss -0.3053 +2026-04-09 09:43:19.852651: val_loss -0.237 +2026-04-09 09:43:19.854997: Pseudo dice [0.4505, 0.4134, 0.6609, 0.7897, 0.2491, 0.6318, 0.8958] +2026-04-09 09:43:19.856763: Epoch time: 102.42 s +2026-04-09 09:43:20.927792: +2026-04-09 09:43:20.929537: Epoch 595 +2026-04-09 09:43:20.931445: Current learning rate: 0.00443 +2026-04-09 09:45:04.944913: train_loss -0.3278 +2026-04-09 09:45:04.950772: val_loss -0.244 +2026-04-09 09:45:04.952694: Pseudo dice [0.4862, 0.0545, 0.6186, 0.3476, 0.1628, 0.8357, 0.5582] +2026-04-09 09:45:04.954968: Epoch time: 104.02 s +2026-04-09 09:45:06.079197: +2026-04-09 09:45:06.082467: Epoch 596 +2026-04-09 09:45:06.084948: Current learning rate: 0.00442 +2026-04-09 09:46:49.448641: train_loss -0.3187 +2026-04-09 09:46:49.452848: val_loss -0.2306 +2026-04-09 09:46:49.454673: Pseudo dice [0.7014, 0.3983, 0.3949, 0.7919, 0.1579, 0.4079, 0.8083] +2026-04-09 09:46:49.456478: Epoch time: 103.37 s +2026-04-09 09:46:50.598092: +2026-04-09 09:46:50.600044: Epoch 597 +2026-04-09 09:46:50.601664: Current learning rate: 0.00441 +2026-04-09 09:48:33.398802: train_loss -0.2906 +2026-04-09 09:48:33.403901: val_loss -0.2291 +2026-04-09 09:48:33.406318: Pseudo dice [0.5223, 0.3947, 0.4708, 0.4903, 0.2194, 0.4543, 0.8574] +2026-04-09 09:48:33.408681: Epoch time: 102.8 s +2026-04-09 09:48:34.502698: +2026-04-09 09:48:34.506174: Epoch 598 +2026-04-09 09:48:34.508880: Current learning rate: 0.0044 +2026-04-09 09:50:17.062709: train_loss -0.3029 +2026-04-09 09:50:17.067934: val_loss -0.288 +2026-04-09 09:50:17.071436: Pseudo dice [0.7584, 0.5699, 0.6977, 0.7642, 0.4354, 0.6901, 0.5827] +2026-04-09 09:50:17.073801: Epoch time: 102.56 s +2026-04-09 09:50:18.162593: +2026-04-09 09:50:18.164400: Epoch 599 +2026-04-09 09:50:18.165993: Current learning rate: 0.00439 +2026-04-09 09:52:08.242745: train_loss -0.3041 +2026-04-09 09:52:08.254024: val_loss -0.2599 +2026-04-09 09:52:08.257566: Pseudo dice [0.5952, 0.5806, 0.7533, 0.671, 0.3511, 0.4399, 0.5966] +2026-04-09 09:52:08.261173: Epoch time: 110.08 s +2026-04-09 09:52:11.249149: +2026-04-09 09:52:11.251617: Epoch 600 +2026-04-09 09:52:11.253507: Current learning rate: 0.00438 +2026-04-09 09:53:58.447747: train_loss -0.3018 +2026-04-09 09:53:58.453910: val_loss -0.2139 +2026-04-09 09:53:58.457196: Pseudo dice [0.1738, 0.0933, 0.7476, 0.719, 0.2019, 0.7175, 0.7764] +2026-04-09 09:53:58.460330: Epoch time: 107.2 s +2026-04-09 09:54:00.057208: +2026-04-09 09:54:00.059597: Epoch 601 +2026-04-09 09:54:00.061844: Current learning rate: 0.00437 +2026-04-09 09:55:43.394097: train_loss -0.3005 +2026-04-09 09:55:43.401447: val_loss -0.2783 +2026-04-09 09:55:43.403773: Pseudo dice [0.4928, 0.5985, 0.5649, 0.7081, 0.5274, 0.7746, 0.7989] +2026-04-09 09:55:43.406105: Epoch time: 103.34 s +2026-04-09 09:55:44.519402: +2026-04-09 09:55:44.521388: Epoch 602 +2026-04-09 09:55:44.523387: Current learning rate: 0.00436 +2026-04-09 09:57:27.652082: train_loss -0.3142 +2026-04-09 09:57:27.657308: val_loss -0.261 +2026-04-09 09:57:27.659683: Pseudo dice [0.3995, 0.178, 0.6783, 0.7137, 0.2179, 0.6258, 0.7546] +2026-04-09 09:57:27.661898: Epoch time: 103.14 s +2026-04-09 09:57:28.766310: +2026-04-09 09:57:28.768311: Epoch 603 +2026-04-09 09:57:28.770111: Current learning rate: 0.00435 +2026-04-09 09:59:11.018821: train_loss -0.2951 +2026-04-09 09:59:11.024395: val_loss -0.2232 +2026-04-09 09:59:11.027494: Pseudo dice [0.3028, 0.4423, 0.6121, 0.1715, 0.2592, 0.5387, 0.6486] +2026-04-09 09:59:11.029179: Epoch time: 102.26 s +2026-04-09 09:59:12.157497: +2026-04-09 09:59:12.159807: Epoch 604 +2026-04-09 09:59:12.161440: Current learning rate: 0.00434 +2026-04-09 10:00:54.308051: train_loss -0.2981 +2026-04-09 10:00:54.313241: val_loss -0.2732 +2026-04-09 10:00:54.315839: Pseudo dice [0.6493, 0.2448, 0.7513, 0.5515, 0.1579, 0.6831, 0.8278] +2026-04-09 10:00:54.318403: Epoch time: 102.15 s +2026-04-09 10:00:55.435504: +2026-04-09 10:00:55.437196: Epoch 605 +2026-04-09 10:00:55.438770: Current learning rate: 0.00433 +2026-04-09 10:02:38.545285: train_loss -0.2997 +2026-04-09 10:02:38.555564: val_loss -0.2671 +2026-04-09 10:02:38.558032: Pseudo dice [0.4892, 0.3404, 0.631, 0.7631, 0.3496, 0.4367, 0.6537] +2026-04-09 10:02:38.560363: Epoch time: 103.11 s +2026-04-09 10:02:39.669390: +2026-04-09 10:02:39.671008: Epoch 606 +2026-04-09 10:02:39.672661: Current learning rate: 0.00432 +2026-04-09 10:04:23.243235: train_loss -0.3016 +2026-04-09 10:04:23.249445: val_loss -0.2754 +2026-04-09 10:04:23.252155: Pseudo dice [0.6742, 0.5223, 0.3852, 0.8816, 0.467, 0.7905, 0.8869] +2026-04-09 10:04:23.254086: Epoch time: 103.58 s +2026-04-09 10:04:24.364169: +2026-04-09 10:04:24.365810: Epoch 607 +2026-04-09 10:04:24.367612: Current learning rate: 0.00431 +2026-04-09 10:06:07.531690: train_loss -0.3065 +2026-04-09 10:06:07.538055: val_loss -0.2562 +2026-04-09 10:06:07.539726: Pseudo dice [0.4073, 0.5518, 0.6016, 0.7756, 0.3235, 0.6341, 0.8473] +2026-04-09 10:06:07.541571: Epoch time: 103.17 s +2026-04-09 10:06:08.650841: +2026-04-09 10:06:08.653074: Epoch 608 +2026-04-09 10:06:08.654654: Current learning rate: 0.0043 +2026-04-09 10:07:53.063636: train_loss -0.3002 +2026-04-09 10:07:53.070355: val_loss -0.2586 +2026-04-09 10:07:53.072236: Pseudo dice [0.6623, 0.2566, 0.6378, 0.6484, 0.2569, 0.6755, 0.7738] +2026-04-09 10:07:53.073871: Epoch time: 104.42 s +2026-04-09 10:07:54.186391: +2026-04-09 10:07:54.188035: Epoch 609 +2026-04-09 10:07:54.189670: Current learning rate: 0.00429 +2026-04-09 10:09:43.014782: train_loss -0.3157 +2026-04-09 10:09:43.028039: val_loss -0.2667 +2026-04-09 10:09:43.032737: Pseudo dice [0.4424, 0.464, 0.6338, 0.7584, 0.4821, 0.7894, 0.594] +2026-04-09 10:09:43.037361: Epoch time: 108.83 s +2026-04-09 10:09:44.172106: +2026-04-09 10:09:44.176359: Epoch 610 +2026-04-09 10:09:44.180900: Current learning rate: 0.00429 +2026-04-09 10:11:55.000112: train_loss -0.3108 +2026-04-09 10:11:55.015834: val_loss -0.262 +2026-04-09 10:11:55.019675: Pseudo dice [0.4088, 0.5608, 0.6583, 0.2201, 0.3128, 0.671, 0.654] +2026-04-09 10:11:55.023597: Epoch time: 130.83 s +2026-04-09 10:11:56.149107: +2026-04-09 10:11:56.152840: Epoch 611 +2026-04-09 10:11:56.157821: Current learning rate: 0.00428 +2026-04-09 10:14:01.846731: train_loss -0.3101 +2026-04-09 10:14:01.854126: val_loss -0.2365 +2026-04-09 10:14:01.860598: Pseudo dice [0.5257, 0.3894, 0.5434, 0.5632, 0.3356, 0.7151, 0.8552] +2026-04-09 10:14:01.864325: Epoch time: 125.7 s +2026-04-09 10:14:03.006760: +2026-04-09 10:14:03.017227: Epoch 612 +2026-04-09 10:14:03.023139: Current learning rate: 0.00427 +2026-04-09 10:15:56.487649: train_loss -0.3109 +2026-04-09 10:15:56.496968: val_loss -0.2359 +2026-04-09 10:15:56.500844: Pseudo dice [0.4149, 0.5116, 0.7756, 0.8302, 0.1115, 0.3495, 0.6488] +2026-04-09 10:15:56.505892: Epoch time: 113.48 s +2026-04-09 10:15:58.720650: +2026-04-09 10:15:58.725585: Epoch 613 +2026-04-09 10:15:58.730404: Current learning rate: 0.00426 +2026-04-09 10:17:46.451769: train_loss -0.3049 +2026-04-09 10:17:46.460494: val_loss -0.2374 +2026-04-09 10:17:46.464823: Pseudo dice [0.4786, 0.2606, 0.6639, 0.7159, 0.2585, 0.65, 0.8207] +2026-04-09 10:17:46.467669: Epoch time: 107.73 s +2026-04-09 10:17:47.600539: +2026-04-09 10:17:47.603502: Epoch 614 +2026-04-09 10:17:47.607488: Current learning rate: 0.00425 +2026-04-09 10:19:32.937627: train_loss -0.2918 +2026-04-09 10:19:32.943005: val_loss -0.2271 +2026-04-09 10:19:32.944992: Pseudo dice [0.327, 0.4625, 0.6198, 0.7315, 0.3177, 0.7208, 0.7791] +2026-04-09 10:19:32.946553: Epoch time: 105.34 s +2026-04-09 10:19:34.105338: +2026-04-09 10:19:34.107372: Epoch 615 +2026-04-09 10:19:34.109114: Current learning rate: 0.00424 +2026-04-09 10:21:18.278603: train_loss -0.2975 +2026-04-09 10:21:18.285763: val_loss -0.2749 +2026-04-09 10:21:18.288280: Pseudo dice [0.4756, 0.1033, 0.8269, 0.8607, 0.4378, 0.7094, 0.7551] +2026-04-09 10:21:18.290838: Epoch time: 104.18 s +2026-04-09 10:21:19.384665: +2026-04-09 10:21:19.387359: Epoch 616 +2026-04-09 10:21:19.390086: Current learning rate: 0.00423 +2026-04-09 10:23:03.659727: train_loss -0.3001 +2026-04-09 10:23:03.666149: val_loss -0.258 +2026-04-09 10:23:03.669743: Pseudo dice [0.3332, 0.3367, 0.7579, 0.8196, 0.2336, 0.5883, 0.8314] +2026-04-09 10:23:03.671545: Epoch time: 104.28 s +2026-04-09 10:23:04.780087: +2026-04-09 10:23:04.784862: Epoch 617 +2026-04-09 10:23:04.790818: Current learning rate: 0.00422 +2026-04-09 10:24:49.470186: train_loss -0.2747 +2026-04-09 10:24:49.475309: val_loss -0.2476 +2026-04-09 10:24:49.478249: Pseudo dice [0.3214, 0.1289, 0.4167, 0.3145, 0.5012, 0.5761, 0.8291] +2026-04-09 10:24:49.480413: Epoch time: 104.69 s +2026-04-09 10:24:50.581086: +2026-04-09 10:24:50.583700: Epoch 618 +2026-04-09 10:24:50.585797: Current learning rate: 0.00421 +2026-04-09 10:26:33.417643: train_loss -0.295 +2026-04-09 10:26:33.422716: val_loss -0.291 +2026-04-09 10:26:33.424290: Pseudo dice [0.6872, 0.3307, 0.8157, 0.7119, 0.4275, 0.8605, 0.6455] +2026-04-09 10:26:33.426080: Epoch time: 102.84 s +2026-04-09 10:26:34.523755: +2026-04-09 10:26:34.525532: Epoch 619 +2026-04-09 10:26:34.527570: Current learning rate: 0.0042 +2026-04-09 10:28:22.170276: train_loss -0.323 +2026-04-09 10:28:22.174853: val_loss -0.2485 +2026-04-09 10:28:22.176434: Pseudo dice [0.563, 0.3499, 0.5754, 0.8186, 0.3025, 0.6719, 0.6062] +2026-04-09 10:28:22.178077: Epoch time: 107.65 s +2026-04-09 10:28:23.307662: +2026-04-09 10:28:23.310712: Epoch 620 +2026-04-09 10:28:23.313546: Current learning rate: 0.00419 +2026-04-09 10:30:06.011684: train_loss -0.3246 +2026-04-09 10:30:06.026534: val_loss -0.2578 +2026-04-09 10:30:06.030705: Pseudo dice [0.6674, 0.272, 0.5544, 0.7558, 0.2183, 0.8006, 0.7349] +2026-04-09 10:30:06.035660: Epoch time: 102.71 s +2026-04-09 10:30:07.173393: +2026-04-09 10:30:07.177644: Epoch 621 +2026-04-09 10:30:07.182471: Current learning rate: 0.00418 +2026-04-09 10:31:51.431057: train_loss -0.3212 +2026-04-09 10:31:51.438308: val_loss -0.2416 +2026-04-09 10:31:51.440294: Pseudo dice [0.1784, 0.196, 0.5287, 0.7824, 0.3802, 0.5605, 0.6809] +2026-04-09 10:31:51.442687: Epoch time: 104.26 s +2026-04-09 10:31:52.549910: +2026-04-09 10:31:52.551643: Epoch 622 +2026-04-09 10:31:52.553101: Current learning rate: 0.00417 +2026-04-09 10:33:36.198507: train_loss -0.3159 +2026-04-09 10:33:36.204519: val_loss -0.2453 +2026-04-09 10:33:36.206470: Pseudo dice [0.3345, 0.2889, 0.5269, 0.5703, 0.2875, 0.7766, 0.7652] +2026-04-09 10:33:36.208577: Epoch time: 103.65 s +2026-04-09 10:33:37.309006: +2026-04-09 10:33:37.310669: Epoch 623 +2026-04-09 10:33:37.312585: Current learning rate: 0.00416 +2026-04-09 10:35:20.051455: train_loss -0.3156 +2026-04-09 10:35:20.056997: val_loss -0.2524 +2026-04-09 10:35:20.058964: Pseudo dice [0.7188, 0.5654, 0.6162, 0.1205, 0.1583, 0.777, 0.9149] +2026-04-09 10:35:20.060887: Epoch time: 102.75 s +2026-04-09 10:35:21.167995: +2026-04-09 10:35:21.170000: Epoch 624 +2026-04-09 10:35:21.171519: Current learning rate: 0.00415 +2026-04-09 10:37:03.230479: train_loss -0.3089 +2026-04-09 10:37:03.234958: val_loss -0.258 +2026-04-09 10:37:03.236461: Pseudo dice [0.4218, 0.2609, 0.3354, 0.882, 0.2635, 0.8146, 0.7302] +2026-04-09 10:37:03.238298: Epoch time: 102.07 s +2026-04-09 10:37:04.341094: +2026-04-09 10:37:04.343205: Epoch 625 +2026-04-09 10:37:04.345042: Current learning rate: 0.00414 +2026-04-09 10:38:49.269432: train_loss -0.3045 +2026-04-09 10:38:49.276965: val_loss -0.2552 +2026-04-09 10:38:49.279432: Pseudo dice [0.5339, 0.2755, 0.706, 0.733, 0.2348, 0.3802, 0.8605] +2026-04-09 10:38:49.282322: Epoch time: 104.93 s +2026-04-09 10:38:50.389935: +2026-04-09 10:38:50.392378: Epoch 626 +2026-04-09 10:38:50.395151: Current learning rate: 0.00413 +2026-04-09 10:40:33.212022: train_loss -0.301 +2026-04-09 10:40:33.217420: val_loss -0.2683 +2026-04-09 10:40:33.219181: Pseudo dice [0.3662, 0.4643, 0.4494, 0.8038, 0.4262, 0.6307, 0.721] +2026-04-09 10:40:33.220808: Epoch time: 102.83 s +2026-04-09 10:40:34.342730: +2026-04-09 10:40:34.344155: Epoch 627 +2026-04-09 10:40:34.346020: Current learning rate: 0.00412 +2026-04-09 10:42:19.244542: train_loss -0.3264 +2026-04-09 10:42:19.253025: val_loss -0.2633 +2026-04-09 10:42:19.257149: Pseudo dice [0.267, 0.5064, 0.7351, 0.8195, 0.4567, 0.5803, 0.4972] +2026-04-09 10:42:19.260312: Epoch time: 104.9 s +2026-04-09 10:42:20.371006: +2026-04-09 10:42:20.373031: Epoch 628 +2026-04-09 10:42:20.375326: Current learning rate: 0.00411 +2026-04-09 10:44:06.964142: train_loss -0.3181 +2026-04-09 10:44:06.969696: val_loss -0.2207 +2026-04-09 10:44:06.971292: Pseudo dice [0.5082, 0.3439, 0.4339, 0.3832, 0.2866, 0.1654, 0.8129] +2026-04-09 10:44:06.973259: Epoch time: 106.6 s +2026-04-09 10:44:08.064273: +2026-04-09 10:44:08.066273: Epoch 629 +2026-04-09 10:44:08.067977: Current learning rate: 0.0041 +2026-04-09 10:45:51.388516: train_loss -0.3003 +2026-04-09 10:45:51.393216: val_loss -0.2789 +2026-04-09 10:45:51.395098: Pseudo dice [0.4067, 0.2024, 0.73, 0.643, 0.3749, 0.7313, 0.882] +2026-04-09 10:45:51.396954: Epoch time: 103.33 s +2026-04-09 10:45:52.540002: +2026-04-09 10:45:52.541999: Epoch 630 +2026-04-09 10:45:52.543808: Current learning rate: 0.00409 +2026-04-09 10:47:35.291254: train_loss -0.2766 +2026-04-09 10:47:35.297597: val_loss -0.2021 +2026-04-09 10:47:35.299805: Pseudo dice [0.2866, 0.4748, 0.5003, 0.6931, 0.0822, 0.6489, 0.4588] +2026-04-09 10:47:35.301799: Epoch time: 102.75 s +2026-04-09 10:47:36.406384: +2026-04-09 10:47:36.410110: Epoch 631 +2026-04-09 10:47:36.413651: Current learning rate: 0.00408 +2026-04-09 10:49:19.572676: train_loss -0.3126 +2026-04-09 10:49:19.578243: val_loss -0.2444 +2026-04-09 10:49:19.580259: Pseudo dice [0.356, 0.3168, 0.7857, 0.8125, 0.2762, 0.3248, 0.8422] +2026-04-09 10:49:19.582166: Epoch time: 103.17 s +2026-04-09 10:49:20.723344: +2026-04-09 10:49:20.725048: Epoch 632 +2026-04-09 10:49:20.726773: Current learning rate: 0.00407 +2026-04-09 10:51:03.483424: train_loss -0.2962 +2026-04-09 10:51:03.488402: val_loss -0.2476 +2026-04-09 10:51:03.491086: Pseudo dice [0.2978, 0.1128, 0.837, 0.7665, 0.0311, 0.7179, 0.7018] +2026-04-09 10:51:03.492837: Epoch time: 102.76 s +2026-04-09 10:51:04.616336: +2026-04-09 10:51:04.618056: Epoch 633 +2026-04-09 10:51:04.619552: Current learning rate: 0.00406 +2026-04-09 10:52:48.599529: train_loss -0.3083 +2026-04-09 10:52:48.606140: val_loss -0.2423 +2026-04-09 10:52:48.607987: Pseudo dice [0.4634, 0.1663, 0.568, 0.6925, 0.3238, 0.2101, 0.8608] +2026-04-09 10:52:48.610194: Epoch time: 103.99 s +2026-04-09 10:52:49.713786: +2026-04-09 10:52:49.717144: Epoch 634 +2026-04-09 10:52:49.722500: Current learning rate: 0.00405 +2026-04-09 10:54:32.370863: train_loss -0.3172 +2026-04-09 10:54:32.375751: val_loss -0.2793 +2026-04-09 10:54:32.377957: Pseudo dice [0.3675, 0.5294, 0.7784, 0.4623, 0.1436, 0.7981, 0.8073] +2026-04-09 10:54:32.380921: Epoch time: 102.66 s +2026-04-09 10:54:33.485601: +2026-04-09 10:54:33.487406: Epoch 635 +2026-04-09 10:54:33.489170: Current learning rate: 0.00404 +2026-04-09 10:56:15.875659: train_loss -0.3247 +2026-04-09 10:56:15.883297: val_loss -0.2583 +2026-04-09 10:56:15.885133: Pseudo dice [0.6479, 0.509, 0.7222, 0.7004, 0.4998, 0.7004, 0.556] +2026-04-09 10:56:15.887299: Epoch time: 102.39 s +2026-04-09 10:56:16.974430: +2026-04-09 10:56:16.976053: Epoch 636 +2026-04-09 10:56:16.977502: Current learning rate: 0.00403 +2026-04-09 10:57:59.878147: train_loss -0.3178 +2026-04-09 10:57:59.882994: val_loss -0.2423 +2026-04-09 10:57:59.885659: Pseudo dice [0.3431, 0.2828, 0.6557, 0.6227, 0.391, 0.6534, 0.7838] +2026-04-09 10:57:59.887154: Epoch time: 102.91 s +2026-04-09 10:58:01.024610: +2026-04-09 10:58:01.026462: Epoch 637 +2026-04-09 10:58:01.027989: Current learning rate: 0.00402 +2026-04-09 10:59:43.177855: train_loss -0.3035 +2026-04-09 10:59:43.182716: val_loss -0.2511 +2026-04-09 10:59:43.184514: Pseudo dice [0.3758, 0.5516, 0.549, 0.7736, 0.3319, 0.7302, 0.7759] +2026-04-09 10:59:43.186292: Epoch time: 102.16 s +2026-04-09 10:59:44.294390: +2026-04-09 10:59:44.296750: Epoch 638 +2026-04-09 10:59:44.298713: Current learning rate: 0.00401 +2026-04-09 11:01:27.613556: train_loss -0.3145 +2026-04-09 11:01:27.619829: val_loss -0.2985 +2026-04-09 11:01:27.622483: Pseudo dice [0.3646, 0.6475, 0.7568, 0.8677, 0.631, 0.5545, 0.7587] +2026-04-09 11:01:27.624799: Epoch time: 103.32 s +2026-04-09 11:01:28.726288: +2026-04-09 11:01:28.728586: Epoch 639 +2026-04-09 11:01:28.730204: Current learning rate: 0.004 +2026-04-09 11:03:10.896019: train_loss -0.2999 +2026-04-09 11:03:10.902067: val_loss -0.2326 +2026-04-09 11:03:10.903982: Pseudo dice [0.2991, 0.2679, 0.7124, 0.4817, 0.0654, 0.4352, 0.8129] +2026-04-09 11:03:10.906147: Epoch time: 102.17 s +2026-04-09 11:03:12.047704: +2026-04-09 11:03:12.049448: Epoch 640 +2026-04-09 11:03:12.051168: Current learning rate: 0.00399 +2026-04-09 11:04:55.583122: train_loss -0.284 +2026-04-09 11:04:55.588502: val_loss -0.2555 +2026-04-09 11:04:55.590927: Pseudo dice [0.4717, 0.4941, 0.4408, 0.8396, 0.3969, 0.6639, 0.8966] +2026-04-09 11:04:55.593171: Epoch time: 103.54 s +2026-04-09 11:04:56.706634: +2026-04-09 11:04:56.708343: Epoch 641 +2026-04-09 11:04:56.710020: Current learning rate: 0.00398 +2026-04-09 11:06:39.414738: train_loss -0.3164 +2026-04-09 11:06:39.419998: val_loss -0.2475 +2026-04-09 11:06:39.422469: Pseudo dice [0.4002, 0.4155, 0.7639, 0.3259, 0.242, 0.6524, 0.5886] +2026-04-09 11:06:39.424838: Epoch time: 102.71 s +2026-04-09 11:06:40.538301: +2026-04-09 11:06:40.541224: Epoch 642 +2026-04-09 11:06:40.543983: Current learning rate: 0.00397 +2026-04-09 11:08:23.136431: train_loss -0.3108 +2026-04-09 11:08:23.141555: val_loss -0.2819 +2026-04-09 11:08:23.143824: Pseudo dice [0.5647, 0.1988, 0.7421, 0.091, 0.425, 0.6817, 0.7294] +2026-04-09 11:08:23.146945: Epoch time: 102.6 s +2026-04-09 11:08:24.270511: +2026-04-09 11:08:24.273867: Epoch 643 +2026-04-09 11:08:24.276890: Current learning rate: 0.00396 +2026-04-09 11:10:08.793647: train_loss -0.3174 +2026-04-09 11:10:08.806539: val_loss -0.28 +2026-04-09 11:10:08.808722: Pseudo dice [0.4358, 0.1235, 0.5695, 0.8789, 0.4986, 0.6893, 0.83] +2026-04-09 11:10:08.810883: Epoch time: 104.53 s +2026-04-09 11:10:09.908567: +2026-04-09 11:10:09.910909: Epoch 644 +2026-04-09 11:10:09.913279: Current learning rate: 0.00395 +2026-04-09 11:11:52.978684: train_loss -0.3218 +2026-04-09 11:11:52.983841: val_loss -0.2634 +2026-04-09 11:11:52.985809: Pseudo dice [0.5342, 0.2804, 0.5666, 0.6038, 0.2594, 0.4723, 0.8697] +2026-04-09 11:11:52.987600: Epoch time: 103.07 s +2026-04-09 11:11:54.146667: +2026-04-09 11:11:54.149737: Epoch 645 +2026-04-09 11:11:54.151830: Current learning rate: 0.00394 +2026-04-09 11:13:37.636094: train_loss -0.3086 +2026-04-09 11:13:37.640545: val_loss -0.2537 +2026-04-09 11:13:37.642585: Pseudo dice [0.4185, 0.1805, 0.6302, 0.6292, 0.2539, 0.6895, 0.859] +2026-04-09 11:13:37.644425: Epoch time: 103.49 s +2026-04-09 11:13:38.737950: +2026-04-09 11:13:38.739982: Epoch 646 +2026-04-09 11:13:38.742605: Current learning rate: 0.00393 +2026-04-09 11:15:21.825566: train_loss -0.3036 +2026-04-09 11:15:21.829759: val_loss -0.2423 +2026-04-09 11:15:21.832560: Pseudo dice [0.314, 0.2151, 0.599, 0.8775, 0.3238, 0.7659, 0.5761] +2026-04-09 11:15:21.834865: Epoch time: 103.09 s +2026-04-09 11:15:22.944777: +2026-04-09 11:15:22.946690: Epoch 647 +2026-04-09 11:15:22.948347: Current learning rate: 0.00392 +2026-04-09 11:17:05.580512: train_loss -0.3129 +2026-04-09 11:17:05.585748: val_loss -0.2639 +2026-04-09 11:17:05.587800: Pseudo dice [0.3853, 0.2252, 0.5744, 0.7286, 0.4865, 0.7334, 0.6872] +2026-04-09 11:17:05.590460: Epoch time: 102.64 s +2026-04-09 11:17:06.686051: +2026-04-09 11:17:06.687747: Epoch 648 +2026-04-09 11:17:06.689642: Current learning rate: 0.00391 +2026-04-09 11:18:50.063716: train_loss -0.3122 +2026-04-09 11:18:50.070285: val_loss -0.2596 +2026-04-09 11:18:50.072146: Pseudo dice [0.5639, 0.5569, 0.7672, 0.8898, 0.2666, 0.6243, 0.7435] +2026-04-09 11:18:50.074589: Epoch time: 103.38 s +2026-04-09 11:18:51.173747: +2026-04-09 11:18:51.175595: Epoch 649 +2026-04-09 11:18:51.177122: Current learning rate: 0.0039 +2026-04-09 11:20:33.556108: train_loss -0.3107 +2026-04-09 11:20:33.571083: val_loss -0.2578 +2026-04-09 11:20:33.574461: Pseudo dice [0.8136, 0.4336, 0.8012, 0.5292, 0.2435, 0.142, 0.8249] +2026-04-09 11:20:33.576539: Epoch time: 102.39 s +2026-04-09 11:20:36.439323: +2026-04-09 11:20:36.441085: Epoch 650 +2026-04-09 11:20:36.444063: Current learning rate: 0.00389 +2026-04-09 11:22:20.844348: train_loss -0.3257 +2026-04-09 11:22:20.850918: val_loss -0.2737 +2026-04-09 11:22:20.852908: Pseudo dice [0.7097, 0.4618, 0.7544, 0.8714, 0.3088, 0.3887, 0.8602] +2026-04-09 11:22:20.858020: Epoch time: 104.41 s +2026-04-09 11:22:21.982502: +2026-04-09 11:22:21.986398: Epoch 651 +2026-04-09 11:22:21.989742: Current learning rate: 0.00388 +2026-04-09 11:24:13.881154: train_loss -0.319 +2026-04-09 11:24:13.890841: val_loss -0.278 +2026-04-09 11:24:13.892988: Pseudo dice [0.2861, 0.5079, 0.697, 0.8392, 0.557, 0.8336, 0.8259] +2026-04-09 11:24:13.896350: Epoch time: 111.9 s +2026-04-09 11:24:15.007835: +2026-04-09 11:24:15.011240: Epoch 652 +2026-04-09 11:24:15.013526: Current learning rate: 0.00387 +2026-04-09 11:26:01.601262: train_loss -0.3319 +2026-04-09 11:26:01.610074: val_loss -0.2671 +2026-04-09 11:26:01.612940: Pseudo dice [0.2625, 0.5375, 0.7412, 0.8376, 0.2042, 0.751, 0.9014] +2026-04-09 11:26:01.615854: Epoch time: 106.6 s +2026-04-09 11:26:03.816950: +2026-04-09 11:26:03.819976: Epoch 653 +2026-04-09 11:26:03.823407: Current learning rate: 0.00386 +2026-04-09 11:27:46.571865: train_loss -0.328 +2026-04-09 11:27:46.577312: val_loss -0.2913 +2026-04-09 11:27:46.579293: Pseudo dice [0.5227, 0.557, 0.7048, 0.8083, 0.4329, 0.7348, 0.7045] +2026-04-09 11:27:46.583327: Epoch time: 102.76 s +2026-04-09 11:27:46.585522: Yayy! New best EMA pseudo Dice: 0.5728 +2026-04-09 11:27:49.354877: +2026-04-09 11:27:49.357794: Epoch 654 +2026-04-09 11:27:49.360203: Current learning rate: 0.00385 +2026-04-09 11:29:33.592818: train_loss -0.3339 +2026-04-09 11:29:33.605040: val_loss -0.2719 +2026-04-09 11:29:33.607851: Pseudo dice [0.5168, 0.1183, 0.7834, 0.7762, 0.4776, 0.4035, 0.7004] +2026-04-09 11:29:33.615817: Epoch time: 104.24 s +2026-04-09 11:29:34.754235: +2026-04-09 11:29:34.756748: Epoch 655 +2026-04-09 11:29:34.758559: Current learning rate: 0.00384 +2026-04-09 11:31:17.996139: train_loss -0.3129 +2026-04-09 11:31:18.000779: val_loss -0.2699 +2026-04-09 11:31:18.002765: Pseudo dice [0.5935, 0.4153, 0.7898, 0.8454, 0.4013, 0.7768, 0.5724] +2026-04-09 11:31:18.005381: Epoch time: 103.25 s +2026-04-09 11:31:18.007642: Yayy! New best EMA pseudo Dice: 0.5753 +2026-04-09 11:31:20.806597: +2026-04-09 11:31:20.809158: Epoch 656 +2026-04-09 11:31:20.811796: Current learning rate: 0.00383 +2026-04-09 11:33:04.255066: train_loss -0.3288 +2026-04-09 11:33:04.260556: val_loss -0.2402 +2026-04-09 11:33:04.262888: Pseudo dice [0.2288, 0.2346, 0.6978, 0.8267, 0.1646, 0.7757, 0.7585] +2026-04-09 11:33:04.266003: Epoch time: 103.45 s +2026-04-09 11:33:05.374336: +2026-04-09 11:33:05.376055: Epoch 657 +2026-04-09 11:33:05.379515: Current learning rate: 0.00382 +2026-04-09 11:34:50.829816: train_loss -0.3219 +2026-04-09 11:34:50.834769: val_loss -0.2038 +2026-04-09 11:34:50.837310: Pseudo dice [0.3443, 0.1275, 0.6571, 0.826, 0.1649, 0.733, 0.315] +2026-04-09 11:34:50.839531: Epoch time: 105.46 s +2026-04-09 11:34:51.941299: +2026-04-09 11:34:51.957064: Epoch 658 +2026-04-09 11:34:51.959489: Current learning rate: 0.00381 +2026-04-09 11:36:34.451491: train_loss -0.3161 +2026-04-09 11:36:34.457305: val_loss -0.291 +2026-04-09 11:36:34.459276: Pseudo dice [0.7766, 0.5675, 0.8186, 0.8183, 0.1808, 0.818, 0.8487] +2026-04-09 11:36:34.461176: Epoch time: 102.51 s +2026-04-09 11:36:35.577065: +2026-04-09 11:36:35.579042: Epoch 659 +2026-04-09 11:36:35.580989: Current learning rate: 0.0038 +2026-04-09 11:38:20.265612: train_loss -0.3229 +2026-04-09 11:38:20.271607: val_loss -0.2938 +2026-04-09 11:38:20.274333: Pseudo dice [0.8079, 0.1966, 0.5472, 0.5569, 0.2602, 0.5926, 0.7798] +2026-04-09 11:38:20.276510: Epoch time: 104.69 s +2026-04-09 11:38:21.398313: +2026-04-09 11:38:21.404656: Epoch 660 +2026-04-09 11:38:21.410075: Current learning rate: 0.00379 +2026-04-09 11:40:04.163608: train_loss -0.3366 +2026-04-09 11:40:04.168674: val_loss -0.2956 +2026-04-09 11:40:04.170487: Pseudo dice [0.4925, 0.4132, 0.7111, 0.646, 0.4102, 0.7402, 0.8348] +2026-04-09 11:40:04.172469: Epoch time: 102.77 s +2026-04-09 11:40:05.298646: +2026-04-09 11:40:05.302391: Epoch 661 +2026-04-09 11:40:05.304104: Current learning rate: 0.00378 +2026-04-09 11:41:50.117949: train_loss -0.3339 +2026-04-09 11:41:50.130650: val_loss -0.2889 +2026-04-09 11:41:50.135382: Pseudo dice [0.7273, 0.5504, 0.5989, 0.9234, 0.3433, 0.7928, 0.6947] +2026-04-09 11:41:50.139828: Epoch time: 104.82 s +2026-04-09 11:41:50.144239: Yayy! New best EMA pseudo Dice: 0.5809 +2026-04-09 11:41:52.987633: +2026-04-09 11:41:52.989301: Epoch 662 +2026-04-09 11:41:52.990901: Current learning rate: 0.00377 +2026-04-09 11:43:35.981600: train_loss -0.3263 +2026-04-09 11:43:35.987725: val_loss -0.2762 +2026-04-09 11:43:35.989811: Pseudo dice [0.3376, 0.5519, 0.7234, 0.8729, 0.3862, 0.6961, 0.8582] +2026-04-09 11:43:35.991693: Epoch time: 103.0 s +2026-04-09 11:43:35.994132: Yayy! New best EMA pseudo Dice: 0.586 +2026-04-09 11:43:38.861355: +2026-04-09 11:43:38.863094: Epoch 663 +2026-04-09 11:43:38.865484: Current learning rate: 0.00376 +2026-04-09 11:45:21.223002: train_loss -0.3393 +2026-04-09 11:45:21.232759: val_loss -0.2809 +2026-04-09 11:45:21.235832: Pseudo dice [0.8271, 0.3551, 0.702, 0.8406, 0.311, 0.8258, 0.7908] +2026-04-09 11:45:21.240915: Epoch time: 102.36 s +2026-04-09 11:45:21.245409: Yayy! New best EMA pseudo Dice: 0.5939 +2026-04-09 11:45:24.193946: +2026-04-09 11:45:24.195971: Epoch 664 +2026-04-09 11:45:24.199285: Current learning rate: 0.00375 +2026-04-09 11:47:07.086573: train_loss -0.3236 +2026-04-09 11:47:07.094344: val_loss -0.2851 +2026-04-09 11:47:07.097170: Pseudo dice [0.3847, 0.2228, 0.8065, 0.5357, 0.6425, 0.7667, 0.7025] +2026-04-09 11:47:07.099220: Epoch time: 102.9 s +2026-04-09 11:47:08.199533: +2026-04-09 11:47:08.201957: Epoch 665 +2026-04-09 11:47:08.203975: Current learning rate: 0.00374 +2026-04-09 11:48:51.387011: train_loss -0.3227 +2026-04-09 11:48:51.393473: val_loss -0.2893 +2026-04-09 11:48:51.397454: Pseudo dice [0.6593, 0.2471, 0.6247, 0.8693, 0.5488, 0.5006, 0.8676] +2026-04-09 11:48:51.399675: Epoch time: 103.19 s +2026-04-09 11:48:51.401287: Yayy! New best EMA pseudo Dice: 0.5949 +2026-04-09 11:48:54.299238: +2026-04-09 11:48:54.301363: Epoch 666 +2026-04-09 11:48:54.303125: Current learning rate: 0.00373 +2026-04-09 11:50:37.650802: train_loss -0.3198 +2026-04-09 11:50:37.657484: val_loss -0.2728 +2026-04-09 11:50:37.660064: Pseudo dice [0.5156, 0.5562, 0.6636, 0.6625, 0.4794, 0.2734, 0.7075] +2026-04-09 11:50:37.663238: Epoch time: 103.35 s +2026-04-09 11:50:38.789150: +2026-04-09 11:50:38.791644: Epoch 667 +2026-04-09 11:50:38.794465: Current learning rate: 0.00372 +2026-04-09 11:52:33.045825: train_loss -0.325 +2026-04-09 11:52:33.054546: val_loss -0.2796 +2026-04-09 11:52:33.057336: Pseudo dice [0.25, 0.6169, 0.5609, 0.4998, 0.5072, 0.3816, 0.9211] +2026-04-09 11:52:33.059304: Epoch time: 114.26 s +2026-04-09 11:52:34.199393: +2026-04-09 11:52:34.202633: Epoch 668 +2026-04-09 11:52:34.206170: Current learning rate: 0.00371 +2026-04-09 11:54:21.082386: train_loss -0.3426 +2026-04-09 11:54:21.088987: val_loss -0.2784 +2026-04-09 11:54:21.091279: Pseudo dice [0.7247, 0.6292, 0.6067, 0.8809, 0.4859, 0.6159, 0.6718] +2026-04-09 11:54:21.093603: Epoch time: 106.89 s +2026-04-09 11:54:22.241057: +2026-04-09 11:54:22.244955: Epoch 669 +2026-04-09 11:54:22.246799: Current learning rate: 0.0037 +2026-04-09 11:56:06.119120: train_loss -0.3354 +2026-04-09 11:56:06.125482: val_loss -0.2645 +2026-04-09 11:56:06.127885: Pseudo dice [0.3208, 0.5572, 0.721, 0.8087, 0.1262, 0.7899, 0.8901] +2026-04-09 11:56:06.129889: Epoch time: 103.88 s +2026-04-09 11:56:07.278587: +2026-04-09 11:56:07.281803: Epoch 670 +2026-04-09 11:56:07.284190: Current learning rate: 0.00369 +2026-04-09 11:57:52.518024: train_loss -0.3417 +2026-04-09 11:57:52.535399: val_loss -0.2793 +2026-04-09 11:57:52.537723: Pseudo dice [0.5807, 0.3351, 0.7866, 0.8126, 0.5044, 0.7478, 0.7447] +2026-04-09 11:57:52.540655: Epoch time: 105.24 s +2026-04-09 11:57:52.543098: Yayy! New best EMA pseudo Dice: 0.5984 +2026-04-09 11:57:55.492005: +2026-04-09 11:57:55.494017: Epoch 671 +2026-04-09 11:57:55.495623: Current learning rate: 0.00368 +2026-04-09 11:59:38.658627: train_loss -0.3254 +2026-04-09 11:59:38.666840: val_loss -0.3013 +2026-04-09 11:59:38.668766: Pseudo dice [0.5104, 0.4342, 0.7547, 0.6606, 0.4456, 0.8524, 0.6762] +2026-04-09 11:59:38.670711: Epoch time: 103.17 s +2026-04-09 11:59:38.672831: Yayy! New best EMA pseudo Dice: 0.6005 +2026-04-09 11:59:41.520039: +2026-04-09 11:59:41.521846: Epoch 672 +2026-04-09 11:59:41.523578: Current learning rate: 0.00367 +2026-04-09 12:01:24.486846: train_loss -0.3238 +2026-04-09 12:01:24.495203: val_loss -0.2857 +2026-04-09 12:01:24.497441: Pseudo dice [0.4704, 0.5842, 0.7555, 0.0913, 0.576, 0.7927, 0.7441] +2026-04-09 12:01:24.500212: Epoch time: 102.97 s +2026-04-09 12:01:25.626694: +2026-04-09 12:01:25.630826: Epoch 673 +2026-04-09 12:01:25.632821: Current learning rate: 0.00366 +2026-04-09 12:03:10.875265: train_loss -0.3256 +2026-04-09 12:03:10.879920: val_loss -0.2762 +2026-04-09 12:03:10.882682: Pseudo dice [0.4391, 0.1723, 0.6333, 0.8569, 0.3962, 0.69, 0.9011] +2026-04-09 12:03:10.884997: Epoch time: 105.25 s +2026-04-09 12:03:12.020258: +2026-04-09 12:03:12.023407: Epoch 674 +2026-04-09 12:03:12.025596: Current learning rate: 0.00365 +2026-04-09 12:04:55.112812: train_loss -0.3294 +2026-04-09 12:04:55.118293: val_loss -0.2953 +2026-04-09 12:04:55.120499: Pseudo dice [0.6956, 0.3904, 0.7207, 0.8826, 0.5106, 0.5991, 0.7906] +2026-04-09 12:04:55.122774: Epoch time: 103.1 s +2026-04-09 12:04:55.125514: Yayy! New best EMA pseudo Dice: 0.6023 +2026-04-09 12:04:57.976775: +2026-04-09 12:04:57.978745: Epoch 675 +2026-04-09 12:04:57.980556: Current learning rate: 0.00364 +2026-04-09 12:06:41.826612: train_loss -0.3418 +2026-04-09 12:06:41.833655: val_loss -0.2763 +2026-04-09 12:06:41.836451: Pseudo dice [0.6034, 0.5265, 0.7288, 0.6896, 0.512, 0.6946, 0.787] +2026-04-09 12:06:41.839127: Epoch time: 103.85 s +2026-04-09 12:06:41.841904: Yayy! New best EMA pseudo Dice: 0.607 +2026-04-09 12:06:44.759278: +2026-04-09 12:06:44.761202: Epoch 676 +2026-04-09 12:06:44.762907: Current learning rate: 0.00363 +2026-04-09 12:08:27.248502: train_loss -0.3237 +2026-04-09 12:08:27.253294: val_loss -0.2552 +2026-04-09 12:08:27.255133: Pseudo dice [0.6231, 0.2646, 0.6617, 0.763, 0.1464, 0.6697, 0.8095] +2026-04-09 12:08:27.256935: Epoch time: 102.49 s +2026-04-09 12:08:28.387820: +2026-04-09 12:08:28.389387: Epoch 677 +2026-04-09 12:08:28.391438: Current learning rate: 0.00362 +2026-04-09 12:10:10.776427: train_loss -0.3261 +2026-04-09 12:10:10.781134: val_loss -0.2606 +2026-04-09 12:10:10.783727: Pseudo dice [0.4704, 0.4953, 0.8554, 0.8063, 0.178, 0.848, 0.9254] +2026-04-09 12:10:10.785683: Epoch time: 102.39 s +2026-04-09 12:10:10.787942: Yayy! New best EMA pseudo Dice: 0.6077 +2026-04-09 12:10:13.554914: +2026-04-09 12:10:13.556765: Epoch 678 +2026-04-09 12:10:13.558229: Current learning rate: 0.00361 +2026-04-09 12:11:58.839357: train_loss -0.3308 +2026-04-09 12:11:58.847963: val_loss -0.3108 +2026-04-09 12:11:58.851559: Pseudo dice [0.8138, 0.1126, 0.8473, 0.5321, 0.5174, 0.8303, 0.8974] +2026-04-09 12:11:58.855037: Epoch time: 105.29 s +2026-04-09 12:11:58.857892: Yayy! New best EMA pseudo Dice: 0.6119 +2026-04-09 12:12:01.816902: +2026-04-09 12:12:01.820260: Epoch 679 +2026-04-09 12:12:01.823759: Current learning rate: 0.0036 +2026-04-09 12:13:45.168775: train_loss -0.3324 +2026-04-09 12:13:45.178281: val_loss -0.2721 +2026-04-09 12:13:45.181430: Pseudo dice [0.693, 0.4056, 0.7317, 0.7557, 0.4627, 0.6211, 0.8596] +2026-04-09 12:13:45.183991: Epoch time: 103.36 s +2026-04-09 12:13:45.188827: Yayy! New best EMA pseudo Dice: 0.6155 +2026-04-09 12:13:48.100610: +2026-04-09 12:13:48.103325: Epoch 680 +2026-04-09 12:13:48.105795: Current learning rate: 0.00359 +2026-04-09 12:15:30.565863: train_loss -0.3164 +2026-04-09 12:15:30.570413: val_loss -0.2612 +2026-04-09 12:15:30.572511: Pseudo dice [0.3811, 0.2027, 0.818, 0.1251, 0.2982, 0.8006, 0.8172] +2026-04-09 12:15:30.574333: Epoch time: 102.47 s +2026-04-09 12:15:31.696848: +2026-04-09 12:15:31.699026: Epoch 681 +2026-04-09 12:15:31.700680: Current learning rate: 0.00358 +2026-04-09 12:17:14.286790: train_loss -0.3194 +2026-04-09 12:17:14.297032: val_loss -0.2688 +2026-04-09 12:17:14.299335: Pseudo dice [0.5465, 0.3916, 0.5416, 0.8401, 0.3124, 0.8723, 0.8907] +2026-04-09 12:17:14.301300: Epoch time: 102.59 s +2026-04-09 12:17:15.424987: +2026-04-09 12:17:15.427056: Epoch 682 +2026-04-09 12:17:15.429130: Current learning rate: 0.00357 +2026-04-09 12:18:58.398496: train_loss -0.3302 +2026-04-09 12:18:58.408606: val_loss -0.2792 +2026-04-09 12:18:58.413935: Pseudo dice [0.3607, 0.545, 0.6197, 0.7384, 0.6908, 0.6062, 0.5621] +2026-04-09 12:18:58.420788: Epoch time: 102.98 s +2026-04-09 12:18:59.552616: +2026-04-09 12:18:59.554866: Epoch 683 +2026-04-09 12:18:59.556354: Current learning rate: 0.00356 +2026-04-09 12:20:42.430326: train_loss -0.3317 +2026-04-09 12:20:42.435439: val_loss -0.2625 +2026-04-09 12:20:42.437551: Pseudo dice [0.4678, 0.4343, 0.5585, 0.8923, 0.5068, 0.0632, 0.7867] +2026-04-09 12:20:42.439569: Epoch time: 102.88 s +2026-04-09 12:20:43.572177: +2026-04-09 12:20:43.573997: Epoch 684 +2026-04-09 12:20:43.576113: Current learning rate: 0.00355 +2026-04-09 12:22:26.274432: train_loss -0.3289 +2026-04-09 12:22:26.281300: val_loss -0.2636 +2026-04-09 12:22:26.283746: Pseudo dice [0.4304, 0.0179, 0.7801, 0.8116, 0.5767, 0.494, 0.8484] +2026-04-09 12:22:26.286510: Epoch time: 102.71 s +2026-04-09 12:22:27.409764: +2026-04-09 12:22:27.412197: Epoch 685 +2026-04-09 12:22:27.414255: Current learning rate: 0.00354 +2026-04-09 12:24:10.858807: train_loss -0.3215 +2026-04-09 12:24:10.864335: val_loss -0.2551 +2026-04-09 12:24:10.866402: Pseudo dice [0.5963, 0.519, 0.5592, 0.8906, 0.4153, 0.7418, 0.7111] +2026-04-09 12:24:10.868232: Epoch time: 103.45 s +2026-04-09 12:24:12.032063: +2026-04-09 12:24:12.035831: Epoch 686 +2026-04-09 12:24:12.040548: Current learning rate: 0.00353 +2026-04-09 12:25:56.149620: train_loss -0.3262 +2026-04-09 12:25:56.157337: val_loss -0.2719 +2026-04-09 12:25:56.159567: Pseudo dice [0.2116, 0.0939, 0.5391, 0.8712, 0.3735, 0.7933, 0.8572] +2026-04-09 12:25:56.161740: Epoch time: 104.12 s +2026-04-09 12:25:57.273843: +2026-04-09 12:25:57.276341: Epoch 687 +2026-04-09 12:25:57.278386: Current learning rate: 0.00352 +2026-04-09 12:27:39.866428: train_loss -0.3228 +2026-04-09 12:27:39.870856: val_loss -0.2469 +2026-04-09 12:27:39.872686: Pseudo dice [0.2386, 0.2565, 0.6807, 0.7664, 0.3404, 0.1062, 0.828] +2026-04-09 12:27:39.874645: Epoch time: 102.6 s +2026-04-09 12:27:41.983858: +2026-04-09 12:27:41.985382: Epoch 688 +2026-04-09 12:27:41.987750: Current learning rate: 0.00351 +2026-04-09 12:29:25.514393: train_loss -0.3282 +2026-04-09 12:29:25.520164: val_loss -0.3104 +2026-04-09 12:29:25.522741: Pseudo dice [0.7677, 0.3178, 0.8803, 0.8748, 0.4291, 0.752, 0.8845] +2026-04-09 12:29:25.524896: Epoch time: 103.53 s +2026-04-09 12:29:26.653692: +2026-04-09 12:29:26.655656: Epoch 689 +2026-04-09 12:29:26.657701: Current learning rate: 0.0035 +2026-04-09 12:31:10.677103: train_loss -0.3329 +2026-04-09 12:31:10.681557: val_loss -0.2749 +2026-04-09 12:31:10.683361: Pseudo dice [0.3856, 0.4152, 0.6859, 0.8141, 0.4799, 0.5504, 0.7451] +2026-04-09 12:31:10.685482: Epoch time: 104.03 s +2026-04-09 12:31:11.794829: +2026-04-09 12:31:11.808021: Epoch 690 +2026-04-09 12:31:11.810120: Current learning rate: 0.00349 +2026-04-09 12:32:56.288988: train_loss -0.3202 +2026-04-09 12:32:56.294548: val_loss -0.252 +2026-04-09 12:32:56.296477: Pseudo dice [0.1768, 0.281, 0.7926, 0.6069, 0.2613, 0.8493, 0.6978] +2026-04-09 12:32:56.298541: Epoch time: 104.5 s +2026-04-09 12:32:57.422280: +2026-04-09 12:32:57.424002: Epoch 691 +2026-04-09 12:32:57.425564: Current learning rate: 0.00348 +2026-04-09 12:34:42.440107: train_loss -0.3106 +2026-04-09 12:34:42.446390: val_loss -0.2614 +2026-04-09 12:34:42.449478: Pseudo dice [0.1475, 0.3627, 0.5271, 0.8389, 0.2249, 0.7033, 0.8909] +2026-04-09 12:34:42.451970: Epoch time: 105.02 s +2026-04-09 12:34:43.590903: +2026-04-09 12:34:43.593063: Epoch 692 +2026-04-09 12:34:43.594703: Current learning rate: 0.00346 +2026-04-09 12:36:27.479654: train_loss -0.3211 +2026-04-09 12:36:27.489197: val_loss -0.2658 +2026-04-09 12:36:27.491257: Pseudo dice [0.181, 0.0529, 0.7926, 0.8521, 0.3874, 0.2422, 0.8537] +2026-04-09 12:36:27.493692: Epoch time: 103.89 s +2026-04-09 12:36:28.630767: +2026-04-09 12:36:28.632525: Epoch 693 +2026-04-09 12:36:28.633911: Current learning rate: 0.00345 +2026-04-09 12:38:12.356666: train_loss -0.3163 +2026-04-09 12:38:12.363662: val_loss -0.2785 +2026-04-09 12:38:12.366936: Pseudo dice [0.3872, 0.4294, 0.8138, 0.6773, 0.2269, 0.8192, 0.3538] +2026-04-09 12:38:12.370468: Epoch time: 103.73 s +2026-04-09 12:38:13.519367: +2026-04-09 12:38:13.521702: Epoch 694 +2026-04-09 12:38:13.523279: Current learning rate: 0.00344 +2026-04-09 12:40:00.362703: train_loss -0.3299 +2026-04-09 12:40:00.368094: val_loss -0.2554 +2026-04-09 12:40:00.370009: Pseudo dice [0.2255, 0.4389, 0.5543, 0.8691, 0.1228, 0.7326, 0.3856] +2026-04-09 12:40:00.372092: Epoch time: 106.85 s +2026-04-09 12:40:01.502825: +2026-04-09 12:40:01.505064: Epoch 695 +2026-04-09 12:40:01.507013: Current learning rate: 0.00343 +2026-04-09 12:41:45.467331: train_loss -0.3231 +2026-04-09 12:41:45.481421: val_loss -0.2687 +2026-04-09 12:41:45.483870: Pseudo dice [0.3147, 0.3342, 0.6223, 0.4657, 0.4298, 0.4533, 0.7665] +2026-04-09 12:41:45.485610: Epoch time: 103.97 s +2026-04-09 12:41:46.609138: +2026-04-09 12:41:46.611059: Epoch 696 +2026-04-09 12:41:46.612792: Current learning rate: 0.00342 +2026-04-09 12:43:29.540109: train_loss -0.314 +2026-04-09 12:43:29.546331: val_loss -0.2767 +2026-04-09 12:43:29.549017: Pseudo dice [0.7453, 0.574, 0.6861, 0.7625, 0.4033, 0.5233, 0.8352] +2026-04-09 12:43:29.551757: Epoch time: 102.93 s +2026-04-09 12:43:30.701720: +2026-04-09 12:43:30.706069: Epoch 697 +2026-04-09 12:43:30.709167: Current learning rate: 0.00341 +2026-04-09 12:45:14.185844: train_loss -0.3399 +2026-04-09 12:45:14.191601: val_loss -0.2843 +2026-04-09 12:45:14.194044: Pseudo dice [0.7694, 0.4753, 0.6663, 0.4605, 0.5685, 0.6435, 0.7933] +2026-04-09 12:45:14.197521: Epoch time: 103.49 s +2026-04-09 12:45:15.349669: +2026-04-09 12:45:15.351490: Epoch 698 +2026-04-09 12:45:15.353431: Current learning rate: 0.0034 +2026-04-09 12:46:58.208337: train_loss -0.328 +2026-04-09 12:46:58.213812: val_loss -0.2296 +2026-04-09 12:46:58.215806: Pseudo dice [0.5098, 0.366, 0.5007, 0.8348, 0.34, 0.5603, 0.4872] +2026-04-09 12:46:58.218010: Epoch time: 102.86 s +2026-04-09 12:46:59.380107: +2026-04-09 12:46:59.382110: Epoch 699 +2026-04-09 12:46:59.384462: Current learning rate: 0.00339 +2026-04-09 12:48:44.858417: train_loss -0.319 +2026-04-09 12:48:44.864857: val_loss -0.2732 +2026-04-09 12:48:44.867094: Pseudo dice [0.5209, 0.1836, 0.6558, 0.8756, 0.535, 0.715, 0.7196] +2026-04-09 12:48:44.869950: Epoch time: 105.48 s +2026-04-09 12:48:47.914148: +2026-04-09 12:48:47.916347: Epoch 700 +2026-04-09 12:48:47.919845: Current learning rate: 0.00338 +2026-04-09 12:50:38.428735: train_loss -0.3056 +2026-04-09 12:50:38.439546: val_loss -0.2711 +2026-04-09 12:50:38.451423: Pseudo dice [0.253, 0.3726, 0.7349, 0.6096, 0.4132, 0.5538, 0.7583] +2026-04-09 12:50:38.453699: Epoch time: 110.52 s +2026-04-09 12:50:39.611931: +2026-04-09 12:50:39.614004: Epoch 701 +2026-04-09 12:50:39.616638: Current learning rate: 0.00337 +2026-04-09 12:52:23.302307: train_loss -0.3171 +2026-04-09 12:52:23.311016: val_loss -0.2733 +2026-04-09 12:52:23.314551: Pseudo dice [0.2218, 0.3152, 0.7418, 0.5742, 0.4669, 0.722, 0.8455] +2026-04-09 12:52:23.318053: Epoch time: 103.69 s +2026-04-09 12:52:24.465083: +2026-04-09 12:52:24.470290: Epoch 702 +2026-04-09 12:52:24.473411: Current learning rate: 0.00336 +2026-04-09 12:54:08.265352: train_loss -0.3193 +2026-04-09 12:54:08.273239: val_loss -0.2294 +2026-04-09 12:54:08.276565: Pseudo dice [0.588, 0.3683, 0.7278, 0.2474, 0.0801, 0.6686, 0.7579] +2026-04-09 12:54:08.278534: Epoch time: 103.8 s +2026-04-09 12:54:09.414437: +2026-04-09 12:54:09.416678: Epoch 703 +2026-04-09 12:54:09.419197: Current learning rate: 0.00335 +2026-04-09 12:55:52.690430: train_loss -0.3233 +2026-04-09 12:55:52.695542: val_loss -0.2731 +2026-04-09 12:55:52.697596: Pseudo dice [0.7955, 0.2885, 0.7946, 0.6225, 0.3086, 0.8101, 0.867] +2026-04-09 12:55:52.700007: Epoch time: 103.28 s +2026-04-09 12:55:53.835243: +2026-04-09 12:55:53.839171: Epoch 704 +2026-04-09 12:55:53.842915: Current learning rate: 0.00334 +2026-04-09 12:57:42.148286: train_loss -0.3246 +2026-04-09 12:57:42.153925: val_loss -0.2705 +2026-04-09 12:57:42.156328: Pseudo dice [0.6277, 0.5437, 0.6585, 0.477, 0.4253, 0.7766, 0.943] +2026-04-09 12:57:42.161348: Epoch time: 108.32 s +2026-04-09 12:57:43.299534: +2026-04-09 12:57:43.302205: Epoch 705 +2026-04-09 12:57:43.304885: Current learning rate: 0.00333 +2026-04-09 12:59:26.514420: train_loss -0.3277 +2026-04-09 12:59:26.519960: val_loss -0.2775 +2026-04-09 12:59:26.521900: Pseudo dice [0.4879, 0.4662, 0.7142, 0.8502, 0.2771, 0.6206, 0.6719] +2026-04-09 12:59:26.523660: Epoch time: 103.22 s +2026-04-09 12:59:27.701554: +2026-04-09 12:59:27.705801: Epoch 706 +2026-04-09 12:59:27.707663: Current learning rate: 0.00332 +2026-04-09 13:01:13.873033: train_loss -0.3453 +2026-04-09 13:01:13.882348: val_loss -0.292 +2026-04-09 13:01:13.885949: Pseudo dice [0.2955, 0.4219, 0.7885, 0.8585, 0.429, 0.7452, 0.7988] +2026-04-09 13:01:13.888145: Epoch time: 106.17 s +2026-04-09 13:01:15.035517: +2026-04-09 13:01:15.037632: Epoch 707 +2026-04-09 13:01:15.039103: Current learning rate: 0.00331 +2026-04-09 13:02:59.623029: train_loss -0.3394 +2026-04-09 13:02:59.628959: val_loss -0.2489 +2026-04-09 13:02:59.631744: Pseudo dice [0.3319, 0.428, 0.6948, 0.7846, 0.0623, 0.5889, 0.9046] +2026-04-09 13:02:59.634458: Epoch time: 104.59 s +2026-04-09 13:03:00.778188: +2026-04-09 13:03:00.780398: Epoch 708 +2026-04-09 13:03:00.782433: Current learning rate: 0.0033 +2026-04-09 13:04:45.596977: train_loss -0.3178 +2026-04-09 13:04:45.603308: val_loss -0.2313 +2026-04-09 13:04:45.605956: Pseudo dice [0.4808, 0.4252, 0.6245, 0.2193, 0.1626, 0.2149, 0.6761] +2026-04-09 13:04:45.608161: Epoch time: 104.82 s +2026-04-09 13:04:46.731589: +2026-04-09 13:04:46.735293: Epoch 709 +2026-04-09 13:04:46.737571: Current learning rate: 0.00329 +2026-04-09 13:06:30.052815: train_loss -0.3127 +2026-04-09 13:06:30.058172: val_loss -0.2304 +2026-04-09 13:06:30.060617: Pseudo dice [0.6299, 0.5474, 0.7232, 0.6445, 0.0166, 0.6829, 0.7764] +2026-04-09 13:06:30.062540: Epoch time: 103.32 s +2026-04-09 13:06:31.208298: +2026-04-09 13:06:31.210890: Epoch 710 +2026-04-09 13:06:31.212900: Current learning rate: 0.00328 +2026-04-09 13:08:15.648441: train_loss -0.2946 +2026-04-09 13:08:15.654832: val_loss -0.2863 +2026-04-09 13:08:15.657739: Pseudo dice [0.4772, 0.1312, 0.8034, 0.053, 0.4244, 0.7665, 0.7278] +2026-04-09 13:08:15.661771: Epoch time: 104.44 s +2026-04-09 13:08:16.824843: +2026-04-09 13:08:16.826778: Epoch 711 +2026-04-09 13:08:16.828394: Current learning rate: 0.00327 +2026-04-09 13:10:03.582299: train_loss -0.3197 +2026-04-09 13:10:03.594083: val_loss -0.2823 +2026-04-09 13:10:03.605318: Pseudo dice [0.6421, 0.6047, 0.5913, 0.8149, 0.2862, 0.6934, 0.832] +2026-04-09 13:10:03.609132: Epoch time: 106.76 s +2026-04-09 13:10:04.753236: +2026-04-09 13:10:04.762695: Epoch 712 +2026-04-09 13:10:04.766671: Current learning rate: 0.00326 +2026-04-09 13:11:50.208449: train_loss -0.3409 +2026-04-09 13:11:50.215571: val_loss -0.2883 +2026-04-09 13:11:50.219386: Pseudo dice [0.384, 0.4273, 0.7337, 0.7313, 0.3513, 0.6592, 0.6691] +2026-04-09 13:11:50.223374: Epoch time: 105.46 s +2026-04-09 13:11:51.363330: +2026-04-09 13:11:51.365517: Epoch 713 +2026-04-09 13:11:51.367950: Current learning rate: 0.00325 +2026-04-09 13:13:36.253808: train_loss -0.335 +2026-04-09 13:13:36.270467: val_loss -0.2994 +2026-04-09 13:13:36.274008: Pseudo dice [0.6546, 0.6636, 0.8016, 0.7837, 0.626, 0.7439, 0.8717] +2026-04-09 13:13:36.284588: Epoch time: 104.89 s +2026-04-09 13:13:37.463056: +2026-04-09 13:13:37.465050: Epoch 714 +2026-04-09 13:13:37.466902: Current learning rate: 0.00324 +2026-04-09 13:15:20.395514: train_loss -0.3404 +2026-04-09 13:15:20.401107: val_loss -0.2474 +2026-04-09 13:15:20.402763: Pseudo dice [0.2552, 0.4858, 0.689, 0.6818, 0.1832, 0.7257, 0.7528] +2026-04-09 13:15:20.404734: Epoch time: 102.94 s +2026-04-09 13:15:21.537148: +2026-04-09 13:15:21.540090: Epoch 715 +2026-04-09 13:15:21.541876: Current learning rate: 0.00323 +2026-04-09 13:17:05.577722: train_loss -0.3205 +2026-04-09 13:17:05.582591: val_loss -0.2596 +2026-04-09 13:17:05.584958: Pseudo dice [0.7469, 0.5894, 0.4631, 0.4393, 0.4052, 0.6604, 0.8039] +2026-04-09 13:17:05.587492: Epoch time: 104.04 s +2026-04-09 13:17:06.721394: +2026-04-09 13:17:06.725058: Epoch 716 +2026-04-09 13:17:06.728419: Current learning rate: 0.00322 +2026-04-09 13:18:50.414760: train_loss -0.3077 +2026-04-09 13:18:50.420849: val_loss -0.247 +2026-04-09 13:18:50.423159: Pseudo dice [0.4992, 0.3025, 0.6417, 0.5769, 0.2611, 0.4695, 0.8052] +2026-04-09 13:18:50.425537: Epoch time: 103.7 s +2026-04-09 13:18:51.590688: +2026-04-09 13:18:51.595118: Epoch 717 +2026-04-09 13:18:51.597536: Current learning rate: 0.00321 +2026-04-09 13:20:37.951303: train_loss -0.3161 +2026-04-09 13:20:37.969814: val_loss -0.2785 +2026-04-09 13:20:37.973422: Pseudo dice [0.5845, 0.2066, 0.8007, 0.8073, 0.3388, 0.2638, 0.9183] +2026-04-09 13:20:37.976980: Epoch time: 106.36 s +2026-04-09 13:20:39.109261: +2026-04-09 13:20:39.111501: Epoch 718 +2026-04-09 13:20:39.113919: Current learning rate: 0.0032 +2026-04-09 13:22:21.965845: train_loss -0.3154 +2026-04-09 13:22:21.971599: val_loss -0.2516 +2026-04-09 13:22:21.973914: Pseudo dice [0.2901, 0.6205, 0.6737, 0.8177, 0.5964, 0.6815, 0.9236] +2026-04-09 13:22:21.976049: Epoch time: 102.86 s +2026-04-09 13:22:23.084858: +2026-04-09 13:22:23.088165: Epoch 719 +2026-04-09 13:22:23.090446: Current learning rate: 0.00319 +2026-04-09 13:24:06.590071: train_loss -0.3017 +2026-04-09 13:24:06.605859: val_loss -0.2299 +2026-04-09 13:24:06.610430: Pseudo dice [0.7752, 0.268, 0.7608, 0.8142, 0.1677, 0.6173, 0.5894] +2026-04-09 13:24:06.615193: Epoch time: 103.51 s +2026-04-09 13:24:07.751440: +2026-04-09 13:24:07.755031: Epoch 720 +2026-04-09 13:24:07.757063: Current learning rate: 0.00318 +2026-04-09 13:25:51.884036: train_loss -0.3185 +2026-04-09 13:25:51.888708: val_loss -0.2705 +2026-04-09 13:25:51.890863: Pseudo dice [0.7486, 0.2344, 0.5424, 0.5986, 0.1694, 0.7143, 0.7754] +2026-04-09 13:25:51.892738: Epoch time: 104.14 s +2026-04-09 13:25:53.027362: +2026-04-09 13:25:53.029492: Epoch 721 +2026-04-09 13:25:53.031356: Current learning rate: 0.00317 +2026-04-09 13:27:36.124810: train_loss -0.3119 +2026-04-09 13:27:36.132147: val_loss -0.273 +2026-04-09 13:27:36.135170: Pseudo dice [0.6301, 0.5075, 0.3044, 0.7733, 0.464, 0.6703, 0.9555] +2026-04-09 13:27:36.137936: Epoch time: 103.1 s +2026-04-09 13:27:37.271455: +2026-04-09 13:27:37.274316: Epoch 722 +2026-04-09 13:27:37.276809: Current learning rate: 0.00316 +2026-04-09 13:29:24.716243: train_loss -0.3321 +2026-04-09 13:29:24.721255: val_loss -0.279 +2026-04-09 13:29:24.724163: Pseudo dice [0.5687, 0.5366, 0.8658, 0.6347, 0.4437, 0.8156, 0.6975] +2026-04-09 13:29:24.726479: Epoch time: 107.45 s +2026-04-09 13:29:25.866962: +2026-04-09 13:29:25.870963: Epoch 723 +2026-04-09 13:29:25.874860: Current learning rate: 0.00315 +2026-04-09 13:31:11.399482: train_loss -0.3263 +2026-04-09 13:31:11.404306: val_loss -0.2952 +2026-04-09 13:31:11.406240: Pseudo dice [0.6112, 0.4905, 0.5851, 0.6382, 0.5925, 0.8259, 0.8605] +2026-04-09 13:31:11.408246: Epoch time: 105.54 s +2026-04-09 13:31:12.541538: +2026-04-09 13:31:12.543221: Epoch 724 +2026-04-09 13:31:12.545117: Current learning rate: 0.00314 +2026-04-09 13:32:56.014221: train_loss -0.3192 +2026-04-09 13:32:56.021481: val_loss -0.2898 +2026-04-09 13:32:56.023458: Pseudo dice [0.3298, 0.5173, 0.6303, 0.8362, 0.3749, 0.8034, 0.925] +2026-04-09 13:32:56.025387: Epoch time: 103.48 s +2026-04-09 13:32:57.145052: +2026-04-09 13:32:57.146827: Epoch 725 +2026-04-09 13:32:57.154315: Current learning rate: 0.00313 +2026-04-09 13:34:41.771837: train_loss -0.3289 +2026-04-09 13:34:41.780035: val_loss -0.294 +2026-04-09 13:34:41.784008: Pseudo dice [0.4644, 0.5466, 0.8542, 0.6261, 0.6433, 0.8058, 0.9231] +2026-04-09 13:34:41.786135: Epoch time: 104.63 s +2026-04-09 13:34:42.921860: +2026-04-09 13:34:42.923749: Epoch 726 +2026-04-09 13:34:42.925465: Current learning rate: 0.00312 +2026-04-09 13:36:26.468309: train_loss -0.3297 +2026-04-09 13:36:26.476460: val_loss -0.2644 +2026-04-09 13:36:26.481351: Pseudo dice [0.3404, 0.2113, 0.8682, 0.1708, 0.1512, 0.8508, 0.786] +2026-04-09 13:36:26.483922: Epoch time: 103.55 s +2026-04-09 13:36:28.832224: +2026-04-09 13:36:28.834111: Epoch 727 +2026-04-09 13:36:28.836067: Current learning rate: 0.00311 +2026-04-09 13:38:11.517473: train_loss -0.3313 +2026-04-09 13:38:11.522586: val_loss -0.2669 +2026-04-09 13:38:11.524322: Pseudo dice [0.2451, 0.4217, 0.6283, 0.871, 0.3392, 0.7773, 0.6942] +2026-04-09 13:38:11.526108: Epoch time: 102.69 s +2026-04-09 13:38:12.656064: +2026-04-09 13:38:12.658314: Epoch 728 +2026-04-09 13:38:12.660022: Current learning rate: 0.0031 +2026-04-09 13:39:57.087825: train_loss -0.3347 +2026-04-09 13:39:57.095351: val_loss -0.2889 +2026-04-09 13:39:57.097753: Pseudo dice [0.5986, 0.5472, 0.6795, 0.8008, 0.3575, 0.688, 0.6924] +2026-04-09 13:39:57.099955: Epoch time: 104.43 s +2026-04-09 13:39:58.237839: +2026-04-09 13:39:58.240670: Epoch 729 +2026-04-09 13:39:58.243582: Current learning rate: 0.00309 +2026-04-09 13:41:43.764396: train_loss -0.3304 +2026-04-09 13:41:43.771964: val_loss -0.2879 +2026-04-09 13:41:43.774481: Pseudo dice [0.7016, 0.2241, 0.7405, 0.8045, 0.2935, 0.6212, 0.8327] +2026-04-09 13:41:43.780838: Epoch time: 105.53 s +2026-04-09 13:41:44.988317: +2026-04-09 13:41:44.990163: Epoch 730 +2026-04-09 13:41:44.991807: Current learning rate: 0.00308 +2026-04-09 13:43:27.612811: train_loss -0.3134 +2026-04-09 13:43:27.619113: val_loss -0.2749 +2026-04-09 13:43:27.621737: Pseudo dice [0.7279, 0.5419, 0.6355, 0.7622, 0.2144, 0.7434, 0.8113] +2026-04-09 13:43:27.624136: Epoch time: 102.63 s +2026-04-09 13:43:28.748943: +2026-04-09 13:43:28.755052: Epoch 731 +2026-04-09 13:43:28.757519: Current learning rate: 0.00307 +2026-04-09 13:45:11.623188: train_loss -0.328 +2026-04-09 13:45:11.627901: val_loss -0.2832 +2026-04-09 13:45:11.629702: Pseudo dice [0.6297, 0.1713, 0.6567, 0.4141, 0.4102, 0.7529, 0.7137] +2026-04-09 13:45:11.631577: Epoch time: 102.88 s +2026-04-09 13:45:12.774762: +2026-04-09 13:45:12.776911: Epoch 732 +2026-04-09 13:45:12.778431: Current learning rate: 0.00306 +2026-04-09 13:46:56.975737: train_loss -0.3311 +2026-04-09 13:46:56.987560: val_loss -0.2856 +2026-04-09 13:46:56.989419: Pseudo dice [0.6789, 0.3526, 0.6549, 0.8691, 0.5003, 0.885, 0.8227] +2026-04-09 13:46:56.992294: Epoch time: 104.2 s +2026-04-09 13:46:58.131389: +2026-04-09 13:46:58.134059: Epoch 733 +2026-04-09 13:46:58.135785: Current learning rate: 0.00305 +2026-04-09 13:48:42.335796: train_loss -0.3452 +2026-04-09 13:48:42.340619: val_loss -0.2978 +2026-04-09 13:48:42.343237: Pseudo dice [0.7166, 0.5674, 0.7283, 0.8947, 0.4949, 0.7138, 0.8574] +2026-04-09 13:48:42.345150: Epoch time: 104.21 s +2026-04-09 13:48:43.501170: +2026-04-09 13:48:43.503829: Epoch 734 +2026-04-09 13:48:43.506238: Current learning rate: 0.00304 +2026-04-09 13:50:26.906861: train_loss -0.3305 +2026-04-09 13:50:26.911879: val_loss -0.2604 +2026-04-09 13:50:26.913457: Pseudo dice [0.7793, 0.2428, 0.6291, 0.2446, 0.2908, 0.6122, 0.8682] +2026-04-09 13:50:26.915699: Epoch time: 103.41 s +2026-04-09 13:50:28.035069: +2026-04-09 13:50:28.037250: Epoch 735 +2026-04-09 13:50:28.038867: Current learning rate: 0.00303 +2026-04-09 13:52:10.985548: train_loss -0.3455 +2026-04-09 13:52:10.990720: val_loss -0.258 +2026-04-09 13:52:10.992516: Pseudo dice [0.5285, 0.6728, 0.6069, 0.4977, 0.3567, 0.729, 0.614] +2026-04-09 13:52:10.994493: Epoch time: 102.95 s +2026-04-09 13:52:12.110289: +2026-04-09 13:52:12.111824: Epoch 736 +2026-04-09 13:52:12.114315: Current learning rate: 0.00302 +2026-04-09 13:53:56.002017: train_loss -0.3202 +2026-04-09 13:53:56.007300: val_loss -0.2829 +2026-04-09 13:53:56.009387: Pseudo dice [0.5173, 0.1914, 0.725, 0.1302, 0.6599, 0.8561, 0.9109] +2026-04-09 13:53:56.011826: Epoch time: 103.89 s +2026-04-09 13:53:57.121150: +2026-04-09 13:53:57.125618: Epoch 737 +2026-04-09 13:53:57.128214: Current learning rate: 0.00301 +2026-04-09 13:55:40.002304: train_loss -0.3362 +2026-04-09 13:55:40.010353: val_loss -0.2734 +2026-04-09 13:55:40.016143: Pseudo dice [0.4554, 0.2357, 0.7687, 0.2116, 0.2216, 0.815, 0.6936] +2026-04-09 13:55:40.018931: Epoch time: 102.88 s +2026-04-09 13:55:41.176925: +2026-04-09 13:55:41.179105: Epoch 738 +2026-04-09 13:55:41.181070: Current learning rate: 0.003 +2026-04-09 13:57:24.314090: train_loss -0.3345 +2026-04-09 13:57:24.320812: val_loss -0.2863 +2026-04-09 13:57:24.323723: Pseudo dice [0.7481, 0.5699, 0.7918, 0.0477, 0.5179, 0.7813, 0.7671] +2026-04-09 13:57:24.327025: Epoch time: 103.14 s +2026-04-09 13:57:25.462602: +2026-04-09 13:57:25.464154: Epoch 739 +2026-04-09 13:57:25.465767: Current learning rate: 0.00299 +2026-04-09 13:59:08.745768: train_loss -0.3285 +2026-04-09 13:59:08.752048: val_loss -0.248 +2026-04-09 13:59:08.754396: Pseudo dice [0.2572, 0.3466, 0.6854, 0.7689, 0.2618, 0.8377, 0.2946] +2026-04-09 13:59:08.757558: Epoch time: 103.29 s +2026-04-09 13:59:09.884075: +2026-04-09 13:59:09.885985: Epoch 740 +2026-04-09 13:59:09.888024: Current learning rate: 0.00297 +2026-04-09 14:00:52.826466: train_loss -0.3414 +2026-04-09 14:00:52.832205: val_loss -0.3019 +2026-04-09 14:00:52.834320: Pseudo dice [0.682, 0.4515, 0.6256, 0.8998, 0.6569, 0.7268, 0.7719] +2026-04-09 14:00:52.836338: Epoch time: 102.95 s +2026-04-09 14:00:53.942870: +2026-04-09 14:00:53.945105: Epoch 741 +2026-04-09 14:00:53.946851: Current learning rate: 0.00296 +2026-04-09 14:02:36.988628: train_loss -0.3474 +2026-04-09 14:02:36.992873: val_loss -0.2678 +2026-04-09 14:02:36.994743: Pseudo dice [0.4434, 0.1044, 0.751, 0.8013, 0.3505, 0.6856, 0.814] +2026-04-09 14:02:36.996632: Epoch time: 103.05 s +2026-04-09 14:02:38.134084: +2026-04-09 14:02:38.136063: Epoch 742 +2026-04-09 14:02:38.138544: Current learning rate: 0.00295 +2026-04-09 14:04:21.084093: train_loss -0.3262 +2026-04-09 14:04:21.090380: val_loss -0.267 +2026-04-09 14:04:21.093404: Pseudo dice [0.4258, 0.2392, 0.6659, 0.8877, 0.3325, 0.5784, 0.8267] +2026-04-09 14:04:21.095606: Epoch time: 102.95 s +2026-04-09 14:04:22.220356: +2026-04-09 14:04:22.224848: Epoch 743 +2026-04-09 14:04:22.231023: Current learning rate: 0.00294 +2026-04-09 14:06:05.890001: train_loss -0.3202 +2026-04-09 14:06:05.895951: val_loss -0.2573 +2026-04-09 14:06:05.898956: Pseudo dice [0.7419, 0.3273, 0.716, 0.4685, 0.2653, 0.8227, 0.5648] +2026-04-09 14:06:05.900840: Epoch time: 103.67 s +2026-04-09 14:06:07.086410: +2026-04-09 14:06:07.088431: Epoch 744 +2026-04-09 14:06:07.090055: Current learning rate: 0.00293 +2026-04-09 14:07:50.105796: train_loss -0.3445 +2026-04-09 14:07:50.114913: val_loss -0.2622 +2026-04-09 14:07:50.117196: Pseudo dice [0.527, 0.5773, 0.7044, 0.4952, 0.3358, 0.7017, 0.8029] +2026-04-09 14:07:50.119639: Epoch time: 103.02 s +2026-04-09 14:07:51.264769: +2026-04-09 14:07:51.266287: Epoch 745 +2026-04-09 14:07:51.268079: Current learning rate: 0.00292 +2026-04-09 14:09:34.089601: train_loss -0.3347 +2026-04-09 14:09:34.094413: val_loss -0.258 +2026-04-09 14:09:34.096444: Pseudo dice [0.6808, 0.3224, 0.5949, 0.8506, 0.2967, 0.8074, 0.5454] +2026-04-09 14:09:34.098036: Epoch time: 102.83 s +2026-04-09 14:09:35.236491: +2026-04-09 14:09:35.238180: Epoch 746 +2026-04-09 14:09:35.240128: Current learning rate: 0.00291 +2026-04-09 14:11:18.074614: train_loss -0.3245 +2026-04-09 14:11:18.082533: val_loss -0.3218 +2026-04-09 14:11:18.085845: Pseudo dice [0.8471, 0.4868, 0.8356, 0.8787, 0.7412, 0.8567, 0.8017] +2026-04-09 14:11:18.089041: Epoch time: 102.84 s +2026-04-09 14:11:20.301770: +2026-04-09 14:11:20.303610: Epoch 747 +2026-04-09 14:11:20.305136: Current learning rate: 0.0029 +2026-04-09 14:13:03.180203: train_loss -0.3442 +2026-04-09 14:13:03.185500: val_loss -0.3144 +2026-04-09 14:13:03.187752: Pseudo dice [0.3201, 0.04, 0.7001, 0.5448, 0.6245, 0.857, 0.8481] +2026-04-09 14:13:03.190370: Epoch time: 102.88 s +2026-04-09 14:13:04.313379: +2026-04-09 14:13:04.314969: Epoch 748 +2026-04-09 14:13:04.316476: Current learning rate: 0.00289 +2026-04-09 14:14:47.251362: train_loss -0.336 +2026-04-09 14:14:47.257362: val_loss -0.2483 +2026-04-09 14:14:47.259162: Pseudo dice [0.5799, 0.4835, 0.7632, 0.7397, 0.2557, 0.5815, 0.317] +2026-04-09 14:14:47.261077: Epoch time: 102.94 s +2026-04-09 14:14:48.506786: +2026-04-09 14:14:48.509103: Epoch 749 +2026-04-09 14:14:48.511629: Current learning rate: 0.00288 +2026-04-09 14:16:32.288241: train_loss -0.3282 +2026-04-09 14:16:32.302112: val_loss -0.2608 +2026-04-09 14:16:32.315270: Pseudo dice [0.4518, 0.2295, 0.6469, 0.8918, 0.4366, 0.7529, 0.576] +2026-04-09 14:16:32.317198: Epoch time: 103.78 s +2026-04-09 14:16:35.160334: +2026-04-09 14:16:35.163079: Epoch 750 +2026-04-09 14:16:35.165369: Current learning rate: 0.00287 +2026-04-09 14:18:17.896520: train_loss -0.3439 +2026-04-09 14:18:17.903468: val_loss -0.2791 +2026-04-09 14:18:17.905251: Pseudo dice [0.4434, 0.2402, 0.8448, 0.8564, 0.4787, 0.7916, 0.8207] +2026-04-09 14:18:17.907520: Epoch time: 102.74 s +2026-04-09 14:18:19.031019: +2026-04-09 14:18:19.033270: Epoch 751 +2026-04-09 14:18:19.035140: Current learning rate: 0.00286 +2026-04-09 14:20:05.058413: train_loss -0.3345 +2026-04-09 14:20:05.065027: val_loss -0.268 +2026-04-09 14:20:05.067394: Pseudo dice [0.348, 0.5217, 0.7091, 0.4278, 0.4957, 0.6233, 0.722] +2026-04-09 14:20:05.070302: Epoch time: 106.03 s +2026-04-09 14:20:06.224331: +2026-04-09 14:20:06.225947: Epoch 752 +2026-04-09 14:20:06.227309: Current learning rate: 0.00285 +2026-04-09 14:21:49.123352: train_loss -0.3321 +2026-04-09 14:21:49.129477: val_loss -0.246 +2026-04-09 14:21:49.131341: Pseudo dice [0.37, 0.1571, 0.5526, 0.7327, 0.3971, 0.6786, 0.8544] +2026-04-09 14:21:49.133832: Epoch time: 102.9 s +2026-04-09 14:21:50.251268: +2026-04-09 14:21:50.253150: Epoch 753 +2026-04-09 14:21:50.254872: Current learning rate: 0.00284 +2026-04-09 14:23:33.370016: train_loss -0.3367 +2026-04-09 14:23:33.374814: val_loss -0.2722 +2026-04-09 14:23:33.376963: Pseudo dice [0.385, 0.2173, 0.752, 0.6404, 0.5373, 0.6847, 0.6128] +2026-04-09 14:23:33.378692: Epoch time: 103.12 s +2026-04-09 14:23:34.522370: +2026-04-09 14:23:34.524390: Epoch 754 +2026-04-09 14:23:34.526018: Current learning rate: 0.00283 +2026-04-09 14:25:17.351059: train_loss -0.3203 +2026-04-09 14:25:17.355187: val_loss -0.2627 +2026-04-09 14:25:17.358308: Pseudo dice [0.3117, 0.4997, 0.7528, 0.8166, 0.4168, 0.5442, 0.5572] +2026-04-09 14:25:17.360289: Epoch time: 102.83 s +2026-04-09 14:25:18.490661: +2026-04-09 14:25:18.492985: Epoch 755 +2026-04-09 14:25:18.494427: Current learning rate: 0.00282 +2026-04-09 14:27:03.179824: train_loss -0.324 +2026-04-09 14:27:03.191845: val_loss -0.3237 +2026-04-09 14:27:03.193712: Pseudo dice [0.8051, 0.2003, 0.8145, 0.8511, 0.4166, 0.8872, 0.9556] +2026-04-09 14:27:03.195397: Epoch time: 104.69 s +2026-04-09 14:27:04.337086: +2026-04-09 14:27:04.339387: Epoch 756 +2026-04-09 14:27:04.342403: Current learning rate: 0.00281 +2026-04-09 14:28:47.013709: train_loss -0.3342 +2026-04-09 14:28:47.019120: val_loss -0.2983 +2026-04-09 14:28:47.020729: Pseudo dice [0.6695, 0.3124, 0.6817, 0.5061, 0.5951, 0.7994, 0.8237] +2026-04-09 14:28:47.022189: Epoch time: 102.68 s +2026-04-09 14:28:48.139179: +2026-04-09 14:28:48.141741: Epoch 757 +2026-04-09 14:28:48.143358: Current learning rate: 0.0028 +2026-04-09 14:30:30.647230: train_loss -0.3342 +2026-04-09 14:30:30.653026: val_loss -0.2592 +2026-04-09 14:30:30.655036: Pseudo dice [0.4056, 0.2756, 0.1953, 0.5337, 0.2364, 0.5248, 0.862] +2026-04-09 14:30:30.657150: Epoch time: 102.51 s +2026-04-09 14:30:31.793677: +2026-04-09 14:30:31.795335: Epoch 758 +2026-04-09 14:30:31.797131: Current learning rate: 0.00279 +2026-04-09 14:32:16.018165: train_loss -0.3319 +2026-04-09 14:32:16.022997: val_loss -0.2659 +2026-04-09 14:32:16.024739: Pseudo dice [0.438, 0.0618, 0.6831, 0.754, 0.3611, 0.7339, 0.8711] +2026-04-09 14:32:16.026556: Epoch time: 104.23 s +2026-04-09 14:32:17.170099: +2026-04-09 14:32:17.174008: Epoch 759 +2026-04-09 14:32:17.176737: Current learning rate: 0.00278 +2026-04-09 14:33:59.395665: train_loss -0.3268 +2026-04-09 14:33:59.401729: val_loss -0.2689 +2026-04-09 14:33:59.405605: Pseudo dice [0.6281, 0.4382, 0.5309, 0.5022, 0.5571, 0.7461, 0.8346] +2026-04-09 14:33:59.407673: Epoch time: 102.23 s +2026-04-09 14:34:00.558663: +2026-04-09 14:34:00.560666: Epoch 760 +2026-04-09 14:34:00.562506: Current learning rate: 0.00277 +2026-04-09 14:35:44.067598: train_loss -0.3427 +2026-04-09 14:35:44.073230: val_loss -0.2885 +2026-04-09 14:35:44.075991: Pseudo dice [0.5703, 0.4891, 0.8544, 0.7867, 0.425, 0.7365, 0.8428] +2026-04-09 14:35:44.080576: Epoch time: 103.51 s +2026-04-09 14:35:45.262400: +2026-04-09 14:35:45.264220: Epoch 761 +2026-04-09 14:35:45.266740: Current learning rate: 0.00276 +2026-04-09 14:37:27.826576: train_loss -0.3378 +2026-04-09 14:37:27.831466: val_loss -0.2567 +2026-04-09 14:37:27.833384: Pseudo dice [0.4804, 0.4158, 0.794, 0.1002, 0.2414, 0.227, 0.5007] +2026-04-09 14:37:27.835361: Epoch time: 102.57 s +2026-04-09 14:37:28.979455: +2026-04-09 14:37:28.981163: Epoch 762 +2026-04-09 14:37:28.982748: Current learning rate: 0.00275 +2026-04-09 14:39:11.454811: train_loss -0.3209 +2026-04-09 14:39:11.464921: val_loss -0.2159 +2026-04-09 14:39:11.467434: Pseudo dice [0.5239, 0.2892, 0.4505, 0.4846, 0.4209, 0.3423, 0.7723] +2026-04-09 14:39:11.469515: Epoch time: 102.48 s +2026-04-09 14:39:12.647359: +2026-04-09 14:39:12.649523: Epoch 763 +2026-04-09 14:39:12.651190: Current learning rate: 0.00274 +2026-04-09 14:40:56.362328: train_loss -0.34 +2026-04-09 14:40:56.368774: val_loss -0.2658 +2026-04-09 14:40:56.371757: Pseudo dice [0.7623, 0.4076, 0.6844, 0.8742, 0.5369, 0.4961, 0.6523] +2026-04-09 14:40:56.373991: Epoch time: 103.72 s +2026-04-09 14:40:57.510200: +2026-04-09 14:40:57.512439: Epoch 764 +2026-04-09 14:40:57.514299: Current learning rate: 0.00273 +2026-04-09 14:42:39.949512: train_loss -0.3423 +2026-04-09 14:42:39.955950: val_loss -0.3014 +2026-04-09 14:42:39.957882: Pseudo dice [0.2686, 0.6437, 0.6525, 0.4945, 0.5434, 0.862, 0.8084] +2026-04-09 14:42:39.960091: Epoch time: 102.44 s +2026-04-09 14:42:41.103570: +2026-04-09 14:42:41.105994: Epoch 765 +2026-04-09 14:42:41.108215: Current learning rate: 0.00272 +2026-04-09 14:44:23.372992: train_loss -0.344 +2026-04-09 14:44:23.378786: val_loss -0.2643 +2026-04-09 14:44:23.380858: Pseudo dice [0.5515, 0.2504, 0.6407, 0.7329, 0.2348, 0.717, 0.8508] +2026-04-09 14:44:23.382732: Epoch time: 102.27 s +2026-04-09 14:44:24.524649: +2026-04-09 14:44:24.526606: Epoch 766 +2026-04-09 14:44:24.528584: Current learning rate: 0.00271 +2026-04-09 14:46:07.953695: train_loss -0.3533 +2026-04-09 14:46:07.959722: val_loss -0.2651 +2026-04-09 14:46:07.964618: Pseudo dice [0.4778, 0.5197, 0.7416, 0.2616, 0.5026, 0.7456, 0.7937] +2026-04-09 14:46:07.967885: Epoch time: 103.43 s +2026-04-09 14:46:09.129261: +2026-04-09 14:46:09.131136: Epoch 767 +2026-04-09 14:46:09.133030: Current learning rate: 0.0027 +2026-04-09 14:47:51.977847: train_loss -0.3447 +2026-04-09 14:47:51.983166: val_loss -0.3018 +2026-04-09 14:47:51.985734: Pseudo dice [0.8284, 0.3349, 0.8156, 0.8726, 0.2725, 0.8398, 0.8734] +2026-04-09 14:47:51.988204: Epoch time: 102.85 s +2026-04-09 14:47:53.123702: +2026-04-09 14:47:53.126498: Epoch 768 +2026-04-09 14:47:53.129461: Current learning rate: 0.00268 +2026-04-09 14:49:36.529334: train_loss -0.3372 +2026-04-09 14:49:36.535765: val_loss -0.278 +2026-04-09 14:49:36.538409: Pseudo dice [0.5296, 0.3712, 0.5724, 0.2505, 0.4148, 0.5849, 0.8849] +2026-04-09 14:49:36.540971: Epoch time: 103.41 s +2026-04-09 14:49:37.683466: +2026-04-09 14:49:37.685390: Epoch 769 +2026-04-09 14:49:37.687088: Current learning rate: 0.00267 +2026-04-09 14:51:19.954257: train_loss -0.3105 +2026-04-09 14:51:19.960315: val_loss -0.283 +2026-04-09 14:51:19.962110: Pseudo dice [0.4226, 0.3317, 0.807, 0.6859, 0.4807, 0.6925, 0.7372] +2026-04-09 14:51:19.964295: Epoch time: 102.27 s +2026-04-09 14:51:21.111896: +2026-04-09 14:51:21.113954: Epoch 770 +2026-04-09 14:51:21.117068: Current learning rate: 0.00266 +2026-04-09 14:53:03.595102: train_loss -0.3372 +2026-04-09 14:53:03.602464: val_loss -0.2842 +2026-04-09 14:53:03.604864: Pseudo dice [0.4715, 0.262, 0.5853, 0.2555, 0.3485, 0.5527, 0.7523] +2026-04-09 14:53:03.607590: Epoch time: 102.49 s +2026-04-09 14:53:04.767828: +2026-04-09 14:53:04.770379: Epoch 771 +2026-04-09 14:53:04.772860: Current learning rate: 0.00265 +2026-04-09 14:54:47.964952: train_loss -0.3405 +2026-04-09 14:54:47.970451: val_loss -0.2883 +2026-04-09 14:54:47.972416: Pseudo dice [0.496, 0.2926, 0.6491, 0.5969, 0.5438, 0.5744, 0.8937] +2026-04-09 14:54:47.974464: Epoch time: 103.2 s +2026-04-09 14:54:49.115913: +2026-04-09 14:54:49.117951: Epoch 772 +2026-04-09 14:54:49.120056: Current learning rate: 0.00264 +2026-04-09 14:56:32.302531: train_loss -0.3448 +2026-04-09 14:56:32.310970: val_loss -0.2845 +2026-04-09 14:56:32.314490: Pseudo dice [0.3775, 0.3052, 0.6879, 0.5863, 0.4311, 0.7203, 0.7958] +2026-04-09 14:56:32.317482: Epoch time: 103.19 s +2026-04-09 14:56:33.477530: +2026-04-09 14:56:33.479778: Epoch 773 +2026-04-09 14:56:33.481987: Current learning rate: 0.00263 +2026-04-09 14:58:20.131824: train_loss -0.3344 +2026-04-09 14:58:20.137753: val_loss -0.2639 +2026-04-09 14:58:20.139622: Pseudo dice [0.3637, 0.4038, 0.6634, 0.6819, 0.5002, 0.4919, 0.7092] +2026-04-09 14:58:20.141632: Epoch time: 106.66 s +2026-04-09 14:58:21.377911: +2026-04-09 14:58:21.379915: Epoch 774 +2026-04-09 14:58:21.382002: Current learning rate: 0.00262 +2026-04-09 15:00:04.391577: train_loss -0.334 +2026-04-09 15:00:04.398386: val_loss -0.2738 +2026-04-09 15:00:04.401141: Pseudo dice [0.2004, 0.1212, 0.5753, 0.207, 0.4957, 0.7888, 0.9526] +2026-04-09 15:00:04.403085: Epoch time: 103.02 s +2026-04-09 15:00:05.544220: +2026-04-09 15:00:05.548986: Epoch 775 +2026-04-09 15:00:05.552158: Current learning rate: 0.00261 +2026-04-09 15:01:49.305759: train_loss -0.3406 +2026-04-09 15:01:49.313719: val_loss -0.2916 +2026-04-09 15:01:49.317172: Pseudo dice [0.7048, 0.301, 0.7066, 0.5397, 0.5922, 0.7503, 0.7735] +2026-04-09 15:01:49.320231: Epoch time: 103.76 s +2026-04-09 15:01:50.452560: +2026-04-09 15:01:50.454491: Epoch 776 +2026-04-09 15:01:50.456636: Current learning rate: 0.0026 +2026-04-09 15:03:33.227109: train_loss -0.3421 +2026-04-09 15:03:33.232063: val_loss -0.2958 +2026-04-09 15:03:33.234165: Pseudo dice [0.7197, 0.6491, 0.8188, 0.6957, 0.4392, 0.6648, 0.7996] +2026-04-09 15:03:33.236256: Epoch time: 102.78 s +2026-04-09 15:03:34.377395: +2026-04-09 15:03:34.379005: Epoch 777 +2026-04-09 15:03:34.380865: Current learning rate: 0.00259 +2026-04-09 15:05:18.127862: train_loss -0.3345 +2026-04-09 15:05:18.133821: val_loss -0.2907 +2026-04-09 15:05:18.136125: Pseudo dice [0.3055, 0.2426, 0.5933, 0.6991, 0.4739, 0.7898, 0.8703] +2026-04-09 15:05:18.138482: Epoch time: 103.75 s +2026-04-09 15:05:19.293417: +2026-04-09 15:05:19.296006: Epoch 778 +2026-04-09 15:05:19.297564: Current learning rate: 0.00258 +2026-04-09 15:07:02.937811: train_loss -0.3374 +2026-04-09 15:07:02.946999: val_loss -0.246 +2026-04-09 15:07:02.949055: Pseudo dice [0.3541, 0.3456, 0.4859, 0.8806, 0.1623, 0.6623, 0.8341] +2026-04-09 15:07:02.951456: Epoch time: 103.65 s +2026-04-09 15:07:04.109702: +2026-04-09 15:07:04.112162: Epoch 779 +2026-04-09 15:07:04.114288: Current learning rate: 0.00257 +2026-04-09 15:08:47.182599: train_loss -0.3415 +2026-04-09 15:08:47.191523: val_loss -0.2607 +2026-04-09 15:08:47.193667: Pseudo dice [0.4247, 0.3699, 0.652, 0.8348, 0.4645, 0.6266, 0.1843] +2026-04-09 15:08:47.195662: Epoch time: 103.08 s +2026-04-09 15:08:48.360285: +2026-04-09 15:08:48.361931: Epoch 780 +2026-04-09 15:08:48.363497: Current learning rate: 0.00256 +2026-04-09 15:10:33.095100: train_loss -0.3425 +2026-04-09 15:10:33.102501: val_loss -0.3059 +2026-04-09 15:10:33.105616: Pseudo dice [0.7197, 0.6813, 0.7825, 0.8222, 0.4222, 0.8589, 0.6284] +2026-04-09 15:10:33.109017: Epoch time: 104.74 s +2026-04-09 15:10:34.278885: +2026-04-09 15:10:34.280656: Epoch 781 +2026-04-09 15:10:34.282180: Current learning rate: 0.00255 +2026-04-09 15:12:17.731164: train_loss -0.3426 +2026-04-09 15:12:17.738460: val_loss -0.2756 +2026-04-09 15:12:17.741189: Pseudo dice [0.2705, 0.2078, 0.776, 0.8166, 0.6271, 0.7054, 0.8243] +2026-04-09 15:12:17.744826: Epoch time: 103.46 s +2026-04-09 15:12:18.916571: +2026-04-09 15:12:18.918838: Epoch 782 +2026-04-09 15:12:18.920749: Current learning rate: 0.00254 +2026-04-09 15:14:37.550787: train_loss -0.3418 +2026-04-09 15:14:37.565028: val_loss -0.315 +2026-04-09 15:14:37.568545: Pseudo dice [0.615, 0.3963, 0.8032, 0.7355, 0.2828, 0.7838, 0.782] +2026-04-09 15:14:37.576809: Epoch time: 138.64 s +2026-04-09 15:14:38.719946: +2026-04-09 15:14:38.724050: Epoch 783 +2026-04-09 15:14:38.727974: Current learning rate: 0.00253 +2026-04-09 15:16:30.490328: train_loss -0.3574 +2026-04-09 15:16:30.496433: val_loss -0.2697 +2026-04-09 15:16:30.498696: Pseudo dice [0.1984, 0.4335, 0.7063, 0.0713, 0.4454, 0.747, 0.9256] +2026-04-09 15:16:30.500811: Epoch time: 111.77 s +2026-04-09 15:16:31.668767: +2026-04-09 15:16:31.671288: Epoch 784 +2026-04-09 15:16:31.673238: Current learning rate: 0.00252 +2026-04-09 15:18:14.419231: train_loss -0.3563 +2026-04-09 15:18:14.425758: val_loss -0.3264 +2026-04-09 15:18:14.428083: Pseudo dice [0.7939, 0.1569, 0.5748, 0.6245, 0.4341, 0.7891, 0.8976] +2026-04-09 15:18:14.432138: Epoch time: 102.75 s +2026-04-09 15:18:15.593327: +2026-04-09 15:18:15.595040: Epoch 785 +2026-04-09 15:18:15.596933: Current learning rate: 0.00251 +2026-04-09 15:19:59.221622: train_loss -0.41 +2026-04-09 15:19:59.232177: val_loss -0.2944 +2026-04-09 15:19:59.233825: Pseudo dice [0.3354, 0.3304, 0.7138, 0.8431, 0.3655, 0.7214, 0.7711] +2026-04-09 15:19:59.236987: Epoch time: 103.63 s +2026-04-09 15:20:00.402027: +2026-04-09 15:20:00.404007: Epoch 786 +2026-04-09 15:20:00.405602: Current learning rate: 0.0025 +2026-04-09 15:21:43.288631: train_loss -0.4077 +2026-04-09 15:21:43.293974: val_loss -0.3591 +2026-04-09 15:21:43.296237: Pseudo dice [0.3606, 0.3516, 0.6251, 0.6984, 0.4434, 0.733, 0.8863] +2026-04-09 15:21:43.298745: Epoch time: 102.89 s +2026-04-09 15:21:44.457111: +2026-04-09 15:21:44.459119: Epoch 787 +2026-04-09 15:21:44.460930: Current learning rate: 0.00249 +2026-04-09 15:23:27.194908: train_loss -0.4013 +2026-04-09 15:23:27.202861: val_loss -0.3306 +2026-04-09 15:23:27.204994: Pseudo dice [0.7174, 0.3872, 0.7882, 0.1693, 0.3321, 0.4326, 0.8495] +2026-04-09 15:23:27.207345: Epoch time: 102.74 s +2026-04-09 15:23:28.452844: +2026-04-09 15:23:28.454474: Epoch 788 +2026-04-09 15:23:28.455902: Current learning rate: 0.00248 +2026-04-09 15:25:11.398955: train_loss -0.3921 +2026-04-09 15:25:11.404234: val_loss -0.3467 +2026-04-09 15:25:11.406574: Pseudo dice [0.6097, 0.1074, 0.7252, 0.1388, 0.5724, 0.6426, 0.6662] +2026-04-09 15:25:11.408980: Epoch time: 102.95 s +2026-04-09 15:25:12.561921: +2026-04-09 15:25:12.563939: Epoch 789 +2026-04-09 15:25:12.565669: Current learning rate: 0.00247 +2026-04-09 15:26:55.147396: train_loss -0.4051 +2026-04-09 15:26:55.153520: val_loss -0.3319 +2026-04-09 15:26:55.155878: Pseudo dice [0.307, 0.3906, 0.7319, 0.8393, 0.4065, 0.7722, 0.878] +2026-04-09 15:26:55.157850: Epoch time: 102.59 s +2026-04-09 15:26:56.315521: +2026-04-09 15:26:56.317370: Epoch 790 +2026-04-09 15:26:56.320024: Current learning rate: 0.00245 +2026-04-09 15:28:39.466878: train_loss -0.4174 +2026-04-09 15:28:39.472183: val_loss -0.3628 +2026-04-09 15:28:39.475074: Pseudo dice [0.6594, 0.3739, 0.7273, 0.81, 0.3225, 0.8633, 0.7848] +2026-04-09 15:28:39.477226: Epoch time: 103.15 s +2026-04-09 15:28:40.652924: +2026-04-09 15:28:40.655045: Epoch 791 +2026-04-09 15:28:40.656842: Current learning rate: 0.00244 +2026-04-09 15:30:23.601821: train_loss -0.406 +2026-04-09 15:30:23.607459: val_loss -0.3734 +2026-04-09 15:30:23.609569: Pseudo dice [0.244, 0.4172, 0.6975, 0.6615, 0.515, 0.7074, 0.9259] +2026-04-09 15:30:23.611742: Epoch time: 102.95 s +2026-04-09 15:30:24.771274: +2026-04-09 15:30:24.773261: Epoch 792 +2026-04-09 15:30:24.774802: Current learning rate: 0.00243 +2026-04-09 15:32:07.103647: train_loss -0.3955 +2026-04-09 15:32:07.108230: val_loss -0.3417 +2026-04-09 15:32:07.110692: Pseudo dice [0.6221, 0.4253, 0.7293, 0.6211, 0.3585, 0.8461, 0.3415] +2026-04-09 15:32:07.113404: Epoch time: 102.34 s +2026-04-09 15:32:08.297368: +2026-04-09 15:32:08.299557: Epoch 793 +2026-04-09 15:32:08.301453: Current learning rate: 0.00242 +2026-04-09 15:33:51.940907: train_loss -0.3952 +2026-04-09 15:33:51.946533: val_loss -0.3495 +2026-04-09 15:33:51.949643: Pseudo dice [0.4166, 0.5379, 0.8285, 0.8512, 0.4815, 0.7021, 0.8636] +2026-04-09 15:33:51.951918: Epoch time: 103.65 s +2026-04-09 15:33:53.112651: +2026-04-09 15:33:53.116228: Epoch 794 +2026-04-09 15:33:53.120650: Current learning rate: 0.00241 +2026-04-09 15:35:35.625600: train_loss -0.4125 +2026-04-09 15:35:35.629991: val_loss -0.3498 +2026-04-09 15:35:35.631895: Pseudo dice [0.6103, 0.1801, 0.7693, 0.4441, 0.4049, 0.7554, 0.8619] +2026-04-09 15:35:35.633616: Epoch time: 102.52 s +2026-04-09 15:35:36.817375: +2026-04-09 15:35:36.819320: Epoch 795 +2026-04-09 15:35:36.821020: Current learning rate: 0.0024 +2026-04-09 15:37:18.992457: train_loss -0.3969 +2026-04-09 15:37:18.997649: val_loss -0.3507 +2026-04-09 15:37:18.999720: Pseudo dice [0.3582, 0.3641, 0.6818, 0.8139, 0.476, 0.7026, 0.8826] +2026-04-09 15:37:19.002014: Epoch time: 102.18 s +2026-04-09 15:37:20.150590: +2026-04-09 15:37:20.152576: Epoch 796 +2026-04-09 15:37:20.154468: Current learning rate: 0.00239 +2026-04-09 15:39:02.396250: train_loss -0.3985 +2026-04-09 15:39:02.406418: val_loss -0.3245 +2026-04-09 15:39:02.408741: Pseudo dice [0.5386, 0.4316, 0.4902, 0.7653, 0.1442, 0.7562, 0.5276] +2026-04-09 15:39:02.410489: Epoch time: 102.25 s +2026-04-09 15:39:03.564518: +2026-04-09 15:39:03.567033: Epoch 797 +2026-04-09 15:39:03.568767: Current learning rate: 0.00238 +2026-04-09 15:40:46.369799: train_loss -0.4143 +2026-04-09 15:40:46.375412: val_loss -0.3171 +2026-04-09 15:40:46.377387: Pseudo dice [0.3325, 0.2557, 0.7864, 0.5749, 0.3176, 0.8171, 0.5567] +2026-04-09 15:40:46.379070: Epoch time: 102.81 s +2026-04-09 15:40:47.542258: +2026-04-09 15:40:47.544380: Epoch 798 +2026-04-09 15:40:47.546216: Current learning rate: 0.00237 +2026-04-09 15:42:31.803793: train_loss -0.4254 +2026-04-09 15:42:31.811073: val_loss -0.3452 +2026-04-09 15:42:31.814701: Pseudo dice [0.4867, 0.5275, 0.644, 0.8362, 0.2713, 0.7888, 0.7266] +2026-04-09 15:42:31.817465: Epoch time: 104.26 s +2026-04-09 15:42:32.977788: +2026-04-09 15:42:32.979802: Epoch 799 +2026-04-09 15:42:32.981697: Current learning rate: 0.00236 +2026-04-09 15:44:18.354286: train_loss -0.4246 +2026-04-09 15:44:18.362831: val_loss -0.3254 +2026-04-09 15:44:18.365485: Pseudo dice [0.3558, 0.2647, 0.8045, 0.2953, 0.3235, 0.6493, 0.9341] +2026-04-09 15:44:18.367952: Epoch time: 105.38 s +2026-04-09 15:44:21.406413: +2026-04-09 15:44:21.411764: Epoch 800 +2026-04-09 15:44:21.416855: Current learning rate: 0.00235 +2026-04-09 15:46:13.650412: train_loss -0.3994 +2026-04-09 15:46:13.656551: val_loss -0.3179 +2026-04-09 15:46:13.658538: Pseudo dice [0.296, 0.0954, 0.6029, 0.244, 0.3518, 0.7369, 0.6817] +2026-04-09 15:46:13.660731: Epoch time: 112.25 s +2026-04-09 15:46:14.828162: +2026-04-09 15:46:14.832226: Epoch 801 +2026-04-09 15:46:14.835654: Current learning rate: 0.00234 +2026-04-09 15:47:56.969770: train_loss -0.4145 +2026-04-09 15:47:56.976578: val_loss -0.3667 +2026-04-09 15:47:56.979013: Pseudo dice [0.6206, 0.4259, 0.8086, 0.6354, 0.3965, 0.6932, 0.6052] +2026-04-09 15:47:56.983139: Epoch time: 102.14 s +2026-04-09 15:47:58.152446: +2026-04-09 15:47:58.154081: Epoch 802 +2026-04-09 15:47:58.155759: Current learning rate: 0.00233 +2026-04-09 15:49:42.942010: train_loss -0.4209 +2026-04-09 15:49:42.965943: val_loss -0.3361 +2026-04-09 15:49:42.970418: Pseudo dice [0.3893, 0.4992, 0.7719, 0.8596, 0.3495, 0.8487, 0.6147] +2026-04-09 15:49:42.974225: Epoch time: 104.79 s +2026-04-09 15:49:44.120505: +2026-04-09 15:49:44.126147: Epoch 803 +2026-04-09 15:49:44.131472: Current learning rate: 0.00232 +2026-04-09 15:51:27.078047: train_loss -0.4193 +2026-04-09 15:51:27.083154: val_loss -0.3385 +2026-04-09 15:51:27.085168: Pseudo dice [0.7558, 0.624, 0.5619, 0.0915, 0.4285, 0.7808, 0.6453] +2026-04-09 15:51:27.087620: Epoch time: 102.96 s +2026-04-09 15:51:29.281277: +2026-04-09 15:51:29.287175: Epoch 804 +2026-04-09 15:51:29.290511: Current learning rate: 0.00231 +2026-04-09 15:53:11.456071: train_loss -0.4245 +2026-04-09 15:53:11.461383: val_loss -0.3172 +2026-04-09 15:53:11.464065: Pseudo dice [0.6618, 0.3367, 0.6931, 0.669, 0.1735, 0.7697, 0.6489] +2026-04-09 15:53:11.465696: Epoch time: 102.18 s +2026-04-09 15:53:12.646708: +2026-04-09 15:53:12.649316: Epoch 805 +2026-04-09 15:53:12.651083: Current learning rate: 0.0023 +2026-04-09 15:54:55.832327: train_loss -0.4292 +2026-04-09 15:54:55.838681: val_loss -0.3653 +2026-04-09 15:54:55.841886: Pseudo dice [0.6109, 0.3275, 0.7076, 0.7547, 0.2685, 0.8131, 0.9484] +2026-04-09 15:54:55.847445: Epoch time: 103.19 s +2026-04-09 15:54:57.020378: +2026-04-09 15:54:57.023696: Epoch 806 +2026-04-09 15:54:57.026093: Current learning rate: 0.00229 +2026-04-09 15:56:39.820866: train_loss -0.4165 +2026-04-09 15:56:39.825795: val_loss -0.3386 +2026-04-09 15:56:39.828152: Pseudo dice [0.7619, 0.4788, 0.8441, 0.8469, 0.2899, 0.7179, 0.8762] +2026-04-09 15:56:39.829995: Epoch time: 102.8 s +2026-04-09 15:56:40.984720: +2026-04-09 15:56:40.986995: Epoch 807 +2026-04-09 15:56:40.990290: Current learning rate: 0.00228 +2026-04-09 15:58:24.591532: train_loss -0.4339 +2026-04-09 15:58:24.597304: val_loss -0.2424 +2026-04-09 15:58:24.599912: Pseudo dice [0.3566, 0.3061, 0.4912, 0.2979, 0.1623, 0.6029, 0.3309] +2026-04-09 15:58:24.602905: Epoch time: 103.61 s +2026-04-09 15:58:25.772619: +2026-04-09 15:58:25.774640: Epoch 808 +2026-04-09 15:58:25.776472: Current learning rate: 0.00226 +2026-04-09 16:00:07.688665: train_loss -0.4124 +2026-04-09 16:00:07.693465: val_loss -0.3486 +2026-04-09 16:00:07.695259: Pseudo dice [0.4231, 0.2839, 0.713, 0.8056, 0.5768, 0.7916, 0.6734] +2026-04-09 16:00:07.697573: Epoch time: 101.92 s +2026-04-09 16:00:08.849958: +2026-04-09 16:00:08.851529: Epoch 809 +2026-04-09 16:00:08.859799: Current learning rate: 0.00225 +2026-04-09 16:01:50.962360: train_loss -0.4259 +2026-04-09 16:01:50.969528: val_loss -0.3577 +2026-04-09 16:01:50.971709: Pseudo dice [0.3694, 0.5055, 0.6893, 0.7213, 0.469, 0.7479, 0.7443] +2026-04-09 16:01:50.974236: Epoch time: 102.12 s +2026-04-09 16:01:52.132199: +2026-04-09 16:01:52.135140: Epoch 810 +2026-04-09 16:01:52.139151: Current learning rate: 0.00224 +2026-04-09 16:03:34.489915: train_loss -0.4246 +2026-04-09 16:03:34.495279: val_loss -0.3614 +2026-04-09 16:03:34.497253: Pseudo dice [0.4636, 0.3581, 0.5572, 0.833, 0.5905, 0.7171, 0.9042] +2026-04-09 16:03:34.499244: Epoch time: 102.36 s +2026-04-09 16:03:35.717967: +2026-04-09 16:03:35.719998: Epoch 811 +2026-04-09 16:03:35.721820: Current learning rate: 0.00223 +2026-04-09 16:05:19.471028: train_loss -0.4173 +2026-04-09 16:05:19.478587: val_loss -0.3369 +2026-04-09 16:05:19.480689: Pseudo dice [0.3215, 0.4429, 0.8313, 0.8903, 0.3872, 0.8401, 0.8843] +2026-04-09 16:05:19.482657: Epoch time: 103.76 s +2026-04-09 16:05:20.649026: +2026-04-09 16:05:20.652054: Epoch 812 +2026-04-09 16:05:20.656867: Current learning rate: 0.00222 +2026-04-09 16:07:04.366195: train_loss -0.4164 +2026-04-09 16:07:04.376633: val_loss -0.3485 +2026-04-09 16:07:04.379105: Pseudo dice [0.3532, 0.3739, 0.7144, 0.8614, 0.4181, 0.6474, 0.8349] +2026-04-09 16:07:04.381826: Epoch time: 103.72 s +2026-04-09 16:07:05.552541: +2026-04-09 16:07:05.554469: Epoch 813 +2026-04-09 16:07:05.556026: Current learning rate: 0.00221 +2026-04-09 16:08:47.796408: train_loss -0.4255 +2026-04-09 16:08:47.800835: val_loss -0.3806 +2026-04-09 16:08:47.803067: Pseudo dice [0.6897, 0.3742, 0.7699, 0.8884, 0.5774, 0.8367, 0.8572] +2026-04-09 16:08:47.805045: Epoch time: 102.25 s +2026-04-09 16:08:48.949372: +2026-04-09 16:08:48.950932: Epoch 814 +2026-04-09 16:08:48.952378: Current learning rate: 0.0022 +2026-04-09 16:10:32.263038: train_loss -0.4184 +2026-04-09 16:10:32.269225: val_loss -0.3515 +2026-04-09 16:10:32.271665: Pseudo dice [0.4705, 0.3791, 0.7836, 0.2774, 0.2215, 0.8501, 0.7787] +2026-04-09 16:10:32.273881: Epoch time: 103.32 s +2026-04-09 16:10:33.444841: +2026-04-09 16:10:33.447641: Epoch 815 +2026-04-09 16:10:33.449642: Current learning rate: 0.00219 +2026-04-09 16:12:15.671159: train_loss -0.423 +2026-04-09 16:12:15.678089: val_loss -0.3782 +2026-04-09 16:12:15.680587: Pseudo dice [0.5258, 0.2412, 0.6455, 0.544, 0.6495, 0.5596, 0.7771] +2026-04-09 16:12:15.682651: Epoch time: 102.23 s +2026-04-09 16:12:16.840539: +2026-04-09 16:12:16.842228: Epoch 816 +2026-04-09 16:12:16.844096: Current learning rate: 0.00218 +2026-04-09 16:13:59.503589: train_loss -0.4216 +2026-04-09 16:13:59.508960: val_loss -0.331 +2026-04-09 16:13:59.510780: Pseudo dice [0.4388, 0.3077, 0.7282, 0.7128, 0.2602, 0.7231, 0.8045] +2026-04-09 16:13:59.513323: Epoch time: 102.67 s +2026-04-09 16:14:00.679802: +2026-04-09 16:14:00.681426: Epoch 817 +2026-04-09 16:14:00.683087: Current learning rate: 0.00217 +2026-04-09 16:15:44.457380: train_loss -0.4218 +2026-04-09 16:15:44.470716: val_loss -0.3639 +2026-04-09 16:15:44.477274: Pseudo dice [0.4701, 0.3345, 0.784, 0.7961, 0.4776, 0.738, 0.8603] +2026-04-09 16:15:44.480682: Epoch time: 103.78 s +2026-04-09 16:15:45.642059: +2026-04-09 16:15:45.644983: Epoch 818 +2026-04-09 16:15:45.646897: Current learning rate: 0.00216 +2026-04-09 16:17:29.563310: train_loss -0.4272 +2026-04-09 16:17:29.568991: val_loss -0.3435 +2026-04-09 16:17:29.571662: Pseudo dice [0.2381, 0.6365, 0.641, 0.728, 0.4993, 0.8184, 0.7706] +2026-04-09 16:17:29.573922: Epoch time: 103.92 s +2026-04-09 16:17:30.740526: +2026-04-09 16:17:30.742931: Epoch 819 +2026-04-09 16:17:30.745443: Current learning rate: 0.00215 +2026-04-09 16:19:13.613180: train_loss -0.3905 +2026-04-09 16:19:13.619848: val_loss -0.3385 +2026-04-09 16:19:13.622607: Pseudo dice [0.7532, 0.7093, 0.7576, 0.4566, 0.2945, 0.7019, 0.7356] +2026-04-09 16:19:13.624988: Epoch time: 102.88 s +2026-04-09 16:19:14.731866: +2026-04-09 16:19:14.733805: Epoch 820 +2026-04-09 16:19:14.736060: Current learning rate: 0.00214 +2026-04-09 16:20:59.015306: train_loss -0.4091 +2026-04-09 16:20:59.021116: val_loss -0.3299 +2026-04-09 16:20:59.023787: Pseudo dice [0.7186, 0.5324, 0.6476, 0.4623, 0.4496, 0.6538, 0.6378] +2026-04-09 16:20:59.025816: Epoch time: 104.29 s +2026-04-09 16:21:00.123026: +2026-04-09 16:21:00.125988: Epoch 821 +2026-04-09 16:21:00.128774: Current learning rate: 0.00213 +2026-04-09 16:22:42.774150: train_loss -0.4127 +2026-04-09 16:22:42.780123: val_loss -0.325 +2026-04-09 16:22:42.781876: Pseudo dice [0.2885, 0.2913, 0.8104, 0.849, 0.2954, 0.842, 0.8013] +2026-04-09 16:22:42.783674: Epoch time: 102.65 s +2026-04-09 16:22:43.872965: +2026-04-09 16:22:43.874788: Epoch 822 +2026-04-09 16:22:43.877083: Current learning rate: 0.00212 +2026-04-09 16:24:26.327169: train_loss -0.4213 +2026-04-09 16:24:26.337764: val_loss -0.3545 +2026-04-09 16:24:26.340375: Pseudo dice [0.7884, 0.0387, 0.8251, 0.5292, 0.4219, 0.7745, 0.8459] +2026-04-09 16:24:26.342768: Epoch time: 102.46 s +2026-04-09 16:24:27.420556: +2026-04-09 16:24:27.422740: Epoch 823 +2026-04-09 16:24:27.425385: Current learning rate: 0.0021 +2026-04-09 16:26:09.759527: train_loss -0.422 +2026-04-09 16:26:09.764703: val_loss -0.3644 +2026-04-09 16:26:09.766555: Pseudo dice [0.7655, 0.5944, 0.7221, 0.8984, 0.1248, 0.7353, 0.8783] +2026-04-09 16:26:09.768251: Epoch time: 102.34 s +2026-04-09 16:26:11.902904: +2026-04-09 16:26:11.904640: Epoch 824 +2026-04-09 16:26:11.906629: Current learning rate: 0.00209 +2026-04-09 16:27:55.625087: train_loss -0.4092 +2026-04-09 16:27:55.630987: val_loss -0.3301 +2026-04-09 16:27:55.633846: Pseudo dice [0.2532, 0.499, 0.6473, 0.8322, 0.4019, 0.6795, 0.8261] +2026-04-09 16:27:55.637056: Epoch time: 103.73 s +2026-04-09 16:27:56.725567: +2026-04-09 16:27:56.728457: Epoch 825 +2026-04-09 16:27:56.730935: Current learning rate: 0.00208 +2026-04-09 16:29:40.019610: train_loss -0.4276 +2026-04-09 16:29:40.025629: val_loss -0.3743 +2026-04-09 16:29:40.027950: Pseudo dice [0.6144, 0.3272, 0.8055, 0.9153, 0.5277, 0.7945, 0.872] +2026-04-09 16:29:40.031357: Epoch time: 103.3 s +2026-04-09 16:29:41.132755: +2026-04-09 16:29:41.135072: Epoch 826 +2026-04-09 16:29:41.138098: Current learning rate: 0.00207 +2026-04-09 16:31:24.908581: train_loss -0.4219 +2026-04-09 16:31:24.913795: val_loss -0.3536 +2026-04-09 16:31:24.916031: Pseudo dice [0.2545, 0.4803, 0.7918, 0.8483, 0.393, 0.7417, 0.9313] +2026-04-09 16:31:24.918086: Epoch time: 103.78 s +2026-04-09 16:31:26.000712: +2026-04-09 16:31:26.002609: Epoch 827 +2026-04-09 16:31:26.004288: Current learning rate: 0.00206 +2026-04-09 16:33:08.447953: train_loss -0.4247 +2026-04-09 16:33:08.461334: val_loss -0.3604 +2026-04-09 16:33:08.465253: Pseudo dice [0.1999, 0.4672, 0.7752, 0.8411, 0.3561, 0.6168, 0.7664] +2026-04-09 16:33:08.467806: Epoch time: 102.45 s +2026-04-09 16:33:09.590638: +2026-04-09 16:33:09.592371: Epoch 828 +2026-04-09 16:33:09.593807: Current learning rate: 0.00205 +2026-04-09 16:34:52.534677: train_loss -0.4252 +2026-04-09 16:34:52.541721: val_loss -0.3796 +2026-04-09 16:34:52.543781: Pseudo dice [0.4778, 0.5889, 0.5087, 0.9216, 0.5593, 0.6552, 0.9444] +2026-04-09 16:34:52.545852: Epoch time: 102.95 s +2026-04-09 16:34:52.548268: Yayy! New best EMA pseudo Dice: 0.6167 +2026-04-09 16:34:55.425691: +2026-04-09 16:34:55.428834: Epoch 829 +2026-04-09 16:34:55.430921: Current learning rate: 0.00204 +2026-04-09 16:36:37.659838: train_loss -0.4133 +2026-04-09 16:36:37.665160: val_loss -0.3525 +2026-04-09 16:36:37.667183: Pseudo dice [0.4852, 0.5673, 0.4639, 0.6125, 0.3265, 0.8692, 0.9488] +2026-04-09 16:36:37.668852: Epoch time: 102.24 s +2026-04-09 16:36:38.736121: +2026-04-09 16:36:38.738814: Epoch 830 +2026-04-09 16:36:38.741365: Current learning rate: 0.00203 +2026-04-09 16:38:21.895509: train_loss -0.3956 +2026-04-09 16:38:21.902021: val_loss -0.3294 +2026-04-09 16:38:21.905587: Pseudo dice [0.6127, 0.6396, 0.6304, 0.5829, 0.3034, 0.7883, 0.6823] +2026-04-09 16:38:21.908776: Epoch time: 103.16 s +2026-04-09 16:38:22.985579: +2026-04-09 16:38:22.987474: Epoch 831 +2026-04-09 16:38:22.989846: Current learning rate: 0.00202 +2026-04-09 16:40:05.714859: train_loss -0.4027 +2026-04-09 16:40:05.719764: val_loss -0.3623 +2026-04-09 16:40:05.721585: Pseudo dice [0.604, 0.5559, 0.4487, 0.4576, 0.3243, 0.785, 0.9255] +2026-04-09 16:40:05.723444: Epoch time: 102.73 s +2026-04-09 16:40:06.818289: +2026-04-09 16:40:06.819987: Epoch 832 +2026-04-09 16:40:06.821630: Current learning rate: 0.00201 +2026-04-09 16:41:49.950383: train_loss -0.4322 +2026-04-09 16:41:49.960538: val_loss -0.3615 +2026-04-09 16:41:49.963383: Pseudo dice [0.4088, 0.2278, 0.7727, 0.8234, 0.3368, 0.7536, 0.8414] +2026-04-09 16:41:49.966988: Epoch time: 103.14 s +2026-04-09 16:41:51.041977: +2026-04-09 16:41:51.043905: Epoch 833 +2026-04-09 16:41:51.046579: Current learning rate: 0.002 +2026-04-09 16:43:33.505018: train_loss -0.431 +2026-04-09 16:43:33.511301: val_loss -0.3408 +2026-04-09 16:43:33.513588: Pseudo dice [0.2068, 0.4774, 0.8236, 0.6431, 0.3434, 0.8129, 0.8545] +2026-04-09 16:43:33.516417: Epoch time: 102.47 s +2026-04-09 16:43:34.606706: +2026-04-09 16:43:34.608370: Epoch 834 +2026-04-09 16:43:34.610758: Current learning rate: 0.00199 +2026-04-09 16:45:17.525979: train_loss -0.4246 +2026-04-09 16:45:17.536106: val_loss -0.3033 +2026-04-09 16:45:17.538116: Pseudo dice [0.502, 0.4698, 0.6529, 0.4298, 0.2913, 0.738, 0.7656] +2026-04-09 16:45:17.540120: Epoch time: 102.92 s +2026-04-09 16:45:18.603425: +2026-04-09 16:45:18.605347: Epoch 835 +2026-04-09 16:45:18.607425: Current learning rate: 0.00198 +2026-04-09 16:47:01.087966: train_loss -0.4397 +2026-04-09 16:47:01.094645: val_loss -0.3314 +2026-04-09 16:47:01.096870: Pseudo dice [0.8185, 0.5004, 0.6203, 0.6239, 0.2469, 0.8582, 0.8489] +2026-04-09 16:47:01.099635: Epoch time: 102.49 s +2026-04-09 16:47:02.227503: +2026-04-09 16:47:02.229375: Epoch 836 +2026-04-09 16:47:02.230940: Current learning rate: 0.00196 +2026-04-09 16:48:44.881497: train_loss -0.4343 +2026-04-09 16:48:44.889354: val_loss -0.3636 +2026-04-09 16:48:44.891986: Pseudo dice [0.4713, 0.4927, 0.7664, 0.7601, 0.408, 0.8593, 0.8737] +2026-04-09 16:48:44.894074: Epoch time: 102.66 s +2026-04-09 16:48:45.986525: +2026-04-09 16:48:45.988212: Epoch 837 +2026-04-09 16:48:45.990236: Current learning rate: 0.00195 +2026-04-09 16:50:29.508000: train_loss -0.4225 +2026-04-09 16:50:29.514654: val_loss -0.3363 +2026-04-09 16:50:29.517028: Pseudo dice [0.4321, 0.1739, 0.6031, 0.6944, 0.5106, 0.788, 0.9137] +2026-04-09 16:50:29.519851: Epoch time: 103.52 s +2026-04-09 16:50:30.616587: +2026-04-09 16:50:30.618239: Epoch 838 +2026-04-09 16:50:30.619749: Current learning rate: 0.00194 +2026-04-09 16:52:13.358032: train_loss -0.4256 +2026-04-09 16:52:13.367155: val_loss -0.308 +2026-04-09 16:52:13.369616: Pseudo dice [0.4068, 0.3122, 0.7314, 0.7012, 0.2711, 0.7867, 0.5452] +2026-04-09 16:52:13.373544: Epoch time: 102.74 s +2026-04-09 16:52:14.450246: +2026-04-09 16:52:14.451926: Epoch 839 +2026-04-09 16:52:14.454555: Current learning rate: 0.00193 +2026-04-09 16:54:03.718035: train_loss -0.4424 +2026-04-09 16:54:03.724903: val_loss -0.3729 +2026-04-09 16:54:03.727054: Pseudo dice [0.7438, 0.4367, 0.7836, 0.7796, 0.4746, 0.4605, 0.8942] +2026-04-09 16:54:03.729472: Epoch time: 109.27 s +2026-04-09 16:54:04.848066: +2026-04-09 16:54:04.850389: Epoch 840 +2026-04-09 16:54:04.853346: Current learning rate: 0.00192 +2026-04-09 16:55:47.747280: train_loss -0.4347 +2026-04-09 16:55:47.765178: val_loss -0.3514 +2026-04-09 16:55:47.767384: Pseudo dice [0.3958, 0.4586, 0.6651, 0.7824, 0.3189, 0.6691, 0.8903] +2026-04-09 16:55:47.769247: Epoch time: 102.9 s +2026-04-09 16:55:48.865014: +2026-04-09 16:55:48.867788: Epoch 841 +2026-04-09 16:55:48.870302: Current learning rate: 0.00191 +2026-04-09 16:57:31.373799: train_loss -0.4265 +2026-04-09 16:57:31.385061: val_loss -0.3127 +2026-04-09 16:57:31.386871: Pseudo dice [0.3238, 0.2138, 0.5724, 0.0992, 0.2636, 0.8274, 0.5802] +2026-04-09 16:57:31.388966: Epoch time: 102.51 s +2026-04-09 16:57:32.483290: +2026-04-09 16:57:32.485365: Epoch 842 +2026-04-09 16:57:32.488051: Current learning rate: 0.0019 +2026-04-09 16:59:16.790787: train_loss -0.4283 +2026-04-09 16:59:16.796229: val_loss -0.3075 +2026-04-09 16:59:16.798963: Pseudo dice [0.757, 0.2359, 0.4483, 0.8005, 0.2505, 0.6584, 0.8949] +2026-04-09 16:59:16.801004: Epoch time: 104.31 s +2026-04-09 16:59:17.904128: +2026-04-09 16:59:17.905770: Epoch 843 +2026-04-09 16:59:17.907805: Current learning rate: 0.00189 +2026-04-09 17:01:02.179572: train_loss -0.4391 +2026-04-09 17:01:02.186128: val_loss -0.2872 +2026-04-09 17:01:02.189675: Pseudo dice [0.2989, 0.5253, 0.6906, 0.7678, 0.1538, 0.7307, 0.5881] +2026-04-09 17:01:02.191894: Epoch time: 104.28 s +2026-04-09 17:01:03.281391: +2026-04-09 17:01:03.283055: Epoch 844 +2026-04-09 17:01:03.285008: Current learning rate: 0.00188 +2026-04-09 17:02:47.811647: train_loss -0.4294 +2026-04-09 17:02:47.818012: val_loss -0.372 +2026-04-09 17:02:47.820041: Pseudo dice [0.8564, 0.3476, 0.866, 0.9253, 0.458, 0.7543, 0.874] +2026-04-09 17:02:47.825449: Epoch time: 104.53 s +2026-04-09 17:02:48.947560: +2026-04-09 17:02:48.952849: Epoch 845 +2026-04-09 17:02:48.958140: Current learning rate: 0.00187 +2026-04-09 17:04:31.175446: train_loss -0.4358 +2026-04-09 17:04:31.181951: val_loss -0.3269 +2026-04-09 17:04:31.183730: Pseudo dice [0.3607, 0.1993, 0.8194, 0.847, 0.2755, 0.7738, 0.8487] +2026-04-09 17:04:31.186650: Epoch time: 102.23 s +2026-04-09 17:04:32.271891: +2026-04-09 17:04:32.273524: Epoch 846 +2026-04-09 17:04:32.275361: Current learning rate: 0.00186 +2026-04-09 17:06:15.339986: train_loss -0.4326 +2026-04-09 17:06:15.347358: val_loss -0.3378 +2026-04-09 17:06:15.349672: Pseudo dice [0.3871, 0.4847, 0.8069, 0.7718, 0.3886, 0.8115, 0.692] +2026-04-09 17:06:15.353509: Epoch time: 103.07 s +2026-04-09 17:06:16.430795: +2026-04-09 17:06:16.433293: Epoch 847 +2026-04-09 17:06:16.435147: Current learning rate: 0.00185 +2026-04-09 17:07:59.443062: train_loss -0.4257 +2026-04-09 17:07:59.447747: val_loss -0.3596 +2026-04-09 17:07:59.449605: Pseudo dice [0.7474, 0.4162, 0.7809, 0.5139, 0.2932, 0.6971, 0.8515] +2026-04-09 17:07:59.451743: Epoch time: 103.02 s +2026-04-09 17:08:00.537241: +2026-04-09 17:08:00.541003: Epoch 848 +2026-04-09 17:08:00.542776: Current learning rate: 0.00184 +2026-04-09 17:09:43.761336: train_loss -0.435 +2026-04-09 17:09:43.765984: val_loss -0.3529 +2026-04-09 17:09:43.767770: Pseudo dice [0.2641, 0.432, 0.7689, 0.7001, 0.2696, 0.6123, 0.8788] +2026-04-09 17:09:43.769429: Epoch time: 103.23 s +2026-04-09 17:09:44.888143: +2026-04-09 17:09:44.891590: Epoch 849 +2026-04-09 17:09:44.893769: Current learning rate: 0.00182 +2026-04-09 17:11:28.514866: train_loss -0.4233 +2026-04-09 17:11:28.528748: val_loss -0.3604 +2026-04-09 17:11:28.530719: Pseudo dice [0.4477, 0.4766, 0.7706, 0.6527, 0.4734, 0.8665, 0.9218] +2026-04-09 17:11:28.532911: Epoch time: 103.63 s +2026-04-09 17:11:31.685105: +2026-04-09 17:11:31.690403: Epoch 850 +2026-04-09 17:11:31.696153: Current learning rate: 0.00181 +2026-04-09 17:13:14.635198: train_loss -0.4326 +2026-04-09 17:13:14.640344: val_loss -0.3589 +2026-04-09 17:13:14.642230: Pseudo dice [0.5193, 0.4925, 0.7515, 0.757, 0.4846, 0.6315, 0.6768] +2026-04-09 17:13:14.646628: Epoch time: 102.95 s +2026-04-09 17:13:15.716159: +2026-04-09 17:13:15.719148: Epoch 851 +2026-04-09 17:13:15.721482: Current learning rate: 0.0018 +2026-04-09 17:14:58.756101: train_loss -0.4273 +2026-04-09 17:14:58.762255: val_loss -0.3729 +2026-04-09 17:14:58.764287: Pseudo dice [0.4227, 0.1503, 0.6621, 0.8843, 0.4331, 0.7646, 0.8453] +2026-04-09 17:14:58.766689: Epoch time: 103.04 s +2026-04-09 17:14:59.872706: +2026-04-09 17:14:59.874805: Epoch 852 +2026-04-09 17:14:59.877277: Current learning rate: 0.00179 +2026-04-09 17:16:42.835038: train_loss -0.4349 +2026-04-09 17:16:42.842962: val_loss -0.3679 +2026-04-09 17:16:42.845404: Pseudo dice [0.2356, 0.552, 0.7783, 0.6545, 0.4221, 0.7109, 0.8727] +2026-04-09 17:16:42.847620: Epoch time: 102.97 s +2026-04-09 17:16:43.927805: +2026-04-09 17:16:43.929947: Epoch 853 +2026-04-09 17:16:43.932257: Current learning rate: 0.00178 +2026-04-09 17:18:26.653704: train_loss -0.429 +2026-04-09 17:18:26.658283: val_loss -0.3666 +2026-04-09 17:18:26.660214: Pseudo dice [0.5106, 0.3116, 0.6851, 0.8197, 0.4317, 0.7986, 0.605] +2026-04-09 17:18:26.662450: Epoch time: 102.73 s +2026-04-09 17:18:27.740160: +2026-04-09 17:18:27.741986: Epoch 854 +2026-04-09 17:18:27.743557: Current learning rate: 0.00177 +2026-04-09 17:20:10.396410: train_loss -0.4332 +2026-04-09 17:20:10.402985: val_loss -0.3655 +2026-04-09 17:20:10.405139: Pseudo dice [0.7139, 0.2001, 0.5689, 0.8845, 0.6117, 0.724, 0.6972] +2026-04-09 17:20:10.408534: Epoch time: 102.66 s +2026-04-09 17:20:11.480499: +2026-04-09 17:20:11.483186: Epoch 855 +2026-04-09 17:20:11.485055: Current learning rate: 0.00176 +2026-04-09 17:21:55.319657: train_loss -0.4086 +2026-04-09 17:21:55.325123: val_loss -0.3295 +2026-04-09 17:21:55.327285: Pseudo dice [0.2797, 0.237, 0.4648, 0.6705, 0.1242, 0.7611, 0.828] +2026-04-09 17:21:55.330070: Epoch time: 103.84 s +2026-04-09 17:21:56.424633: +2026-04-09 17:21:56.427495: Epoch 856 +2026-04-09 17:21:56.430309: Current learning rate: 0.00175 +2026-04-09 17:23:41.035969: train_loss -0.4097 +2026-04-09 17:23:41.041670: val_loss -0.3651 +2026-04-09 17:23:41.043911: Pseudo dice [0.7218, 0.4448, 0.6846, 0.5101, 0.4073, 0.7262, 0.7917] +2026-04-09 17:23:41.045861: Epoch time: 104.62 s +2026-04-09 17:23:42.144294: +2026-04-09 17:23:42.155864: Epoch 857 +2026-04-09 17:23:42.157717: Current learning rate: 0.00174 +2026-04-09 17:25:24.286685: train_loss -0.4192 +2026-04-09 17:25:24.292091: val_loss -0.3636 +2026-04-09 17:25:24.295310: Pseudo dice [0.4577, 0.4867, 0.7322, 0.6171, 0.4365, 0.6957, 0.9105] +2026-04-09 17:25:24.297683: Epoch time: 102.15 s +2026-04-09 17:25:25.399967: +2026-04-09 17:25:25.403958: Epoch 858 +2026-04-09 17:25:25.408462: Current learning rate: 0.00173 +2026-04-09 17:27:10.350047: train_loss -0.4381 +2026-04-09 17:27:10.358464: val_loss -0.3984 +2026-04-09 17:27:10.361084: Pseudo dice [0.6722, 0.4864, 0.78, 0.7678, 0.6196, 0.7774, 0.8862] +2026-04-09 17:27:10.363448: Epoch time: 104.95 s +2026-04-09 17:27:11.464979: +2026-04-09 17:27:11.466706: Epoch 859 +2026-04-09 17:27:11.468734: Current learning rate: 0.00172 +2026-04-09 17:28:54.045668: train_loss -0.4405 +2026-04-09 17:28:54.051670: val_loss -0.3623 +2026-04-09 17:28:54.055390: Pseudo dice [0.5605, 0.3735, 0.61, 0.5752, 0.5503, 0.731, 0.9062] +2026-04-09 17:28:54.057961: Epoch time: 102.58 s +2026-04-09 17:28:55.166287: +2026-04-09 17:28:55.170275: Epoch 860 +2026-04-09 17:28:55.172585: Current learning rate: 0.0017 +2026-04-09 17:30:39.655225: train_loss -0.4217 +2026-04-09 17:30:39.661287: val_loss -0.3661 +2026-04-09 17:30:39.663892: Pseudo dice [0.4227, 0.4734, 0.8637, 0.8088, 0.4761, 0.8144, 0.7968] +2026-04-09 17:30:39.666097: Epoch time: 104.49 s +2026-04-09 17:30:40.763818: +2026-04-09 17:30:40.765970: Epoch 861 +2026-04-09 17:30:40.767945: Current learning rate: 0.00169 +2026-04-09 17:32:22.966690: train_loss -0.4308 +2026-04-09 17:32:22.971814: val_loss -0.3362 +2026-04-09 17:32:22.975011: Pseudo dice [0.6904, 0.4313, 0.5502, 0.6944, 0.1987, 0.8014, 0.6455] +2026-04-09 17:32:22.976838: Epoch time: 102.21 s +2026-04-09 17:32:24.051427: +2026-04-09 17:32:24.053848: Epoch 862 +2026-04-09 17:32:24.056842: Current learning rate: 0.00168 +2026-04-09 17:34:05.995011: train_loss -0.4372 +2026-04-09 17:34:06.000459: val_loss -0.3812 +2026-04-09 17:34:06.002346: Pseudo dice [0.6859, 0.0365, 0.8475, 0.911, 0.502, 0.7509, 0.8104] +2026-04-09 17:34:06.004621: Epoch time: 101.95 s +2026-04-09 17:34:07.082164: +2026-04-09 17:34:07.083998: Epoch 863 +2026-04-09 17:34:07.086317: Current learning rate: 0.00167 +2026-04-09 17:35:53.682639: train_loss -0.4339 +2026-04-09 17:35:53.689317: val_loss -0.3521 +2026-04-09 17:35:53.693225: Pseudo dice [0.357, 0.5266, 0.801, 0.8005, 0.5076, 0.6617, 0.8344] +2026-04-09 17:35:53.695569: Epoch time: 106.6 s +2026-04-09 17:35:53.698226: Yayy! New best EMA pseudo Dice: 0.617 +2026-04-09 17:35:56.730971: +2026-04-09 17:35:56.733298: Epoch 864 +2026-04-09 17:35:56.739685: Current learning rate: 0.00166 +2026-04-09 17:37:39.691067: train_loss -0.4242 +2026-04-09 17:37:39.698774: val_loss -0.3737 +2026-04-09 17:37:39.702233: Pseudo dice [0.4607, 0.436, 0.6189, 0.9164, 0.4676, 0.7592, 0.7989] +2026-04-09 17:37:39.704581: Epoch time: 102.96 s +2026-04-09 17:37:39.706309: Yayy! New best EMA pseudo Dice: 0.619 +2026-04-09 17:37:42.502008: +2026-04-09 17:37:42.504717: Epoch 865 +2026-04-09 17:37:42.506235: Current learning rate: 0.00165 +2026-04-09 17:39:25.594973: train_loss -0.4421 +2026-04-09 17:39:25.601741: val_loss -0.3946 +2026-04-09 17:39:25.604207: Pseudo dice [0.8073, 0.5401, 0.6846, 0.7388, 0.5975, 0.7226, 0.8955] +2026-04-09 17:39:25.606774: Epoch time: 103.1 s +2026-04-09 17:39:25.609279: Yayy! New best EMA pseudo Dice: 0.6283 +2026-04-09 17:39:28.503543: +2026-04-09 17:39:28.505953: Epoch 866 +2026-04-09 17:39:28.507620: Current learning rate: 0.00164 +2026-04-09 17:41:13.705726: train_loss -0.4367 +2026-04-09 17:41:13.716241: val_loss -0.3907 +2026-04-09 17:41:13.722286: Pseudo dice [0.6734, 0.4761, 0.8154, 0.8533, 0.3712, 0.8889, 0.9375] +2026-04-09 17:41:13.726035: Epoch time: 105.21 s +2026-04-09 17:41:13.730491: Yayy! New best EMA pseudo Dice: 0.6371 +2026-04-09 17:41:16.807967: +2026-04-09 17:41:16.812160: Epoch 867 +2026-04-09 17:41:16.814897: Current learning rate: 0.00163 +2026-04-09 17:43:05.081874: train_loss -0.4369 +2026-04-09 17:43:05.088983: val_loss -0.3594 +2026-04-09 17:43:05.091096: Pseudo dice [0.3606, 0.5388, 0.7303, 0.9296, 0.4222, 0.7456, 0.9265] +2026-04-09 17:43:05.093272: Epoch time: 108.28 s +2026-04-09 17:43:05.095906: Yayy! New best EMA pseudo Dice: 0.6399 +2026-04-09 17:43:07.766121: +2026-04-09 17:43:07.768558: Epoch 868 +2026-04-09 17:43:07.774438: Current learning rate: 0.00162 +2026-04-09 17:45:00.475664: train_loss -0.4396 +2026-04-09 17:45:00.485427: val_loss -0.3487 +2026-04-09 17:45:00.488804: Pseudo dice [0.334, 0.0882, 0.7769, 0.782, 0.552, 0.7967, 0.8866] +2026-04-09 17:45:00.492105: Epoch time: 112.71 s +2026-04-09 17:45:01.628960: +2026-04-09 17:45:01.635956: Epoch 869 +2026-04-09 17:45:01.641311: Current learning rate: 0.00161 +2026-04-09 17:46:52.296235: train_loss -0.4325 +2026-04-09 17:46:52.303004: val_loss -0.3878 +2026-04-09 17:46:52.306099: Pseudo dice [0.4305, 0.7318, 0.8606, 0.8629, 0.4698, 0.8472, 0.8473] +2026-04-09 17:46:52.308619: Epoch time: 110.67 s +2026-04-09 17:46:52.312212: Yayy! New best EMA pseudo Dice: 0.6447 +2026-04-09 17:46:55.295505: +2026-04-09 17:46:55.297496: Epoch 870 +2026-04-09 17:46:55.299570: Current learning rate: 0.00159 +2026-04-09 17:48:53.327963: train_loss -0.4332 +2026-04-09 17:48:53.336955: val_loss -0.3858 +2026-04-09 17:48:53.340258: Pseudo dice [0.7571, 0.6204, 0.6879, 0.7464, 0.3816, 0.8394, 0.8432] +2026-04-09 17:48:53.342899: Epoch time: 118.04 s +2026-04-09 17:48:53.345765: Yayy! New best EMA pseudo Dice: 0.6499 +2026-04-09 17:48:56.294449: +2026-04-09 17:48:56.297020: Epoch 871 +2026-04-09 17:48:56.299400: Current learning rate: 0.00158 +2026-04-09 17:50:47.144247: train_loss -0.4282 +2026-04-09 17:50:47.152201: val_loss -0.3732 +2026-04-09 17:50:47.156262: Pseudo dice [0.3205, 0.348, 0.8335, 0.8082, 0.5088, 0.8121, 0.8991] +2026-04-09 17:50:47.160614: Epoch time: 110.85 s +2026-04-09 17:50:48.260207: +2026-04-09 17:50:48.263667: Epoch 872 +2026-04-09 17:50:48.266846: Current learning rate: 0.00157 +2026-04-09 17:52:33.544140: train_loss -0.4283 +2026-04-09 17:52:33.551928: val_loss -0.3633 +2026-04-09 17:52:33.555468: Pseudo dice [0.6184, 0.6479, 0.7334, 0.4762, 0.4601, 0.5839, 0.9319] +2026-04-09 17:52:33.558115: Epoch time: 105.29 s +2026-04-09 17:52:34.684016: +2026-04-09 17:52:34.693116: Epoch 873 +2026-04-09 17:52:34.697613: Current learning rate: 0.00156 +2026-04-09 17:54:20.555892: train_loss -0.4297 +2026-04-09 17:54:20.562211: val_loss -0.3635 +2026-04-09 17:54:20.564984: Pseudo dice [0.7599, 0.4898, 0.7011, 0.8672, 0.4622, 0.7909, 0.9118] +2026-04-09 17:54:20.567493: Epoch time: 105.88 s +2026-04-09 17:54:20.570005: Yayy! New best EMA pseudo Dice: 0.6546 +2026-04-09 17:54:23.829901: +2026-04-09 17:54:23.835804: Epoch 874 +2026-04-09 17:54:23.840984: Current learning rate: 0.00155 +2026-04-09 17:56:08.909017: train_loss -0.4389 +2026-04-09 17:56:08.915398: val_loss -0.3797 +2026-04-09 17:56:08.918186: Pseudo dice [0.5266, 0.5232, 0.8064, 0.4725, 0.5726, 0.7939, 0.8659] +2026-04-09 17:56:08.922465: Epoch time: 105.08 s +2026-04-09 17:56:10.014408: +2026-04-09 17:56:10.017496: Epoch 875 +2026-04-09 17:56:10.021923: Current learning rate: 0.00154 +2026-04-09 17:57:57.558164: train_loss -0.445 +2026-04-09 17:57:57.564952: val_loss -0.3588 +2026-04-09 17:57:57.567824: Pseudo dice [0.6378, 0.54, 0.8426, 0.6608, 0.3039, 0.7982, 0.9509] +2026-04-09 17:57:57.570395: Epoch time: 107.55 s +2026-04-09 17:57:57.573824: Yayy! New best EMA pseudo Dice: 0.6565 +2026-04-09 17:58:00.759367: +2026-04-09 17:58:00.761582: Epoch 876 +2026-04-09 17:58:00.767447: Current learning rate: 0.00153 +2026-04-09 17:59:47.793328: train_loss -0.4323 +2026-04-09 17:59:47.804502: val_loss -0.3537 +2026-04-09 17:59:47.806896: Pseudo dice [0.5489, 0.6608, 0.7973, 0.5428, 0.4028, 0.7835, 0.7798] +2026-04-09 17:59:47.809648: Epoch time: 107.04 s +2026-04-09 17:59:48.956087: +2026-04-09 17:59:48.959460: Epoch 877 +2026-04-09 17:59:48.962649: Current learning rate: 0.00152 +2026-04-09 18:01:44.016996: train_loss -0.4433 +2026-04-09 18:01:44.027128: val_loss -0.3549 +2026-04-09 18:01:44.030542: Pseudo dice [0.3052, 0.3218, 0.7309, 0.8823, 0.289, 0.8954, 0.8661] +2026-04-09 18:01:44.035167: Epoch time: 115.06 s +2026-04-09 18:01:45.168542: +2026-04-09 18:01:45.170557: Epoch 878 +2026-04-09 18:01:45.172980: Current learning rate: 0.00151 +2026-04-09 18:03:31.539744: train_loss -0.4381 +2026-04-09 18:03:31.546307: val_loss -0.3834 +2026-04-09 18:03:31.548542: Pseudo dice [0.4195, 0.1164, 0.8347, 0.6291, 0.5686, 0.7207, 0.8761] +2026-04-09 18:03:31.551029: Epoch time: 106.37 s +2026-04-09 18:03:32.670977: +2026-04-09 18:03:32.675229: Epoch 879 +2026-04-09 18:03:32.679505: Current learning rate: 0.00149 +2026-04-09 18:05:18.052723: train_loss -0.4266 +2026-04-09 18:05:18.060317: val_loss -0.3613 +2026-04-09 18:05:18.063739: Pseudo dice [0.6777, 0.3909, 0.5515, 0.5585, 0.3039, 0.7584, 0.9091] +2026-04-09 18:05:18.067731: Epoch time: 105.38 s +2026-04-09 18:05:20.319308: +2026-04-09 18:05:20.321687: Epoch 880 +2026-04-09 18:05:20.324227: Current learning rate: 0.00148 +2026-04-09 18:07:04.195757: train_loss -0.4268 +2026-04-09 18:07:04.202370: val_loss -0.3626 +2026-04-09 18:07:04.205727: Pseudo dice [0.3203, 0.5627, 0.6349, 0.8727, 0.5533, 0.7034, 0.8003] +2026-04-09 18:07:04.208706: Epoch time: 103.88 s +2026-04-09 18:07:05.316695: +2026-04-09 18:07:05.320796: Epoch 881 +2026-04-09 18:07:05.323346: Current learning rate: 0.00147 +2026-04-09 18:08:49.823226: train_loss -0.432 +2026-04-09 18:08:49.831189: val_loss -0.3613 +2026-04-09 18:08:49.834757: Pseudo dice [0.3724, 0.6078, 0.8286, 0.8793, 0.4276, 0.8096, 0.8269] +2026-04-09 18:08:49.838461: Epoch time: 104.51 s +2026-04-09 18:08:50.928188: +2026-04-09 18:08:50.932784: Epoch 882 +2026-04-09 18:08:50.936028: Current learning rate: 0.00146 +2026-04-09 18:11:32.098877: train_loss -0.4278 +2026-04-09 18:11:32.117992: val_loss -0.3306 +2026-04-09 18:11:32.124310: Pseudo dice [0.3009, 0.5213, 0.6926, 0.6797, 0.3615, 0.8247, 0.9003] +2026-04-09 18:11:32.130351: Epoch time: 161.17 s +2026-04-09 18:11:33.239469: +2026-04-09 18:11:33.251138: Epoch 883 +2026-04-09 18:11:33.256945: Current learning rate: 0.00145 +2026-04-09 18:14:51.411987: train_loss -0.4445 +2026-04-09 18:14:51.431201: val_loss -0.37 +2026-04-09 18:14:51.437402: Pseudo dice [0.6697, 0.478, 0.803, 0.3677, 0.4774, 0.7978, 0.9115] +2026-04-09 18:14:51.444426: Epoch time: 198.18 s +2026-04-09 18:14:52.598338: +2026-04-09 18:14:52.606028: Epoch 884 +2026-04-09 18:14:52.615971: Current learning rate: 0.00144 +2026-04-09 18:18:21.235609: train_loss -0.4416 +2026-04-09 18:18:21.255852: val_loss -0.3653 +2026-04-09 18:18:21.265785: Pseudo dice [0.7537, 0.562, 0.8237, 0.8509, 0.5236, 0.7612, 0.5112] +2026-04-09 18:18:21.274090: Epoch time: 208.64 s +2026-04-09 18:18:22.390116: +2026-04-09 18:18:22.396285: Epoch 885 +2026-04-09 18:18:22.402023: Current learning rate: 0.00143 +2026-04-09 18:21:49.146496: train_loss -0.4388 +2026-04-09 18:21:49.172958: val_loss -0.3834 +2026-04-09 18:21:49.183144: Pseudo dice [0.3568, 0.565, 0.5837, 0.2011, 0.6105, 0.7087, 0.893] +2026-04-09 18:21:49.195083: Epoch time: 206.76 s +2026-04-09 18:21:50.347283: +2026-04-09 18:21:50.357399: Epoch 886 +2026-04-09 18:21:50.364205: Current learning rate: 0.00142 +2026-04-09 18:25:14.896088: train_loss -0.4506 +2026-04-09 18:25:14.913618: val_loss -0.3662 +2026-04-09 18:25:14.920640: Pseudo dice [0.3079, 0.4458, 0.753, 0.7624, 0.5219, 0.7839, 0.854] +2026-04-09 18:25:14.925771: Epoch time: 204.55 s +2026-04-09 18:25:16.028995: +2026-04-09 18:25:16.034656: Epoch 887 +2026-04-09 18:25:16.040263: Current learning rate: 0.00141 +2026-04-09 18:28:35.487629: train_loss -0.455 +2026-04-09 18:28:35.507730: val_loss -0.3838 +2026-04-09 18:28:35.514874: Pseudo dice [0.6165, 0.4944, 0.8435, 0.8437, 0.5356, 0.8129, 0.7372] +2026-04-09 18:28:35.521042: Epoch time: 199.46 s +2026-04-09 18:28:36.663152: +2026-04-09 18:28:36.671616: Epoch 888 +2026-04-09 18:28:36.678105: Current learning rate: 0.00139 +2026-04-09 18:31:58.965136: train_loss -0.4403 +2026-04-09 18:31:58.985300: val_loss -0.3331 +2026-04-09 18:31:58.990994: Pseudo dice [0.51, 0.4266, 0.7391, 0.84, 0.419, 0.4845, 0.7996] +2026-04-09 18:31:58.999216: Epoch time: 202.31 s +2026-04-09 18:32:00.147562: +2026-04-09 18:32:00.153754: Epoch 889 +2026-04-09 18:32:00.160312: Current learning rate: 0.00138 +2026-04-09 18:35:09.076628: train_loss -0.446 +2026-04-09 18:35:09.099413: val_loss -0.3903 +2026-04-09 18:35:09.105226: Pseudo dice [0.7125, 0.5118, 0.842, 0.8049, 0.5218, 0.8223, 0.4437] +2026-04-09 18:35:09.112848: Epoch time: 188.93 s +2026-04-09 18:35:10.231290: +2026-04-09 18:35:10.237656: Epoch 890 +2026-04-09 18:35:10.242873: Current learning rate: 0.00137 +2026-04-09 18:38:26.153666: train_loss -0.4316 +2026-04-09 18:38:26.176327: val_loss -0.3587 +2026-04-09 18:38:26.182774: Pseudo dice [0.1866, 0.6033, 0.7386, 0.5722, 0.4565, 0.7842, 0.8345] +2026-04-09 18:38:26.189795: Epoch time: 195.93 s +2026-04-09 18:38:27.325968: +2026-04-09 18:38:27.333019: Epoch 891 +2026-04-09 18:38:27.340744: Current learning rate: 0.00136 +2026-04-09 18:41:42.303025: train_loss -0.4499 +2026-04-09 18:41:42.322144: val_loss -0.3883 +2026-04-09 18:41:42.328711: Pseudo dice [0.7287, 0.6988, 0.5951, 0.9002, 0.5818, 0.8293, 0.867] +2026-04-09 18:41:42.336035: Epoch time: 194.98 s +2026-04-09 18:41:43.458152: +2026-04-09 18:41:43.463813: Epoch 892 +2026-04-09 18:41:43.470209: Current learning rate: 0.00135 +2026-04-09 18:45:04.946909: train_loss -0.4376 +2026-04-09 18:45:04.968640: val_loss -0.3546 +2026-04-09 18:45:04.974606: Pseudo dice [0.3683, 0.0457, 0.8288, 0.8449, 0.5469, 0.7951, 0.7403] +2026-04-09 18:45:04.980398: Epoch time: 201.49 s +2026-04-09 18:45:06.119701: +2026-04-09 18:45:06.125831: Epoch 893 +2026-04-09 18:45:06.131597: Current learning rate: 0.00134 +2026-04-09 18:48:27.524643: train_loss -0.4437 +2026-04-09 18:48:27.549425: val_loss -0.3653 +2026-04-09 18:48:27.557466: Pseudo dice [0.4409, 0.3955, 0.2497, 0.8867, 0.5148, 0.3069, 0.8968] +2026-04-09 18:48:27.568640: Epoch time: 201.41 s +2026-04-09 18:48:28.925737: +2026-04-09 18:48:28.932533: Epoch 894 +2026-04-09 18:48:28.941969: Current learning rate: 0.00133 +2026-04-09 18:51:44.464589: train_loss -0.4454 +2026-04-09 18:51:44.483551: val_loss -0.3725 +2026-04-09 18:51:44.489330: Pseudo dice [0.7441, 0.3542, 0.7978, 0.9265, 0.4558, 0.8105, 0.8124] +2026-04-09 18:51:44.496434: Epoch time: 195.54 s +2026-04-09 18:51:45.626504: +2026-04-09 18:51:45.635272: Epoch 895 +2026-04-09 18:51:45.644218: Current learning rate: 0.00132 +2026-04-09 18:54:49.819470: train_loss -0.4438 +2026-04-09 18:54:49.835423: val_loss -0.4007 +2026-04-09 18:54:49.842462: Pseudo dice [0.5749, 0.4054, 0.6802, 0.8902, 0.5016, 0.7654, 0.8422] +2026-04-09 18:54:49.848951: Epoch time: 184.2 s +2026-04-09 18:54:50.978323: +2026-04-09 18:54:50.984790: Epoch 896 +2026-04-09 18:54:50.988727: Current learning rate: 0.0013 +2026-04-09 18:58:05.053178: train_loss -0.4421 +2026-04-09 18:58:05.073931: val_loss -0.3746 +2026-04-09 18:58:05.080504: Pseudo dice [0.793, 0.2677, 0.7175, 0.9259, 0.5994, 0.5495, 0.7294] +2026-04-09 18:58:05.088193: Epoch time: 194.08 s +2026-04-09 18:58:06.241284: +2026-04-09 18:58:06.250544: Epoch 897 +2026-04-09 18:58:06.257441: Current learning rate: 0.00129 +2026-04-09 19:01:23.067442: train_loss -0.4335 +2026-04-09 19:01:23.091275: val_loss -0.3845 +2026-04-09 19:01:23.102325: Pseudo dice [0.6394, 0.6202, 0.7484, 0.8008, 0.6286, 0.7972, 0.8929] +2026-04-09 19:01:23.110530: Epoch time: 196.83 s +2026-04-09 19:01:24.249611: +2026-04-09 19:01:24.256306: Epoch 898 +2026-04-09 19:01:24.263805: Current learning rate: 0.00128 +2026-04-09 19:05:01.831672: train_loss -0.4436 +2026-04-09 19:05:01.853258: val_loss -0.3731 +2026-04-09 19:05:01.860953: Pseudo dice [0.4748, 0.6072, 0.6981, 0.9198, 0.5286, 0.8036, 0.8142] +2026-04-09 19:05:01.867791: Epoch time: 217.59 s +2026-04-09 19:05:03.013434: +2026-04-09 19:05:03.019965: Epoch 899 +2026-04-09 19:05:03.027184: Current learning rate: 0.00127 +2026-04-09 19:08:35.166235: train_loss -0.4504 +2026-04-09 19:08:35.185360: val_loss -0.372 +2026-04-09 19:08:35.191934: Pseudo dice [0.7756, 0.6156, 0.7758, 0.8107, 0.5774, 0.7214, 0.7365] +2026-04-09 19:08:35.199137: Epoch time: 212.16 s +2026-04-09 19:08:37.798671: Yayy! New best EMA pseudo Dice: 0.6611 +2026-04-09 19:08:42.506203: +2026-04-09 19:08:42.509934: Epoch 900 +2026-04-09 19:08:42.514732: Current learning rate: 0.00126 +2026-04-09 19:12:04.600414: train_loss -0.451 +2026-04-09 19:12:04.619076: val_loss -0.3432 +2026-04-09 19:12:04.629069: Pseudo dice [0.4651, 0.5924, 0.7387, 0.8209, 0.4527, 0.772, 0.5587] +2026-04-09 19:12:04.636131: Epoch time: 202.1 s +2026-04-09 19:12:05.743513: +2026-04-09 19:12:05.749418: Epoch 901 +2026-04-09 19:12:05.755038: Current learning rate: 0.00125 +2026-04-09 19:15:42.987965: train_loss -0.441 +2026-04-09 19:15:43.007528: val_loss -0.3569 +2026-04-09 19:15:43.015319: Pseudo dice [0.631, 0.606, 0.7461, 0.8545, 0.59, 0.7201, 0.5455] +2026-04-09 19:15:43.022565: Epoch time: 217.25 s +2026-04-09 19:15:44.136293: +2026-04-09 19:15:44.141786: Epoch 902 +2026-04-09 19:15:44.148223: Current learning rate: 0.00124 +2026-04-09 19:19:44.048697: train_loss -0.432 +2026-04-09 19:19:44.066601: val_loss -0.3516 +2026-04-09 19:19:44.071698: Pseudo dice [0.5877, 0.3381, 0.7855, 0.7159, 0.3799, 0.7757, 0.7856] +2026-04-09 19:19:44.078687: Epoch time: 239.92 s +2026-04-09 19:19:45.198446: +2026-04-09 19:19:45.204252: Epoch 903 +2026-04-09 19:19:45.208919: Current learning rate: 0.00122 +2026-04-09 19:23:45.058138: train_loss -0.4434 +2026-04-09 19:23:45.078594: val_loss -0.3675 +2026-04-09 19:23:45.085596: Pseudo dice [0.3804, 0.4058, 0.63, 0.7645, 0.5841, 0.6283, 0.864] +2026-04-09 19:23:45.091732: Epoch time: 239.86 s +2026-04-09 19:23:46.202215: +2026-04-09 19:23:46.209316: Epoch 904 +2026-04-09 19:23:46.219576: Current learning rate: 0.00121 +2026-04-09 19:27:39.070674: train_loss -0.4372 +2026-04-09 19:27:39.092169: val_loss -0.3645 +2026-04-09 19:27:39.097831: Pseudo dice [0.3152, 0.1883, 0.7777, 0.654, 0.3719, 0.7178, 0.843] +2026-04-09 19:27:39.104722: Epoch time: 232.87 s +2026-04-09 19:27:40.219501: +2026-04-09 19:27:40.225888: Epoch 905 +2026-04-09 19:27:40.233533: Current learning rate: 0.0012 +2026-04-09 19:31:31.641946: train_loss -0.4418 +2026-04-09 19:31:31.666178: val_loss -0.3427 +2026-04-09 19:31:31.672287: Pseudo dice [0.4236, 0.247, 0.6875, 0.4424, 0.3815, 0.7353, 0.6803] +2026-04-09 19:31:31.678310: Epoch time: 231.43 s +2026-04-09 19:31:32.773551: +2026-04-09 19:31:32.781563: Epoch 906 +2026-04-09 19:31:32.788643: Current learning rate: 0.00119 +2026-04-09 19:35:11.960667: train_loss -0.4427 +2026-04-09 19:35:11.979079: val_loss -0.3848 +2026-04-09 19:35:11.984969: Pseudo dice [0.8224, 0.1117, 0.5784, 0.7564, 0.5181, 0.7895, 0.9038] +2026-04-09 19:35:11.992097: Epoch time: 219.19 s +2026-04-09 19:35:13.117929: +2026-04-09 19:35:13.132762: Epoch 907 +2026-04-09 19:35:13.139732: Current learning rate: 0.00118 +2026-04-09 19:38:50.255846: train_loss -0.4364 +2026-04-09 19:38:50.273778: val_loss -0.3513 +2026-04-09 19:38:50.279621: Pseudo dice [0.2373, 0.4406, 0.7371, 0.7069, 0.4739, 0.8339, 0.7939] +2026-04-09 19:38:50.284040: Epoch time: 217.14 s +2026-04-09 19:38:51.377736: +2026-04-09 19:38:51.385174: Epoch 908 +2026-04-09 19:38:51.390254: Current learning rate: 0.00117 +2026-04-09 19:42:27.846515: train_loss -0.4423 +2026-04-09 19:42:27.863504: val_loss -0.3471 +2026-04-09 19:42:27.869125: Pseudo dice [0.6057, 0.4601, 0.8731, 0.8413, 0.2408, 0.7984, 0.8899] +2026-04-09 19:42:27.875898: Epoch time: 216.47 s +2026-04-09 19:42:28.989226: +2026-04-09 19:42:28.994538: Epoch 909 +2026-04-09 19:42:28.999497: Current learning rate: 0.00116 +2026-04-09 19:45:50.818179: train_loss -0.4371 +2026-04-09 19:45:50.837479: val_loss -0.346 +2026-04-09 19:45:50.842461: Pseudo dice [0.3382, 0.5076, 0.4428, 0.9047, 0.372, 0.7875, 0.8826] +2026-04-09 19:45:50.847976: Epoch time: 201.83 s +2026-04-09 19:45:51.957388: +2026-04-09 19:45:51.962770: Epoch 910 +2026-04-09 19:45:51.967784: Current learning rate: 0.00115 +2026-04-09 19:49:08.707622: train_loss -0.4336 +2026-04-09 19:49:08.729460: val_loss -0.3642 +2026-04-09 19:49:08.735234: Pseudo dice [0.5505, 0.284, 0.7051, 0.8777, 0.1647, 0.8416, 0.9198] +2026-04-09 19:49:08.740448: Epoch time: 196.75 s +2026-04-09 19:49:09.857499: +2026-04-09 19:49:09.863163: Epoch 911 +2026-04-09 19:49:09.868691: Current learning rate: 0.00113 +2026-04-09 19:52:41.699455: train_loss -0.4407 +2026-04-09 19:52:41.717782: val_loss -0.353 +2026-04-09 19:52:41.725771: Pseudo dice [0.7165, 0.5004, 0.5927, 0.7757, 0.3816, 0.5273, 0.7493] +2026-04-09 19:52:41.732782: Epoch time: 211.85 s +2026-04-09 19:52:42.853802: +2026-04-09 19:52:42.858637: Epoch 912 +2026-04-09 19:52:42.864531: Current learning rate: 0.00112 +2026-04-09 19:56:14.321571: train_loss -0.4362 +2026-04-09 19:56:14.340692: val_loss -0.3754 +2026-04-09 19:56:14.346480: Pseudo dice [0.4363, 0.5145, 0.5943, 0.7295, 0.2157, 0.7784, 0.8638] +2026-04-09 19:56:14.351963: Epoch time: 211.47 s +2026-04-09 19:56:15.492718: +2026-04-09 19:56:15.498172: Epoch 913 +2026-04-09 19:56:15.502884: Current learning rate: 0.00111 +2026-04-09 19:59:29.340413: train_loss -0.4408 +2026-04-09 19:59:29.356071: val_loss -0.3742 +2026-04-09 19:59:29.361045: Pseudo dice [0.6947, 0.4298, 0.7616, 0.5228, 0.3813, 0.2488, 0.8339] +2026-04-09 19:59:29.366693: Epoch time: 193.85 s +2026-04-09 19:59:30.459796: +2026-04-09 19:59:30.465349: Epoch 914 +2026-04-09 19:59:30.470285: Current learning rate: 0.0011 +2026-04-09 20:02:52.242460: train_loss -0.4483 +2026-04-09 20:02:52.261023: val_loss -0.3756 +2026-04-09 20:02:52.266324: Pseudo dice [0.8288, 0.4425, 0.6851, 0.7735, 0.4514, 0.8277, 0.8451] +2026-04-09 20:02:52.271140: Epoch time: 201.79 s +2026-04-09 20:02:53.343978: +2026-04-09 20:02:53.348233: Epoch 915 +2026-04-09 20:02:53.353851: Current learning rate: 0.00109 +2026-04-09 20:06:12.221350: train_loss -0.4389 +2026-04-09 20:06:12.243120: val_loss -0.3848 +2026-04-09 20:06:12.250111: Pseudo dice [0.7235, 0.4272, 0.6889, 0.9027, 0.4541, 0.8477, 0.7758] +2026-04-09 20:06:12.257437: Epoch time: 198.88 s +2026-04-09 20:06:13.381064: +2026-04-09 20:06:13.389239: Epoch 916 +2026-04-09 20:06:13.394746: Current learning rate: 0.00108 +2026-04-09 20:09:34.099375: train_loss -0.4587 +2026-04-09 20:09:34.118189: val_loss -0.3728 +2026-04-09 20:09:34.123926: Pseudo dice [0.4463, 0.2527, 0.7145, 0.8225, 0.2819, 0.8222, 0.8838] +2026-04-09 20:09:34.131531: Epoch time: 200.72 s +2026-04-09 20:09:35.256345: +2026-04-09 20:09:35.260889: Epoch 917 +2026-04-09 20:09:35.265971: Current learning rate: 0.00106 +2026-04-09 20:13:02.601579: train_loss -0.4454 +2026-04-09 20:13:02.620044: val_loss -0.3655 +2026-04-09 20:13:02.627758: Pseudo dice [0.4865, 0.4155, 0.8532, 0.8572, 0.5255, 0.7222, 0.7884] +2026-04-09 20:13:02.634795: Epoch time: 207.35 s +2026-04-09 20:13:03.744964: +2026-04-09 20:13:03.750556: Epoch 918 +2026-04-09 20:13:03.756207: Current learning rate: 0.00105 +2026-04-09 20:16:34.987818: train_loss -0.4388 +2026-04-09 20:16:35.010710: val_loss -0.3612 +2026-04-09 20:16:35.018916: Pseudo dice [0.1409, 0.3271, 0.7732, 0.583, 0.6876, 0.7033, 0.8698] +2026-04-09 20:16:35.027750: Epoch time: 211.25 s +2026-04-09 20:16:36.151603: +2026-04-09 20:16:36.157173: Epoch 919 +2026-04-09 20:16:36.163527: Current learning rate: 0.00104 +2026-04-09 20:20:02.974574: train_loss -0.4471 +2026-04-09 20:20:02.999476: val_loss -0.3795 +2026-04-09 20:20:03.008460: Pseudo dice [0.614, 0.5451, 0.7164, 0.4627, 0.6533, 0.7966, 0.9198] +2026-04-09 20:20:03.017419: Epoch time: 206.83 s +2026-04-09 20:20:04.149422: +2026-04-09 20:20:04.156188: Epoch 920 +2026-04-09 20:20:04.162522: Current learning rate: 0.00103 +2026-04-09 20:23:26.633755: train_loss -0.4525 +2026-04-09 20:23:26.650055: val_loss -0.3292 +2026-04-09 20:23:26.655233: Pseudo dice [0.3396, 0.4287, 0.5839, 0.7434, 0.2515, 0.7295, 0.8573] +2026-04-09 20:23:26.662190: Epoch time: 202.49 s +2026-04-09 20:23:27.767620: +2026-04-09 20:23:27.773556: Epoch 921 +2026-04-09 20:23:27.779244: Current learning rate: 0.00102 +2026-04-09 20:26:41.615204: train_loss -0.4497 +2026-04-09 20:26:41.633034: val_loss -0.3738 +2026-04-09 20:26:41.644185: Pseudo dice [0.4191, 0.5139, 0.7083, 0.8353, 0.4498, 0.7884, 0.8866] +2026-04-09 20:26:41.655665: Epoch time: 193.85 s +2026-04-09 20:26:42.770983: +2026-04-09 20:26:42.775205: Epoch 922 +2026-04-09 20:26:42.780218: Current learning rate: 0.00101 +2026-04-09 20:30:02.260467: train_loss -0.4565 +2026-04-09 20:30:02.276673: val_loss -0.3564 +2026-04-09 20:30:02.281917: Pseudo dice [0.3185, 0.3082, 0.6564, 0.8765, 0.312, 0.8363, 0.7773] +2026-04-09 20:30:02.286852: Epoch time: 199.49 s +2026-04-09 20:30:03.429457: +2026-04-09 20:30:03.434819: Epoch 923 +2026-04-09 20:30:03.439466: Current learning rate: 0.001 +2026-04-09 20:33:16.525494: train_loss -0.4551 +2026-04-09 20:33:16.542509: val_loss -0.3696 +2026-04-09 20:33:16.548064: Pseudo dice [0.7503, 0.4293, 0.8057, 0.6638, 0.4553, 0.2985, 0.779] +2026-04-09 20:33:16.553056: Epoch time: 193.1 s +2026-04-09 20:33:17.641704: +2026-04-09 20:33:17.647843: Epoch 924 +2026-04-09 20:33:17.652558: Current learning rate: 0.00098 +2026-04-09 20:36:24.587366: train_loss -0.4504 +2026-04-09 20:36:24.600562: val_loss -0.346 +2026-04-09 20:36:24.606336: Pseudo dice [0.5877, 0.4413, 0.8127, 0.591, 0.2134, 0.5957, 0.8283] +2026-04-09 20:36:24.611955: Epoch time: 186.95 s +2026-04-09 20:36:25.722700: +2026-04-09 20:36:25.727737: Epoch 925 +2026-04-09 20:36:25.732681: Current learning rate: 0.00097 +2026-04-09 20:39:35.426759: train_loss -0.4443 +2026-04-09 20:39:35.446165: val_loss -0.366 +2026-04-09 20:39:35.451994: Pseudo dice [0.6798, 0.4441, 0.7886, 0.9103, 0.4804, 0.8178, 0.9129] +2026-04-09 20:39:35.457600: Epoch time: 189.71 s +2026-04-09 20:39:36.599977: +2026-04-09 20:39:36.605445: Epoch 926 +2026-04-09 20:39:36.610684: Current learning rate: 0.00096 +2026-04-09 20:42:50.284414: train_loss -0.4391 +2026-04-09 20:42:50.301532: val_loss -0.3453 +2026-04-09 20:42:50.306617: Pseudo dice [0.6335, 0.1419, 0.6373, 0.8763, 0.3901, 0.6706, 0.7015] +2026-04-09 20:42:50.312546: Epoch time: 193.69 s +2026-04-09 20:42:51.421790: +2026-04-09 20:42:51.427013: Epoch 927 +2026-04-09 20:42:51.432578: Current learning rate: 0.00095 +2026-04-09 20:45:53.517816: train_loss -0.4509 +2026-04-09 20:45:53.539349: val_loss -0.3989 +2026-04-09 20:45:53.544665: Pseudo dice [0.6668, 0.427, 0.5616, 0.908, 0.6923, 0.7808, 0.9189] +2026-04-09 20:45:53.549602: Epoch time: 182.1 s +2026-04-09 20:45:54.668870: +2026-04-09 20:45:54.674060: Epoch 928 +2026-04-09 20:45:54.682820: Current learning rate: 0.00094 +2026-04-09 20:49:02.695306: train_loss -0.4592 +2026-04-09 20:49:02.713965: val_loss -0.3873 +2026-04-09 20:49:02.719719: Pseudo dice [0.448, 0.3733, 0.8312, 0.586, 0.4878, 0.8047, 0.8983] +2026-04-09 20:49:02.725816: Epoch time: 188.03 s +2026-04-09 20:49:03.836558: +2026-04-09 20:49:03.842442: Epoch 929 +2026-04-09 20:49:03.848335: Current learning rate: 0.00092 +2026-04-09 20:52:13.404870: train_loss -0.4412 +2026-04-09 20:52:13.423258: val_loss -0.3783 +2026-04-09 20:52:13.429121: Pseudo dice [0.6606, 0.4131, 0.7843, 0.7684, 0.5239, 0.7935, 0.915] +2026-04-09 20:52:13.435020: Epoch time: 189.57 s +2026-04-09 20:52:14.526443: +2026-04-09 20:52:14.533759: Epoch 930 +2026-04-09 20:52:14.540751: Current learning rate: 0.00091 +2026-04-09 20:55:20.167426: train_loss -0.4495 +2026-04-09 20:55:20.182913: val_loss -0.4016 +2026-04-09 20:55:20.187681: Pseudo dice [0.5857, 0.5021, 0.7966, 0.8663, 0.6108, 0.822, 0.9422] +2026-04-09 20:55:20.192678: Epoch time: 185.64 s +2026-04-09 20:55:21.315242: +2026-04-09 20:55:21.320630: Epoch 931 +2026-04-09 20:55:21.325118: Current learning rate: 0.0009 +2026-04-09 20:58:25.936983: train_loss -0.4506 +2026-04-09 20:58:25.951966: val_loss -0.3859 +2026-04-09 20:58:25.956565: Pseudo dice [0.6712, 0.3703, 0.8281, 0.849, 0.1633, 0.5576, 0.8952] +2026-04-09 20:58:25.961047: Epoch time: 184.63 s +2026-04-09 20:58:27.093709: +2026-04-09 20:58:27.098839: Epoch 932 +2026-04-09 20:58:27.104912: Current learning rate: 0.00089 +2026-04-09 21:01:25.983110: train_loss -0.4504 +2026-04-09 21:01:26.004868: val_loss -0.3878 +2026-04-09 21:01:26.012564: Pseudo dice [0.8319, 0.4294, 0.7868, 0.7992, 0.352, 0.4573, 0.9398] +2026-04-09 21:01:26.019674: Epoch time: 178.89 s +2026-04-09 21:01:27.149672: +2026-04-09 21:01:27.155755: Epoch 933 +2026-04-09 21:01:27.161599: Current learning rate: 0.00088 +2026-04-09 21:04:30.247718: train_loss -0.4403 +2026-04-09 21:04:30.265951: val_loss -0.3952 +2026-04-09 21:04:30.272403: Pseudo dice [0.746, 0.6255, 0.654, 0.7937, 0.5339, 0.7875, 0.9392] +2026-04-09 21:04:30.277153: Epoch time: 183.1 s +2026-04-09 21:04:31.388538: +2026-04-09 21:04:31.394650: Epoch 934 +2026-04-09 21:04:31.399208: Current learning rate: 0.00087 +2026-04-09 21:07:39.406258: train_loss -0.4576 +2026-04-09 21:07:39.425482: val_loss -0.3719 +2026-04-09 21:07:39.431448: Pseudo dice [0.8173, 0.4391, 0.7495, 0.394, 0.5815, 0.4181, 0.8195] +2026-04-09 21:07:39.438016: Epoch time: 188.02 s +2026-04-09 21:07:40.540636: +2026-04-09 21:07:40.547999: Epoch 935 +2026-04-09 21:07:40.553921: Current learning rate: 0.00085 +2026-04-09 21:10:42.617181: train_loss -0.4438 +2026-04-09 21:10:42.631183: val_loss -0.3476 +2026-04-09 21:10:42.636865: Pseudo dice [0.5219, 0.5333, 0.7227, 0.5109, 0.452, 0.4612, 0.9561] +2026-04-09 21:10:42.641381: Epoch time: 182.08 s +2026-04-09 21:10:43.747621: +2026-04-09 21:10:43.752810: Epoch 936 +2026-04-09 21:10:43.757494: Current learning rate: 0.00084 +2026-04-09 21:13:40.245255: train_loss -0.4433 +2026-04-09 21:13:40.261604: val_loss -0.3621 +2026-04-09 21:13:40.266146: Pseudo dice [0.3, 0.443, 0.7424, 0.8138, 0.3592, 0.7686, 0.9321] +2026-04-09 21:13:40.272285: Epoch time: 176.5 s +2026-04-09 21:13:41.411436: +2026-04-09 21:13:41.417698: Epoch 937 +2026-04-09 21:13:41.422707: Current learning rate: 0.00083 +2026-04-09 21:16:47.430130: train_loss -0.4544 +2026-04-09 21:16:47.455178: val_loss -0.3689 +2026-04-09 21:16:47.462798: Pseudo dice [0.7915, 0.308, 0.7628, 0.4653, 0.3708, 0.85, 0.868] +2026-04-09 21:16:47.470284: Epoch time: 186.02 s +2026-04-09 21:16:48.591547: +2026-04-09 21:16:48.597144: Epoch 938 +2026-04-09 21:16:48.604503: Current learning rate: 0.00082 +2026-04-09 21:20:01.886603: train_loss -0.4379 +2026-04-09 21:20:01.898412: val_loss -0.3784 +2026-04-09 21:20:01.912535: Pseudo dice [0.4139, 0.4972, 0.7623, 0.8738, 0.4597, 0.795, 0.8817] +2026-04-09 21:20:01.916881: Epoch time: 193.3 s +2026-04-09 21:20:03.057741: +2026-04-09 21:20:03.062417: Epoch 939 +2026-04-09 21:20:03.067626: Current learning rate: 0.00081 +2026-04-09 21:23:14.312668: train_loss -0.4532 +2026-04-09 21:23:14.327675: val_loss -0.358 +2026-04-09 21:23:14.332717: Pseudo dice [0.4101, 0.4053, 0.4496, 0.2378, 0.1629, 0.6673, 0.6362] +2026-04-09 21:23:14.338580: Epoch time: 191.26 s +2026-04-09 21:23:15.451709: +2026-04-09 21:23:15.458563: Epoch 940 +2026-04-09 21:23:15.463960: Current learning rate: 0.00079 +2026-04-09 21:26:34.336243: train_loss -0.4468 +2026-04-09 21:26:34.356182: val_loss -0.3472 +2026-04-09 21:26:34.366600: Pseudo dice [0.5025, 0.5731, 0.8411, 0.5984, 0.3961, 0.8147, 0.8344] +2026-04-09 21:26:34.374640: Epoch time: 198.89 s +2026-04-09 21:26:35.470701: +2026-04-09 21:26:35.476646: Epoch 941 +2026-04-09 21:26:35.482115: Current learning rate: 0.00078 +2026-04-09 21:29:45.005937: train_loss -0.4545 +2026-04-09 21:29:45.025349: val_loss -0.4056 +2026-04-09 21:29:45.031351: Pseudo dice [0.6468, 0.6075, 0.6503, 0.7805, 0.6549, 0.8408, 0.8947] +2026-04-09 21:29:45.036359: Epoch time: 189.54 s +2026-04-09 21:29:46.149594: +2026-04-09 21:29:46.163108: Epoch 942 +2026-04-09 21:29:46.168441: Current learning rate: 0.00077 +2026-04-09 21:32:51.043835: train_loss -0.4504 +2026-04-09 21:32:51.064086: val_loss -0.3554 +2026-04-09 21:32:51.070377: Pseudo dice [0.298, 0.3345, 0.5322, 0.4123, 0.3968, 0.7742, 0.9254] +2026-04-09 21:32:51.076647: Epoch time: 184.9 s +2026-04-09 21:32:52.187475: +2026-04-09 21:32:52.194072: Epoch 943 +2026-04-09 21:32:52.198140: Current learning rate: 0.00076 +2026-04-09 21:36:04.370046: train_loss -0.4512 +2026-04-09 21:36:04.389325: val_loss -0.3793 +2026-04-09 21:36:04.396125: Pseudo dice [0.3941, 0.5914, 0.7983, 0.6377, 0.5035, 0.7799, 0.9349] +2026-04-09 21:36:04.401492: Epoch time: 192.19 s +2026-04-09 21:36:05.519583: +2026-04-09 21:36:05.524254: Epoch 944 +2026-04-09 21:36:05.528605: Current learning rate: 0.00075 +2026-04-09 21:38:58.683160: train_loss -0.4542 +2026-04-09 21:38:58.697532: val_loss -0.3037 +2026-04-09 21:38:58.703101: Pseudo dice [0.3065, 0.5638, 0.6753, 0.0999, 0.4013, 0.748, 0.6737] +2026-04-09 21:38:58.707413: Epoch time: 173.17 s +2026-04-09 21:38:59.791707: +2026-04-09 21:38:59.796615: Epoch 945 +2026-04-09 21:38:59.802468: Current learning rate: 0.00074 +2026-04-09 21:42:06.772351: train_loss -0.4593 +2026-04-09 21:42:06.787067: val_loss -0.3801 +2026-04-09 21:42:06.792007: Pseudo dice [0.5475, 0.4149, 0.7894, 0.7546, 0.3721, 0.7134, 0.7524] +2026-04-09 21:42:06.798018: Epoch time: 186.98 s +2026-04-09 21:42:07.947211: +2026-04-09 21:42:07.955168: Epoch 946 +2026-04-09 21:42:07.961388: Current learning rate: 0.00072 +2026-04-09 21:45:12.605137: train_loss -0.4581 +2026-04-09 21:45:12.620290: val_loss -0.3737 +2026-04-09 21:45:12.626076: Pseudo dice [0.3665, 0.5348, 0.768, 0.5258, 0.3502, 0.5817, 0.85] +2026-04-09 21:45:12.631122: Epoch time: 184.66 s +2026-04-09 21:45:13.753861: +2026-04-09 21:45:13.759395: Epoch 947 +2026-04-09 21:45:13.765425: Current learning rate: 0.00071 +2026-04-09 21:48:27.448383: train_loss -0.4564 +2026-04-09 21:48:27.464402: val_loss -0.3701 +2026-04-09 21:48:27.469286: Pseudo dice [0.7498, 0.4659, 0.7567, 0.6196, 0.6056, 0.76, 0.7516] +2026-04-09 21:48:27.474639: Epoch time: 193.7 s +2026-04-09 21:48:28.620760: +2026-04-09 21:48:28.624947: Epoch 948 +2026-04-09 21:48:28.630149: Current learning rate: 0.0007 +2026-04-09 21:51:43.451714: train_loss -0.448 +2026-04-09 21:51:43.468021: val_loss -0.3905 +2026-04-09 21:51:43.474761: Pseudo dice [0.5373, 0.42, 0.8106, 0.5531, 0.3821, 0.7491, 0.891] +2026-04-09 21:51:43.479258: Epoch time: 194.83 s +2026-04-09 21:51:44.604919: +2026-04-09 21:51:44.615379: Epoch 949 +2026-04-09 21:51:44.620583: Current learning rate: 0.00069 +2026-04-09 21:55:00.174356: train_loss -0.4617 +2026-04-09 21:55:00.189233: val_loss -0.3873 +2026-04-09 21:55:00.193321: Pseudo dice [0.47, 0.3875, 0.837, 0.9193, 0.5948, 0.8101, 0.8816] +2026-04-09 21:55:00.197124: Epoch time: 195.57 s +2026-04-09 21:55:03.677577: +2026-04-09 21:55:03.683212: Epoch 950 +2026-04-09 21:55:03.689490: Current learning rate: 0.00067 +2026-04-09 21:58:08.616835: train_loss -0.4559 +2026-04-09 21:58:08.639534: val_loss -0.3861 +2026-04-09 21:58:08.647505: Pseudo dice [0.56, 0.596, 0.8516, 0.8042, 0.5678, 0.8183, 0.7759] +2026-04-09 21:58:08.653152: Epoch time: 184.94 s +2026-04-09 21:58:09.772689: +2026-04-09 21:58:09.778928: Epoch 951 +2026-04-09 21:58:09.784583: Current learning rate: 0.00066 +2026-04-09 22:01:23.360633: train_loss -0.4456 +2026-04-09 22:01:23.383081: val_loss -0.3533 +2026-04-09 22:01:23.389824: Pseudo dice [0.5538, 0.4699, 0.5449, 0.5934, 0.2597, 0.7356, 0.641] +2026-04-09 22:01:23.397846: Epoch time: 193.59 s +2026-04-09 22:01:24.499799: +2026-04-09 22:01:24.506029: Epoch 952 +2026-04-09 22:01:24.512406: Current learning rate: 0.00065 +2026-04-09 22:04:31.183301: train_loss -0.4594 +2026-04-09 22:04:31.198103: val_loss -0.3337 +2026-04-09 22:04:31.204260: Pseudo dice [0.1888, 0.4084, 0.7771, 0.4244, 0.4831, 0.8104, 0.7756] +2026-04-09 22:04:31.211415: Epoch time: 186.69 s +2026-04-09 22:04:32.312726: +2026-04-09 22:04:32.318558: Epoch 953 +2026-04-09 22:04:32.323587: Current learning rate: 0.00064 +2026-04-09 22:07:30.429766: train_loss -0.4596 +2026-04-09 22:07:30.450749: val_loss -0.3883 +2026-04-09 22:07:30.456865: Pseudo dice [0.6485, 0.6936, 0.8852, 0.9183, 0.5079, 0.666, 0.9288] +2026-04-09 22:07:30.463280: Epoch time: 178.12 s +2026-04-09 22:07:31.613663: +2026-04-09 22:07:31.619967: Epoch 954 +2026-04-09 22:07:31.627027: Current learning rate: 0.00063 +2026-04-09 22:10:11.741808: train_loss -0.4563 +2026-04-09 22:10:11.759622: val_loss -0.3783 +2026-04-09 22:10:11.779468: Pseudo dice [0.4541, 0.5027, 0.6661, 0.7345, 0.507, 0.7711, 0.7891] +2026-04-09 22:10:11.785160: Epoch time: 160.13 s +2026-04-09 22:10:12.912509: +2026-04-09 22:10:12.917598: Epoch 955 +2026-04-09 22:10:12.923161: Current learning rate: 0.00061 +2026-04-09 22:12:57.587303: train_loss -0.4601 +2026-04-09 22:12:57.594661: val_loss -0.3658 +2026-04-09 22:12:57.597849: Pseudo dice [0.736, 0.4852, 0.6615, 0.3807, 0.6739, 0.8174, 0.8707] +2026-04-09 22:12:57.600364: Epoch time: 164.68 s +2026-04-09 22:12:58.751007: +2026-04-09 22:12:58.760785: Epoch 956 +2026-04-09 22:12:58.769265: Current learning rate: 0.0006 +2026-04-09 22:15:45.800047: train_loss -0.459 +2026-04-09 22:15:45.817611: val_loss -0.3865 +2026-04-09 22:15:45.822300: Pseudo dice [0.4626, 0.6033, 0.758, 0.8948, 0.4773, 0.8658, 0.7837] +2026-04-09 22:15:45.826858: Epoch time: 167.05 s +2026-04-09 22:15:46.939054: +2026-04-09 22:15:46.944872: Epoch 957 +2026-04-09 22:15:46.952487: Current learning rate: 0.00059 +2026-04-09 22:18:49.052705: train_loss -0.455 +2026-04-09 22:18:49.068600: val_loss -0.3882 +2026-04-09 22:18:49.075181: Pseudo dice [0.7937, 0.4998, 0.8219, 0.758, 0.4184, 0.7964, 0.931] +2026-04-09 22:18:49.080239: Epoch time: 182.12 s +2026-04-09 22:18:50.199573: +2026-04-09 22:18:50.205071: Epoch 958 +2026-04-09 22:18:50.210224: Current learning rate: 0.00058 +2026-04-09 22:21:41.518225: train_loss -0.4643 +2026-04-09 22:21:41.534179: val_loss -0.3846 +2026-04-09 22:21:41.541224: Pseudo dice [0.7579, 0.4281, 0.6281, 0.7347, 0.4276, 0.6749, 0.9086] +2026-04-09 22:21:41.546619: Epoch time: 171.32 s +2026-04-09 22:21:42.697647: +2026-04-09 22:21:42.702716: Epoch 959 +2026-04-09 22:21:42.707532: Current learning rate: 0.00056 +2026-04-09 22:24:39.848260: train_loss -0.4703 +2026-04-09 22:24:39.860564: val_loss -0.3668 +2026-04-09 22:24:39.865542: Pseudo dice [0.2288, 0.2606, 0.7488, 0.8694, 0.4415, 0.7939, 0.8805] +2026-04-09 22:24:39.869758: Epoch time: 177.15 s +2026-04-09 22:24:41.005456: +2026-04-09 22:24:41.010334: Epoch 960 +2026-04-09 22:24:41.016457: Current learning rate: 0.00055 +2026-04-09 22:27:43.593007: train_loss -0.4553 +2026-04-09 22:27:43.607578: val_loss -0.3563 +2026-04-09 22:27:43.612531: Pseudo dice [0.0671, 0.3963, 0.7823, 0.9045, 0.3134, 0.8851, 0.7823] +2026-04-09 22:27:43.617274: Epoch time: 182.59 s +2026-04-09 22:27:44.768418: +2026-04-09 22:27:44.773743: Epoch 961 +2026-04-09 22:27:44.779397: Current learning rate: 0.00054 +2026-04-09 22:30:47.308066: train_loss -0.4592 +2026-04-09 22:30:47.324273: val_loss -0.3745 +2026-04-09 22:30:47.330203: Pseudo dice [0.7152, 0.4958, 0.7545, 0.8777, 0.4307, 0.7413, 0.8966] +2026-04-09 22:30:47.335525: Epoch time: 182.54 s +2026-04-09 22:30:48.478211: +2026-04-09 22:30:48.484310: Epoch 962 +2026-04-09 22:30:48.491441: Current learning rate: 0.00053 +2026-04-09 22:33:47.313874: train_loss -0.4668 +2026-04-09 22:33:47.338601: val_loss -0.3674 +2026-04-09 22:33:47.347554: Pseudo dice [0.408, 0.2992, 0.6889, 0.8481, 0.5826, 0.875, 0.951] +2026-04-09 22:33:47.356385: Epoch time: 178.84 s +2026-04-09 22:33:48.495247: +2026-04-09 22:33:48.501858: Epoch 963 +2026-04-09 22:33:48.507963: Current learning rate: 0.00051 +2026-04-09 22:36:49.584693: train_loss -0.4533 +2026-04-09 22:36:49.602608: val_loss -0.3828 +2026-04-09 22:36:49.608475: Pseudo dice [0.5279, 0.5784, 0.7591, 0.8955, 0.4889, 0.7204, 0.9502] +2026-04-09 22:36:49.614283: Epoch time: 181.09 s +2026-04-09 22:36:50.786040: +2026-04-09 22:36:50.792294: Epoch 964 +2026-04-09 22:36:50.797926: Current learning rate: 0.0005 +2026-04-09 22:40:00.071823: train_loss -0.4591 +2026-04-09 22:40:00.090683: val_loss -0.3881 +2026-04-09 22:40:00.096615: Pseudo dice [0.6855, 0.4374, 0.7408, 0.8478, 0.2546, 0.7651, 0.9424] +2026-04-09 22:40:00.103763: Epoch time: 189.29 s +2026-04-09 22:40:01.233062: +2026-04-09 22:40:01.239199: Epoch 965 +2026-04-09 22:40:01.243619: Current learning rate: 0.00049 +2026-04-09 22:43:12.990848: train_loss -0.4555 +2026-04-09 22:43:13.010869: val_loss -0.3808 +2026-04-09 22:43:13.017945: Pseudo dice [0.608, 0.4807, 0.689, 0.7739, 0.5901, 0.7052, 0.9452] +2026-04-09 22:43:13.024934: Epoch time: 191.76 s +2026-04-09 22:43:14.183646: +2026-04-09 22:43:14.190278: Epoch 966 +2026-04-09 22:43:14.198053: Current learning rate: 0.00048 +2026-04-09 22:46:26.712137: train_loss -0.458 +2026-04-09 22:46:26.732271: val_loss -0.3822 +2026-04-09 22:46:26.739139: Pseudo dice [0.835, 0.6332, 0.7438, 0.6385, 0.602, 0.7917, 0.6563] +2026-04-09 22:46:26.744156: Epoch time: 192.53 s +2026-04-09 22:46:27.885776: +2026-04-09 22:46:27.891817: Epoch 967 +2026-04-09 22:46:27.898343: Current learning rate: 0.00046 +2026-04-09 22:49:32.639497: train_loss -0.4527 +2026-04-09 22:49:32.657696: val_loss -0.3654 +2026-04-09 22:49:32.664074: Pseudo dice [0.8092, 0.4863, 0.6761, 0.7846, 0.3759, 0.7294, 0.8055] +2026-04-09 22:49:32.670705: Epoch time: 184.76 s +2026-04-09 22:49:32.677542: Yayy! New best EMA pseudo Dice: 0.6616 +2026-04-09 22:49:36.156648: +2026-04-09 22:49:36.161701: Epoch 968 +2026-04-09 22:49:36.167712: Current learning rate: 0.00045 +2026-04-09 22:52:46.431200: train_loss -0.4638 +2026-04-09 22:52:46.449484: val_loss -0.3433 +2026-04-09 22:52:46.454467: Pseudo dice [0.7076, 0.5568, 0.8081, 0.4521, 0.2405, 0.785, 0.8851] +2026-04-09 22:52:46.459508: Epoch time: 190.28 s +2026-04-09 22:52:47.594582: +2026-04-09 22:52:47.600162: Epoch 969 +2026-04-09 22:52:47.606947: Current learning rate: 0.00044 +2026-04-09 22:55:50.891857: train_loss -0.4543 +2026-04-09 22:55:50.907191: val_loss -0.3744 +2026-04-09 22:55:50.912434: Pseudo dice [0.2381, 0.4618, 0.8008, 0.8226, 0.3896, 0.8891, 0.9648] +2026-04-09 22:55:50.917503: Epoch time: 183.3 s +2026-04-09 22:55:52.065826: +2026-04-09 22:55:52.071068: Epoch 970 +2026-04-09 22:55:52.076045: Current learning rate: 0.00043 +2026-04-09 22:58:52.788834: train_loss -0.4562 +2026-04-09 22:58:52.807789: val_loss -0.3976 +2026-04-09 22:58:52.814742: Pseudo dice [0.4124, 0.617, 0.8249, 0.8616, 0.4986, 0.8394, 0.9065] +2026-04-09 22:58:52.821178: Epoch time: 180.73 s +2026-04-09 22:58:52.828888: Yayy! New best EMA pseudo Dice: 0.6632 +2026-04-09 22:58:56.429547: +2026-04-09 22:58:56.435972: Epoch 971 +2026-04-09 22:58:56.442315: Current learning rate: 0.00041 +2026-04-09 23:02:13.102986: train_loss -0.4645 +2026-04-09 23:02:13.116736: val_loss -0.3979 +2026-04-09 23:02:13.121128: Pseudo dice [0.8123, 0.4195, 0.8288, 0.9168, 0.5188, 0.7651, 0.9379] +2026-04-09 23:02:13.126204: Epoch time: 196.68 s +2026-04-09 23:02:13.131328: Yayy! New best EMA pseudo Dice: 0.6712 +2026-04-09 23:02:16.456961: +2026-04-09 23:02:16.461065: Epoch 972 +2026-04-09 23:02:16.465273: Current learning rate: 0.0004 +2026-04-09 23:05:19.661440: train_loss -0.4475 +2026-04-09 23:05:19.682315: val_loss -0.3554 +2026-04-09 23:05:19.689299: Pseudo dice [0.6784, 0.6077, 0.6976, 0.7996, 0.3729, 0.7743, 0.59] +2026-04-09 23:05:19.695938: Epoch time: 183.21 s +2026-04-09 23:05:20.837287: +2026-04-09 23:05:20.846637: Epoch 973 +2026-04-09 23:05:20.855147: Current learning rate: 0.00039 +2026-04-09 23:08:28.841874: train_loss -0.455 +2026-04-09 23:08:28.860876: val_loss -0.3872 +2026-04-09 23:08:28.866680: Pseudo dice [0.4756, 0.1087, 0.75, 0.6167, 0.5944, 0.8616, 0.9056] +2026-04-09 23:08:28.871740: Epoch time: 188.01 s +2026-04-09 23:08:30.000247: +2026-04-09 23:08:30.005650: Epoch 974 +2026-04-09 23:08:30.012200: Current learning rate: 0.00037 +2026-04-09 23:11:33.924193: train_loss -0.4668 +2026-04-09 23:11:33.941807: val_loss -0.355 +2026-04-09 23:11:33.946705: Pseudo dice [0.6567, 0.4537, 0.6215, 0.8461, 0.253, 0.8356, 0.8072] +2026-04-09 23:11:33.952347: Epoch time: 183.93 s +2026-04-09 23:11:35.083777: +2026-04-09 23:11:35.088615: Epoch 975 +2026-04-09 23:11:35.093793: Current learning rate: 0.00036 +2026-04-09 23:14:41.472435: train_loss -0.4642 +2026-04-09 23:14:41.489265: val_loss -0.3904 +2026-04-09 23:14:41.496429: Pseudo dice [0.7045, 0.3826, 0.8792, 0.5309, 0.41, 0.769, 0.9266] +2026-04-09 23:14:41.503374: Epoch time: 186.39 s +2026-04-09 23:14:42.656723: +2026-04-09 23:14:42.661257: Epoch 976 +2026-04-09 23:14:42.666174: Current learning rate: 0.00035 +2026-04-09 23:17:54.918468: train_loss -0.4594 +2026-04-09 23:17:54.934921: val_loss -0.3546 +2026-04-09 23:17:54.941252: Pseudo dice [0.8019, 0.491, 0.5855, 0.724, 0.3685, 0.7531, 0.8064] +2026-04-09 23:17:54.946975: Epoch time: 192.26 s +2026-04-09 23:17:57.423218: +2026-04-09 23:17:57.428645: Epoch 977 +2026-04-09 23:17:57.433298: Current learning rate: 0.00034 +2026-04-09 23:21:05.892862: train_loss -0.4602 +2026-04-09 23:21:05.909132: val_loss -0.3789 +2026-04-09 23:21:05.913872: Pseudo dice [0.23, 0.4031, 0.6613, 0.923, 0.6532, 0.7814, 0.9289] +2026-04-09 23:21:05.923068: Epoch time: 188.47 s +2026-04-09 23:21:07.068444: +2026-04-09 23:21:07.074881: Epoch 978 +2026-04-09 23:21:07.080130: Current learning rate: 0.00032 +2026-04-09 23:24:20.327855: train_loss -0.4718 +2026-04-09 23:24:20.343384: val_loss -0.4118 +2026-04-09 23:24:20.347942: Pseudo dice [0.34, 0.5508, 0.8543, 0.8923, 0.6719, 0.8676, 0.9206] +2026-04-09 23:24:20.353827: Epoch time: 193.26 s +2026-04-09 23:24:21.510013: +2026-04-09 23:24:21.514047: Epoch 979 +2026-04-09 23:24:21.518295: Current learning rate: 0.00031 +2026-04-09 23:27:30.634693: train_loss -0.4562 +2026-04-09 23:27:30.650680: val_loss -0.3749 +2026-04-09 23:27:30.656802: Pseudo dice [0.8002, 0.455, 0.8383, 0.7469, 0.2849, 0.8246, 0.9573] +2026-04-09 23:27:30.662224: Epoch time: 189.13 s +2026-04-09 23:27:31.784137: +2026-04-09 23:27:31.789243: Epoch 980 +2026-04-09 23:27:31.794684: Current learning rate: 0.0003 +2026-04-09 23:30:44.434438: train_loss -0.4517 +2026-04-09 23:30:44.452104: val_loss -0.3943 +2026-04-09 23:30:44.456668: Pseudo dice [0.4069, 0.5708, 0.6378, 0.8419, 0.5038, 0.88, 0.9418] +2026-04-09 23:30:44.461058: Epoch time: 192.65 s +2026-04-09 23:30:45.627782: +2026-04-09 23:30:45.633435: Epoch 981 +2026-04-09 23:30:45.637242: Current learning rate: 0.00028 +2026-04-09 23:33:50.857374: train_loss -0.4603 +2026-04-09 23:33:50.883312: val_loss -0.3948 +2026-04-09 23:33:50.893469: Pseudo dice [0.758, 0.5267, 0.7305, 0.8541, 0.617, 0.8245, 0.8154] +2026-04-09 23:33:50.901005: Epoch time: 185.23 s +2026-04-09 23:33:50.907312: Yayy! New best EMA pseudo Dice: 0.6768 +2026-04-09 23:33:54.324307: +2026-04-09 23:33:54.331760: Epoch 982 +2026-04-09 23:33:54.338172: Current learning rate: 0.00027 +2026-04-09 23:37:08.019372: train_loss -0.4658 +2026-04-09 23:37:08.050463: val_loss -0.3939 +2026-04-09 23:37:08.056311: Pseudo dice [0.1693, 0.6289, 0.8334, 0.87, 0.7267, 0.809, 0.9284] +2026-04-09 23:37:08.074360: Epoch time: 193.7 s +2026-04-09 23:37:08.080737: Yayy! New best EMA pseudo Dice: 0.6801 +2026-04-09 23:37:11.718807: +2026-04-09 23:37:11.724620: Epoch 983 +2026-04-09 23:37:11.731745: Current learning rate: 0.00026 +2026-04-09 23:40:41.141881: train_loss -0.4617 +2026-04-09 23:40:41.158209: val_loss -0.3976 +2026-04-09 23:40:41.163818: Pseudo dice [0.8368, 0.5569, 0.8225, 0.8459, 0.3896, 0.8447, 0.8258] +2026-04-09 23:40:41.170533: Epoch time: 209.43 s +2026-04-09 23:40:41.175550: Yayy! New best EMA pseudo Dice: 0.6852 +2026-04-09 23:40:44.636281: +2026-04-09 23:40:44.642890: Epoch 984 +2026-04-09 23:40:44.647283: Current learning rate: 0.00024 +2026-04-09 23:44:15.560967: train_loss -0.453 +2026-04-09 23:44:15.578852: val_loss -0.3454 +2026-04-09 23:44:15.586002: Pseudo dice [0.797, 0.5981, 0.7151, 0.8261, 0.4788, 0.7866, 0.9185] +2026-04-09 23:44:15.593421: Epoch time: 210.93 s +2026-04-09 23:44:15.599154: Yayy! New best EMA pseudo Dice: 0.6899 +2026-04-09 23:44:18.884049: +2026-04-09 23:44:18.890860: Epoch 985 +2026-04-09 23:44:18.896083: Current learning rate: 0.00023 +2026-04-09 23:47:46.040090: train_loss -0.4554 +2026-04-09 23:47:46.063484: val_loss -0.3891 +2026-04-09 23:47:46.070755: Pseudo dice [0.7399, 0.6045, 0.7834, 0.8808, 0.6219, 0.8057, 0.9003] +2026-04-09 23:47:46.076081: Epoch time: 207.16 s +2026-04-09 23:47:46.082451: Yayy! New best EMA pseudo Dice: 0.6971 +2026-04-09 23:47:49.768834: +2026-04-09 23:47:49.774213: Epoch 986 +2026-04-09 23:47:49.779561: Current learning rate: 0.00021 +2026-04-09 23:51:13.364660: train_loss -0.4584 +2026-04-09 23:51:13.384029: val_loss -0.3792 +2026-04-09 23:51:13.391012: Pseudo dice [0.4341, 0.4226, 0.8099, 0.7852, 0.3033, 0.8424, 0.8744] +2026-04-09 23:51:13.395989: Epoch time: 203.6 s +2026-04-09 23:51:14.562328: +2026-04-09 23:51:14.569455: Epoch 987 +2026-04-09 23:51:14.576425: Current learning rate: 0.0002 +2026-04-09 23:54:22.864216: train_loss -0.461 +2026-04-09 23:54:22.881681: val_loss -0.386 +2026-04-09 23:54:22.886507: Pseudo dice [0.5042, 0.419, 0.8232, 0.1934, 0.4195, 0.8548, 0.8245] +2026-04-09 23:54:22.892741: Epoch time: 188.3 s +2026-04-09 23:54:24.030453: +2026-04-09 23:54:24.042241: Epoch 988 +2026-04-09 23:54:24.048799: Current learning rate: 0.00019 +2026-04-09 23:57:38.116203: train_loss -0.458 +2026-04-09 23:57:38.133081: val_loss -0.4098 +2026-04-09 23:57:38.139133: Pseudo dice [0.6329, 0.4906, 0.7775, 0.8451, 0.465, 0.8042, 0.876] +2026-04-09 23:57:38.146556: Epoch time: 194.09 s +2026-04-09 23:57:39.300376: +2026-04-09 23:57:39.309531: Epoch 989 +2026-04-09 23:57:39.315135: Current learning rate: 0.00017 +2026-04-10 00:00:43.279808: train_loss -0.4625 +2026-04-10 00:00:43.296827: val_loss -0.3734 +2026-04-10 00:00:43.300709: Pseudo dice [0.4716, 0.372, 0.7433, 0.5653, 0.3388, 0.8881, 0.8735] +2026-04-10 00:00:43.305386: Epoch time: 183.98 s +2026-04-10 00:00:44.465181: +2026-04-10 00:00:44.471237: Epoch 990 +2026-04-10 00:00:44.476068: Current learning rate: 0.00016 +2026-04-10 00:03:53.225089: train_loss -0.4587 +2026-04-10 00:03:53.241225: val_loss -0.3838 +2026-04-10 00:03:53.246343: Pseudo dice [0.4007, 0.4568, 0.7777, 0.6132, 0.2804, 0.8516, 0.9388] +2026-04-10 00:03:53.252294: Epoch time: 188.76 s +2026-04-10 00:03:54.409461: +2026-04-10 00:03:54.416898: Epoch 991 +2026-04-10 00:03:54.424316: Current learning rate: 0.00014 +2026-04-10 00:07:14.311272: train_loss -0.4628 +2026-04-10 00:07:14.331741: val_loss -0.3664 +2026-04-10 00:07:14.339299: Pseudo dice [0.29, 0.4406, 0.8203, 0.7357, 0.4329, 0.8113, 0.9217] +2026-04-10 00:07:14.346958: Epoch time: 199.9 s +2026-04-10 00:07:15.496109: +2026-04-10 00:07:15.502470: Epoch 992 +2026-04-10 00:07:15.508065: Current learning rate: 0.00013 +2026-04-10 00:10:28.542449: train_loss -0.4588 +2026-04-10 00:10:28.557621: val_loss -0.3611 +2026-04-10 00:10:28.563966: Pseudo dice [0.7772, 0.4612, 0.6819, 0.1156, 0.5066, 0.8822, 0.8319] +2026-04-10 00:10:28.569073: Epoch time: 193.05 s +2026-04-10 00:10:29.697957: +2026-04-10 00:10:29.703069: Epoch 993 +2026-04-10 00:10:29.709225: Current learning rate: 0.00011 +2026-04-10 00:13:35.963462: train_loss -0.4562 +2026-04-10 00:13:35.983195: val_loss -0.3805 +2026-04-10 00:13:35.988174: Pseudo dice [0.4842, 0.3624, 0.6707, 0.8352, 0.5066, 0.8339, 0.913] +2026-04-10 00:13:35.994906: Epoch time: 186.27 s +2026-04-10 00:13:37.136571: +2026-04-10 00:13:37.143142: Epoch 994 +2026-04-10 00:13:37.148692: Current learning rate: 0.0001 +2026-04-10 00:16:35.649664: train_loss -0.4711 +2026-04-10 00:16:35.670111: val_loss -0.3868 +2026-04-10 00:16:35.678694: Pseudo dice [0.4804, 0.5983, 0.8291, 0.8871, 0.5852, 0.8174, 0.6773] +2026-04-10 00:16:35.685396: Epoch time: 178.52 s +2026-04-10 00:16:36.811382: +2026-04-10 00:16:36.815837: Epoch 995 +2026-04-10 00:16:36.821412: Current learning rate: 8e-05 +2026-04-10 00:19:38.866605: train_loss -0.4578 +2026-04-10 00:19:38.876161: val_loss -0.3619 +2026-04-10 00:19:38.879118: Pseudo dice [0.6846, 0.3759, 0.7278, 0.845, 0.365, 0.8817, 0.6612] +2026-04-10 00:19:38.882782: Epoch time: 182.06 s +2026-04-10 00:19:40.003640: +2026-04-10 00:19:40.008438: Epoch 996 +2026-04-10 00:19:40.013198: Current learning rate: 7e-05 +2026-04-10 00:22:50.368624: train_loss -0.4659 +2026-04-10 00:22:50.389228: val_loss -0.3894 +2026-04-10 00:22:50.394510: Pseudo dice [0.7176, 0.4796, 0.8459, 0.5623, 0.5288, 0.8469, 0.9056] +2026-04-10 00:22:50.400458: Epoch time: 190.37 s +2026-04-10 00:22:51.546207: +2026-04-10 00:22:51.552615: Epoch 997 +2026-04-10 00:22:51.557735: Current learning rate: 5e-05 +2026-04-10 00:25:52.926022: train_loss -0.4656 +2026-04-10 00:25:52.941236: val_loss -0.3957 +2026-04-10 00:25:52.946729: Pseudo dice [0.6237, 0.6504, 0.8085, 0.6374, 0.6445, 0.637, 0.8178] +2026-04-10 00:25:52.952612: Epoch time: 181.38 s +2026-04-10 00:25:54.071675: +2026-04-10 00:25:54.078788: Epoch 998 +2026-04-10 00:25:54.084560: Current learning rate: 4e-05 +2026-04-10 00:28:58.980733: train_loss -0.4642 +2026-04-10 00:28:58.997737: val_loss -0.4044 +2026-04-10 00:28:59.003396: Pseudo dice [0.6545, 0.5985, 0.7588, 0.7541, 0.483, 0.7681, 0.8275] +2026-04-10 00:28:59.008083: Epoch time: 184.91 s +2026-04-10 00:29:00.144283: +2026-04-10 00:29:00.149173: Epoch 999 +2026-04-10 00:29:00.155451: Current learning rate: 2e-05 +2026-04-10 00:32:16.305368: train_loss -0.4616 +2026-04-10 00:32:16.323550: val_loss -0.4144 +2026-04-10 00:32:16.329062: Pseudo dice [0.7761, 0.5614, 0.8086, 0.8778, 0.5784, 0.8429, 0.8975] +2026-04-10 00:32:16.335181: Epoch time: 196.16 s +2026-04-10 00:32:19.976106: Training done. +2026-04-10 00:32:20.380702: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-10 00:32:20.394436: The split file contains 5 splits. +2026-04-10 00:32:20.401110: Desired fold for training: 3 +2026-04-10 00:32:20.406861: This split has 387 training and 97 validation cases. +2026-04-10 00:32:20.411661: predicting MSWAL_0002 +2026-04-10 00:32:20.435430: MSWAL_0002, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-10 00:33:15.851245: predicting MSWAL_0003 +2026-04-10 00:33:15.895756: MSWAL_0003, shape torch.Size([1, 381, 603, 603]), rank 0 +2026-04-10 00:34:05.340234: predicting MSWAL_0013 +2026-04-10 00:34:05.386506: MSWAL_0013, shape torch.Size([1, 165, 520, 520]), rank 0 +2026-04-10 00:34:22.135346: predicting MSWAL_0037 +2026-04-10 00:34:22.171811: MSWAL_0037, shape torch.Size([1, 168, 507, 507]), rank 0 +2026-04-10 00:34:32.723984: predicting MSWAL_0038 +2026-04-10 00:34:32.770470: MSWAL_0038, shape torch.Size([1, 293, 528, 528]), rank 0 +2026-04-10 00:35:10.993757: predicting MSWAL_0049 +2026-04-10 00:35:11.043299: MSWAL_0049, shape torch.Size([1, 185, 507, 507]), rank 0 +2026-04-10 00:35:25.660356: predicting MSWAL_0055 +2026-04-10 00:35:25.707043: MSWAL_0055, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-10 00:35:40.748047: predicting MSWAL_0093 +2026-04-10 00:35:40.807458: MSWAL_0093, shape torch.Size([1, 265, 507, 507]), rank 0 +2026-04-10 00:36:00.707420: predicting MSWAL_0098 +2026-04-10 00:36:00.746258: MSWAL_0098, shape torch.Size([1, 159, 429, 429]), rank 0 +2026-04-10 00:36:09.941186: predicting MSWAL_0103 +2026-04-10 00:36:09.961528: MSWAL_0103, shape torch.Size([1, 172, 472, 472]), rank 0 +2026-04-10 00:36:23.336728: predicting MSWAL_0113 +2026-04-10 00:36:23.375540: MSWAL_0113, shape torch.Size([1, 277, 543, 543]), rank 0 +2026-04-10 00:36:55.941747: predicting MSWAL_0114 +2026-04-10 00:36:55.985266: MSWAL_0114, shape torch.Size([1, 333, 507, 507]), rank 0 +2026-04-10 00:37:20.717825: predicting MSWAL_0126 +2026-04-10 00:37:20.762112: MSWAL_0126, shape torch.Size([1, 169, 507, 507]), rank 0 +2026-04-10 00:37:35.427210: predicting MSWAL_0133 +2026-04-10 00:37:35.459818: MSWAL_0133, shape torch.Size([1, 288, 540, 540]), rank 0 +2026-04-10 00:38:15.806334: predicting MSWAL_0134 +2026-04-10 00:38:15.848610: MSWAL_0134, shape torch.Size([1, 166, 480, 480]), rank 0 +2026-04-10 00:38:26.085707: predicting MSWAL_0136 +2026-04-10 00:38:26.122946: MSWAL_0136, shape torch.Size([1, 258, 515, 515]), rank 0 +2026-04-10 00:38:58.683044: predicting MSWAL_0139 +2026-04-10 00:38:58.749216: MSWAL_0139, shape torch.Size([1, 451, 601, 601]), rank 0 +2026-04-10 00:40:04.760041: predicting MSWAL_0141 +2026-04-10 00:40:04.827690: MSWAL_0141, shape torch.Size([1, 414, 545, 545]), rank 0 +2026-04-10 00:41:02.073746: predicting MSWAL_0147 +2026-04-10 00:41:02.124664: MSWAL_0147, shape torch.Size([1, 310, 480, 480]), rank 0 +2026-04-10 00:41:26.039621: predicting MSWAL_0159 +2026-04-10 00:41:26.092570: MSWAL_0159, shape torch.Size([1, 294, 532, 532]), rank 0 +2026-04-10 00:42:06.806563: predicting MSWAL_0163 +2026-04-10 00:42:06.843555: MSWAL_0163, shape torch.Size([1, 310, 480, 480]), rank 0 +2026-04-10 00:42:30.718222: predicting MSWAL_0169 +2026-04-10 00:42:30.791451: MSWAL_0169, shape torch.Size([1, 274, 565, 565]), rank 0 +2026-04-10 00:43:05.161395: predicting MSWAL_0174 +2026-04-10 00:43:05.217432: MSWAL_0174, shape torch.Size([1, 434, 607, 607]), rank 0 +2026-04-10 00:44:05.067323: predicting MSWAL_0178 +2026-04-10 00:44:05.132169: MSWAL_0178, shape torch.Size([1, 310, 573, 573]), rank 0 +2026-04-10 00:44:46.823696: predicting MSWAL_0179 +2026-04-10 00:44:46.880985: MSWAL_0179, shape torch.Size([1, 322, 509, 509]), rank 0 +2026-04-10 00:45:11.984621: predicting MSWAL_0185 +2026-04-10 00:45:12.060095: MSWAL_0185, shape torch.Size([1, 190, 545, 545]), rank 0 +2026-04-10 00:45:36.825849: predicting MSWAL_0199 +2026-04-10 00:45:36.861716: MSWAL_0199, shape torch.Size([1, 182, 504, 504]), rank 0 +2026-04-10 00:45:51.480124: predicting MSWAL_0220 +2026-04-10 00:45:51.508759: MSWAL_0220, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-10 00:46:05.860034: predicting MSWAL_0222 +2026-04-10 00:46:05.889362: MSWAL_0222, shape torch.Size([1, 265, 491, 491]), rank 0 +2026-04-10 00:46:25.309788: predicting MSWAL_0243 +2026-04-10 00:46:25.382201: MSWAL_0243, shape torch.Size([1, 264, 557, 557]), rank 0 +2026-04-10 00:46:58.682359: predicting MSWAL_0255 +2026-04-10 00:46:58.724051: MSWAL_0255, shape torch.Size([1, 305, 527, 527]), rank 0 +2026-04-10 00:47:39.360402: predicting MSWAL_0260 +2026-04-10 00:47:39.415803: MSWAL_0260, shape torch.Size([1, 142, 525, 525]), rank 0 +2026-04-10 00:47:56.194092: predicting MSWAL_0274 +2026-04-10 00:47:56.234943: MSWAL_0274, shape torch.Size([1, 334, 480, 480]), rank 0 +2026-04-10 00:48:20.481978: predicting MSWAL_0275 +2026-04-10 00:48:20.547378: MSWAL_0275, shape torch.Size([1, 320, 521, 521]), rank 0 +2026-04-10 00:49:01.273835: predicting MSWAL_0276 +2026-04-10 00:49:01.316895: MSWAL_0276, shape torch.Size([1, 284, 516, 516]), rank 0 +2026-04-10 00:49:41.581315: predicting MSWAL_0281 +2026-04-10 00:49:41.645061: MSWAL_0281, shape torch.Size([1, 357, 557, 557]), rank 0 +2026-04-10 00:50:30.204550: predicting MSWAL_0282 +2026-04-10 00:50:30.258325: MSWAL_0282, shape torch.Size([1, 233, 543, 543]), rank 0 +2026-04-10 00:51:03.239982: predicting MSWAL_0283 +2026-04-10 00:51:03.286049: MSWAL_0283, shape torch.Size([1, 217, 507, 507]), rank 0 +2026-04-10 00:51:19.595047: predicting MSWAL_0290 +2026-04-10 00:51:19.636398: MSWAL_0290, shape torch.Size([1, 193, 507, 507]), rank 0 +2026-04-10 00:51:33.755436: predicting MSWAL_0303 +2026-04-10 00:51:33.799277: MSWAL_0303, shape torch.Size([1, 269, 507, 507]), rank 0 +2026-04-10 00:51:53.694325: predicting MSWAL_0306 +2026-04-10 00:51:53.721730: MSWAL_0306, shape torch.Size([1, 205, 507, 507]), rank 0 +2026-04-10 00:52:08.391519: predicting MSWAL_0308 +2026-04-10 00:52:08.476912: MSWAL_0308, shape torch.Size([1, 229, 507, 507]), rank 0 +2026-04-10 00:52:27.935130: predicting MSWAL_0324 +2026-04-10 00:52:28.010293: MSWAL_0324, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-10 00:52:42.880596: predicting MSWAL_0326 +2026-04-10 00:52:42.921145: MSWAL_0326, shape torch.Size([1, 197, 507, 507]), rank 0 +2026-04-10 00:52:57.938973: predicting MSWAL_0330 +2026-04-10 00:52:57.967843: MSWAL_0330, shape torch.Size([1, 189, 507, 507]), rank 0 +2026-04-10 00:53:12.271759: predicting MSWAL_0334 +2026-04-10 00:53:12.351180: MSWAL_0334, shape torch.Size([1, 197, 507, 507]), rank 0 +2026-04-10 00:53:27.447868: predicting MSWAL_0335 +2026-04-10 00:53:27.477188: MSWAL_0335, shape torch.Size([1, 361, 591, 591]), rank 0 +2026-04-10 00:54:16.108002: predicting MSWAL_0338 +2026-04-10 00:54:16.191471: MSWAL_0338, shape torch.Size([1, 317, 617, 617]), rank 0 +2026-04-10 00:54:58.464564: predicting MSWAL_0342 +2026-04-10 00:54:58.498855: MSWAL_0342, shape torch.Size([1, 309, 493, 493]), rank 0 +2026-04-10 00:55:22.419871: predicting MSWAL_0346 +2026-04-10 00:55:22.499521: MSWAL_0346, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-10 00:55:32.906490: predicting MSWAL_0354 +2026-04-10 00:55:32.944171: MSWAL_0354, shape torch.Size([1, 283, 519, 519]), rank 0 +2026-04-10 00:56:12.540839: predicting MSWAL_0375 +2026-04-10 00:56:12.590569: MSWAL_0375, shape torch.Size([1, 277, 568, 568]), rank 0 +2026-04-10 00:56:46.478223: predicting MSWAL_0376 +2026-04-10 00:56:46.553125: MSWAL_0376, shape torch.Size([1, 305, 537, 537]), rank 0 +2026-04-10 00:57:27.315308: predicting MSWAL_0381 +2026-04-10 00:57:27.361475: MSWAL_0381, shape torch.Size([1, 325, 541, 541]), rank 0 +2026-04-10 00:58:08.322717: predicting MSWAL_0389 +2026-04-10 00:58:08.361670: MSWAL_0389, shape torch.Size([1, 217, 667, 667]), rank 0 +2026-04-10 00:58:47.157876: predicting MSWAL_0390 +2026-04-10 00:58:47.192534: MSWAL_0390, shape torch.Size([1, 273, 560, 560]), rank 0 +2026-04-10 00:59:20.624512: predicting MSWAL_0402 +2026-04-10 00:59:20.664865: MSWAL_0402, shape torch.Size([1, 149, 536, 536]), rank 0 +2026-04-10 00:59:37.696348: predicting MSWAL_0409 +2026-04-10 00:59:37.733482: MSWAL_0409, shape torch.Size([1, 415, 559, 559]), rank 0 +2026-04-10 01:00:34.966079: predicting MSWAL_0414 +2026-04-10 01:00:35.047668: MSWAL_0414, shape torch.Size([1, 305, 521, 521]), rank 0 +2026-04-10 01:01:15.879940: predicting MSWAL_0423 +2026-04-10 01:01:15.918565: MSWAL_0423, shape torch.Size([1, 337, 529, 529]), rank 0 +2026-04-10 01:02:03.831634: predicting MSWAL_0432 +2026-04-10 01:02:03.897740: MSWAL_0432, shape torch.Size([1, 357, 507, 507]), rank 0 +2026-04-10 01:02:33.692140: predicting MSWAL_0439 +2026-04-10 01:02:33.723042: MSWAL_0439, shape torch.Size([1, 337, 559, 559]), rank 0 +2026-04-10 01:03:21.495075: predicting MSWAL_0460 +2026-04-10 01:03:21.536760: MSWAL_0460, shape torch.Size([1, 197, 595, 595]), rank 0 +2026-04-10 01:03:46.939801: predicting MSWAL_0463 +2026-04-10 01:03:46.990057: MSWAL_0463, shape torch.Size([1, 197, 507, 507]), rank 0 +2026-04-10 01:04:01.588598: predicting MSWAL_0466 +2026-04-10 01:04:01.622842: MSWAL_0466, shape torch.Size([1, 317, 584, 584]), rank 0 +2026-04-10 01:04:42.951284: predicting MSWAL_0468 +2026-04-10 01:04:42.993408: MSWAL_0468, shape torch.Size([1, 377, 615, 615]), rank 0 +2026-04-10 01:05:32.422191: predicting MSWAL_0473 +2026-04-10 01:05:32.487066: MSWAL_0473, shape torch.Size([1, 177, 579, 579]), rank 0 +2026-04-10 01:05:57.005162: predicting MSWAL_0483 +2026-04-10 01:05:57.066287: MSWAL_0483, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-10 01:06:11.863505: predicting MSWAL_0486 +2026-04-10 01:06:11.942129: MSWAL_0486, shape torch.Size([1, 201, 507, 507]), rank 0 +2026-04-10 01:06:27.249147: predicting MSWAL_0497 +2026-04-10 01:06:27.285921: MSWAL_0497, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-10 01:06:41.292810: predicting MSWAL_0498 +2026-04-10 01:06:41.345044: MSWAL_0498, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-10 01:06:56.883847: predicting MSWAL_0521 +2026-04-10 01:06:56.915644: MSWAL_0521, shape torch.Size([1, 261, 517, 517]), rank 0 +2026-04-10 01:07:29.267884: predicting MSWAL_0526 +2026-04-10 01:07:29.324955: MSWAL_0526, shape torch.Size([1, 397, 507, 507]), rank 0 +2026-04-10 01:08:02.945060: predicting MSWAL_0531 +2026-04-10 01:08:02.986068: MSWAL_0531, shape torch.Size([1, 145, 531, 531]), rank 0 +2026-04-10 01:08:19.285958: predicting MSWAL_0539 +2026-04-10 01:08:19.325326: MSWAL_0539, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-10 01:08:33.775886: predicting MSWAL_0549 +2026-04-10 01:08:33.845203: MSWAL_0549, shape torch.Size([1, 321, 507, 507]), rank 0 +2026-04-10 01:08:58.288928: predicting MSWAL_0550 +2026-04-10 01:08:58.326188: MSWAL_0550, shape torch.Size([1, 269, 652, 652]), rank 0 +2026-04-10 01:09:49.025151: predicting MSWAL_0553 +2026-04-10 01:09:49.112838: MSWAL_0553, shape torch.Size([1, 201, 507, 507]), rank 0 +2026-04-10 01:10:04.112839: predicting MSWAL_0557 +2026-04-10 01:10:04.155647: MSWAL_0557, shape torch.Size([1, 412, 553, 553]), rank 0 +2026-04-10 01:11:00.937008: predicting MSWAL_0561 +2026-04-10 01:11:01.000147: MSWAL_0561, shape torch.Size([1, 289, 507, 507]), rank 0 +2026-04-10 01:11:25.248770: predicting MSWAL_0574 +2026-04-10 01:11:25.289463: MSWAL_0574, shape torch.Size([1, 166, 480, 480]), rank 0 +2026-04-10 01:11:34.874099: predicting MSWAL_0579 +2026-04-10 01:11:34.931607: MSWAL_0579, shape torch.Size([1, 302, 480, 480]), rank 0 +2026-04-10 01:11:59.043169: predicting MSWAL_0581 +2026-04-10 01:11:59.077523: MSWAL_0581, shape torch.Size([1, 205, 572, 572]), rank 0 +2026-04-10 01:12:24.143895: predicting MSWAL_0591 +2026-04-10 01:12:24.198655: MSWAL_0591, shape torch.Size([1, 166, 544, 544]), rank 0 +2026-04-10 01:12:41.318770: predicting MSWAL_0593 +2026-04-10 01:12:41.351047: MSWAL_0593, shape torch.Size([1, 158, 480, 480]), rank 0 +2026-04-10 01:12:51.379376: predicting MSWAL_0598 +2026-04-10 01:12:51.425987: MSWAL_0598, shape torch.Size([1, 174, 480, 480]), rank 0 +2026-04-10 01:13:05.449836: predicting MSWAL_0601 +2026-04-10 01:13:05.521889: MSWAL_0601, shape torch.Size([1, 154, 480, 480]), rank 0 +2026-04-10 01:13:15.428762: predicting MSWAL_0602 +2026-04-10 01:13:15.493734: MSWAL_0602, shape torch.Size([1, 338, 568, 568]), rank 0 +2026-04-10 01:14:04.435418: predicting MSWAL_0604 +2026-04-10 01:14:04.506539: MSWAL_0604, shape torch.Size([1, 346, 480, 480]), rank 0 +2026-04-10 01:14:33.014955: predicting MSWAL_0605 +2026-04-10 01:14:33.102526: MSWAL_0605, shape torch.Size([1, 314, 537, 537]), rank 0 +2026-04-10 01:15:13.706265: predicting MSWAL_0615 +2026-04-10 01:15:13.751312: MSWAL_0615, shape torch.Size([1, 366, 597, 597]), rank 0 +2026-04-10 01:16:03.731879: predicting MSWAL_0630 +2026-04-10 01:16:03.784252: MSWAL_0630, shape torch.Size([1, 266, 475, 475]), rank 0 +2026-04-10 01:16:23.320096: predicting MSWAL_0656 +2026-04-10 01:16:23.385443: MSWAL_0656, shape torch.Size([1, 137, 420, 420]), rank 0 +2026-04-10 01:16:32.846045: predicting MSWAL_0661 +2026-04-10 01:16:32.872907: MSWAL_0661, shape torch.Size([1, 157, 469, 469]), rank 0 +2026-04-10 01:16:42.726257: predicting MSWAL_0669 +2026-04-10 01:16:42.773265: MSWAL_0669, shape torch.Size([1, 321, 507, 507]), rank 0 +2026-04-10 01:17:08.005114: predicting MSWAL_0671 +2026-04-10 01:17:08.072452: MSWAL_0671, shape torch.Size([1, 317, 507, 507]), rank 0 +2026-04-10 01:17:32.904294: predicting MSWAL_0680 +2026-04-10 01:17:32.943060: MSWAL_0680, shape torch.Size([1, 328, 585, 585]), rank 0 +2026-04-10 01:19:56.350061: Validation complete +2026-04-10 01:19:56.358537: Mean Validation Dice: 0.4287475912537947 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/checkpoint_best.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/checkpoint_best.pth new file mode 100644 index 0000000000000000000000000000000000000000..1338ca5ec0e3370a46fc7f89c39bee4a9a8924f0 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/checkpoint_best.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:29dfb790c62eb1eb49cf51236ea5acafe218a01badf2a9ecaf67aa5adfaf70b9 +size 1129843858 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/checkpoint_final.pth b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/checkpoint_final.pth new file mode 100644 index 0000000000000000000000000000000000000000..14edb78515efb6d8eca540341547422c47ba059f --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/checkpoint_final.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04967b88b891c459c693825f1b70ea2d473d4139ba3ce48dd3e6f8a86ee3ed82 +size 1129849750 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/debug.json b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/debug.json new file mode 100644 index 0000000000000000000000000000000000000000..de96374ed2aec5b4944f1eaac06fb734e3518395 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/debug.json @@ -0,0 +1,53 @@ +{ + "_best_ema": "None", + "batch_size": "2", + "configuration_manager": "{'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}", + "configuration_name": "3d_fullres", + "cudnn_version": 90100, + "current_epoch": "0", + "dataloader_train": "", + "dataloader_train.generator": "", + "dataloader_train.num_processes": "12", + "dataloader_train.transform": "None", + "dataloader_val": "", + "dataloader_val.generator": "", + "dataloader_val.num_processes": "6", + "dataloader_val.transform": "None", + "dataset_json": "{'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}", + "device": "cuda:0", + "disable_checkpointing": "False", + "enable_deep_supervision": "True", + "fold": "4", + "folder_with_segs_from_previous_stage": "None", + "gpu_name": "NVIDIA A100-SXM4-80GB", + "grad_scaler": "", + "hostname": "cn1097", + "inference_allowed_mirroring_axes": "(0, 1, 2)", + "initial_lr": "0.01", + "is_cascaded": "False", + "is_ddp": "False", + "label_manager": "", + "local_rank": "0", + "log_file": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/training_log_2026_4_8_16_19_41.txt", + "logger": "", + "loss": "DeepSupervisionWrapper(\n (loss): DC_and_CE_loss(\n (ce): RobustCrossEntropyLoss()\n (dc): OptimizedModule(\n (_orig_mod): MemoryEfficientSoftDiceLoss()\n )\n )\n)", + "lr_scheduler": "", + "my_init_kwargs": "{'plans': {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}, 'configuration': '3d_fullres', 'fold': 4, 'dataset_json': {'name': 'MSWAL', 'description': ' 3D Multi-class Segmentation of Whole Abdominal Lesions Dataset', 'licence': 'CC BY-NC 4.0', 'relase': 'July 8, 2025', 'tensorImageSize': '3D', 'file_ending': '.nii.gz', 'channel_names': {'0': 'CT'}, 'labels': {'background': 0, 'gallstone': 1, 'kidney stone': 2, 'liver tumor': 3, 'kidney tumor': 4, 'pancreatic cancer': 5, 'liver cyst': 6, 'kidney cyst': 7}, 'numTraining': 484, 'numTest': 210, 'training': [{'image': './imagesTr/MSWAL_0001_0000.nii.gz', 'label': './labelsTr/MSWAL_0001.nii.gz'}, {'image': './imagesTr/MSWAL_0002_0000.nii.gz', 'label': './labelsTr/MSWAL_0002.nii.gz'}, {'image': './imagesTr/MSWAL_0003_0000.nii.gz', 'label': './labelsTr/MSWAL_0003.nii.gz'}, {'image': './imagesTr/MSWAL_0008_0000.nii.gz', 'label': './labelsTr/MSWAL_0008.nii.gz'}, {'image': './imagesTr/MSWAL_0009_0000.nii.gz', 'label': './labelsTr/MSWAL_0009.nii.gz'}, {'image': './imagesTr/MSWAL_0011_0000.nii.gz', 'label': './labelsTr/MSWAL_0011.nii.gz'}, {'image': './imagesTr/MSWAL_0013_0000.nii.gz', 'label': './labelsTr/MSWAL_0013.nii.gz'}, {'image': './imagesTr/MSWAL_0014_0000.nii.gz', 'label': './labelsTr/MSWAL_0014.nii.gz'}, {'image': './imagesTr/MSWAL_0015_0000.nii.gz', 'label': './labelsTr/MSWAL_0015.nii.gz'}, {'image': './imagesTr/MSWAL_0017_0000.nii.gz', 'label': './labelsTr/MSWAL_0017.nii.gz'}, {'image': './imagesTr/MSWAL_0018_0000.nii.gz', 'label': './labelsTr/MSWAL_0018.nii.gz'}, {'image': './imagesTr/MSWAL_0020_0000.nii.gz', 'label': './labelsTr/MSWAL_0020.nii.gz'}, {'image': './imagesTr/MSWAL_0021_0000.nii.gz', 'label': './labelsTr/MSWAL_0021.nii.gz'}, {'image': './imagesTr/MSWAL_0022_0000.nii.gz', 'label': './labelsTr/MSWAL_0022.nii.gz'}, {'image': './imagesTr/MSWAL_0024_0000.nii.gz', 'label': './labelsTr/MSWAL_0024.nii.gz'}, {'image': './imagesTr/MSWAL_0026_0000.nii.gz', 'label': './labelsTr/MSWAL_0026.nii.gz'}, {'image': './imagesTr/MSWAL_0027_0000.nii.gz', 'label': './labelsTr/MSWAL_0027.nii.gz'}, {'image': './imagesTr/MSWAL_0028_0000.nii.gz', 'label': './labelsTr/MSWAL_0028.nii.gz'}, {'image': './imagesTr/MSWAL_0029_0000.nii.gz', 'label': './labelsTr/MSWAL_0029.nii.gz'}, {'image': './imagesTr/MSWAL_0031_0000.nii.gz', 'label': './labelsTr/MSWAL_0031.nii.gz'}, {'image': './imagesTr/MSWAL_0032_0000.nii.gz', 'label': './labelsTr/MSWAL_0032.nii.gz'}, {'image': './imagesTr/MSWAL_0033_0000.nii.gz', 'label': './labelsTr/MSWAL_0033.nii.gz'}, {'image': './imagesTr/MSWAL_0034_0000.nii.gz', 'label': './labelsTr/MSWAL_0034.nii.gz'}, {'image': './imagesTr/MSWAL_0035_0000.nii.gz', 'label': './labelsTr/MSWAL_0035.nii.gz'}, {'image': './imagesTr/MSWAL_0037_0000.nii.gz', 'label': './labelsTr/MSWAL_0037.nii.gz'}, {'image': './imagesTr/MSWAL_0038_0000.nii.gz', 'label': './labelsTr/MSWAL_0038.nii.gz'}, {'image': './imagesTr/MSWAL_0039_0000.nii.gz', 'label': './labelsTr/MSWAL_0039.nii.gz'}, {'image': './imagesTr/MSWAL_0040_0000.nii.gz', 'label': './labelsTr/MSWAL_0040.nii.gz'}, {'image': './imagesTr/MSWAL_0041_0000.nii.gz', 'label': './labelsTr/MSWAL_0041.nii.gz'}, {'image': './imagesTr/MSWAL_0042_0000.nii.gz', 'label': './labelsTr/MSWAL_0042.nii.gz'}, {'image': './imagesTr/MSWAL_0045_0000.nii.gz', 'label': './labelsTr/MSWAL_0045.nii.gz'}, {'image': './imagesTr/MSWAL_0046_0000.nii.gz', 'label': './labelsTr/MSWAL_0046.nii.gz'}, {'image': './imagesTr/MSWAL_0049_0000.nii.gz', 'label': './labelsTr/MSWAL_0049.nii.gz'}, {'image': './imagesTr/MSWAL_0050_0000.nii.gz', 'label': './labelsTr/MSWAL_0050.nii.gz'}, {'image': './imagesTr/MSWAL_0051_0000.nii.gz', 'label': './labelsTr/MSWAL_0051.nii.gz'}, {'image': './imagesTr/MSWAL_0052_0000.nii.gz', 'label': './labelsTr/MSWAL_0052.nii.gz'}, {'image': './imagesTr/MSWAL_0054_0000.nii.gz', 'label': './labelsTr/MSWAL_0054.nii.gz'}, {'image': './imagesTr/MSWAL_0055_0000.nii.gz', 'label': './labelsTr/MSWAL_0055.nii.gz'}, {'image': './imagesTr/MSWAL_0056_0000.nii.gz', 'label': './labelsTr/MSWAL_0056.nii.gz'}, {'image': './imagesTr/MSWAL_0057_0000.nii.gz', 'label': './labelsTr/MSWAL_0057.nii.gz'}, {'image': './imagesTr/MSWAL_0059_0000.nii.gz', 'label': './labelsTr/MSWAL_0059.nii.gz'}, {'image': './imagesTr/MSWAL_0060_0000.nii.gz', 'label': './labelsTr/MSWAL_0060.nii.gz'}, {'image': './imagesTr/MSWAL_0061_0000.nii.gz', 'label': './labelsTr/MSWAL_0061.nii.gz'}, {'image': './imagesTr/MSWAL_0063_0000.nii.gz', 'label': './labelsTr/MSWAL_0063.nii.gz'}, {'image': './imagesTr/MSWAL_0064_0000.nii.gz', 'label': './labelsTr/MSWAL_0064.nii.gz'}, {'image': './imagesTr/MSWAL_0065_0000.nii.gz', 'label': './labelsTr/MSWAL_0065.nii.gz'}, {'image': './imagesTr/MSWAL_0066_0000.nii.gz', 'label': './labelsTr/MSWAL_0066.nii.gz'}, {'image': './imagesTr/MSWAL_0067_0000.nii.gz', 'label': './labelsTr/MSWAL_0067.nii.gz'}, {'image': './imagesTr/MSWAL_0069_0000.nii.gz', 'label': './labelsTr/MSWAL_0069.nii.gz'}, {'image': './imagesTr/MSWAL_0072_0000.nii.gz', 'label': './labelsTr/MSWAL_0072.nii.gz'}, {'image': './imagesTr/MSWAL_0075_0000.nii.gz', 'label': './labelsTr/MSWAL_0075.nii.gz'}, {'image': './imagesTr/MSWAL_0077_0000.nii.gz', 'label': './labelsTr/MSWAL_0077.nii.gz'}, {'image': './imagesTr/MSWAL_0080_0000.nii.gz', 'label': './labelsTr/MSWAL_0080.nii.gz'}, {'image': './imagesTr/MSWAL_0082_0000.nii.gz', 'label': './labelsTr/MSWAL_0082.nii.gz'}, {'image': './imagesTr/MSWAL_0083_0000.nii.gz', 'label': './labelsTr/MSWAL_0083.nii.gz'}, {'image': './imagesTr/MSWAL_0084_0000.nii.gz', 'label': './labelsTr/MSWAL_0084.nii.gz'}, {'image': './imagesTr/MSWAL_0085_0000.nii.gz', 'label': './labelsTr/MSWAL_0085.nii.gz'}, {'image': './imagesTr/MSWAL_0086_0000.nii.gz', 'label': './labelsTr/MSWAL_0086.nii.gz'}, {'image': './imagesTr/MSWAL_0088_0000.nii.gz', 'label': './labelsTr/MSWAL_0088.nii.gz'}, {'image': './imagesTr/MSWAL_0089_0000.nii.gz', 'label': './labelsTr/MSWAL_0089.nii.gz'}, {'image': './imagesTr/MSWAL_0092_0000.nii.gz', 'label': './labelsTr/MSWAL_0092.nii.gz'}, {'image': './imagesTr/MSWAL_0093_0000.nii.gz', 'label': './labelsTr/MSWAL_0093.nii.gz'}, {'image': './imagesTr/MSWAL_0094_0000.nii.gz', 'label': './labelsTr/MSWAL_0094.nii.gz'}, {'image': './imagesTr/MSWAL_0095_0000.nii.gz', 'label': './labelsTr/MSWAL_0095.nii.gz'}, {'image': './imagesTr/MSWAL_0096_0000.nii.gz', 'label': './labelsTr/MSWAL_0096.nii.gz'}, {'image': './imagesTr/MSWAL_0098_0000.nii.gz', 'label': './labelsTr/MSWAL_0098.nii.gz'}, {'image': './imagesTr/MSWAL_0099_0000.nii.gz', 'label': './labelsTr/MSWAL_0099.nii.gz'}, {'image': './imagesTr/MSWAL_0101_0000.nii.gz', 'label': './labelsTr/MSWAL_0101.nii.gz'}, {'image': './imagesTr/MSWAL_0102_0000.nii.gz', 'label': './labelsTr/MSWAL_0102.nii.gz'}, {'image': './imagesTr/MSWAL_0103_0000.nii.gz', 'label': './labelsTr/MSWAL_0103.nii.gz'}, {'image': './imagesTr/MSWAL_0104_0000.nii.gz', 'label': './labelsTr/MSWAL_0104.nii.gz'}, {'image': './imagesTr/MSWAL_0105_0000.nii.gz', 'label': './labelsTr/MSWAL_0105.nii.gz'}, {'image': './imagesTr/MSWAL_0106_0000.nii.gz', 'label': './labelsTr/MSWAL_0106.nii.gz'}, {'image': './imagesTr/MSWAL_0108_0000.nii.gz', 'label': './labelsTr/MSWAL_0108.nii.gz'}, {'image': './imagesTr/MSWAL_0109_0000.nii.gz', 'label': './labelsTr/MSWAL_0109.nii.gz'}, {'image': './imagesTr/MSWAL_0110_0000.nii.gz', 'label': './labelsTr/MSWAL_0110.nii.gz'}, {'image': './imagesTr/MSWAL_0111_0000.nii.gz', 'label': './labelsTr/MSWAL_0111.nii.gz'}, {'image': './imagesTr/MSWAL_0112_0000.nii.gz', 'label': './labelsTr/MSWAL_0112.nii.gz'}, {'image': './imagesTr/MSWAL_0113_0000.nii.gz', 'label': './labelsTr/MSWAL_0113.nii.gz'}, {'image': './imagesTr/MSWAL_0114_0000.nii.gz', 'label': './labelsTr/MSWAL_0114.nii.gz'}, {'image': './imagesTr/MSWAL_0117_0000.nii.gz', 'label': './labelsTr/MSWAL_0117.nii.gz'}, {'image': './imagesTr/MSWAL_0119_0000.nii.gz', 'label': './labelsTr/MSWAL_0119.nii.gz'}, {'image': './imagesTr/MSWAL_0120_0000.nii.gz', 'label': './labelsTr/MSWAL_0120.nii.gz'}, {'image': './imagesTr/MSWAL_0122_0000.nii.gz', 'label': './labelsTr/MSWAL_0122.nii.gz'}, {'image': './imagesTr/MSWAL_0124_0000.nii.gz', 'label': './labelsTr/MSWAL_0124.nii.gz'}, {'image': './imagesTr/MSWAL_0125_0000.nii.gz', 'label': './labelsTr/MSWAL_0125.nii.gz'}, {'image': './imagesTr/MSWAL_0126_0000.nii.gz', 'label': './labelsTr/MSWAL_0126.nii.gz'}, {'image': './imagesTr/MSWAL_0127_0000.nii.gz', 'label': './labelsTr/MSWAL_0127.nii.gz'}, {'image': './imagesTr/MSWAL_0128_0000.nii.gz', 'label': './labelsTr/MSWAL_0128.nii.gz'}, {'image': './imagesTr/MSWAL_0129_0000.nii.gz', 'label': './labelsTr/MSWAL_0129.nii.gz'}, {'image': './imagesTr/MSWAL_0130_0000.nii.gz', 'label': './labelsTr/MSWAL_0130.nii.gz'}, {'image': './imagesTr/MSWAL_0132_0000.nii.gz', 'label': './labelsTr/MSWAL_0132.nii.gz'}, {'image': './imagesTr/MSWAL_0133_0000.nii.gz', 'label': './labelsTr/MSWAL_0133.nii.gz'}, {'image': './imagesTr/MSWAL_0134_0000.nii.gz', 'label': './labelsTr/MSWAL_0134.nii.gz'}, {'image': './imagesTr/MSWAL_0136_0000.nii.gz', 'label': './labelsTr/MSWAL_0136.nii.gz'}, {'image': './imagesTr/MSWAL_0138_0000.nii.gz', 'label': './labelsTr/MSWAL_0138.nii.gz'}, {'image': './imagesTr/MSWAL_0139_0000.nii.gz', 'label': './labelsTr/MSWAL_0139.nii.gz'}, {'image': './imagesTr/MSWAL_0140_0000.nii.gz', 'label': './labelsTr/MSWAL_0140.nii.gz'}, {'image': './imagesTr/MSWAL_0141_0000.nii.gz', 'label': './labelsTr/MSWAL_0141.nii.gz'}, {'image': './imagesTr/MSWAL_0142_0000.nii.gz', 'label': './labelsTr/MSWAL_0142.nii.gz'}, {'image': './imagesTr/MSWAL_0143_0000.nii.gz', 'label': './labelsTr/MSWAL_0143.nii.gz'}, {'image': './imagesTr/MSWAL_0145_0000.nii.gz', 'label': './labelsTr/MSWAL_0145.nii.gz'}, {'image': './imagesTr/MSWAL_0147_0000.nii.gz', 'label': './labelsTr/MSWAL_0147.nii.gz'}, {'image': './imagesTr/MSWAL_0148_0000.nii.gz', 'label': './labelsTr/MSWAL_0148.nii.gz'}, {'image': './imagesTr/MSWAL_0149_0000.nii.gz', 'label': './labelsTr/MSWAL_0149.nii.gz'}, {'image': './imagesTr/MSWAL_0150_0000.nii.gz', 'label': './labelsTr/MSWAL_0150.nii.gz'}, {'image': './imagesTr/MSWAL_0151_0000.nii.gz', 'label': './labelsTr/MSWAL_0151.nii.gz'}, {'image': './imagesTr/MSWAL_0152_0000.nii.gz', 'label': './labelsTr/MSWAL_0152.nii.gz'}, {'image': './imagesTr/MSWAL_0157_0000.nii.gz', 'label': './labelsTr/MSWAL_0157.nii.gz'}, {'image': './imagesTr/MSWAL_0159_0000.nii.gz', 'label': './labelsTr/MSWAL_0159.nii.gz'}, {'image': './imagesTr/MSWAL_0162_0000.nii.gz', 'label': './labelsTr/MSWAL_0162.nii.gz'}, {'image': './imagesTr/MSWAL_0163_0000.nii.gz', 'label': './labelsTr/MSWAL_0163.nii.gz'}, {'image': './imagesTr/MSWAL_0165_0000.nii.gz', 'label': './labelsTr/MSWAL_0165.nii.gz'}, {'image': './imagesTr/MSWAL_0166_0000.nii.gz', 'label': './labelsTr/MSWAL_0166.nii.gz'}, {'image': './imagesTr/MSWAL_0167_0000.nii.gz', 'label': './labelsTr/MSWAL_0167.nii.gz'}, {'image': './imagesTr/MSWAL_0168_0000.nii.gz', 'label': './labelsTr/MSWAL_0168.nii.gz'}, {'image': './imagesTr/MSWAL_0169_0000.nii.gz', 'label': './labelsTr/MSWAL_0169.nii.gz'}, {'image': './imagesTr/MSWAL_0170_0000.nii.gz', 'label': './labelsTr/MSWAL_0170.nii.gz'}, {'image': './imagesTr/MSWAL_0171_0000.nii.gz', 'label': './labelsTr/MSWAL_0171.nii.gz'}, {'image': './imagesTr/MSWAL_0172_0000.nii.gz', 'label': './labelsTr/MSWAL_0172.nii.gz'}, {'image': './imagesTr/MSWAL_0173_0000.nii.gz', 'label': './labelsTr/MSWAL_0173.nii.gz'}, {'image': './imagesTr/MSWAL_0174_0000.nii.gz', 'label': './labelsTr/MSWAL_0174.nii.gz'}, {'image': './imagesTr/MSWAL_0175_0000.nii.gz', 'label': './labelsTr/MSWAL_0175.nii.gz'}, {'image': './imagesTr/MSWAL_0176_0000.nii.gz', 'label': './labelsTr/MSWAL_0176.nii.gz'}, {'image': './imagesTr/MSWAL_0177_0000.nii.gz', 'label': './labelsTr/MSWAL_0177.nii.gz'}, {'image': './imagesTr/MSWAL_0178_0000.nii.gz', 'label': './labelsTr/MSWAL_0178.nii.gz'}, {'image': './imagesTr/MSWAL_0179_0000.nii.gz', 'label': './labelsTr/MSWAL_0179.nii.gz'}, {'image': './imagesTr/MSWAL_0180_0000.nii.gz', 'label': './labelsTr/MSWAL_0180.nii.gz'}, {'image': './imagesTr/MSWAL_0182_0000.nii.gz', 'label': './labelsTr/MSWAL_0182.nii.gz'}, {'image': './imagesTr/MSWAL_0183_0000.nii.gz', 'label': './labelsTr/MSWAL_0183.nii.gz'}, {'image': './imagesTr/MSWAL_0184_0000.nii.gz', 'label': './labelsTr/MSWAL_0184.nii.gz'}, {'image': './imagesTr/MSWAL_0185_0000.nii.gz', 'label': './labelsTr/MSWAL_0185.nii.gz'}, {'image': './imagesTr/MSWAL_0186_0000.nii.gz', 'label': './labelsTr/MSWAL_0186.nii.gz'}, {'image': './imagesTr/MSWAL_0187_0000.nii.gz', 'label': './labelsTr/MSWAL_0187.nii.gz'}, {'image': './imagesTr/MSWAL_0188_0000.nii.gz', 'label': './labelsTr/MSWAL_0188.nii.gz'}, {'image': './imagesTr/MSWAL_0189_0000.nii.gz', 'label': './labelsTr/MSWAL_0189.nii.gz'}, {'image': './imagesTr/MSWAL_0193_0000.nii.gz', 'label': './labelsTr/MSWAL_0193.nii.gz'}, {'image': './imagesTr/MSWAL_0194_0000.nii.gz', 'label': './labelsTr/MSWAL_0194.nii.gz'}, {'image': './imagesTr/MSWAL_0195_0000.nii.gz', 'label': './labelsTr/MSWAL_0195.nii.gz'}, {'image': './imagesTr/MSWAL_0199_0000.nii.gz', 'label': './labelsTr/MSWAL_0199.nii.gz'}, {'image': './imagesTr/MSWAL_0201_0000.nii.gz', 'label': './labelsTr/MSWAL_0201.nii.gz'}, {'image': './imagesTr/MSWAL_0202_0000.nii.gz', 'label': './labelsTr/MSWAL_0202.nii.gz'}, {'image': './imagesTr/MSWAL_0203_0000.nii.gz', 'label': './labelsTr/MSWAL_0203.nii.gz'}, {'image': './imagesTr/MSWAL_0204_0000.nii.gz', 'label': './labelsTr/MSWAL_0204.nii.gz'}, {'image': './imagesTr/MSWAL_0207_0000.nii.gz', 'label': './labelsTr/MSWAL_0207.nii.gz'}, {'image': './imagesTr/MSWAL_0208_0000.nii.gz', 'label': './labelsTr/MSWAL_0208.nii.gz'}, {'image': './imagesTr/MSWAL_0209_0000.nii.gz', 'label': './labelsTr/MSWAL_0209.nii.gz'}, {'image': './imagesTr/MSWAL_0214_0000.nii.gz', 'label': './labelsTr/MSWAL_0214.nii.gz'}, {'image': './imagesTr/MSWAL_0217_0000.nii.gz', 'label': './labelsTr/MSWAL_0217.nii.gz'}, {'image': './imagesTr/MSWAL_0218_0000.nii.gz', 'label': './labelsTr/MSWAL_0218.nii.gz'}, {'image': './imagesTr/MSWAL_0219_0000.nii.gz', 'label': './labelsTr/MSWAL_0219.nii.gz'}, {'image': './imagesTr/MSWAL_0220_0000.nii.gz', 'label': './labelsTr/MSWAL_0220.nii.gz'}, {'image': './imagesTr/MSWAL_0221_0000.nii.gz', 'label': './labelsTr/MSWAL_0221.nii.gz'}, {'image': './imagesTr/MSWAL_0222_0000.nii.gz', 'label': './labelsTr/MSWAL_0222.nii.gz'}, {'image': './imagesTr/MSWAL_0223_0000.nii.gz', 'label': './labelsTr/MSWAL_0223.nii.gz'}, {'image': './imagesTr/MSWAL_0224_0000.nii.gz', 'label': './labelsTr/MSWAL_0224.nii.gz'}, {'image': './imagesTr/MSWAL_0225_0000.nii.gz', 'label': './labelsTr/MSWAL_0225.nii.gz'}, {'image': './imagesTr/MSWAL_0226_0000.nii.gz', 'label': './labelsTr/MSWAL_0226.nii.gz'}, {'image': './imagesTr/MSWAL_0227_0000.nii.gz', 'label': './labelsTr/MSWAL_0227.nii.gz'}, {'image': './imagesTr/MSWAL_0228_0000.nii.gz', 'label': './labelsTr/MSWAL_0228.nii.gz'}, {'image': './imagesTr/MSWAL_0229_0000.nii.gz', 'label': './labelsTr/MSWAL_0229.nii.gz'}, {'image': './imagesTr/MSWAL_0230_0000.nii.gz', 'label': './labelsTr/MSWAL_0230.nii.gz'}, {'image': './imagesTr/MSWAL_0233_0000.nii.gz', 'label': './labelsTr/MSWAL_0233.nii.gz'}, {'image': './imagesTr/MSWAL_0234_0000.nii.gz', 'label': './labelsTr/MSWAL_0234.nii.gz'}, {'image': './imagesTr/MSWAL_0238_0000.nii.gz', 'label': './labelsTr/MSWAL_0238.nii.gz'}, {'image': './imagesTr/MSWAL_0241_0000.nii.gz', 'label': './labelsTr/MSWAL_0241.nii.gz'}, {'image': './imagesTr/MSWAL_0242_0000.nii.gz', 'label': './labelsTr/MSWAL_0242.nii.gz'}, {'image': './imagesTr/MSWAL_0243_0000.nii.gz', 'label': './labelsTr/MSWAL_0243.nii.gz'}, {'image': './imagesTr/MSWAL_0245_0000.nii.gz', 'label': './labelsTr/MSWAL_0245.nii.gz'}, {'image': './imagesTr/MSWAL_0246_0000.nii.gz', 'label': './labelsTr/MSWAL_0246.nii.gz'}, {'image': './imagesTr/MSWAL_0247_0000.nii.gz', 'label': './labelsTr/MSWAL_0247.nii.gz'}, {'image': './imagesTr/MSWAL_0248_0000.nii.gz', 'label': './labelsTr/MSWAL_0248.nii.gz'}, {'image': './imagesTr/MSWAL_0251_0000.nii.gz', 'label': './labelsTr/MSWAL_0251.nii.gz'}, {'image': './imagesTr/MSWAL_0252_0000.nii.gz', 'label': './labelsTr/MSWAL_0252.nii.gz'}, {'image': './imagesTr/MSWAL_0253_0000.nii.gz', 'label': './labelsTr/MSWAL_0253.nii.gz'}, {'image': './imagesTr/MSWAL_0254_0000.nii.gz', 'label': './labelsTr/MSWAL_0254.nii.gz'}, {'image': './imagesTr/MSWAL_0255_0000.nii.gz', 'label': './labelsTr/MSWAL_0255.nii.gz'}, {'image': './imagesTr/MSWAL_0256_0000.nii.gz', 'label': './labelsTr/MSWAL_0256.nii.gz'}, {'image': './imagesTr/MSWAL_0257_0000.nii.gz', 'label': './labelsTr/MSWAL_0257.nii.gz'}, {'image': './imagesTr/MSWAL_0258_0000.nii.gz', 'label': './labelsTr/MSWAL_0258.nii.gz'}, {'image': './imagesTr/MSWAL_0259_0000.nii.gz', 'label': './labelsTr/MSWAL_0259.nii.gz'}, {'image': './imagesTr/MSWAL_0260_0000.nii.gz', 'label': './labelsTr/MSWAL_0260.nii.gz'}, {'image': './imagesTr/MSWAL_0261_0000.nii.gz', 'label': './labelsTr/MSWAL_0261.nii.gz'}, {'image': './imagesTr/MSWAL_0262_0000.nii.gz', 'label': './labelsTr/MSWAL_0262.nii.gz'}, {'image': './imagesTr/MSWAL_0263_0000.nii.gz', 'label': './labelsTr/MSWAL_0263.nii.gz'}, {'image': './imagesTr/MSWAL_0264_0000.nii.gz', 'label': './labelsTr/MSWAL_0264.nii.gz'}, {'image': './imagesTr/MSWAL_0265_0000.nii.gz', 'label': './labelsTr/MSWAL_0265.nii.gz'}, {'image': './imagesTr/MSWAL_0267_0000.nii.gz', 'label': './labelsTr/MSWAL_0267.nii.gz'}, {'image': './imagesTr/MSWAL_0270_0000.nii.gz', 'label': './labelsTr/MSWAL_0270.nii.gz'}, {'image': './imagesTr/MSWAL_0271_0000.nii.gz', 'label': './labelsTr/MSWAL_0271.nii.gz'}, {'image': './imagesTr/MSWAL_0272_0000.nii.gz', 'label': './labelsTr/MSWAL_0272.nii.gz'}, {'image': './imagesTr/MSWAL_0273_0000.nii.gz', 'label': './labelsTr/MSWAL_0273.nii.gz'}, {'image': './imagesTr/MSWAL_0274_0000.nii.gz', 'label': './labelsTr/MSWAL_0274.nii.gz'}, {'image': './imagesTr/MSWAL_0275_0000.nii.gz', 'label': './labelsTr/MSWAL_0275.nii.gz'}, {'image': './imagesTr/MSWAL_0276_0000.nii.gz', 'label': './labelsTr/MSWAL_0276.nii.gz'}, {'image': './imagesTr/MSWAL_0277_0000.nii.gz', 'label': './labelsTr/MSWAL_0277.nii.gz'}, {'image': './imagesTr/MSWAL_0278_0000.nii.gz', 'label': './labelsTr/MSWAL_0278.nii.gz'}, {'image': './imagesTr/MSWAL_0279_0000.nii.gz', 'label': './labelsTr/MSWAL_0279.nii.gz'}, {'image': './imagesTr/MSWAL_0281_0000.nii.gz', 'label': './labelsTr/MSWAL_0281.nii.gz'}, {'image': './imagesTr/MSWAL_0282_0000.nii.gz', 'label': './labelsTr/MSWAL_0282.nii.gz'}, {'image': './imagesTr/MSWAL_0283_0000.nii.gz', 'label': './labelsTr/MSWAL_0283.nii.gz'}, {'image': './imagesTr/MSWAL_0284_0000.nii.gz', 'label': './labelsTr/MSWAL_0284.nii.gz'}, {'image': './imagesTr/MSWAL_0285_0000.nii.gz', 'label': './labelsTr/MSWAL_0285.nii.gz'}, {'image': './imagesTr/MSWAL_0288_0000.nii.gz', 'label': './labelsTr/MSWAL_0288.nii.gz'}, {'image': './imagesTr/MSWAL_0289_0000.nii.gz', 'label': './labelsTr/MSWAL_0289.nii.gz'}, {'image': './imagesTr/MSWAL_0290_0000.nii.gz', 'label': './labelsTr/MSWAL_0290.nii.gz'}, {'image': './imagesTr/MSWAL_0293_0000.nii.gz', 'label': './labelsTr/MSWAL_0293.nii.gz'}, {'image': './imagesTr/MSWAL_0296_0000.nii.gz', 'label': './labelsTr/MSWAL_0296.nii.gz'}, {'image': './imagesTr/MSWAL_0297_0000.nii.gz', 'label': './labelsTr/MSWAL_0297.nii.gz'}, {'image': './imagesTr/MSWAL_0301_0000.nii.gz', 'label': './labelsTr/MSWAL_0301.nii.gz'}, {'image': './imagesTr/MSWAL_0302_0000.nii.gz', 'label': './labelsTr/MSWAL_0302.nii.gz'}, {'image': './imagesTr/MSWAL_0303_0000.nii.gz', 'label': './labelsTr/MSWAL_0303.nii.gz'}, {'image': './imagesTr/MSWAL_0306_0000.nii.gz', 'label': './labelsTr/MSWAL_0306.nii.gz'}, {'image': './imagesTr/MSWAL_0307_0000.nii.gz', 'label': './labelsTr/MSWAL_0307.nii.gz'}, {'image': './imagesTr/MSWAL_0308_0000.nii.gz', 'label': './labelsTr/MSWAL_0308.nii.gz'}, {'image': './imagesTr/MSWAL_0311_0000.nii.gz', 'label': './labelsTr/MSWAL_0311.nii.gz'}, {'image': './imagesTr/MSWAL_0312_0000.nii.gz', 'label': './labelsTr/MSWAL_0312.nii.gz'}, {'image': './imagesTr/MSWAL_0313_0000.nii.gz', 'label': './labelsTr/MSWAL_0313.nii.gz'}, {'image': './imagesTr/MSWAL_0314_0000.nii.gz', 'label': './labelsTr/MSWAL_0314.nii.gz'}, {'image': './imagesTr/MSWAL_0316_0000.nii.gz', 'label': './labelsTr/MSWAL_0316.nii.gz'}, {'image': './imagesTr/MSWAL_0317_0000.nii.gz', 'label': './labelsTr/MSWAL_0317.nii.gz'}, {'image': './imagesTr/MSWAL_0318_0000.nii.gz', 'label': './labelsTr/MSWAL_0318.nii.gz'}, {'image': './imagesTr/MSWAL_0320_0000.nii.gz', 'label': './labelsTr/MSWAL_0320.nii.gz'}, {'image': './imagesTr/MSWAL_0323_0000.nii.gz', 'label': './labelsTr/MSWAL_0323.nii.gz'}, {'image': './imagesTr/MSWAL_0324_0000.nii.gz', 'label': './labelsTr/MSWAL_0324.nii.gz'}, {'image': './imagesTr/MSWAL_0326_0000.nii.gz', 'label': './labelsTr/MSWAL_0326.nii.gz'}, {'image': './imagesTr/MSWAL_0327_0000.nii.gz', 'label': './labelsTr/MSWAL_0327.nii.gz'}, {'image': './imagesTr/MSWAL_0328_0000.nii.gz', 'label': './labelsTr/MSWAL_0328.nii.gz'}, {'image': './imagesTr/MSWAL_0330_0000.nii.gz', 'label': './labelsTr/MSWAL_0330.nii.gz'}, {'image': './imagesTr/MSWAL_0331_0000.nii.gz', 'label': './labelsTr/MSWAL_0331.nii.gz'}, {'image': './imagesTr/MSWAL_0332_0000.nii.gz', 'label': './labelsTr/MSWAL_0332.nii.gz'}, {'image': './imagesTr/MSWAL_0333_0000.nii.gz', 'label': './labelsTr/MSWAL_0333.nii.gz'}, {'image': './imagesTr/MSWAL_0334_0000.nii.gz', 'label': './labelsTr/MSWAL_0334.nii.gz'}, {'image': './imagesTr/MSWAL_0335_0000.nii.gz', 'label': './labelsTr/MSWAL_0335.nii.gz'}, {'image': './imagesTr/MSWAL_0336_0000.nii.gz', 'label': './labelsTr/MSWAL_0336.nii.gz'}, {'image': './imagesTr/MSWAL_0337_0000.nii.gz', 'label': './labelsTr/MSWAL_0337.nii.gz'}, {'image': './imagesTr/MSWAL_0338_0000.nii.gz', 'label': './labelsTr/MSWAL_0338.nii.gz'}, {'image': './imagesTr/MSWAL_0341_0000.nii.gz', 'label': './labelsTr/MSWAL_0341.nii.gz'}, {'image': './imagesTr/MSWAL_0342_0000.nii.gz', 'label': './labelsTr/MSWAL_0342.nii.gz'}, {'image': './imagesTr/MSWAL_0343_0000.nii.gz', 'label': './labelsTr/MSWAL_0343.nii.gz'}, {'image': './imagesTr/MSWAL_0344_0000.nii.gz', 'label': './labelsTr/MSWAL_0344.nii.gz'}, {'image': './imagesTr/MSWAL_0345_0000.nii.gz', 'label': './labelsTr/MSWAL_0345.nii.gz'}, {'image': './imagesTr/MSWAL_0346_0000.nii.gz', 'label': './labelsTr/MSWAL_0346.nii.gz'}, {'image': './imagesTr/MSWAL_0348_0000.nii.gz', 'label': './labelsTr/MSWAL_0348.nii.gz'}, {'image': './imagesTr/MSWAL_0353_0000.nii.gz', 'label': './labelsTr/MSWAL_0353.nii.gz'}, {'image': './imagesTr/MSWAL_0354_0000.nii.gz', 'label': './labelsTr/MSWAL_0354.nii.gz'}, {'image': './imagesTr/MSWAL_0355_0000.nii.gz', 'label': './labelsTr/MSWAL_0355.nii.gz'}, {'image': './imagesTr/MSWAL_0356_0000.nii.gz', 'label': './labelsTr/MSWAL_0356.nii.gz'}, {'image': './imagesTr/MSWAL_0357_0000.nii.gz', 'label': './labelsTr/MSWAL_0357.nii.gz'}, {'image': './imagesTr/MSWAL_0360_0000.nii.gz', 'label': './labelsTr/MSWAL_0360.nii.gz'}, {'image': './imagesTr/MSWAL_0361_0000.nii.gz', 'label': './labelsTr/MSWAL_0361.nii.gz'}, {'image': './imagesTr/MSWAL_0362_0000.nii.gz', 'label': './labelsTr/MSWAL_0362.nii.gz'}, {'image': './imagesTr/MSWAL_0363_0000.nii.gz', 'label': './labelsTr/MSWAL_0363.nii.gz'}, {'image': './imagesTr/MSWAL_0365_0000.nii.gz', 'label': './labelsTr/MSWAL_0365.nii.gz'}, {'image': './imagesTr/MSWAL_0366_0000.nii.gz', 'label': './labelsTr/MSWAL_0366.nii.gz'}, {'image': './imagesTr/MSWAL_0369_0000.nii.gz', 'label': './labelsTr/MSWAL_0369.nii.gz'}, {'image': './imagesTr/MSWAL_0370_0000.nii.gz', 'label': './labelsTr/MSWAL_0370.nii.gz'}, {'image': './imagesTr/MSWAL_0373_0000.nii.gz', 'label': './labelsTr/MSWAL_0373.nii.gz'}, {'image': './imagesTr/MSWAL_0374_0000.nii.gz', 'label': './labelsTr/MSWAL_0374.nii.gz'}, {'image': './imagesTr/MSWAL_0375_0000.nii.gz', 'label': './labelsTr/MSWAL_0375.nii.gz'}, {'image': './imagesTr/MSWAL_0376_0000.nii.gz', 'label': './labelsTr/MSWAL_0376.nii.gz'}, {'image': './imagesTr/MSWAL_0378_0000.nii.gz', 'label': './labelsTr/MSWAL_0378.nii.gz'}, {'image': './imagesTr/MSWAL_0379_0000.nii.gz', 'label': './labelsTr/MSWAL_0379.nii.gz'}, {'image': './imagesTr/MSWAL_0380_0000.nii.gz', 'label': './labelsTr/MSWAL_0380.nii.gz'}, {'image': './imagesTr/MSWAL_0381_0000.nii.gz', 'label': './labelsTr/MSWAL_0381.nii.gz'}, {'image': './imagesTr/MSWAL_0382_0000.nii.gz', 'label': './labelsTr/MSWAL_0382.nii.gz'}, {'image': './imagesTr/MSWAL_0387_0000.nii.gz', 'label': './labelsTr/MSWAL_0387.nii.gz'}, {'image': './imagesTr/MSWAL_0388_0000.nii.gz', 'label': './labelsTr/MSWAL_0388.nii.gz'}, {'image': './imagesTr/MSWAL_0389_0000.nii.gz', 'label': './labelsTr/MSWAL_0389.nii.gz'}, {'image': './imagesTr/MSWAL_0390_0000.nii.gz', 'label': './labelsTr/MSWAL_0390.nii.gz'}, {'image': './imagesTr/MSWAL_0391_0000.nii.gz', 'label': './labelsTr/MSWAL_0391.nii.gz'}, {'image': './imagesTr/MSWAL_0392_0000.nii.gz', 'label': './labelsTr/MSWAL_0392.nii.gz'}, {'image': './imagesTr/MSWAL_0393_0000.nii.gz', 'label': './labelsTr/MSWAL_0393.nii.gz'}, {'image': './imagesTr/MSWAL_0397_0000.nii.gz', 'label': './labelsTr/MSWAL_0397.nii.gz'}, {'image': './imagesTr/MSWAL_0398_0000.nii.gz', 'label': './labelsTr/MSWAL_0398.nii.gz'}, {'image': './imagesTr/MSWAL_0399_0000.nii.gz', 'label': './labelsTr/MSWAL_0399.nii.gz'}, {'image': './imagesTr/MSWAL_0400_0000.nii.gz', 'label': './labelsTr/MSWAL_0400.nii.gz'}, {'image': './imagesTr/MSWAL_0402_0000.nii.gz', 'label': './labelsTr/MSWAL_0402.nii.gz'}, {'image': './imagesTr/MSWAL_0403_0000.nii.gz', 'label': './labelsTr/MSWAL_0403.nii.gz'}, {'image': './imagesTr/MSWAL_0407_0000.nii.gz', 'label': './labelsTr/MSWAL_0407.nii.gz'}, {'image': './imagesTr/MSWAL_0409_0000.nii.gz', 'label': './labelsTr/MSWAL_0409.nii.gz'}, {'image': './imagesTr/MSWAL_0410_0000.nii.gz', 'label': './labelsTr/MSWAL_0410.nii.gz'}, {'image': './imagesTr/MSWAL_0411_0000.nii.gz', 'label': './labelsTr/MSWAL_0411.nii.gz'}, {'image': './imagesTr/MSWAL_0412_0000.nii.gz', 'label': './labelsTr/MSWAL_0412.nii.gz'}, {'image': './imagesTr/MSWAL_0414_0000.nii.gz', 'label': './labelsTr/MSWAL_0414.nii.gz'}, {'image': './imagesTr/MSWAL_0415_0000.nii.gz', 'label': './labelsTr/MSWAL_0415.nii.gz'}, {'image': './imagesTr/MSWAL_0416_0000.nii.gz', 'label': './labelsTr/MSWAL_0416.nii.gz'}, {'image': './imagesTr/MSWAL_0417_0000.nii.gz', 'label': './labelsTr/MSWAL_0417.nii.gz'}, {'image': './imagesTr/MSWAL_0418_0000.nii.gz', 'label': './labelsTr/MSWAL_0418.nii.gz'}, {'image': './imagesTr/MSWAL_0419_0000.nii.gz', 'label': './labelsTr/MSWAL_0419.nii.gz'}, {'image': './imagesTr/MSWAL_0420_0000.nii.gz', 'label': './labelsTr/MSWAL_0420.nii.gz'}, {'image': './imagesTr/MSWAL_0421_0000.nii.gz', 'label': './labelsTr/MSWAL_0421.nii.gz'}, {'image': './imagesTr/MSWAL_0422_0000.nii.gz', 'label': './labelsTr/MSWAL_0422.nii.gz'}, {'image': './imagesTr/MSWAL_0423_0000.nii.gz', 'label': './labelsTr/MSWAL_0423.nii.gz'}, {'image': './imagesTr/MSWAL_0425_0000.nii.gz', 'label': './labelsTr/MSWAL_0425.nii.gz'}, {'image': './imagesTr/MSWAL_0426_0000.nii.gz', 'label': './labelsTr/MSWAL_0426.nii.gz'}, {'image': './imagesTr/MSWAL_0427_0000.nii.gz', 'label': './labelsTr/MSWAL_0427.nii.gz'}, {'image': './imagesTr/MSWAL_0428_0000.nii.gz', 'label': './labelsTr/MSWAL_0428.nii.gz'}, {'image': './imagesTr/MSWAL_0429_0000.nii.gz', 'label': './labelsTr/MSWAL_0429.nii.gz'}, {'image': './imagesTr/MSWAL_0430_0000.nii.gz', 'label': './labelsTr/MSWAL_0430.nii.gz'}, {'image': './imagesTr/MSWAL_0431_0000.nii.gz', 'label': './labelsTr/MSWAL_0431.nii.gz'}, {'image': './imagesTr/MSWAL_0432_0000.nii.gz', 'label': './labelsTr/MSWAL_0432.nii.gz'}, {'image': './imagesTr/MSWAL_0434_0000.nii.gz', 'label': './labelsTr/MSWAL_0434.nii.gz'}, {'image': './imagesTr/MSWAL_0435_0000.nii.gz', 'label': './labelsTr/MSWAL_0435.nii.gz'}, {'image': './imagesTr/MSWAL_0436_0000.nii.gz', 'label': './labelsTr/MSWAL_0436.nii.gz'}, {'image': './imagesTr/MSWAL_0437_0000.nii.gz', 'label': './labelsTr/MSWAL_0437.nii.gz'}, {'image': './imagesTr/MSWAL_0438_0000.nii.gz', 'label': './labelsTr/MSWAL_0438.nii.gz'}, {'image': './imagesTr/MSWAL_0439_0000.nii.gz', 'label': './labelsTr/MSWAL_0439.nii.gz'}, {'image': './imagesTr/MSWAL_0440_0000.nii.gz', 'label': './labelsTr/MSWAL_0440.nii.gz'}, {'image': './imagesTr/MSWAL_0442_0000.nii.gz', 'label': './labelsTr/MSWAL_0442.nii.gz'}, {'image': './imagesTr/MSWAL_0446_0000.nii.gz', 'label': './labelsTr/MSWAL_0446.nii.gz'}, {'image': './imagesTr/MSWAL_0447_0000.nii.gz', 'label': './labelsTr/MSWAL_0447.nii.gz'}, {'image': './imagesTr/MSWAL_0452_0000.nii.gz', 'label': './labelsTr/MSWAL_0452.nii.gz'}, {'image': './imagesTr/MSWAL_0453_0000.nii.gz', 'label': './labelsTr/MSWAL_0453.nii.gz'}, {'image': './imagesTr/MSWAL_0455_0000.nii.gz', 'label': './labelsTr/MSWAL_0455.nii.gz'}, {'image': './imagesTr/MSWAL_0457_0000.nii.gz', 'label': './labelsTr/MSWAL_0457.nii.gz'}, {'image': './imagesTr/MSWAL_0460_0000.nii.gz', 'label': './labelsTr/MSWAL_0460.nii.gz'}, {'image': './imagesTr/MSWAL_0461_0000.nii.gz', 'label': './labelsTr/MSWAL_0461.nii.gz'}, {'image': './imagesTr/MSWAL_0463_0000.nii.gz', 'label': './labelsTr/MSWAL_0463.nii.gz'}, {'image': './imagesTr/MSWAL_0464_0000.nii.gz', 'label': './labelsTr/MSWAL_0464.nii.gz'}, {'image': './imagesTr/MSWAL_0465_0000.nii.gz', 'label': './labelsTr/MSWAL_0465.nii.gz'}, {'image': './imagesTr/MSWAL_0466_0000.nii.gz', 'label': './labelsTr/MSWAL_0466.nii.gz'}, {'image': './imagesTr/MSWAL_0468_0000.nii.gz', 'label': './labelsTr/MSWAL_0468.nii.gz'}, {'image': './imagesTr/MSWAL_0470_0000.nii.gz', 'label': './labelsTr/MSWAL_0470.nii.gz'}, {'image': './imagesTr/MSWAL_0471_0000.nii.gz', 'label': './labelsTr/MSWAL_0471.nii.gz'}, {'image': './imagesTr/MSWAL_0473_0000.nii.gz', 'label': './labelsTr/MSWAL_0473.nii.gz'}, {'image': './imagesTr/MSWAL_0474_0000.nii.gz', 'label': './labelsTr/MSWAL_0474.nii.gz'}, {'image': './imagesTr/MSWAL_0475_0000.nii.gz', 'label': './labelsTr/MSWAL_0475.nii.gz'}, {'image': './imagesTr/MSWAL_0476_0000.nii.gz', 'label': './labelsTr/MSWAL_0476.nii.gz'}, {'image': './imagesTr/MSWAL_0477_0000.nii.gz', 'label': './labelsTr/MSWAL_0477.nii.gz'}, {'image': './imagesTr/MSWAL_0479_0000.nii.gz', 'label': './labelsTr/MSWAL_0479.nii.gz'}, {'image': './imagesTr/MSWAL_0480_0000.nii.gz', 'label': './labelsTr/MSWAL_0480.nii.gz'}, {'image': './imagesTr/MSWAL_0482_0000.nii.gz', 'label': './labelsTr/MSWAL_0482.nii.gz'}, {'image': './imagesTr/MSWAL_0483_0000.nii.gz', 'label': './labelsTr/MSWAL_0483.nii.gz'}, {'image': './imagesTr/MSWAL_0484_0000.nii.gz', 'label': './labelsTr/MSWAL_0484.nii.gz'}, {'image': './imagesTr/MSWAL_0485_0000.nii.gz', 'label': './labelsTr/MSWAL_0485.nii.gz'}, {'image': './imagesTr/MSWAL_0486_0000.nii.gz', 'label': './labelsTr/MSWAL_0486.nii.gz'}, {'image': './imagesTr/MSWAL_0487_0000.nii.gz', 'label': './labelsTr/MSWAL_0487.nii.gz'}, {'image': './imagesTr/MSWAL_0488_0000.nii.gz', 'label': './labelsTr/MSWAL_0488.nii.gz'}, {'image': './imagesTr/MSWAL_0489_0000.nii.gz', 'label': './labelsTr/MSWAL_0489.nii.gz'}, {'image': './imagesTr/MSWAL_0490_0000.nii.gz', 'label': './labelsTr/MSWAL_0490.nii.gz'}, {'image': './imagesTr/MSWAL_0491_0000.nii.gz', 'label': './labelsTr/MSWAL_0491.nii.gz'}, {'image': './imagesTr/MSWAL_0492_0000.nii.gz', 'label': './labelsTr/MSWAL_0492.nii.gz'}, {'image': './imagesTr/MSWAL_0493_0000.nii.gz', 'label': './labelsTr/MSWAL_0493.nii.gz'}, {'image': './imagesTr/MSWAL_0495_0000.nii.gz', 'label': './labelsTr/MSWAL_0495.nii.gz'}, {'image': './imagesTr/MSWAL_0497_0000.nii.gz', 'label': './labelsTr/MSWAL_0497.nii.gz'}, {'image': './imagesTr/MSWAL_0498_0000.nii.gz', 'label': './labelsTr/MSWAL_0498.nii.gz'}, {'image': './imagesTr/MSWAL_0500_0000.nii.gz', 'label': './labelsTr/MSWAL_0500.nii.gz'}, {'image': './imagesTr/MSWAL_0501_0000.nii.gz', 'label': './labelsTr/MSWAL_0501.nii.gz'}, {'image': './imagesTr/MSWAL_0504_0000.nii.gz', 'label': './labelsTr/MSWAL_0504.nii.gz'}, {'image': './imagesTr/MSWAL_0505_0000.nii.gz', 'label': './labelsTr/MSWAL_0505.nii.gz'}, {'image': './imagesTr/MSWAL_0506_0000.nii.gz', 'label': './labelsTr/MSWAL_0506.nii.gz'}, {'image': './imagesTr/MSWAL_0507_0000.nii.gz', 'label': './labelsTr/MSWAL_0507.nii.gz'}, {'image': './imagesTr/MSWAL_0508_0000.nii.gz', 'label': './labelsTr/MSWAL_0508.nii.gz'}, {'image': './imagesTr/MSWAL_0509_0000.nii.gz', 'label': './labelsTr/MSWAL_0509.nii.gz'}, {'image': './imagesTr/MSWAL_0510_0000.nii.gz', 'label': './labelsTr/MSWAL_0510.nii.gz'}, {'image': './imagesTr/MSWAL_0512_0000.nii.gz', 'label': './labelsTr/MSWAL_0512.nii.gz'}, {'image': './imagesTr/MSWAL_0516_0000.nii.gz', 'label': './labelsTr/MSWAL_0516.nii.gz'}, {'image': './imagesTr/MSWAL_0518_0000.nii.gz', 'label': './labelsTr/MSWAL_0518.nii.gz'}, {'image': './imagesTr/MSWAL_0519_0000.nii.gz', 'label': './labelsTr/MSWAL_0519.nii.gz'}, {'image': './imagesTr/MSWAL_0521_0000.nii.gz', 'label': './labelsTr/MSWAL_0521.nii.gz'}, {'image': './imagesTr/MSWAL_0522_0000.nii.gz', 'label': './labelsTr/MSWAL_0522.nii.gz'}, {'image': './imagesTr/MSWAL_0523_0000.nii.gz', 'label': './labelsTr/MSWAL_0523.nii.gz'}, {'image': './imagesTr/MSWAL_0524_0000.nii.gz', 'label': './labelsTr/MSWAL_0524.nii.gz'}, {'image': './imagesTr/MSWAL_0526_0000.nii.gz', 'label': './labelsTr/MSWAL_0526.nii.gz'}, {'image': './imagesTr/MSWAL_0527_0000.nii.gz', 'label': './labelsTr/MSWAL_0527.nii.gz'}, {'image': './imagesTr/MSWAL_0530_0000.nii.gz', 'label': './labelsTr/MSWAL_0530.nii.gz'}, {'image': './imagesTr/MSWAL_0531_0000.nii.gz', 'label': './labelsTr/MSWAL_0531.nii.gz'}, {'image': './imagesTr/MSWAL_0534_0000.nii.gz', 'label': './labelsTr/MSWAL_0534.nii.gz'}, {'image': './imagesTr/MSWAL_0535_0000.nii.gz', 'label': './labelsTr/MSWAL_0535.nii.gz'}, {'image': './imagesTr/MSWAL_0536_0000.nii.gz', 'label': './labelsTr/MSWAL_0536.nii.gz'}, {'image': './imagesTr/MSWAL_0538_0000.nii.gz', 'label': './labelsTr/MSWAL_0538.nii.gz'}, {'image': './imagesTr/MSWAL_0539_0000.nii.gz', 'label': './labelsTr/MSWAL_0539.nii.gz'}, {'image': './imagesTr/MSWAL_0540_0000.nii.gz', 'label': './labelsTr/MSWAL_0540.nii.gz'}, {'image': './imagesTr/MSWAL_0542_0000.nii.gz', 'label': './labelsTr/MSWAL_0542.nii.gz'}, {'image': './imagesTr/MSWAL_0544_0000.nii.gz', 'label': './labelsTr/MSWAL_0544.nii.gz'}, {'image': './imagesTr/MSWAL_0545_0000.nii.gz', 'label': './labelsTr/MSWAL_0545.nii.gz'}, {'image': './imagesTr/MSWAL_0546_0000.nii.gz', 'label': './labelsTr/MSWAL_0546.nii.gz'}, {'image': './imagesTr/MSWAL_0547_0000.nii.gz', 'label': './labelsTr/MSWAL_0547.nii.gz'}, {'image': './imagesTr/MSWAL_0548_0000.nii.gz', 'label': './labelsTr/MSWAL_0548.nii.gz'}, {'image': './imagesTr/MSWAL_0549_0000.nii.gz', 'label': './labelsTr/MSWAL_0549.nii.gz'}, {'image': './imagesTr/MSWAL_0550_0000.nii.gz', 'label': './labelsTr/MSWAL_0550.nii.gz'}, {'image': './imagesTr/MSWAL_0551_0000.nii.gz', 'label': './labelsTr/MSWAL_0551.nii.gz'}, {'image': './imagesTr/MSWAL_0552_0000.nii.gz', 'label': './labelsTr/MSWAL_0552.nii.gz'}, {'image': './imagesTr/MSWAL_0553_0000.nii.gz', 'label': './labelsTr/MSWAL_0553.nii.gz'}, {'image': './imagesTr/MSWAL_0554_0000.nii.gz', 'label': './labelsTr/MSWAL_0554.nii.gz'}, {'image': './imagesTr/MSWAL_0555_0000.nii.gz', 'label': './labelsTr/MSWAL_0555.nii.gz'}, {'image': './imagesTr/MSWAL_0556_0000.nii.gz', 'label': './labelsTr/MSWAL_0556.nii.gz'}, {'image': './imagesTr/MSWAL_0557_0000.nii.gz', 'label': './labelsTr/MSWAL_0557.nii.gz'}, {'image': './imagesTr/MSWAL_0558_0000.nii.gz', 'label': './labelsTr/MSWAL_0558.nii.gz'}, {'image': './imagesTr/MSWAL_0559_0000.nii.gz', 'label': './labelsTr/MSWAL_0559.nii.gz'}, {'image': './imagesTr/MSWAL_0561_0000.nii.gz', 'label': './labelsTr/MSWAL_0561.nii.gz'}, {'image': './imagesTr/MSWAL_0562_0000.nii.gz', 'label': './labelsTr/MSWAL_0562.nii.gz'}, {'image': './imagesTr/MSWAL_0563_0000.nii.gz', 'label': './labelsTr/MSWAL_0563.nii.gz'}, {'image': './imagesTr/MSWAL_0564_0000.nii.gz', 'label': './labelsTr/MSWAL_0564.nii.gz'}, {'image': './imagesTr/MSWAL_0566_0000.nii.gz', 'label': './labelsTr/MSWAL_0566.nii.gz'}, {'image': './imagesTr/MSWAL_0567_0000.nii.gz', 'label': './labelsTr/MSWAL_0567.nii.gz'}, {'image': './imagesTr/MSWAL_0568_0000.nii.gz', 'label': './labelsTr/MSWAL_0568.nii.gz'}, {'image': './imagesTr/MSWAL_0571_0000.nii.gz', 'label': './labelsTr/MSWAL_0571.nii.gz'}, {'image': './imagesTr/MSWAL_0573_0000.nii.gz', 'label': './labelsTr/MSWAL_0573.nii.gz'}, {'image': './imagesTr/MSWAL_0574_0000.nii.gz', 'label': './labelsTr/MSWAL_0574.nii.gz'}, {'image': './imagesTr/MSWAL_0575_0000.nii.gz', 'label': './labelsTr/MSWAL_0575.nii.gz'}, {'image': './imagesTr/MSWAL_0577_0000.nii.gz', 'label': './labelsTr/MSWAL_0577.nii.gz'}, {'image': './imagesTr/MSWAL_0578_0000.nii.gz', 'label': './labelsTr/MSWAL_0578.nii.gz'}, {'image': './imagesTr/MSWAL_0579_0000.nii.gz', 'label': './labelsTr/MSWAL_0579.nii.gz'}, {'image': './imagesTr/MSWAL_0580_0000.nii.gz', 'label': './labelsTr/MSWAL_0580.nii.gz'}, {'image': './imagesTr/MSWAL_0581_0000.nii.gz', 'label': './labelsTr/MSWAL_0581.nii.gz'}, {'image': './imagesTr/MSWAL_0582_0000.nii.gz', 'label': './labelsTr/MSWAL_0582.nii.gz'}, {'image': './imagesTr/MSWAL_0583_0000.nii.gz', 'label': './labelsTr/MSWAL_0583.nii.gz'}, {'image': './imagesTr/MSWAL_0584_0000.nii.gz', 'label': './labelsTr/MSWAL_0584.nii.gz'}, {'image': './imagesTr/MSWAL_0586_0000.nii.gz', 'label': './labelsTr/MSWAL_0586.nii.gz'}, {'image': './imagesTr/MSWAL_0590_0000.nii.gz', 'label': './labelsTr/MSWAL_0590.nii.gz'}, {'image': './imagesTr/MSWAL_0591_0000.nii.gz', 'label': './labelsTr/MSWAL_0591.nii.gz'}, {'image': './imagesTr/MSWAL_0592_0000.nii.gz', 'label': './labelsTr/MSWAL_0592.nii.gz'}, {'image': './imagesTr/MSWAL_0593_0000.nii.gz', 'label': './labelsTr/MSWAL_0593.nii.gz'}, {'image': './imagesTr/MSWAL_0595_0000.nii.gz', 'label': './labelsTr/MSWAL_0595.nii.gz'}, {'image': './imagesTr/MSWAL_0596_0000.nii.gz', 'label': './labelsTr/MSWAL_0596.nii.gz'}, {'image': './imagesTr/MSWAL_0597_0000.nii.gz', 'label': './labelsTr/MSWAL_0597.nii.gz'}, {'image': './imagesTr/MSWAL_0598_0000.nii.gz', 'label': './labelsTr/MSWAL_0598.nii.gz'}, {'image': './imagesTr/MSWAL_0599_0000.nii.gz', 'label': './labelsTr/MSWAL_0599.nii.gz'}, {'image': './imagesTr/MSWAL_0600_0000.nii.gz', 'label': './labelsTr/MSWAL_0600.nii.gz'}, {'image': './imagesTr/MSWAL_0601_0000.nii.gz', 'label': './labelsTr/MSWAL_0601.nii.gz'}, {'image': './imagesTr/MSWAL_0602_0000.nii.gz', 'label': './labelsTr/MSWAL_0602.nii.gz'}, {'image': './imagesTr/MSWAL_0604_0000.nii.gz', 'label': './labelsTr/MSWAL_0604.nii.gz'}, {'image': './imagesTr/MSWAL_0605_0000.nii.gz', 'label': './labelsTr/MSWAL_0605.nii.gz'}, {'image': './imagesTr/MSWAL_0608_0000.nii.gz', 'label': './labelsTr/MSWAL_0608.nii.gz'}, {'image': './imagesTr/MSWAL_0612_0000.nii.gz', 'label': './labelsTr/MSWAL_0612.nii.gz'}, {'image': './imagesTr/MSWAL_0614_0000.nii.gz', 'label': './labelsTr/MSWAL_0614.nii.gz'}, {'image': './imagesTr/MSWAL_0615_0000.nii.gz', 'label': './labelsTr/MSWAL_0615.nii.gz'}, {'image': './imagesTr/MSWAL_0616_0000.nii.gz', 'label': './labelsTr/MSWAL_0616.nii.gz'}, {'image': './imagesTr/MSWAL_0617_0000.nii.gz', 'label': './labelsTr/MSWAL_0617.nii.gz'}, {'image': './imagesTr/MSWAL_0621_0000.nii.gz', 'label': './labelsTr/MSWAL_0621.nii.gz'}, {'image': './imagesTr/MSWAL_0623_0000.nii.gz', 'label': './labelsTr/MSWAL_0623.nii.gz'}, {'image': './imagesTr/MSWAL_0625_0000.nii.gz', 'label': './labelsTr/MSWAL_0625.nii.gz'}, {'image': './imagesTr/MSWAL_0626_0000.nii.gz', 'label': './labelsTr/MSWAL_0626.nii.gz'}, {'image': './imagesTr/MSWAL_0627_0000.nii.gz', 'label': './labelsTr/MSWAL_0627.nii.gz'}, {'image': './imagesTr/MSWAL_0628_0000.nii.gz', 'label': './labelsTr/MSWAL_0628.nii.gz'}, {'image': './imagesTr/MSWAL_0629_0000.nii.gz', 'label': './labelsTr/MSWAL_0629.nii.gz'}, {'image': './imagesTr/MSWAL_0630_0000.nii.gz', 'label': './labelsTr/MSWAL_0630.nii.gz'}, {'image': './imagesTr/MSWAL_0632_0000.nii.gz', 'label': './labelsTr/MSWAL_0632.nii.gz'}, {'image': './imagesTr/MSWAL_0635_0000.nii.gz', 'label': './labelsTr/MSWAL_0635.nii.gz'}, {'image': './imagesTr/MSWAL_0636_0000.nii.gz', 'label': './labelsTr/MSWAL_0636.nii.gz'}, {'image': './imagesTr/MSWAL_0638_0000.nii.gz', 'label': './labelsTr/MSWAL_0638.nii.gz'}, {'image': './imagesTr/MSWAL_0640_0000.nii.gz', 'label': './labelsTr/MSWAL_0640.nii.gz'}, {'image': './imagesTr/MSWAL_0641_0000.nii.gz', 'label': './labelsTr/MSWAL_0641.nii.gz'}, {'image': './imagesTr/MSWAL_0643_0000.nii.gz', 'label': './labelsTr/MSWAL_0643.nii.gz'}, {'image': './imagesTr/MSWAL_0644_0000.nii.gz', 'label': './labelsTr/MSWAL_0644.nii.gz'}, {'image': './imagesTr/MSWAL_0646_0000.nii.gz', 'label': './labelsTr/MSWAL_0646.nii.gz'}, {'image': './imagesTr/MSWAL_0648_0000.nii.gz', 'label': './labelsTr/MSWAL_0648.nii.gz'}, {'image': './imagesTr/MSWAL_0649_0000.nii.gz', 'label': './labelsTr/MSWAL_0649.nii.gz'}, {'image': './imagesTr/MSWAL_0650_0000.nii.gz', 'label': './labelsTr/MSWAL_0650.nii.gz'}, {'image': './imagesTr/MSWAL_0651_0000.nii.gz', 'label': './labelsTr/MSWAL_0651.nii.gz'}, {'image': './imagesTr/MSWAL_0653_0000.nii.gz', 'label': './labelsTr/MSWAL_0653.nii.gz'}, {'image': './imagesTr/MSWAL_0654_0000.nii.gz', 'label': './labelsTr/MSWAL_0654.nii.gz'}, {'image': './imagesTr/MSWAL_0655_0000.nii.gz', 'label': './labelsTr/MSWAL_0655.nii.gz'}, {'image': './imagesTr/MSWAL_0656_0000.nii.gz', 'label': './labelsTr/MSWAL_0656.nii.gz'}, {'image': './imagesTr/MSWAL_0658_0000.nii.gz', 'label': './labelsTr/MSWAL_0658.nii.gz'}, {'image': './imagesTr/MSWAL_0660_0000.nii.gz', 'label': './labelsTr/MSWAL_0660.nii.gz'}, {'image': './imagesTr/MSWAL_0661_0000.nii.gz', 'label': './labelsTr/MSWAL_0661.nii.gz'}, {'image': './imagesTr/MSWAL_0662_0000.nii.gz', 'label': './labelsTr/MSWAL_0662.nii.gz'}, {'image': './imagesTr/MSWAL_0663_0000.nii.gz', 'label': './labelsTr/MSWAL_0663.nii.gz'}, {'image': './imagesTr/MSWAL_0666_0000.nii.gz', 'label': './labelsTr/MSWAL_0666.nii.gz'}, {'image': './imagesTr/MSWAL_0667_0000.nii.gz', 'label': './labelsTr/MSWAL_0667.nii.gz'}, {'image': './imagesTr/MSWAL_0668_0000.nii.gz', 'label': './labelsTr/MSWAL_0668.nii.gz'}, {'image': './imagesTr/MSWAL_0669_0000.nii.gz', 'label': './labelsTr/MSWAL_0669.nii.gz'}, {'image': './imagesTr/MSWAL_0670_0000.nii.gz', 'label': './labelsTr/MSWAL_0670.nii.gz'}, {'image': './imagesTr/MSWAL_0671_0000.nii.gz', 'label': './labelsTr/MSWAL_0671.nii.gz'}, {'image': './imagesTr/MSWAL_0673_0000.nii.gz', 'label': './labelsTr/MSWAL_0673.nii.gz'}, {'image': './imagesTr/MSWAL_0674_0000.nii.gz', 'label': './labelsTr/MSWAL_0674.nii.gz'}, {'image': './imagesTr/MSWAL_0675_0000.nii.gz', 'label': './labelsTr/MSWAL_0675.nii.gz'}, {'image': './imagesTr/MSWAL_0676_0000.nii.gz', 'label': './labelsTr/MSWAL_0676.nii.gz'}, {'image': './imagesTr/MSWAL_0677_0000.nii.gz', 'label': './labelsTr/MSWAL_0677.nii.gz'}, {'image': './imagesTr/MSWAL_0679_0000.nii.gz', 'label': './labelsTr/MSWAL_0679.nii.gz'}, {'image': './imagesTr/MSWAL_0680_0000.nii.gz', 'label': './labelsTr/MSWAL_0680.nii.gz'}, {'image': './imagesTr/MSWAL_0681_0000.nii.gz', 'label': './labelsTr/MSWAL_0681.nii.gz'}, {'image': './imagesTr/MSWAL_0682_0000.nii.gz', 'label': './labelsTr/MSWAL_0682.nii.gz'}, {'image': './imagesTr/MSWAL_0685_0000.nii.gz', 'label': './labelsTr/MSWAL_0685.nii.gz'}, {'image': './imagesTr/MSWAL_0686_0000.nii.gz', 'label': './labelsTr/MSWAL_0686.nii.gz'}, {'image': './imagesTr/MSWAL_0687_0000.nii.gz', 'label': './labelsTr/MSWAL_0687.nii.gz'}, {'image': './imagesTr/MSWAL_0688_0000.nii.gz', 'label': './labelsTr/MSWAL_0688.nii.gz'}, {'image': './imagesTr/MSWAL_0690_0000.nii.gz', 'label': './labelsTr/MSWAL_0690.nii.gz'}, {'image': './imagesTr/MSWAL_0692_0000.nii.gz', 'label': './labelsTr/MSWAL_0692.nii.gz'}, {'image': './imagesTr/MSWAL_0693_0000.nii.gz', 'label': './labelsTr/MSWAL_0693.nii.gz'}, {'image': './imagesTr/MSWAL_0694_0000.nii.gz', 'label': './labelsTr/MSWAL_0694.nii.gz'}], 'test': [{'image': './imagesTs/MSWAL_0004_0000.nii.gz', 'label': './labelsTs/MSWAL_0004.nii.gz'}, {'image': './imagesTs/MSWAL_0005_0000.nii.gz', 'label': './labelsTs/MSWAL_0005.nii.gz'}, {'image': './imagesTs/MSWAL_0006_0000.nii.gz', 'label': './labelsTs/MSWAL_0006.nii.gz'}, {'image': './imagesTs/MSWAL_0007_0000.nii.gz', 'label': './labelsTs/MSWAL_0007.nii.gz'}, {'image': './imagesTs/MSWAL_0010_0000.nii.gz', 'label': './labelsTs/MSWAL_0010.nii.gz'}, {'image': './imagesTs/MSWAL_0012_0000.nii.gz', 'label': './labelsTs/MSWAL_0012.nii.gz'}, {'image': './imagesTs/MSWAL_0016_0000.nii.gz', 'label': './labelsTs/MSWAL_0016.nii.gz'}, {'image': './imagesTs/MSWAL_0019_0000.nii.gz', 'label': './labelsTs/MSWAL_0019.nii.gz'}, {'image': './imagesTs/MSWAL_0023_0000.nii.gz', 'label': './labelsTs/MSWAL_0023.nii.gz'}, {'image': './imagesTs/MSWAL_0025_0000.nii.gz', 'label': './labelsTs/MSWAL_0025.nii.gz'}, {'image': './imagesTs/MSWAL_0030_0000.nii.gz', 'label': './labelsTs/MSWAL_0030.nii.gz'}, {'image': './imagesTs/MSWAL_0036_0000.nii.gz', 'label': './labelsTs/MSWAL_0036.nii.gz'}, {'image': './imagesTs/MSWAL_0043_0000.nii.gz', 'label': './labelsTs/MSWAL_0043.nii.gz'}, {'image': './imagesTs/MSWAL_0044_0000.nii.gz', 'label': './labelsTs/MSWAL_0044.nii.gz'}, {'image': './imagesTs/MSWAL_0047_0000.nii.gz', 'label': './labelsTs/MSWAL_0047.nii.gz'}, {'image': './imagesTs/MSWAL_0048_0000.nii.gz', 'label': './labelsTs/MSWAL_0048.nii.gz'}, {'image': './imagesTs/MSWAL_0053_0000.nii.gz', 'label': './labelsTs/MSWAL_0053.nii.gz'}, {'image': './imagesTs/MSWAL_0058_0000.nii.gz', 'label': './labelsTs/MSWAL_0058.nii.gz'}, {'image': './imagesTs/MSWAL_0062_0000.nii.gz', 'label': './labelsTs/MSWAL_0062.nii.gz'}, {'image': './imagesTs/MSWAL_0068_0000.nii.gz', 'label': './labelsTs/MSWAL_0068.nii.gz'}, {'image': './imagesTs/MSWAL_0070_0000.nii.gz', 'label': './labelsTs/MSWAL_0070.nii.gz'}, {'image': './imagesTs/MSWAL_0071_0000.nii.gz', 'label': './labelsTs/MSWAL_0071.nii.gz'}, {'image': './imagesTs/MSWAL_0073_0000.nii.gz', 'label': './labelsTs/MSWAL_0073.nii.gz'}, {'image': './imagesTs/MSWAL_0074_0000.nii.gz', 'label': './labelsTs/MSWAL_0074.nii.gz'}, {'image': './imagesTs/MSWAL_0076_0000.nii.gz', 'label': './labelsTs/MSWAL_0076.nii.gz'}, {'image': './imagesTs/MSWAL_0078_0000.nii.gz', 'label': './labelsTs/MSWAL_0078.nii.gz'}, {'image': './imagesTs/MSWAL_0079_0000.nii.gz', 'label': './labelsTs/MSWAL_0079.nii.gz'}, {'image': './imagesTs/MSWAL_0081_0000.nii.gz', 'label': './labelsTs/MSWAL_0081.nii.gz'}, {'image': './imagesTs/MSWAL_0087_0000.nii.gz', 'label': './labelsTs/MSWAL_0087.nii.gz'}, {'image': './imagesTs/MSWAL_0090_0000.nii.gz', 'label': './labelsTs/MSWAL_0090.nii.gz'}, {'image': './imagesTs/MSWAL_0091_0000.nii.gz', 'label': './labelsTs/MSWAL_0091.nii.gz'}, {'image': './imagesTs/MSWAL_0097_0000.nii.gz', 'label': './labelsTs/MSWAL_0097.nii.gz'}, {'image': './imagesTs/MSWAL_0100_0000.nii.gz', 'label': './labelsTs/MSWAL_0100.nii.gz'}, {'image': './imagesTs/MSWAL_0107_0000.nii.gz', 'label': './labelsTs/MSWAL_0107.nii.gz'}, {'image': './imagesTs/MSWAL_0115_0000.nii.gz', 'label': './labelsTs/MSWAL_0115.nii.gz'}, {'image': './imagesTs/MSWAL_0116_0000.nii.gz', 'label': './labelsTs/MSWAL_0116.nii.gz'}, {'image': './imagesTs/MSWAL_0118_0000.nii.gz', 'label': './labelsTs/MSWAL_0118.nii.gz'}, {'image': './imagesTs/MSWAL_0121_0000.nii.gz', 'label': './labelsTs/MSWAL_0121.nii.gz'}, {'image': './imagesTs/MSWAL_0123_0000.nii.gz', 'label': './labelsTs/MSWAL_0123.nii.gz'}, {'image': './imagesTs/MSWAL_0131_0000.nii.gz', 'label': './labelsTs/MSWAL_0131.nii.gz'}, {'image': './imagesTs/MSWAL_0135_0000.nii.gz', 'label': './labelsTs/MSWAL_0135.nii.gz'}, {'image': './imagesTs/MSWAL_0137_0000.nii.gz', 'label': './labelsTs/MSWAL_0137.nii.gz'}, {'image': './imagesTs/MSWAL_0144_0000.nii.gz', 'label': './labelsTs/MSWAL_0144.nii.gz'}, {'image': './imagesTs/MSWAL_0146_0000.nii.gz', 'label': './labelsTs/MSWAL_0146.nii.gz'}, {'image': './imagesTs/MSWAL_0153_0000.nii.gz', 'label': './labelsTs/MSWAL_0153.nii.gz'}, {'image': './imagesTs/MSWAL_0154_0000.nii.gz', 'label': './labelsTs/MSWAL_0154.nii.gz'}, {'image': './imagesTs/MSWAL_0155_0000.nii.gz', 'label': './labelsTs/MSWAL_0155.nii.gz'}, {'image': './imagesTs/MSWAL_0156_0000.nii.gz', 'label': './labelsTs/MSWAL_0156.nii.gz'}, {'image': './imagesTs/MSWAL_0158_0000.nii.gz', 'label': './labelsTs/MSWAL_0158.nii.gz'}, {'image': './imagesTs/MSWAL_0160_0000.nii.gz', 'label': './labelsTs/MSWAL_0160.nii.gz'}, {'image': './imagesTs/MSWAL_0161_0000.nii.gz', 'label': './labelsTs/MSWAL_0161.nii.gz'}, {'image': './imagesTs/MSWAL_0164_0000.nii.gz', 'label': './labelsTs/MSWAL_0164.nii.gz'}, {'image': './imagesTs/MSWAL_0181_0000.nii.gz', 'label': './labelsTs/MSWAL_0181.nii.gz'}, {'image': './imagesTs/MSWAL_0190_0000.nii.gz', 'label': './labelsTs/MSWAL_0190.nii.gz'}, {'image': './imagesTs/MSWAL_0191_0000.nii.gz', 'label': './labelsTs/MSWAL_0191.nii.gz'}, {'image': './imagesTs/MSWAL_0192_0000.nii.gz', 'label': './labelsTs/MSWAL_0192.nii.gz'}, {'image': './imagesTs/MSWAL_0196_0000.nii.gz', 'label': './labelsTs/MSWAL_0196.nii.gz'}, {'image': './imagesTs/MSWAL_0197_0000.nii.gz', 'label': './labelsTs/MSWAL_0197.nii.gz'}, {'image': './imagesTs/MSWAL_0198_0000.nii.gz', 'label': './labelsTs/MSWAL_0198.nii.gz'}, {'image': './imagesTs/MSWAL_0200_0000.nii.gz', 'label': './labelsTs/MSWAL_0200.nii.gz'}, {'image': './imagesTs/MSWAL_0205_0000.nii.gz', 'label': './labelsTs/MSWAL_0205.nii.gz'}, {'image': './imagesTs/MSWAL_0206_0000.nii.gz', 'label': './labelsTs/MSWAL_0206.nii.gz'}, {'image': './imagesTs/MSWAL_0210_0000.nii.gz', 'label': './labelsTs/MSWAL_0210.nii.gz'}, {'image': './imagesTs/MSWAL_0211_0000.nii.gz', 'label': './labelsTs/MSWAL_0211.nii.gz'}, {'image': './imagesTs/MSWAL_0212_0000.nii.gz', 'label': './labelsTs/MSWAL_0212.nii.gz'}, {'image': './imagesTs/MSWAL_0213_0000.nii.gz', 'label': './labelsTs/MSWAL_0213.nii.gz'}, {'image': './imagesTs/MSWAL_0215_0000.nii.gz', 'label': './labelsTs/MSWAL_0215.nii.gz'}, {'image': './imagesTs/MSWAL_0216_0000.nii.gz', 'label': './labelsTs/MSWAL_0216.nii.gz'}, {'image': './imagesTs/MSWAL_0231_0000.nii.gz', 'label': './labelsTs/MSWAL_0231.nii.gz'}, {'image': './imagesTs/MSWAL_0232_0000.nii.gz', 'label': './labelsTs/MSWAL_0232.nii.gz'}, {'image': './imagesTs/MSWAL_0235_0000.nii.gz', 'label': './labelsTs/MSWAL_0235.nii.gz'}, {'image': './imagesTs/MSWAL_0236_0000.nii.gz', 'label': './labelsTs/MSWAL_0236.nii.gz'}, {'image': './imagesTs/MSWAL_0237_0000.nii.gz', 'label': './labelsTs/MSWAL_0237.nii.gz'}, {'image': './imagesTs/MSWAL_0239_0000.nii.gz', 'label': './labelsTs/MSWAL_0239.nii.gz'}, {'image': './imagesTs/MSWAL_0240_0000.nii.gz', 'label': './labelsTs/MSWAL_0240.nii.gz'}, {'image': './imagesTs/MSWAL_0244_0000.nii.gz', 'label': './labelsTs/MSWAL_0244.nii.gz'}, {'image': './imagesTs/MSWAL_0249_0000.nii.gz', 'label': './labelsTs/MSWAL_0249.nii.gz'}, {'image': './imagesTs/MSWAL_0250_0000.nii.gz', 'label': './labelsTs/MSWAL_0250.nii.gz'}, {'image': './imagesTs/MSWAL_0266_0000.nii.gz', 'label': './labelsTs/MSWAL_0266.nii.gz'}, {'image': './imagesTs/MSWAL_0268_0000.nii.gz', 'label': './labelsTs/MSWAL_0268.nii.gz'}, {'image': './imagesTs/MSWAL_0269_0000.nii.gz', 'label': './labelsTs/MSWAL_0269.nii.gz'}, {'image': './imagesTs/MSWAL_0280_0000.nii.gz', 'label': './labelsTs/MSWAL_0280.nii.gz'}, {'image': './imagesTs/MSWAL_0286_0000.nii.gz', 'label': './labelsTs/MSWAL_0286.nii.gz'}, {'image': './imagesTs/MSWAL_0287_0000.nii.gz', 'label': './labelsTs/MSWAL_0287.nii.gz'}, {'image': './imagesTs/MSWAL_0291_0000.nii.gz', 'label': './labelsTs/MSWAL_0291.nii.gz'}, {'image': './imagesTs/MSWAL_0292_0000.nii.gz', 'label': './labelsTs/MSWAL_0292.nii.gz'}, {'image': './imagesTs/MSWAL_0294_0000.nii.gz', 'label': './labelsTs/MSWAL_0294.nii.gz'}, {'image': './imagesTs/MSWAL_0295_0000.nii.gz', 'label': './labelsTs/MSWAL_0295.nii.gz'}, {'image': './imagesTs/MSWAL_0298_0000.nii.gz', 'label': './labelsTs/MSWAL_0298.nii.gz'}, {'image': './imagesTs/MSWAL_0299_0000.nii.gz', 'label': './labelsTs/MSWAL_0299.nii.gz'}, {'image': './imagesTs/MSWAL_0300_0000.nii.gz', 'label': './labelsTs/MSWAL_0300.nii.gz'}, {'image': './imagesTs/MSWAL_0304_0000.nii.gz', 'label': './labelsTs/MSWAL_0304.nii.gz'}, {'image': './imagesTs/MSWAL_0305_0000.nii.gz', 'label': './labelsTs/MSWAL_0305.nii.gz'}, {'image': './imagesTs/MSWAL_0309_0000.nii.gz', 'label': './labelsTs/MSWAL_0309.nii.gz'}, {'image': './imagesTs/MSWAL_0310_0000.nii.gz', 'label': './labelsTs/MSWAL_0310.nii.gz'}, {'image': './imagesTs/MSWAL_0315_0000.nii.gz', 'label': './labelsTs/MSWAL_0315.nii.gz'}, {'image': './imagesTs/MSWAL_0319_0000.nii.gz', 'label': './labelsTs/MSWAL_0319.nii.gz'}, {'image': './imagesTs/MSWAL_0321_0000.nii.gz', 'label': './labelsTs/MSWAL_0321.nii.gz'}, {'image': './imagesTs/MSWAL_0322_0000.nii.gz', 'label': './labelsTs/MSWAL_0322.nii.gz'}, {'image': './imagesTs/MSWAL_0325_0000.nii.gz', 'label': './labelsTs/MSWAL_0325.nii.gz'}, {'image': './imagesTs/MSWAL_0329_0000.nii.gz', 'label': './labelsTs/MSWAL_0329.nii.gz'}, {'image': './imagesTs/MSWAL_0339_0000.nii.gz', 'label': './labelsTs/MSWAL_0339.nii.gz'}, {'image': './imagesTs/MSWAL_0340_0000.nii.gz', 'label': './labelsTs/MSWAL_0340.nii.gz'}, {'image': './imagesTs/MSWAL_0347_0000.nii.gz', 'label': './labelsTs/MSWAL_0347.nii.gz'}, {'image': './imagesTs/MSWAL_0349_0000.nii.gz', 'label': './labelsTs/MSWAL_0349.nii.gz'}, {'image': './imagesTs/MSWAL_0350_0000.nii.gz', 'label': './labelsTs/MSWAL_0350.nii.gz'}, {'image': './imagesTs/MSWAL_0351_0000.nii.gz', 'label': './labelsTs/MSWAL_0351.nii.gz'}, {'image': './imagesTs/MSWAL_0352_0000.nii.gz', 'label': './labelsTs/MSWAL_0352.nii.gz'}, {'image': './imagesTs/MSWAL_0358_0000.nii.gz', 'label': './labelsTs/MSWAL_0358.nii.gz'}, {'image': './imagesTs/MSWAL_0359_0000.nii.gz', 'label': './labelsTs/MSWAL_0359.nii.gz'}, {'image': './imagesTs/MSWAL_0364_0000.nii.gz', 'label': './labelsTs/MSWAL_0364.nii.gz'}, {'image': './imagesTs/MSWAL_0367_0000.nii.gz', 'label': './labelsTs/MSWAL_0367.nii.gz'}, {'image': './imagesTs/MSWAL_0368_0000.nii.gz', 'label': './labelsTs/MSWAL_0368.nii.gz'}, {'image': './imagesTs/MSWAL_0371_0000.nii.gz', 'label': './labelsTs/MSWAL_0371.nii.gz'}, {'image': './imagesTs/MSWAL_0372_0000.nii.gz', 'label': './labelsTs/MSWAL_0372.nii.gz'}, {'image': './imagesTs/MSWAL_0377_0000.nii.gz', 'label': './labelsTs/MSWAL_0377.nii.gz'}, {'image': './imagesTs/MSWAL_0383_0000.nii.gz', 'label': './labelsTs/MSWAL_0383.nii.gz'}, {'image': './imagesTs/MSWAL_0384_0000.nii.gz', 'label': './labelsTs/MSWAL_0384.nii.gz'}, {'image': './imagesTs/MSWAL_0385_0000.nii.gz', 'label': './labelsTs/MSWAL_0385.nii.gz'}, {'image': './imagesTs/MSWAL_0386_0000.nii.gz', 'label': './labelsTs/MSWAL_0386.nii.gz'}, {'image': './imagesTs/MSWAL_0394_0000.nii.gz', 'label': './labelsTs/MSWAL_0394.nii.gz'}, {'image': './imagesTs/MSWAL_0395_0000.nii.gz', 'label': './labelsTs/MSWAL_0395.nii.gz'}, {'image': './imagesTs/MSWAL_0396_0000.nii.gz', 'label': './labelsTs/MSWAL_0396.nii.gz'}, {'image': './imagesTs/MSWAL_0401_0000.nii.gz', 'label': './labelsTs/MSWAL_0401.nii.gz'}, {'image': './imagesTs/MSWAL_0404_0000.nii.gz', 'label': './labelsTs/MSWAL_0404.nii.gz'}, {'image': './imagesTs/MSWAL_0405_0000.nii.gz', 'label': './labelsTs/MSWAL_0405.nii.gz'}, {'image': './imagesTs/MSWAL_0406_0000.nii.gz', 'label': './labelsTs/MSWAL_0406.nii.gz'}, {'image': './imagesTs/MSWAL_0408_0000.nii.gz', 'label': './labelsTs/MSWAL_0408.nii.gz'}, {'image': './imagesTs/MSWAL_0413_0000.nii.gz', 'label': './labelsTs/MSWAL_0413.nii.gz'}, {'image': './imagesTs/MSWAL_0424_0000.nii.gz', 'label': './labelsTs/MSWAL_0424.nii.gz'}, {'image': './imagesTs/MSWAL_0433_0000.nii.gz', 'label': './labelsTs/MSWAL_0433.nii.gz'}, {'image': './imagesTs/MSWAL_0441_0000.nii.gz', 'label': './labelsTs/MSWAL_0441.nii.gz'}, {'image': './imagesTs/MSWAL_0443_0000.nii.gz', 'label': './labelsTs/MSWAL_0443.nii.gz'}, {'image': './imagesTs/MSWAL_0444_0000.nii.gz', 'label': './labelsTs/MSWAL_0444.nii.gz'}, {'image': './imagesTs/MSWAL_0445_0000.nii.gz', 'label': './labelsTs/MSWAL_0445.nii.gz'}, {'image': './imagesTs/MSWAL_0448_0000.nii.gz', 'label': './labelsTs/MSWAL_0448.nii.gz'}, {'image': './imagesTs/MSWAL_0449_0000.nii.gz', 'label': './labelsTs/MSWAL_0449.nii.gz'}, {'image': './imagesTs/MSWAL_0450_0000.nii.gz', 'label': './labelsTs/MSWAL_0450.nii.gz'}, {'image': './imagesTs/MSWAL_0451_0000.nii.gz', 'label': './labelsTs/MSWAL_0451.nii.gz'}, {'image': './imagesTs/MSWAL_0454_0000.nii.gz', 'label': './labelsTs/MSWAL_0454.nii.gz'}, {'image': './imagesTs/MSWAL_0456_0000.nii.gz', 'label': './labelsTs/MSWAL_0456.nii.gz'}, {'image': './imagesTs/MSWAL_0458_0000.nii.gz', 'label': './labelsTs/MSWAL_0458.nii.gz'}, {'image': './imagesTs/MSWAL_0459_0000.nii.gz', 'label': './labelsTs/MSWAL_0459.nii.gz'}, {'image': './imagesTs/MSWAL_0462_0000.nii.gz', 'label': './labelsTs/MSWAL_0462.nii.gz'}, {'image': './imagesTs/MSWAL_0467_0000.nii.gz', 'label': './labelsTs/MSWAL_0467.nii.gz'}, {'image': './imagesTs/MSWAL_0469_0000.nii.gz', 'label': './labelsTs/MSWAL_0469.nii.gz'}, {'image': './imagesTs/MSWAL_0472_0000.nii.gz', 'label': './labelsTs/MSWAL_0472.nii.gz'}, {'image': './imagesTs/MSWAL_0478_0000.nii.gz', 'label': './labelsTs/MSWAL_0478.nii.gz'}, {'image': './imagesTs/MSWAL_0481_0000.nii.gz', 'label': './labelsTs/MSWAL_0481.nii.gz'}, {'image': './imagesTs/MSWAL_0494_0000.nii.gz', 'label': './labelsTs/MSWAL_0494.nii.gz'}, {'image': './imagesTs/MSWAL_0496_0000.nii.gz', 'label': './labelsTs/MSWAL_0496.nii.gz'}, {'image': './imagesTs/MSWAL_0499_0000.nii.gz', 'label': './labelsTs/MSWAL_0499.nii.gz'}, {'image': './imagesTs/MSWAL_0502_0000.nii.gz', 'label': './labelsTs/MSWAL_0502.nii.gz'}, {'image': './imagesTs/MSWAL_0503_0000.nii.gz', 'label': './labelsTs/MSWAL_0503.nii.gz'}, {'image': './imagesTs/MSWAL_0511_0000.nii.gz', 'label': './labelsTs/MSWAL_0511.nii.gz'}, {'image': './imagesTs/MSWAL_0513_0000.nii.gz', 'label': './labelsTs/MSWAL_0513.nii.gz'}, {'image': './imagesTs/MSWAL_0514_0000.nii.gz', 'label': './labelsTs/MSWAL_0514.nii.gz'}, {'image': './imagesTs/MSWAL_0515_0000.nii.gz', 'label': './labelsTs/MSWAL_0515.nii.gz'}, {'image': './imagesTs/MSWAL_0517_0000.nii.gz', 'label': './labelsTs/MSWAL_0517.nii.gz'}, {'image': './imagesTs/MSWAL_0520_0000.nii.gz', 'label': './labelsTs/MSWAL_0520.nii.gz'}, {'image': './imagesTs/MSWAL_0525_0000.nii.gz', 'label': './labelsTs/MSWAL_0525.nii.gz'}, {'image': './imagesTs/MSWAL_0528_0000.nii.gz', 'label': './labelsTs/MSWAL_0528.nii.gz'}, {'image': './imagesTs/MSWAL_0529_0000.nii.gz', 'label': './labelsTs/MSWAL_0529.nii.gz'}, {'image': './imagesTs/MSWAL_0532_0000.nii.gz', 'label': './labelsTs/MSWAL_0532.nii.gz'}, {'image': './imagesTs/MSWAL_0533_0000.nii.gz', 'label': './labelsTs/MSWAL_0533.nii.gz'}, {'image': './imagesTs/MSWAL_0537_0000.nii.gz', 'label': './labelsTs/MSWAL_0537.nii.gz'}, {'image': './imagesTs/MSWAL_0541_0000.nii.gz', 'label': './labelsTs/MSWAL_0541.nii.gz'}, {'image': './imagesTs/MSWAL_0543_0000.nii.gz', 'label': './labelsTs/MSWAL_0543.nii.gz'}, {'image': './imagesTs/MSWAL_0560_0000.nii.gz', 'label': './labelsTs/MSWAL_0560.nii.gz'}, {'image': './imagesTs/MSWAL_0565_0000.nii.gz', 'label': './labelsTs/MSWAL_0565.nii.gz'}, {'image': './imagesTs/MSWAL_0569_0000.nii.gz', 'label': './labelsTs/MSWAL_0569.nii.gz'}, {'image': './imagesTs/MSWAL_0570_0000.nii.gz', 'label': './labelsTs/MSWAL_0570.nii.gz'}, {'image': './imagesTs/MSWAL_0572_0000.nii.gz', 'label': './labelsTs/MSWAL_0572.nii.gz'}, {'image': './imagesTs/MSWAL_0576_0000.nii.gz', 'label': './labelsTs/MSWAL_0576.nii.gz'}, {'image': './imagesTs/MSWAL_0585_0000.nii.gz', 'label': './labelsTs/MSWAL_0585.nii.gz'}, {'image': './imagesTs/MSWAL_0587_0000.nii.gz', 'label': './labelsTs/MSWAL_0587.nii.gz'}, {'image': './imagesTs/MSWAL_0588_0000.nii.gz', 'label': './labelsTs/MSWAL_0588.nii.gz'}, {'image': './imagesTs/MSWAL_0589_0000.nii.gz', 'label': './labelsTs/MSWAL_0589.nii.gz'}, {'image': './imagesTs/MSWAL_0594_0000.nii.gz', 'label': './labelsTs/MSWAL_0594.nii.gz'}, {'image': './imagesTs/MSWAL_0603_0000.nii.gz', 'label': './labelsTs/MSWAL_0603.nii.gz'}, {'image': './imagesTs/MSWAL_0606_0000.nii.gz', 'label': './labelsTs/MSWAL_0606.nii.gz'}, {'image': './imagesTs/MSWAL_0607_0000.nii.gz', 'label': './labelsTs/MSWAL_0607.nii.gz'}, {'image': './imagesTs/MSWAL_0609_0000.nii.gz', 'label': './labelsTs/MSWAL_0609.nii.gz'}, {'image': './imagesTs/MSWAL_0610_0000.nii.gz', 'label': './labelsTs/MSWAL_0610.nii.gz'}, {'image': './imagesTs/MSWAL_0611_0000.nii.gz', 'label': './labelsTs/MSWAL_0611.nii.gz'}, {'image': './imagesTs/MSWAL_0613_0000.nii.gz', 'label': './labelsTs/MSWAL_0613.nii.gz'}, {'image': './imagesTs/MSWAL_0618_0000.nii.gz', 'label': './labelsTs/MSWAL_0618.nii.gz'}, {'image': './imagesTs/MSWAL_0619_0000.nii.gz', 'label': './labelsTs/MSWAL_0619.nii.gz'}, {'image': './imagesTs/MSWAL_0620_0000.nii.gz', 'label': './labelsTs/MSWAL_0620.nii.gz'}, {'image': './imagesTs/MSWAL_0622_0000.nii.gz', 'label': './labelsTs/MSWAL_0622.nii.gz'}, {'image': './imagesTs/MSWAL_0624_0000.nii.gz', 'label': './labelsTs/MSWAL_0624.nii.gz'}, {'image': './imagesTs/MSWAL_0631_0000.nii.gz', 'label': './labelsTs/MSWAL_0631.nii.gz'}, {'image': './imagesTs/MSWAL_0633_0000.nii.gz', 'label': './labelsTs/MSWAL_0633.nii.gz'}, {'image': './imagesTs/MSWAL_0634_0000.nii.gz', 'label': './labelsTs/MSWAL_0634.nii.gz'}, {'image': './imagesTs/MSWAL_0637_0000.nii.gz', 'label': './labelsTs/MSWAL_0637.nii.gz'}, {'image': './imagesTs/MSWAL_0639_0000.nii.gz', 'label': './labelsTs/MSWAL_0639.nii.gz'}, {'image': './imagesTs/MSWAL_0642_0000.nii.gz', 'label': './labelsTs/MSWAL_0642.nii.gz'}, {'image': './imagesTs/MSWAL_0645_0000.nii.gz', 'label': './labelsTs/MSWAL_0645.nii.gz'}, {'image': './imagesTs/MSWAL_0647_0000.nii.gz', 'label': './labelsTs/MSWAL_0647.nii.gz'}, {'image': './imagesTs/MSWAL_0652_0000.nii.gz', 'label': './labelsTs/MSWAL_0652.nii.gz'}, {'image': './imagesTs/MSWAL_0657_0000.nii.gz', 'label': './labelsTs/MSWAL_0657.nii.gz'}, {'image': './imagesTs/MSWAL_0659_0000.nii.gz', 'label': './labelsTs/MSWAL_0659.nii.gz'}, {'image': './imagesTs/MSWAL_0664_0000.nii.gz', 'label': './labelsTs/MSWAL_0664.nii.gz'}, {'image': './imagesTs/MSWAL_0665_0000.nii.gz', 'label': './labelsTs/MSWAL_0665.nii.gz'}, {'image': './imagesTs/MSWAL_0672_0000.nii.gz', 'label': './labelsTs/MSWAL_0672.nii.gz'}, {'image': './imagesTs/MSWAL_0678_0000.nii.gz', 'label': './labelsTs/MSWAL_0678.nii.gz'}, {'image': './imagesTs/MSWAL_0683_0000.nii.gz', 'label': './labelsTs/MSWAL_0683.nii.gz'}, {'image': './imagesTs/MSWAL_0684_0000.nii.gz', 'label': './labelsTs/MSWAL_0684.nii.gz'}, {'image': './imagesTs/MSWAL_0689_0000.nii.gz', 'label': './labelsTs/MSWAL_0689.nii.gz'}, {'image': './imagesTs/MSWAL_0691_0000.nii.gz', 'label': './labelsTs/MSWAL_0691.nii.gz'}]}, 'unpack_dataset': True, 'device': device(type='cuda')}", + "network": "OptimizedModule", + "num_epochs": "1000", + "num_input_channels": "1", + "num_iterations_per_epoch": "250", + "num_val_iterations_per_epoch": "50", + "optimizer": "SGD (\nParameter Group 0\n dampening: 0\n differentiable: False\n foreach: None\n fused: None\n initial_lr: 0.01\n lr: 0.01\n maximize: False\n momentum: 0.99\n nesterov: True\n weight_decay: 3e-05\n)", + "output_folder": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4", + "output_folder_base": "/data/houbb/nnunetv2/nnUNet_results/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres", + "oversample_foreground_percent": "0.33", + "plans_manager": "{'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'configurations': {'2d': {'data_identifier': 'nnUNetPlans_2d', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 35, 'patch_size': [512, 512], 'median_image_size_in_voxels': [512.0, 512.0], 'spacing': [0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 8, 'features_per_stage': [32, 64, 128, 256, 512, 512, 512, 512], 'conv_op': 'torch.nn.modules.conv.Conv2d', 'kernel_sizes': [[3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3], [3, 3]], 'strides': [[1, 1], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2], [2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm2d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_lowres': {'data_identifier': 'nnUNetResEncUNetLPlans_3d_lowres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [190, 381, 381], 'spacing': [1.6798954741801528, 1.0079372845080916, 1.0079372845080916], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': False, 'next_stage': '3d_cascade_fullres'}, '3d_fullres': {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True}, '3d_cascade_fullres': {'inherits_from': '3d_fullres', 'previous_stage': '3d_lowres'}}, 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}}", + "preprocessed_dataset_folder": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/nnUNetPlans_3d_fullres", + "preprocessed_dataset_folder_base": "/data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL", + "save_every": "50", + "torch_version": "2.5.0+cu121", + "unpack_dataset": "True", + "was_initialized": "True", + "weight_decay": "3e-05" +} \ No newline at end of file diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/progress.png b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/progress.png new file mode 100644 index 0000000000000000000000000000000000000000..3aca458a6991ff6d3ac0ab4e5cc1bd896661f707 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/progress.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42d7f4f92d8d17770803ba5bc334e90dc6fc66b8bf08520ee9fcaa375fff7afc +size 1205645 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/training_log_2026_4_8_15_31_10.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/training_log_2026_4_8_15_31_10.txt new file mode 100644 index 0000000000000000000000000000000000000000..257b0b2eab7b5e9ccd2f5a1b0f8716cab0d16dec --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/training_log_2026_4_8_15_31_10.txt @@ -0,0 +1,11 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 15:31:10.522146: do_dummy_2d_data_aug: False +2026-04-08 15:31:10.527774: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-08 15:31:10.531223: The split file contains 5 splits. +2026-04-08 15:31:10.533201: Desired fold for training: 4 +2026-04-08 15:31:10.536277: This split has 388 training and 96 validation cases. diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/training_log_2026_4_8_16_19_41.txt b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/training_log_2026_4_8_16_19_41.txt new file mode 100644 index 0000000000000000000000000000000000000000..03b899089bc6f24051b2e17e9717fc595b5ac465 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/fold_4/training_log_2026_4_8_16_19_41.txt @@ -0,0 +1,7372 @@ + +####################################################################### +Please cite the following paper when using nnU-Net: +Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211. +####################################################################### + +2026-04-08 16:19:41.337186: do_dummy_2d_data_aug: False +2026-04-08 16:19:41.416261: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-08 16:19:41.420592: The split file contains 5 splits. +2026-04-08 16:19:41.422248: Desired fold for training: 4 +2026-04-08 16:19:41.423715: This split has 388 training and 96 validation cases. +2026-04-08 16:19:49.565491: Using torch.compile... + +This is the configuration used by this training: +Configuration name: 3d_fullres + {'data_identifier': 'nnUNetPlans_3d_fullres', 'preprocessor_name': 'DefaultPreprocessor', 'batch_size': 2, 'patch_size': [112, 256, 256], 'median_image_size_in_voxels': [255.5, 512.0, 512.0], 'spacing': [1.25, 0.75, 0.75], 'normalization_schemes': ['CTNormalization'], 'use_mask_for_norm': [False], 'resampling_fn_data': 'resample_data_or_seg_to_shape', 'resampling_fn_seg': 'resample_data_or_seg_to_shape', 'resampling_fn_data_kwargs': {'is_seg': False, 'order': 3, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_seg_kwargs': {'is_seg': True, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'resampling_fn_probabilities': 'resample_data_or_seg_to_shape', 'resampling_fn_probabilities_kwargs': {'is_seg': False, 'order': 1, 'order_z': 0, 'force_separate_z': None}, 'architecture': {'network_class_name': 'dynamic_network_architectures.architectures.unet.ResidualEncoderUNet', 'arch_kwargs': {'n_stages': 7, 'features_per_stage': [32, 64, 128, 256, 320, 320, 320], 'conv_op': 'torch.nn.modules.conv.Conv3d', 'kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'strides': [[1, 1, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2], [1, 2, 2], [1, 2, 2]], 'n_blocks_per_stage': [1, 3, 4, 6, 6, 6, 6], 'n_conv_per_stage_decoder': [1, 1, 1, 1, 1, 1], 'conv_bias': True, 'norm_op': 'torch.nn.modules.instancenorm.InstanceNorm3d', 'norm_op_kwargs': {'eps': 1e-05, 'affine': True}, 'dropout_op': None, 'dropout_op_kwargs': None, 'nonlin': 'torch.nn.LeakyReLU', 'nonlin_kwargs': {'inplace': True}}, '_kw_requires_import': ['conv_op', 'norm_op', 'dropout_op', 'nonlin']}, 'batch_dice': True} + +These are the global plan.json settings: + {'dataset_name': 'Dataset201_MSWAL', 'plans_name': 'nnUNetResEncUNetLPlans', 'original_median_spacing_after_transp': [1.25, 0.75, 0.75], 'original_median_shape_after_transp': [261, 512, 512], 'image_reader_writer': 'SimpleITKIO', 'transpose_forward': [0, 1, 2], 'transpose_backward': [0, 1, 2], 'experiment_planner_used': 'nnUNetPlannerResEncL', 'label_manager': 'LabelManager', 'foreground_intensity_properties_per_channel': {'0': {'max': 3071.0, 'mean': 71.96339416503906, 'median': 45.0, 'min': -932.0, 'percentile_00_5': -93.0, 'percentile_99_5': 1052.0, 'std': 141.6230926513672}}} + +2026-04-08 16:19:51.409630: unpacking dataset... +2026-04-08 16:19:57.944138: unpacking done... +2026-04-08 16:19:57.996833: Unable to plot network architecture: nnUNet_compile is enabled! +2026-04-08 16:19:58.077473: +2026-04-08 16:19:58.084445: Epoch 0 +2026-04-08 16:19:58.087427: Current learning rate: 0.01 +2026-04-08 16:24:01.216327: train_loss 0.1905 +2026-04-08 16:24:01.225932: val_loss 0.1018 +2026-04-08 16:24:01.229710: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:24:01.233964: Epoch time: 243.14 s +2026-04-08 16:24:01.237629: Yayy! New best EMA pseudo Dice: 0.0 +2026-04-08 16:24:04.086581: +2026-04-08 16:24:04.090970: Epoch 1 +2026-04-08 16:24:04.095585: Current learning rate: 0.00999 +2026-04-08 16:25:46.792297: train_loss 0.0579 +2026-04-08 16:25:46.804198: val_loss 0.0647 +2026-04-08 16:25:46.807553: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:25:46.812632: Epoch time: 102.71 s +2026-04-08 16:25:47.848170: +2026-04-08 16:25:47.850864: Epoch 2 +2026-04-08 16:25:47.852982: Current learning rate: 0.00998 +2026-04-08 16:27:32.409530: train_loss 0.0624 +2026-04-08 16:27:32.419286: val_loss 0.0957 +2026-04-08 16:27:32.422410: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:27:32.428032: Epoch time: 104.56 s +2026-04-08 16:27:33.530088: +2026-04-08 16:27:33.536108: Epoch 3 +2026-04-08 16:27:33.538439: Current learning rate: 0.00997 +2026-04-08 16:29:17.269131: train_loss 0.0529 +2026-04-08 16:29:17.277305: val_loss 0.0875 +2026-04-08 16:29:17.280901: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:29:17.282903: Epoch time: 103.74 s +2026-04-08 16:29:18.329252: +2026-04-08 16:29:18.332024: Epoch 4 +2026-04-08 16:29:18.335808: Current learning rate: 0.00996 +2026-04-08 16:31:04.104959: train_loss 0.0563 +2026-04-08 16:31:04.115225: val_loss 0.0772 +2026-04-08 16:31:04.118173: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:31:04.122193: Epoch time: 105.78 s +2026-04-08 16:31:05.200801: +2026-04-08 16:31:05.206198: Epoch 5 +2026-04-08 16:31:05.211394: Current learning rate: 0.00995 +2026-04-08 16:32:49.609509: train_loss 0.0542 +2026-04-08 16:32:49.621618: val_loss 0.0649 +2026-04-08 16:32:49.626171: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:32:49.628404: Epoch time: 104.41 s +2026-04-08 16:32:50.625944: +2026-04-08 16:32:50.628218: Epoch 6 +2026-04-08 16:32:50.633035: Current learning rate: 0.00995 +2026-04-08 16:34:35.614417: train_loss 0.0486 +2026-04-08 16:34:35.627640: val_loss 0.0447 +2026-04-08 16:34:35.630113: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:34:35.632301: Epoch time: 104.99 s +2026-04-08 16:34:36.854379: +2026-04-08 16:34:36.858014: Epoch 7 +2026-04-08 16:34:36.861238: Current learning rate: 0.00994 +2026-04-08 16:36:23.287114: train_loss 0.0439 +2026-04-08 16:36:23.304053: val_loss 0.1004 +2026-04-08 16:36:23.306900: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:36:23.309996: Epoch time: 106.44 s +2026-04-08 16:36:24.372133: +2026-04-08 16:36:24.375858: Epoch 8 +2026-04-08 16:36:24.378529: Current learning rate: 0.00993 +2026-04-08 16:38:10.381026: train_loss 0.0491 +2026-04-08 16:38:10.392145: val_loss 0.0346 +2026-04-08 16:38:10.396606: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:38:10.398698: Epoch time: 106.01 s +2026-04-08 16:38:11.513817: +2026-04-08 16:38:11.516407: Epoch 9 +2026-04-08 16:38:11.524956: Current learning rate: 0.00992 +2026-04-08 16:39:56.659236: train_loss 0.0438 +2026-04-08 16:39:56.670753: val_loss 0.0793 +2026-04-08 16:39:56.673467: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:39:56.677758: Epoch time: 105.15 s +2026-04-08 16:39:57.701132: +2026-04-08 16:39:57.704171: Epoch 10 +2026-04-08 16:39:57.706626: Current learning rate: 0.00991 +2026-04-08 16:41:43.365143: train_loss 0.048 +2026-04-08 16:41:43.374134: val_loss 0.0523 +2026-04-08 16:41:43.376485: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:41:43.379495: Epoch time: 105.67 s +2026-04-08 16:41:44.421845: +2026-04-08 16:41:44.424624: Epoch 11 +2026-04-08 16:41:44.427127: Current learning rate: 0.0099 +2026-04-08 16:43:30.408002: train_loss 0.0458 +2026-04-08 16:43:30.418626: val_loss 0.0618 +2026-04-08 16:43:30.422133: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:43:30.427982: Epoch time: 105.99 s +2026-04-08 16:43:31.467227: +2026-04-08 16:43:31.474984: Epoch 12 +2026-04-08 16:43:31.479312: Current learning rate: 0.00989 +2026-04-08 16:45:15.785132: train_loss 0.0574 +2026-04-08 16:45:15.794398: val_loss 0.0872 +2026-04-08 16:45:15.797373: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:45:15.806077: Epoch time: 104.32 s +2026-04-08 16:45:16.877681: +2026-04-08 16:45:16.881752: Epoch 13 +2026-04-08 16:45:16.884722: Current learning rate: 0.00988 +2026-04-08 16:47:03.116546: train_loss 0.0503 +2026-04-08 16:47:03.132753: val_loss 0.0954 +2026-04-08 16:47:03.136244: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:47:03.138841: Epoch time: 106.24 s +2026-04-08 16:47:04.204409: +2026-04-08 16:47:04.208196: Epoch 14 +2026-04-08 16:47:04.211988: Current learning rate: 0.00987 +2026-04-08 16:48:48.173707: train_loss 0.0374 +2026-04-08 16:48:48.181990: val_loss 0.0452 +2026-04-08 16:48:48.184343: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:48:48.187006: Epoch time: 103.97 s +2026-04-08 16:48:49.258601: +2026-04-08 16:48:49.262559: Epoch 15 +2026-04-08 16:48:49.265403: Current learning rate: 0.00986 +2026-04-08 16:50:34.266479: train_loss 0.0496 +2026-04-08 16:50:34.275782: val_loss 0.0554 +2026-04-08 16:50:34.284010: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:50:34.289560: Epoch time: 105.01 s +2026-04-08 16:50:35.396934: +2026-04-08 16:50:35.401074: Epoch 16 +2026-04-08 16:50:35.405177: Current learning rate: 0.00986 +2026-04-08 16:52:19.822234: train_loss 0.0484 +2026-04-08 16:52:19.831634: val_loss 0.0394 +2026-04-08 16:52:19.837625: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:52:19.840425: Epoch time: 104.43 s +2026-04-08 16:52:20.949651: +2026-04-08 16:52:20.953477: Epoch 17 +2026-04-08 16:52:20.958318: Current learning rate: 0.00985 +2026-04-08 16:54:08.543307: train_loss 0.0415 +2026-04-08 16:54:08.552857: val_loss 0.0538 +2026-04-08 16:54:08.556361: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:54:08.559268: Epoch time: 107.6 s +2026-04-08 16:54:11.113082: +2026-04-08 16:54:11.118551: Epoch 18 +2026-04-08 16:54:11.121265: Current learning rate: 0.00984 +2026-04-08 16:55:58.029473: train_loss 0.0596 +2026-04-08 16:55:58.038060: val_loss 0.0718 +2026-04-08 16:55:58.041474: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:55:58.045278: Epoch time: 106.92 s +2026-04-08 16:55:59.143110: +2026-04-08 16:55:59.148216: Epoch 19 +2026-04-08 16:55:59.151222: Current learning rate: 0.00983 +2026-04-08 16:57:44.077077: train_loss 0.0444 +2026-04-08 16:57:44.088215: val_loss 0.0559 +2026-04-08 16:57:44.090997: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:57:44.093261: Epoch time: 104.94 s +2026-04-08 16:57:45.198573: +2026-04-08 16:57:45.201805: Epoch 20 +2026-04-08 16:57:45.205151: Current learning rate: 0.00982 +2026-04-08 16:59:30.921930: train_loss 0.0457 +2026-04-08 16:59:30.931186: val_loss 0.0455 +2026-04-08 16:59:30.934158: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 16:59:30.936345: Epoch time: 105.73 s +2026-04-08 16:59:32.038933: +2026-04-08 16:59:32.043202: Epoch 21 +2026-04-08 16:59:32.046728: Current learning rate: 0.00981 +2026-04-08 17:01:17.388076: train_loss 0.0367 +2026-04-08 17:01:17.399789: val_loss 0.052 +2026-04-08 17:01:17.402870: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:01:17.406043: Epoch time: 105.35 s +2026-04-08 17:01:18.435005: +2026-04-08 17:01:18.440037: Epoch 22 +2026-04-08 17:01:18.442627: Current learning rate: 0.0098 +2026-04-08 17:03:05.214159: train_loss 0.041 +2026-04-08 17:03:05.221167: val_loss 0.0469 +2026-04-08 17:03:05.226281: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:03:05.229543: Epoch time: 106.78 s +2026-04-08 17:03:06.253472: +2026-04-08 17:03:06.256524: Epoch 23 +2026-04-08 17:03:06.259698: Current learning rate: 0.00979 +2026-04-08 17:04:53.000627: train_loss 0.0482 +2026-04-08 17:04:53.009693: val_loss 0.0566 +2026-04-08 17:04:53.013862: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:04:53.019805: Epoch time: 106.75 s +2026-04-08 17:04:54.043036: +2026-04-08 17:04:54.045717: Epoch 24 +2026-04-08 17:04:54.049060: Current learning rate: 0.00978 +2026-04-08 17:06:42.135707: train_loss 0.0336 +2026-04-08 17:06:42.141830: val_loss 0.0868 +2026-04-08 17:06:42.144648: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:06:42.148418: Epoch time: 108.1 s +2026-04-08 17:06:43.180529: +2026-04-08 17:06:43.185782: Epoch 25 +2026-04-08 17:06:43.191187: Current learning rate: 0.00977 +2026-04-08 17:08:28.331803: train_loss 0.0438 +2026-04-08 17:08:28.344148: val_loss 0.0591 +2026-04-08 17:08:28.347601: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:08:28.351043: Epoch time: 105.15 s +2026-04-08 17:08:29.377435: +2026-04-08 17:08:29.379946: Epoch 26 +2026-04-08 17:08:29.383045: Current learning rate: 0.00977 +2026-04-08 17:10:15.170785: train_loss 0.0508 +2026-04-08 17:10:15.178440: val_loss 0.0694 +2026-04-08 17:10:15.180375: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:10:15.182569: Epoch time: 105.8 s +2026-04-08 17:10:16.232424: +2026-04-08 17:10:16.234783: Epoch 27 +2026-04-08 17:10:16.236765: Current learning rate: 0.00976 +2026-04-08 17:12:01.349100: train_loss 0.0368 +2026-04-08 17:12:01.359842: val_loss 0.0416 +2026-04-08 17:12:01.372195: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:12:01.376610: Epoch time: 105.12 s +2026-04-08 17:12:02.448645: +2026-04-08 17:12:02.452488: Epoch 28 +2026-04-08 17:12:02.455751: Current learning rate: 0.00975 +2026-04-08 17:13:49.084010: train_loss 0.045 +2026-04-08 17:13:49.094059: val_loss 0.0784 +2026-04-08 17:13:49.096867: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:13:49.100422: Epoch time: 106.64 s +2026-04-08 17:13:50.150802: +2026-04-08 17:13:50.153576: Epoch 29 +2026-04-08 17:13:50.159024: Current learning rate: 0.00974 +2026-04-08 17:15:35.155604: train_loss 0.0465 +2026-04-08 17:15:35.163417: val_loss 0.0384 +2026-04-08 17:15:35.165731: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:15:35.169457: Epoch time: 105.01 s +2026-04-08 17:15:36.235098: +2026-04-08 17:15:36.239396: Epoch 30 +2026-04-08 17:15:36.243550: Current learning rate: 0.00973 +2026-04-08 17:17:21.021475: train_loss 0.0399 +2026-04-08 17:17:21.027960: val_loss 0.0546 +2026-04-08 17:17:21.031641: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:17:21.034295: Epoch time: 104.79 s +2026-04-08 17:17:22.095078: +2026-04-08 17:17:22.096929: Epoch 31 +2026-04-08 17:17:22.100862: Current learning rate: 0.00972 +2026-04-08 17:19:07.340662: train_loss 0.0391 +2026-04-08 17:19:07.348705: val_loss 0.0616 +2026-04-08 17:19:07.350908: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:19:07.354759: Epoch time: 105.25 s +2026-04-08 17:19:08.430062: +2026-04-08 17:19:08.432089: Epoch 32 +2026-04-08 17:19:08.434769: Current learning rate: 0.00971 +2026-04-08 17:20:53.054776: train_loss 0.0392 +2026-04-08 17:20:53.061990: val_loss 0.0373 +2026-04-08 17:20:53.064068: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:20:53.067203: Epoch time: 104.63 s +2026-04-08 17:20:54.145013: +2026-04-08 17:20:54.148666: Epoch 33 +2026-04-08 17:20:54.150877: Current learning rate: 0.0097 +2026-04-08 17:22:38.570246: train_loss 0.0342 +2026-04-08 17:22:38.577907: val_loss 0.0341 +2026-04-08 17:22:38.580246: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:22:38.583568: Epoch time: 104.43 s +2026-04-08 17:22:39.680060: +2026-04-08 17:22:39.686965: Epoch 34 +2026-04-08 17:22:39.689905: Current learning rate: 0.00969 +2026-04-08 17:24:23.592365: train_loss 0.0282 +2026-04-08 17:24:23.599057: val_loss 0.0881 +2026-04-08 17:24:23.603221: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:24:23.605328: Epoch time: 103.92 s +2026-04-08 17:24:24.689297: +2026-04-08 17:24:24.691473: Epoch 35 +2026-04-08 17:24:24.693539: Current learning rate: 0.00968 +2026-04-08 17:26:08.694556: train_loss 0.0314 +2026-04-08 17:26:08.705906: val_loss 0.0576 +2026-04-08 17:26:08.717213: Pseudo dice [0.0, 0.0, 0.0048, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:26:08.719279: Epoch time: 104.01 s +2026-04-08 17:26:08.722140: Yayy! New best EMA pseudo Dice: 0.0001 +2026-04-08 17:26:11.646342: +2026-04-08 17:26:11.648487: Epoch 36 +2026-04-08 17:26:11.652452: Current learning rate: 0.00968 +2026-04-08 17:27:56.001798: train_loss 0.0323 +2026-04-08 17:27:56.009582: val_loss 0.0415 +2026-04-08 17:27:56.012644: Pseudo dice [0.0, 0.0, 0.0007, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:27:56.014882: Epoch time: 104.36 s +2026-04-08 17:27:56.017242: Yayy! New best EMA pseudo Dice: 0.0001 +2026-04-08 17:27:58.904824: +2026-04-08 17:27:58.906955: Epoch 37 +2026-04-08 17:27:58.908444: Current learning rate: 0.00967 +2026-04-08 17:29:44.745012: train_loss 0.0372 +2026-04-08 17:29:44.759461: val_loss 0.049 +2026-04-08 17:29:44.763888: Pseudo dice [0.0, 0.0, 0.0204, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:29:44.766550: Epoch time: 105.84 s +2026-04-08 17:29:44.768452: Yayy! New best EMA pseudo Dice: 0.0004 +2026-04-08 17:29:47.728935: +2026-04-08 17:29:47.731191: Epoch 38 +2026-04-08 17:29:47.732997: Current learning rate: 0.00966 +2026-04-08 17:31:31.918911: train_loss 0.0319 +2026-04-08 17:31:31.936751: val_loss 0.0584 +2026-04-08 17:31:31.941036: Pseudo dice [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:31:31.944105: Epoch time: 104.19 s +2026-04-08 17:31:33.042540: +2026-04-08 17:31:33.044750: Epoch 39 +2026-04-08 17:31:33.046298: Current learning rate: 0.00965 +2026-04-08 17:33:18.636818: train_loss 0.0412 +2026-04-08 17:33:18.645601: val_loss 0.0376 +2026-04-08 17:33:18.648501: Pseudo dice [0.0, 0.0, 0.0803, 0.0, 0.0, 0.0, 0.0] +2026-04-08 17:33:18.652150: Epoch time: 105.6 s +2026-04-08 17:33:18.656532: Yayy! New best EMA pseudo Dice: 0.0014 +2026-04-08 17:33:21.601762: +2026-04-08 17:33:21.605753: Epoch 40 +2026-04-08 17:33:21.607936: Current learning rate: 0.00964 +2026-04-08 17:35:07.428794: train_loss 0.0252 +2026-04-08 17:35:07.436435: val_loss 0.0405 +2026-04-08 17:35:07.439233: Pseudo dice [0.0, 0.0, 0.0334, 0.0, 0.0, 0.0, 0.3031] +2026-04-08 17:35:07.441695: Epoch time: 105.83 s +2026-04-08 17:35:07.444723: Yayy! New best EMA pseudo Dice: 0.0061 +2026-04-08 17:35:10.373048: +2026-04-08 17:35:10.374942: Epoch 41 +2026-04-08 17:35:10.380770: Current learning rate: 0.00963 +2026-04-08 17:36:56.209901: train_loss 0.0262 +2026-04-08 17:36:56.219087: val_loss 0.019 +2026-04-08 17:36:56.222028: Pseudo dice [0.0, 0.0, 0.0085, 0.0, 0.0, 0.0, 0.3629] +2026-04-08 17:36:56.226020: Epoch time: 105.84 s +2026-04-08 17:36:56.229628: Yayy! New best EMA pseudo Dice: 0.0108 +2026-04-08 17:36:59.307787: +2026-04-08 17:36:59.310286: Epoch 42 +2026-04-08 17:36:59.312189: Current learning rate: 0.00962 +2026-04-08 17:38:44.634748: train_loss 0.0219 +2026-04-08 17:38:44.656807: val_loss 0.041 +2026-04-08 17:38:44.659169: Pseudo dice [0.0, 0.0, 0.1491, 0.0, 0.0, 0.0, 0.4314] +2026-04-08 17:38:44.662623: Epoch time: 105.33 s +2026-04-08 17:38:44.665684: Yayy! New best EMA pseudo Dice: 0.018 +2026-04-08 17:38:47.522721: +2026-04-08 17:38:47.524588: Epoch 43 +2026-04-08 17:38:47.526480: Current learning rate: 0.00961 +2026-04-08 17:40:32.508866: train_loss 0.021 +2026-04-08 17:40:32.516853: val_loss 0.0674 +2026-04-08 17:40:32.519354: Pseudo dice [0.0, 0.0, 0.3314, 0.0, 0.0, 0.0, 0.4131] +2026-04-08 17:40:32.521872: Epoch time: 104.99 s +2026-04-08 17:40:32.524245: Yayy! New best EMA pseudo Dice: 0.0268 +2026-04-08 17:40:35.252852: +2026-04-08 17:40:35.255208: Epoch 44 +2026-04-08 17:40:35.257735: Current learning rate: 0.0096 +2026-04-08 17:42:19.879708: train_loss 0.0247 +2026-04-08 17:42:19.890991: val_loss 0.0311 +2026-04-08 17:42:19.893254: Pseudo dice [0.0, 0.0, 0.2655, 0.0, 0.0, 0.0, 0.1105] +2026-04-08 17:42:19.896844: Epoch time: 104.63 s +2026-04-08 17:42:19.901344: Yayy! New best EMA pseudo Dice: 0.0295 +2026-04-08 17:42:22.660861: +2026-04-08 17:42:22.662630: Epoch 45 +2026-04-08 17:42:22.664201: Current learning rate: 0.00959 +2026-04-08 17:44:07.792666: train_loss 0.0159 +2026-04-08 17:44:07.799892: val_loss 0.0631 +2026-04-08 17:44:07.802637: Pseudo dice [0.0, 0.0, 0.2601, 0.0, 0.0, 0.0, 0.2643] +2026-04-08 17:44:07.804478: Epoch time: 105.13 s +2026-04-08 17:44:07.806896: Yayy! New best EMA pseudo Dice: 0.0341 +2026-04-08 17:44:10.656657: +2026-04-08 17:44:10.658639: Epoch 46 +2026-04-08 17:44:10.660765: Current learning rate: 0.00959 +2026-04-08 17:45:54.514176: train_loss 0.0267 +2026-04-08 17:45:54.525928: val_loss 0.0361 +2026-04-08 17:45:54.529995: Pseudo dice [0.0, 0.0, 0.0476, 0.0, 0.0, 0.0, 0.1195] +2026-04-08 17:45:54.532622: Epoch time: 103.86 s +2026-04-08 17:45:55.564917: +2026-04-08 17:45:55.567909: Epoch 47 +2026-04-08 17:45:55.571071: Current learning rate: 0.00958 +2026-04-08 17:47:39.895221: train_loss 0.0204 +2026-04-08 17:47:39.904803: val_loss 0.023 +2026-04-08 17:47:39.907628: Pseudo dice [0.0, 0.0, 0.2577, 0.0, 0.0, 0.0, 0.1472] +2026-04-08 17:47:39.910199: Epoch time: 104.33 s +2026-04-08 17:47:39.912276: Yayy! New best EMA pseudo Dice: 0.0355 +2026-04-08 17:47:42.842884: +2026-04-08 17:47:42.846441: Epoch 48 +2026-04-08 17:47:42.847984: Current learning rate: 0.00957 +2026-04-08 17:49:26.983655: train_loss 0.0203 +2026-04-08 17:49:26.990677: val_loss 0.0252 +2026-04-08 17:49:26.992980: Pseudo dice [0.0, 0.0, 0.2246, 0.0, 0.0, 0.0, 0.3299] +2026-04-08 17:49:26.996532: Epoch time: 104.14 s +2026-04-08 17:49:26.998751: Yayy! New best EMA pseudo Dice: 0.0399 +2026-04-08 17:49:29.843029: +2026-04-08 17:49:29.845004: Epoch 49 +2026-04-08 17:49:29.846421: Current learning rate: 0.00956 +2026-04-08 17:51:14.992001: train_loss 0.0229 +2026-04-08 17:51:15.003601: val_loss 0.042 +2026-04-08 17:51:15.006029: Pseudo dice [0.0, 0.0, 0.0264, 0.0, 0.0, 0.0, 0.2311] +2026-04-08 17:51:15.010279: Epoch time: 105.15 s +2026-04-08 17:51:17.840259: +2026-04-08 17:51:17.842746: Epoch 50 +2026-04-08 17:51:17.844649: Current learning rate: 0.00955 +2026-04-08 17:53:02.912283: train_loss 0.016 +2026-04-08 17:53:02.920339: val_loss 0.0176 +2026-04-08 17:53:02.922599: Pseudo dice [0.0, 0.0, 0.0935, 0.0, 0.0, 0.0, 0.4925] +2026-04-08 17:53:02.926126: Epoch time: 105.08 s +2026-04-08 17:53:02.929458: Yayy! New best EMA pseudo Dice: 0.044 +2026-04-08 17:53:05.767294: +2026-04-08 17:53:05.770034: Epoch 51 +2026-04-08 17:53:05.775287: Current learning rate: 0.00954 +2026-04-08 17:54:50.888469: train_loss -0.0021 +2026-04-08 17:54:50.899633: val_loss 0.0243 +2026-04-08 17:54:50.902892: Pseudo dice [0.0, 0.0, 0.301, 0.0, 0.0, 0.0, 0.1573] +2026-04-08 17:54:50.905527: Epoch time: 105.12 s +2026-04-08 17:54:50.907909: Yayy! New best EMA pseudo Dice: 0.0461 +2026-04-08 17:54:54.051945: +2026-04-08 17:54:54.055707: Epoch 52 +2026-04-08 17:54:54.060295: Current learning rate: 0.00953 +2026-04-08 17:56:38.662567: train_loss 0.0174 +2026-04-08 17:56:38.669436: val_loss 0.0236 +2026-04-08 17:56:38.673316: Pseudo dice [0.0, 0.0, 0.2751, 0.0, 0.0, 0.0, 0.3394] +2026-04-08 17:56:38.677770: Epoch time: 104.61 s +2026-04-08 17:56:38.680684: Yayy! New best EMA pseudo Dice: 0.0503 +2026-04-08 17:56:41.525661: +2026-04-08 17:56:41.528692: Epoch 53 +2026-04-08 17:56:41.531204: Current learning rate: 0.00952 +2026-04-08 17:58:26.541782: train_loss 0.008 +2026-04-08 17:58:26.549295: val_loss 0.03 +2026-04-08 17:58:26.551495: Pseudo dice [0.0, 0.0, 0.2302, 0.0, 0.0, 0.0, 0.2337] +2026-04-08 17:58:26.553599: Epoch time: 105.02 s +2026-04-08 17:58:26.556027: Yayy! New best EMA pseudo Dice: 0.0519 +2026-04-08 17:58:30.591439: +2026-04-08 17:58:30.593288: Epoch 54 +2026-04-08 17:58:30.595041: Current learning rate: 0.00951 +2026-04-08 18:00:14.854399: train_loss 0.0085 +2026-04-08 18:00:14.863070: val_loss -0.0121 +2026-04-08 18:00:14.866592: Pseudo dice [0.0, 0.0, 0.4086, 0.0, 0.0, 0.0, 0.534] +2026-04-08 18:00:14.869555: Epoch time: 104.27 s +2026-04-08 18:00:14.871946: Yayy! New best EMA pseudo Dice: 0.0602 +2026-04-08 18:00:17.652726: +2026-04-08 18:00:17.654984: Epoch 55 +2026-04-08 18:00:17.657608: Current learning rate: 0.0095 +2026-04-08 18:02:01.353359: train_loss 0.0018 +2026-04-08 18:02:01.362623: val_loss 0.0059 +2026-04-08 18:02:01.384319: Pseudo dice [0.0, 0.0, 0.0916, 0.0, 0.0, 0.0, 0.4194] +2026-04-08 18:02:01.386603: Epoch time: 103.7 s +2026-04-08 18:02:01.389371: Yayy! New best EMA pseudo Dice: 0.0615 +2026-04-08 18:02:04.346459: +2026-04-08 18:02:04.350071: Epoch 56 +2026-04-08 18:02:04.352661: Current learning rate: 0.00949 +2026-04-08 18:03:48.389704: train_loss -0.0041 +2026-04-08 18:03:48.398541: val_loss 0.0181 +2026-04-08 18:03:48.401637: Pseudo dice [0.0, 0.0, 0.2669, 0.0, 0.0, 0.0, 0.4171] +2026-04-08 18:03:48.407519: Epoch time: 104.05 s +2026-04-08 18:03:48.410010: Yayy! New best EMA pseudo Dice: 0.0651 +2026-04-08 18:03:51.240398: +2026-04-08 18:03:51.242106: Epoch 57 +2026-04-08 18:03:51.243495: Current learning rate: 0.00949 +2026-04-08 18:05:36.280577: train_loss -0.0029 +2026-04-08 18:05:36.289038: val_loss -0.0031 +2026-04-08 18:05:36.291755: Pseudo dice [0.0, 0.0, 0.4556, 0.0, 0.0, 0.0, 0.316] +2026-04-08 18:05:36.294220: Epoch time: 105.04 s +2026-04-08 18:05:36.296452: Yayy! New best EMA pseudo Dice: 0.0696 +2026-04-08 18:05:39.131695: +2026-04-08 18:05:39.134966: Epoch 58 +2026-04-08 18:05:39.136893: Current learning rate: 0.00948 +2026-04-08 18:07:23.297224: train_loss -0.0047 +2026-04-08 18:07:23.306679: val_loss 0.0135 +2026-04-08 18:07:23.309862: Pseudo dice [0.0, 0.0, 0.3989, 0.0, 0.0, 0.0, 0.4927] +2026-04-08 18:07:23.313225: Epoch time: 104.17 s +2026-04-08 18:07:23.317219: Yayy! New best EMA pseudo Dice: 0.0754 +2026-04-08 18:07:26.305785: +2026-04-08 18:07:26.307786: Epoch 59 +2026-04-08 18:07:26.309465: Current learning rate: 0.00947 +2026-04-08 18:09:11.733170: train_loss 0.0038 +2026-04-08 18:09:11.747428: val_loss -0.0104 +2026-04-08 18:09:11.750636: Pseudo dice [0.0, 0.0, 0.0832, 0.0, 0.0, 0.0, 0.5518] +2026-04-08 18:09:11.752425: Epoch time: 105.43 s +2026-04-08 18:09:11.756871: Yayy! New best EMA pseudo Dice: 0.0769 +2026-04-08 18:09:14.690225: +2026-04-08 18:09:14.693157: Epoch 60 +2026-04-08 18:09:14.697054: Current learning rate: 0.00946 +2026-04-08 18:10:58.993161: train_loss -0.0119 +2026-04-08 18:10:59.003229: val_loss 0.0312 +2026-04-08 18:10:59.006916: Pseudo dice [0.0, 0.0, 0.3549, 0.0, 0.0, 0.0, 0.3106] +2026-04-08 18:10:59.009616: Epoch time: 104.31 s +2026-04-08 18:10:59.012376: Yayy! New best EMA pseudo Dice: 0.0787 +2026-04-08 18:11:02.003003: +2026-04-08 18:11:02.004874: Epoch 61 +2026-04-08 18:11:02.006416: Current learning rate: 0.00945 +2026-04-08 18:12:46.665853: train_loss -0.0124 +2026-04-08 18:12:46.676043: val_loss 0.0012 +2026-04-08 18:12:46.679786: Pseudo dice [0.0, 0.4402, 0.3014, 0.0, 0.0, 0.001, 0.4874] +2026-04-08 18:12:46.683823: Epoch time: 104.67 s +2026-04-08 18:12:46.690118: Yayy! New best EMA pseudo Dice: 0.0884 +2026-04-08 18:12:49.584032: +2026-04-08 18:12:49.586518: Epoch 62 +2026-04-08 18:12:49.589190: Current learning rate: 0.00944 +2026-04-08 18:14:35.022239: train_loss -0.0184 +2026-04-08 18:14:35.034143: val_loss -0.0136 +2026-04-08 18:14:35.036982: Pseudo dice [0.0, 0.4989, 0.4123, 0.0, 0.0, 0.4725, 0.4537] +2026-04-08 18:14:35.039457: Epoch time: 105.44 s +2026-04-08 18:14:35.042485: Yayy! New best EMA pseudo Dice: 0.1058 +2026-04-08 18:14:37.832629: +2026-04-08 18:14:37.834246: Epoch 63 +2026-04-08 18:14:37.836005: Current learning rate: 0.00943 +2026-04-08 18:16:23.373513: train_loss -0.0207 +2026-04-08 18:16:23.384053: val_loss -0.0347 +2026-04-08 18:16:23.386927: Pseudo dice [0.0137, 0.2865, 0.2791, 0.0, 0.0, 0.4906, 0.5415] +2026-04-08 18:16:23.390931: Epoch time: 105.54 s +2026-04-08 18:16:23.395331: Yayy! New best EMA pseudo Dice: 0.1183 +2026-04-08 18:16:26.458014: +2026-04-08 18:16:26.460111: Epoch 64 +2026-04-08 18:16:26.462481: Current learning rate: 0.00942 +2026-04-08 18:18:12.509146: train_loss -0.0321 +2026-04-08 18:18:12.516230: val_loss -0.004 +2026-04-08 18:18:12.518333: Pseudo dice [0.0321, 0.1952, 0.3224, 0.0, 0.0, 0.4894, 0.3292] +2026-04-08 18:18:12.520542: Epoch time: 106.05 s +2026-04-08 18:18:12.524709: Yayy! New best EMA pseudo Dice: 0.126 +2026-04-08 18:18:15.481913: +2026-04-08 18:18:15.487204: Epoch 65 +2026-04-08 18:18:15.490041: Current learning rate: 0.00941 +2026-04-08 18:19:59.013504: train_loss -0.0175 +2026-04-08 18:19:59.019500: val_loss -0.0098 +2026-04-08 18:19:59.023483: Pseudo dice [0.0065, 0.405, 0.2452, 0.0, 0.0, 0.5037, 0.5144] +2026-04-08 18:19:59.027320: Epoch time: 103.53 s +2026-04-08 18:19:59.030358: Yayy! New best EMA pseudo Dice: 0.1373 +2026-04-08 18:20:01.904694: +2026-04-08 18:20:01.906756: Epoch 66 +2026-04-08 18:20:01.908799: Current learning rate: 0.0094 +2026-04-08 18:21:45.372018: train_loss -0.0374 +2026-04-08 18:21:45.379335: val_loss -0.0194 +2026-04-08 18:21:45.381750: Pseudo dice [0.1038, 0.4431, 0.2848, 0.0, 0.0, 0.166, 0.4173] +2026-04-08 18:21:45.384863: Epoch time: 103.47 s +2026-04-08 18:21:45.402047: Yayy! New best EMA pseudo Dice: 0.1438 +2026-04-08 18:21:48.236549: +2026-04-08 18:21:48.239181: Epoch 67 +2026-04-08 18:21:48.240994: Current learning rate: 0.00939 +2026-04-08 18:23:31.492290: train_loss -0.0334 +2026-04-08 18:23:31.506056: val_loss -0.0157 +2026-04-08 18:23:31.508797: Pseudo dice [0.2194, 0.4089, 0.2906, 0.0, 0.0, 0.2147, 0.3108] +2026-04-08 18:23:31.511327: Epoch time: 103.26 s +2026-04-08 18:23:31.513361: Yayy! New best EMA pseudo Dice: 0.1501 +2026-04-08 18:23:34.303668: +2026-04-08 18:23:34.305346: Epoch 68 +2026-04-08 18:23:34.306850: Current learning rate: 0.00939 +2026-04-08 18:25:17.344716: train_loss -0.0335 +2026-04-08 18:25:17.359430: val_loss -0.0486 +2026-04-08 18:25:17.364075: Pseudo dice [0.1779, 0.2138, 0.5136, 0.0, 0.0, 0.3926, 0.3841] +2026-04-08 18:25:17.366257: Epoch time: 103.04 s +2026-04-08 18:25:17.369081: Yayy! New best EMA pseudo Dice: 0.1591 +2026-04-08 18:25:20.214383: +2026-04-08 18:25:20.216845: Epoch 69 +2026-04-08 18:25:20.221664: Current learning rate: 0.00938 +2026-04-08 18:27:03.128056: train_loss -0.0323 +2026-04-08 18:27:03.134479: val_loss -0.0322 +2026-04-08 18:27:03.136836: Pseudo dice [0.092, 0.569, 0.2999, 0.0, 0.0, 0.2972, 0.661] +2026-04-08 18:27:03.138906: Epoch time: 102.92 s +2026-04-08 18:27:03.141219: Yayy! New best EMA pseudo Dice: 0.1706 +2026-04-08 18:27:07.047391: +2026-04-08 18:27:07.049798: Epoch 70 +2026-04-08 18:27:07.051824: Current learning rate: 0.00937 +2026-04-08 18:28:50.367762: train_loss -0.0371 +2026-04-08 18:28:50.373197: val_loss 0.017 +2026-04-08 18:28:50.375000: Pseudo dice [0.0077, 0.8222, 0.2561, 0.0, 0.0, 0.0848, 0.4943] +2026-04-08 18:28:50.377770: Epoch time: 103.32 s +2026-04-08 18:28:50.380943: Yayy! New best EMA pseudo Dice: 0.1773 +2026-04-08 18:28:53.324668: +2026-04-08 18:28:53.326581: Epoch 71 +2026-04-08 18:28:53.328246: Current learning rate: 0.00936 +2026-04-08 18:30:36.712717: train_loss -0.0263 +2026-04-08 18:30:36.718569: val_loss 0.0017 +2026-04-08 18:30:36.720802: Pseudo dice [0.0745, 0.0308, 0.2516, 0.0, 0.0, 0.1308, 0.194] +2026-04-08 18:30:36.722724: Epoch time: 103.39 s +2026-04-08 18:30:37.811672: +2026-04-08 18:30:37.813788: Epoch 72 +2026-04-08 18:30:37.816082: Current learning rate: 0.00935 +2026-04-08 18:32:21.810703: train_loss -0.0345 +2026-04-08 18:32:21.816819: val_loss 0.0123 +2026-04-08 18:32:21.818559: Pseudo dice [0.1341, 0.2839, 0.0458, 0.0, 0.0, 0.3406, 0.2745] +2026-04-08 18:32:21.821257: Epoch time: 104.0 s +2026-04-08 18:32:22.908602: +2026-04-08 18:32:22.910668: Epoch 73 +2026-04-08 18:32:22.912317: Current learning rate: 0.00934 +2026-04-08 18:34:06.430608: train_loss -0.0405 +2026-04-08 18:34:06.437544: val_loss -0.0346 +2026-04-08 18:34:06.440039: Pseudo dice [0.1056, 0.562, 0.2228, 0.0, 0.0, 0.1669, 0.4481] +2026-04-08 18:34:06.443184: Epoch time: 103.53 s +2026-04-08 18:34:07.562202: +2026-04-08 18:34:07.564153: Epoch 74 +2026-04-08 18:34:07.565860: Current learning rate: 0.00933 +2026-04-08 18:35:51.601103: train_loss -0.0302 +2026-04-08 18:35:51.607712: val_loss -0.0155 +2026-04-08 18:35:51.610348: Pseudo dice [0.1142, 0.2922, 0.0936, 0.0, 0.0, 0.1038, 0.5488] +2026-04-08 18:35:51.612787: Epoch time: 104.04 s +2026-04-08 18:35:52.731292: +2026-04-08 18:35:52.733223: Epoch 75 +2026-04-08 18:35:52.735376: Current learning rate: 0.00932 +2026-04-08 18:37:36.860667: train_loss -0.0388 +2026-04-08 18:37:36.879575: val_loss 0.0103 +2026-04-08 18:37:36.887207: Pseudo dice [0.0698, 0.1799, 0.5015, 0.0, 0.0, 0.2472, 0.4765] +2026-04-08 18:37:36.891086: Epoch time: 104.13 s +2026-04-08 18:37:37.995449: +2026-04-08 18:37:38.002533: Epoch 76 +2026-04-08 18:37:38.005112: Current learning rate: 0.00931 +2026-04-08 18:39:21.480313: train_loss -0.03 +2026-04-08 18:39:21.508065: val_loss -0.0121 +2026-04-08 18:39:21.510633: Pseudo dice [0.2898, 0.2716, 0.3979, 0.0, 0.0, 0.1667, 0.4075] +2026-04-08 18:39:21.512884: Epoch time: 103.49 s +2026-04-08 18:39:21.514886: Yayy! New best EMA pseudo Dice: 0.18 +2026-04-08 18:39:24.503896: +2026-04-08 18:39:24.505616: Epoch 77 +2026-04-08 18:39:24.507059: Current learning rate: 0.0093 +2026-04-08 18:41:07.610363: train_loss -0.0452 +2026-04-08 18:41:07.615985: val_loss -0.0488 +2026-04-08 18:41:07.618336: Pseudo dice [0.2053, 0.1442, 0.3579, 0.0, 0.0, 0.1564, 0.5716] +2026-04-08 18:41:07.620828: Epoch time: 103.11 s +2026-04-08 18:41:07.623738: Yayy! New best EMA pseudo Dice: 0.1825 +2026-04-08 18:41:10.447207: +2026-04-08 18:41:10.449335: Epoch 78 +2026-04-08 18:41:10.452756: Current learning rate: 0.0093 +2026-04-08 18:42:55.266597: train_loss -0.0499 +2026-04-08 18:42:55.273449: val_loss -0.0372 +2026-04-08 18:42:55.275856: Pseudo dice [0.5024, 0.1431, 0.3639, 0.0, 0.0, 0.073, 0.5222] +2026-04-08 18:42:55.282451: Epoch time: 104.82 s +2026-04-08 18:42:55.285191: Yayy! New best EMA pseudo Dice: 0.1872 +2026-04-08 18:42:58.247916: +2026-04-08 18:42:58.249608: Epoch 79 +2026-04-08 18:42:58.252669: Current learning rate: 0.00929 +2026-04-08 18:44:41.856898: train_loss -0.0419 +2026-04-08 18:44:41.866919: val_loss -0.0192 +2026-04-08 18:44:41.869075: Pseudo dice [0.0439, 0.16, 0.3793, 0.0, 0.0, 0.1188, 0.4219] +2026-04-08 18:44:41.871851: Epoch time: 103.61 s +2026-04-08 18:44:42.979714: +2026-04-08 18:44:42.982210: Epoch 80 +2026-04-08 18:44:42.985410: Current learning rate: 0.00928 +2026-04-08 18:46:26.331966: train_loss -0.032 +2026-04-08 18:46:26.342554: val_loss -0.0402 +2026-04-08 18:46:26.344655: Pseudo dice [0.0158, 0.1071, 0.4069, 0.0, 0.0, 0.1309, 0.5061] +2026-04-08 18:46:26.346951: Epoch time: 103.36 s +2026-04-08 18:46:27.444641: +2026-04-08 18:46:27.448240: Epoch 81 +2026-04-08 18:46:27.451059: Current learning rate: 0.00927 +2026-04-08 18:48:10.786726: train_loss -0.0456 +2026-04-08 18:48:10.795848: val_loss -0.0308 +2026-04-08 18:48:10.798798: Pseudo dice [0.2882, 0.1924, 0.3575, 0.0, 0.0, 0.2156, 0.5647] +2026-04-08 18:48:10.800880: Epoch time: 103.35 s +2026-04-08 18:48:10.803072: Yayy! New best EMA pseudo Dice: 0.1876 +2026-04-08 18:48:13.792274: +2026-04-08 18:48:13.793901: Epoch 82 +2026-04-08 18:48:13.795323: Current learning rate: 0.00926 +2026-04-08 18:49:57.303169: train_loss -0.0427 +2026-04-08 18:49:57.309899: val_loss -0.0348 +2026-04-08 18:49:57.312612: Pseudo dice [0.4409, 0.4154, 0.0061, 0.0, 0.0, 0.1095, 0.6065] +2026-04-08 18:49:57.315584: Epoch time: 103.51 s +2026-04-08 18:49:57.318084: Yayy! New best EMA pseudo Dice: 0.1914 +2026-04-08 18:50:00.132739: +2026-04-08 18:50:00.134183: Epoch 83 +2026-04-08 18:50:00.135866: Current learning rate: 0.00925 +2026-04-08 18:51:43.156175: train_loss -0.0519 +2026-04-08 18:51:43.162515: val_loss -0.0499 +2026-04-08 18:51:43.169182: Pseudo dice [0.2612, 0.1385, 0.3552, 0.0, 0.0, 0.0642, 0.5538] +2026-04-08 18:51:43.172349: Epoch time: 103.03 s +2026-04-08 18:51:43.174495: Yayy! New best EMA pseudo Dice: 0.1918 +2026-04-08 18:51:46.121092: +2026-04-08 18:51:46.123182: Epoch 84 +2026-04-08 18:51:46.125075: Current learning rate: 0.00924 +2026-04-08 18:53:29.242421: train_loss -0.0511 +2026-04-08 18:53:29.247725: val_loss -0.0098 +2026-04-08 18:53:29.249865: Pseudo dice [0.3624, 0.5898, 0.23, 0.0, 0.0, 0.4619, 0.4748] +2026-04-08 18:53:29.252081: Epoch time: 103.12 s +2026-04-08 18:53:29.254699: Yayy! New best EMA pseudo Dice: 0.2029 +2026-04-08 18:53:32.028716: +2026-04-08 18:53:32.030662: Epoch 85 +2026-04-08 18:53:32.032089: Current learning rate: 0.00923 +2026-04-08 18:55:14.704811: train_loss -0.0567 +2026-04-08 18:55:14.713639: val_loss -0.0284 +2026-04-08 18:55:14.717178: Pseudo dice [0.1746, 0.6488, 0.4329, 0.0, 0.0, 0.488, 0.5164] +2026-04-08 18:55:14.719168: Epoch time: 102.68 s +2026-04-08 18:55:14.722894: Yayy! New best EMA pseudo Dice: 0.2149 +2026-04-08 18:55:17.661771: +2026-04-08 18:55:17.663956: Epoch 86 +2026-04-08 18:55:17.665828: Current learning rate: 0.00922 +2026-04-08 18:57:00.463976: train_loss -0.0582 +2026-04-08 18:57:00.473865: val_loss -0.0264 +2026-04-08 18:57:00.475454: Pseudo dice [0.3892, 0.1665, 0.3166, 0.0, 0.0, 0.1004, 0.6562] +2026-04-08 18:57:00.477671: Epoch time: 102.81 s +2026-04-08 18:57:00.479839: Yayy! New best EMA pseudo Dice: 0.2167 +2026-04-08 18:57:03.302386: +2026-04-08 18:57:03.304530: Epoch 87 +2026-04-08 18:57:03.305958: Current learning rate: 0.00921 +2026-04-08 18:58:47.341665: train_loss -0.0559 +2026-04-08 18:58:47.348067: val_loss -0.0566 +2026-04-08 18:58:47.350489: Pseudo dice [0.2875, 0.3132, 0.616, 0.0002, 0.0, 0.1535, 0.4709] +2026-04-08 18:58:47.352383: Epoch time: 104.04 s +2026-04-08 18:58:47.354260: Yayy! New best EMA pseudo Dice: 0.2213 +2026-04-08 18:58:50.060619: +2026-04-08 18:58:50.062399: Epoch 88 +2026-04-08 18:58:50.063771: Current learning rate: 0.0092 +2026-04-08 19:00:32.465128: train_loss -0.0748 +2026-04-08 19:00:32.490704: val_loss -0.0543 +2026-04-08 19:00:32.494708: Pseudo dice [0.3649, 0.2876, 0.2991, 0.0, 0.0, 0.3417, 0.4891] +2026-04-08 19:00:32.507276: Epoch time: 102.41 s +2026-04-08 19:00:32.508863: Yayy! New best EMA pseudo Dice: 0.2247 +2026-04-08 19:00:35.333203: +2026-04-08 19:00:35.336117: Epoch 89 +2026-04-08 19:00:35.338023: Current learning rate: 0.0092 +2026-04-08 19:02:17.480211: train_loss -0.0593 +2026-04-08 19:02:17.486860: val_loss -0.013 +2026-04-08 19:02:17.488812: Pseudo dice [0.129, 0.6192, 0.0004, 0.0, 0.0, 0.0971, 0.5537] +2026-04-08 19:02:17.490542: Epoch time: 102.15 s +2026-04-08 19:02:18.557416: +2026-04-08 19:02:18.559863: Epoch 90 +2026-04-08 19:02:18.561651: Current learning rate: 0.00919 +2026-04-08 19:04:01.660289: train_loss -0.0521 +2026-04-08 19:04:01.666719: val_loss -0.016 +2026-04-08 19:04:01.669903: Pseudo dice [0.2215, 0.5112, 0.4992, 0.0, 0.0, 0.0752, 0.2133] +2026-04-08 19:04:01.671829: Epoch time: 103.11 s +2026-04-08 19:04:02.711328: +2026-04-08 19:04:02.713830: Epoch 91 +2026-04-08 19:04:02.715886: Current learning rate: 0.00918 +2026-04-08 19:05:45.315330: train_loss -0.0539 +2026-04-08 19:05:45.324220: val_loss -0.0506 +2026-04-08 19:05:45.325996: Pseudo dice [0.2865, 0.4251, 0.4064, 0.0004, 0.0, 0.1479, 0.5843] +2026-04-08 19:05:45.328023: Epoch time: 102.61 s +2026-04-08 19:05:45.329824: Yayy! New best EMA pseudo Dice: 0.226 +2026-04-08 19:05:48.074977: +2026-04-08 19:05:48.077406: Epoch 92 +2026-04-08 19:05:48.079910: Current learning rate: 0.00917 +2026-04-08 19:07:30.483884: train_loss -0.0656 +2026-04-08 19:07:30.492220: val_loss -0.0192 +2026-04-08 19:07:30.494470: Pseudo dice [0.1743, 0.117, 0.4732, 0.0, 0.0, 0.0967, 0.6486] +2026-04-08 19:07:30.496661: Epoch time: 102.41 s +2026-04-08 19:07:31.528584: +2026-04-08 19:07:31.530603: Epoch 93 +2026-04-08 19:07:31.533350: Current learning rate: 0.00916 +2026-04-08 19:09:13.970050: train_loss -0.0604 +2026-04-08 19:09:13.976320: val_loss -0.0233 +2026-04-08 19:09:13.979544: Pseudo dice [0.1019, 0.5675, 0.4823, 0.0001, 0.0, 0.0644, 0.3136] +2026-04-08 19:09:13.982384: Epoch time: 102.44 s +2026-04-08 19:09:15.005555: +2026-04-08 19:09:15.007304: Epoch 94 +2026-04-08 19:09:15.009433: Current learning rate: 0.00915 +2026-04-08 19:10:57.753390: train_loss -0.0736 +2026-04-08 19:10:57.759708: val_loss -0.0636 +2026-04-08 19:10:57.762250: Pseudo dice [0.4492, 0.6092, 0.4094, 0.0, 0.0, 0.1319, 0.5911] +2026-04-08 19:10:57.764330: Epoch time: 102.75 s +2026-04-08 19:10:57.766356: Yayy! New best EMA pseudo Dice: 0.2332 +2026-04-08 19:11:00.514023: +2026-04-08 19:11:00.515753: Epoch 95 +2026-04-08 19:11:00.517183: Current learning rate: 0.00914 +2026-04-08 19:12:42.871808: train_loss -0.0684 +2026-04-08 19:12:42.877022: val_loss -0.001 +2026-04-08 19:12:42.878922: Pseudo dice [0.1837, 0.6091, 0.4234, 0.0006, 0.0, 0.0617, 0.1573] +2026-04-08 19:12:42.881147: Epoch time: 102.36 s +2026-04-08 19:12:43.910006: +2026-04-08 19:12:43.912337: Epoch 96 +2026-04-08 19:12:43.914182: Current learning rate: 0.00913 +2026-04-08 19:14:25.911587: train_loss -0.0511 +2026-04-08 19:14:25.918120: val_loss -0.0475 +2026-04-08 19:14:25.921643: Pseudo dice [0.3176, 0.0852, 0.417, 0.0016, 0.0, 0.4168, 0.5925] +2026-04-08 19:14:25.923876: Epoch time: 102.0 s +2026-04-08 19:14:25.927069: Yayy! New best EMA pseudo Dice: 0.2335 +2026-04-08 19:14:28.698889: +2026-04-08 19:14:28.701405: Epoch 97 +2026-04-08 19:14:28.703447: Current learning rate: 0.00912 +2026-04-08 19:16:11.533626: train_loss -0.0703 +2026-04-08 19:16:11.541458: val_loss -0.0497 +2026-04-08 19:16:11.544363: Pseudo dice [0.3704, 0.8219, 0.3027, 0.0, 0.0, 0.191, 0.5276] +2026-04-08 19:16:11.546773: Epoch time: 102.84 s +2026-04-08 19:16:11.548602: Yayy! New best EMA pseudo Dice: 0.2417 +2026-04-08 19:16:14.326859: +2026-04-08 19:16:14.328400: Epoch 98 +2026-04-08 19:16:14.329987: Current learning rate: 0.00911 +2026-04-08 19:17:57.267621: train_loss -0.0589 +2026-04-08 19:17:57.273476: val_loss -0.0094 +2026-04-08 19:17:57.275314: Pseudo dice [0.3221, 0.6103, 0.4316, 0.0348, 0.0, 0.0733, 0.5344] +2026-04-08 19:17:57.277715: Epoch time: 102.94 s +2026-04-08 19:17:57.279726: Yayy! New best EMA pseudo Dice: 0.2462 +2026-04-08 19:17:59.952037: +2026-04-08 19:17:59.953597: Epoch 99 +2026-04-08 19:17:59.955009: Current learning rate: 0.0091 +2026-04-08 19:19:42.317302: train_loss -0.0601 +2026-04-08 19:19:42.325726: val_loss -0.0155 +2026-04-08 19:19:42.327686: Pseudo dice [0.1369, 0.0486, 0.5517, 0.0001, 0.0, 0.2264, 0.3604] +2026-04-08 19:19:42.330029: Epoch time: 102.37 s +2026-04-08 19:19:45.139213: +2026-04-08 19:19:45.141554: Epoch 100 +2026-04-08 19:19:45.143212: Current learning rate: 0.0091 +2026-04-08 19:21:28.263311: train_loss -0.0758 +2026-04-08 19:21:28.270914: val_loss -0.0648 +2026-04-08 19:21:28.274098: Pseudo dice [0.3723, 0.5242, 0.4717, 0.0, 0.0, 0.2425, 0.1983] +2026-04-08 19:21:28.276255: Epoch time: 103.13 s +2026-04-08 19:21:29.309167: +2026-04-08 19:21:29.311334: Epoch 101 +2026-04-08 19:21:29.313185: Current learning rate: 0.00909 +2026-04-08 19:23:12.195045: train_loss -0.0784 +2026-04-08 19:23:12.201985: val_loss -0.0617 +2026-04-08 19:23:12.204110: Pseudo dice [0.1834, 0.6793, 0.4562, 0.0006, 0.0, 0.1411, 0.5529] +2026-04-08 19:23:12.206456: Epoch time: 102.89 s +2026-04-08 19:23:12.210309: Yayy! New best EMA pseudo Dice: 0.2469 +2026-04-08 19:23:15.059118: +2026-04-08 19:23:15.061207: Epoch 102 +2026-04-08 19:23:15.062828: Current learning rate: 0.00908 +2026-04-08 19:24:58.069406: train_loss -0.0819 +2026-04-08 19:24:58.077195: val_loss -0.0332 +2026-04-08 19:24:58.082818: Pseudo dice [0.3211, 0.1748, 0.3031, 0.0023, 0.0, 0.1486, 0.3845] +2026-04-08 19:24:58.086459: Epoch time: 103.01 s +2026-04-08 19:24:59.140571: +2026-04-08 19:24:59.143773: Epoch 103 +2026-04-08 19:24:59.146736: Current learning rate: 0.00907 +2026-04-08 19:26:43.835904: train_loss -0.0657 +2026-04-08 19:26:43.846727: val_loss -0.0553 +2026-04-08 19:26:43.851887: Pseudo dice [0.1116, 0.5634, 0.5438, 0.1039, 0.0, 0.1682, 0.597] +2026-04-08 19:26:43.856223: Epoch time: 104.7 s +2026-04-08 19:26:43.859348: Yayy! New best EMA pseudo Dice: 0.2469 +2026-04-08 19:26:46.840173: +2026-04-08 19:26:46.842009: Epoch 104 +2026-04-08 19:26:46.843574: Current learning rate: 0.00906 +2026-04-08 19:28:29.643703: train_loss -0.0636 +2026-04-08 19:28:29.648968: val_loss -0.0447 +2026-04-08 19:28:29.651547: Pseudo dice [0.1295, 0.6373, 0.4426, 0.0088, 0.0, 0.534, 0.741] +2026-04-08 19:28:29.653786: Epoch time: 102.81 s +2026-04-08 19:28:29.655985: Yayy! New best EMA pseudo Dice: 0.2579 +2026-04-08 19:28:33.460122: +2026-04-08 19:28:33.462818: Epoch 105 +2026-04-08 19:28:33.464277: Current learning rate: 0.00905 +2026-04-08 19:30:17.045061: train_loss -0.0717 +2026-04-08 19:30:17.052736: val_loss -0.0265 +2026-04-08 19:30:17.054913: Pseudo dice [0.281, 0.4925, 0.4658, 0.0003, 0.0, 0.237, 0.4502] +2026-04-08 19:30:17.057237: Epoch time: 103.59 s +2026-04-08 19:30:17.059409: Yayy! New best EMA pseudo Dice: 0.2596 +2026-04-08 19:30:19.936648: +2026-04-08 19:30:19.940042: Epoch 106 +2026-04-08 19:30:19.942122: Current learning rate: 0.00904 +2026-04-08 19:32:03.208059: train_loss -0.0739 +2026-04-08 19:32:03.216566: val_loss -0.0518 +2026-04-08 19:32:03.218816: Pseudo dice [0.2938, 0.6669, 0.3338, 0.0, 0.0, 0.3629, 0.4117] +2026-04-08 19:32:03.221732: Epoch time: 103.27 s +2026-04-08 19:32:03.224245: Yayy! New best EMA pseudo Dice: 0.2632 +2026-04-08 19:32:06.155030: +2026-04-08 19:32:06.156617: Epoch 107 +2026-04-08 19:32:06.158264: Current learning rate: 0.00903 +2026-04-08 19:33:50.410641: train_loss -0.0849 +2026-04-08 19:33:50.417061: val_loss -0.051 +2026-04-08 19:33:50.419820: Pseudo dice [0.201, 0.1738, 0.2512, 0.1208, 0.0, 0.1831, 0.5915] +2026-04-08 19:33:50.422410: Epoch time: 104.26 s +2026-04-08 19:33:51.485681: +2026-04-08 19:33:51.487583: Epoch 108 +2026-04-08 19:33:51.489432: Current learning rate: 0.00902 +2026-04-08 19:35:34.953885: train_loss -0.0679 +2026-04-08 19:35:34.960434: val_loss -0.053 +2026-04-08 19:35:34.962836: Pseudo dice [0.2844, 0.3575, 0.2836, 0.0846, 0.0, 0.2624, 0.4586] +2026-04-08 19:35:34.965176: Epoch time: 103.47 s +2026-04-08 19:35:36.038200: +2026-04-08 19:35:36.040167: Epoch 109 +2026-04-08 19:35:36.043206: Current learning rate: 0.00901 +2026-04-08 19:37:19.733496: train_loss -0.0796 +2026-04-08 19:37:19.741155: val_loss -0.0797 +2026-04-08 19:37:19.744063: Pseudo dice [0.4508, 0.1566, 0.498, 0.0607, 0.0, 0.4679, 0.603] +2026-04-08 19:37:19.747022: Epoch time: 103.7 s +2026-04-08 19:37:19.758349: Yayy! New best EMA pseudo Dice: 0.2637 +2026-04-08 19:37:22.852821: +2026-04-08 19:37:22.858643: Epoch 110 +2026-04-08 19:37:22.861698: Current learning rate: 0.009 +2026-04-08 19:39:06.205381: train_loss -0.0761 +2026-04-08 19:39:06.213492: val_loss -0.0471 +2026-04-08 19:39:06.216100: Pseudo dice [0.2862, 0.7251, 0.5541, 0.0062, 0.0, 0.3592, 0.4392] +2026-04-08 19:39:06.218661: Epoch time: 103.36 s +2026-04-08 19:39:06.221562: Yayy! New best EMA pseudo Dice: 0.2712 +2026-04-08 19:39:09.017522: +2026-04-08 19:39:09.019375: Epoch 111 +2026-04-08 19:39:09.022368: Current learning rate: 0.009 +2026-04-08 19:40:52.279859: train_loss -0.0731 +2026-04-08 19:40:52.284754: val_loss -0.0364 +2026-04-08 19:40:52.286960: Pseudo dice [0.2058, 0.4154, 0.3438, 0.0378, 0.0, 0.2198, 0.5671] +2026-04-08 19:40:52.289038: Epoch time: 103.27 s +2026-04-08 19:40:53.355556: +2026-04-08 19:40:53.357532: Epoch 112 +2026-04-08 19:40:53.359094: Current learning rate: 0.00899 +2026-04-08 19:42:37.136186: train_loss -0.0787 +2026-04-08 19:42:37.147523: val_loss -0.0531 +2026-04-08 19:42:37.152256: Pseudo dice [0.223, 0.6272, 0.4339, 0.0896, 0.0, 0.2814, 0.5205] +2026-04-08 19:42:37.154038: Epoch time: 103.78 s +2026-04-08 19:42:37.156494: Yayy! New best EMA pseudo Dice: 0.2737 +2026-04-08 19:42:39.982960: +2026-04-08 19:42:39.984928: Epoch 113 +2026-04-08 19:42:39.986524: Current learning rate: 0.00898 +2026-04-08 19:44:22.914648: train_loss -0.0607 +2026-04-08 19:44:22.921322: val_loss -0.0681 +2026-04-08 19:44:22.924308: Pseudo dice [0.0367, 0.7711, 0.6115, 0.003, 0.0, 0.4295, 0.197] +2026-04-08 19:44:22.926601: Epoch time: 102.93 s +2026-04-08 19:44:22.929051: Yayy! New best EMA pseudo Dice: 0.2756 +2026-04-08 19:44:25.706341: +2026-04-08 19:44:25.707998: Epoch 114 +2026-04-08 19:44:25.709559: Current learning rate: 0.00897 +2026-04-08 19:46:09.402134: train_loss -0.0779 +2026-04-08 19:46:09.416152: val_loss -0.0418 +2026-04-08 19:46:09.420738: Pseudo dice [0.5997, 0.1103, 0.4077, 0.0022, 0.0, 0.415, 0.4886] +2026-04-08 19:46:09.422898: Epoch time: 103.7 s +2026-04-08 19:46:09.425838: Yayy! New best EMA pseudo Dice: 0.277 +2026-04-08 19:46:12.196322: +2026-04-08 19:46:12.197986: Epoch 115 +2026-04-08 19:46:12.199755: Current learning rate: 0.00896 +2026-04-08 19:47:55.664292: train_loss -0.076 +2026-04-08 19:47:55.672860: val_loss -0.0462 +2026-04-08 19:47:55.675379: Pseudo dice [0.3144, 0.0301, 0.5967, 0.1095, 0.0, 0.1582, 0.4197] +2026-04-08 19:47:55.678212: Epoch time: 103.47 s +2026-04-08 19:47:56.761443: +2026-04-08 19:47:56.763674: Epoch 116 +2026-04-08 19:47:56.765611: Current learning rate: 0.00895 +2026-04-08 19:49:39.988054: train_loss -0.0897 +2026-04-08 19:49:39.995455: val_loss -0.0613 +2026-04-08 19:49:39.997783: Pseudo dice [0.4882, 0.4346, 0.54, 0.0016, 0.0, 0.2258, 0.4311] +2026-04-08 19:49:40.000252: Epoch time: 103.23 s +2026-04-08 19:49:41.078854: +2026-04-08 19:49:41.080962: Epoch 117 +2026-04-08 19:49:41.082718: Current learning rate: 0.00894 +2026-04-08 19:51:24.537133: train_loss -0.0812 +2026-04-08 19:51:24.544669: val_loss -0.0376 +2026-04-08 19:51:24.547664: Pseudo dice [0.4621, 0.5421, 0.291, 0.0, 0.0, 0.1318, 0.7587] +2026-04-08 19:51:24.549939: Epoch time: 103.46 s +2026-04-08 19:51:24.551857: Yayy! New best EMA pseudo Dice: 0.2793 +2026-04-08 19:51:27.433336: +2026-04-08 19:51:27.435169: Epoch 118 +2026-04-08 19:51:27.437248: Current learning rate: 0.00893 +2026-04-08 19:53:10.303797: train_loss -0.0758 +2026-04-08 19:53:10.313503: val_loss -0.0603 +2026-04-08 19:53:10.316234: Pseudo dice [0.4824, 0.7072, 0.3539, 0.2148, 0.0, 0.12, 0.4616] +2026-04-08 19:53:10.319578: Epoch time: 102.87 s +2026-04-08 19:53:10.322140: Yayy! New best EMA pseudo Dice: 0.2848 +2026-04-08 19:53:13.098868: +2026-04-08 19:53:13.112759: Epoch 119 +2026-04-08 19:53:13.114338: Current learning rate: 0.00892 +2026-04-08 19:54:56.784739: train_loss -0.0865 +2026-04-08 19:54:56.791920: val_loss -0.075 +2026-04-08 19:54:56.796273: Pseudo dice [0.3541, 0.1377, 0.6032, 0.0091, 0.0, 0.1321, 0.6378] +2026-04-08 19:54:56.799317: Epoch time: 103.69 s +2026-04-08 19:54:57.864413: +2026-04-08 19:54:57.878334: Epoch 120 +2026-04-08 19:54:57.884556: Current learning rate: 0.00891 +2026-04-08 19:56:41.605311: train_loss -0.0914 +2026-04-08 19:56:41.640423: val_loss -0.0797 +2026-04-08 19:56:41.642787: Pseudo dice [0.1404, 0.4613, 0.5103, 0.0001, 0.0, 0.3459, 0.6713] +2026-04-08 19:56:41.645209: Epoch time: 103.74 s +2026-04-08 19:56:41.647773: Yayy! New best EMA pseudo Dice: 0.2852 +2026-04-08 19:56:44.708659: +2026-04-08 19:56:44.710444: Epoch 121 +2026-04-08 19:56:44.712614: Current learning rate: 0.0089 +2026-04-08 19:58:27.492705: train_loss -0.0798 +2026-04-08 19:58:27.499541: val_loss -0.014 +2026-04-08 19:58:27.501419: Pseudo dice [0.1365, 0.4674, 0.2658, 0.0739, 0.0, 0.3908, 0.4834] +2026-04-08 19:58:27.503946: Epoch time: 102.79 s +2026-04-08 19:58:28.570508: +2026-04-08 19:58:28.572320: Epoch 122 +2026-04-08 19:58:28.573809: Current learning rate: 0.00889 +2026-04-08 20:00:12.449316: train_loss -0.0873 +2026-04-08 20:00:12.455893: val_loss -0.0652 +2026-04-08 20:00:12.457853: Pseudo dice [0.5939, 0.776, 0.5415, 0.0006, 0.0004, 0.1313, 0.7685] +2026-04-08 20:00:12.459908: Epoch time: 103.88 s +2026-04-08 20:00:12.462198: Yayy! New best EMA pseudo Dice: 0.2945 +2026-04-08 20:00:15.247316: +2026-04-08 20:00:15.249229: Epoch 123 +2026-04-08 20:00:15.250796: Current learning rate: 0.00889 +2026-04-08 20:01:57.462975: train_loss -0.1005 +2026-04-08 20:01:57.467808: val_loss -0.0545 +2026-04-08 20:01:57.469658: Pseudo dice [0.5088, 0.1132, 0.3384, 0.0018, 0.0, 0.1059, 0.6597] +2026-04-08 20:01:57.471305: Epoch time: 102.22 s +2026-04-08 20:01:58.543224: +2026-04-08 20:01:58.545155: Epoch 124 +2026-04-08 20:01:58.546674: Current learning rate: 0.00888 +2026-04-08 20:03:41.271335: train_loss -0.0882 +2026-04-08 20:03:41.279011: val_loss -0.0627 +2026-04-08 20:03:41.280945: Pseudo dice [0.651, 0.7803, 0.524, 0.0, 0.2738, 0.3592, 0.7977] +2026-04-08 20:03:41.283116: Epoch time: 102.73 s +2026-04-08 20:03:41.285930: Yayy! New best EMA pseudo Dice: 0.3092 +2026-04-08 20:03:44.091449: +2026-04-08 20:03:44.093048: Epoch 125 +2026-04-08 20:03:44.094408: Current learning rate: 0.00887 +2026-04-08 20:05:26.998562: train_loss -0.1079 +2026-04-08 20:05:27.005379: val_loss -0.1132 +2026-04-08 20:05:27.009352: Pseudo dice [0.4261, 0.5676, 0.7082, 0.0529, 0.1363, 0.4706, 0.7712] +2026-04-08 20:05:27.011754: Epoch time: 102.91 s +2026-04-08 20:05:27.013855: Yayy! New best EMA pseudo Dice: 0.323 +2026-04-08 20:05:29.804654: +2026-04-08 20:05:29.806244: Epoch 126 +2026-04-08 20:05:29.807827: Current learning rate: 0.00886 +2026-04-08 20:07:12.962320: train_loss -0.1045 +2026-04-08 20:07:12.968403: val_loss -0.0555 +2026-04-08 20:07:12.976052: Pseudo dice [0.4722, 0.4505, 0.5508, 0.0, 0.3021, 0.4642, 0.4942] +2026-04-08 20:07:12.983212: Epoch time: 103.16 s +2026-04-08 20:07:12.985195: Yayy! New best EMA pseudo Dice: 0.3298 +2026-04-08 20:07:15.399509: +2026-04-08 20:07:15.401512: Epoch 127 +2026-04-08 20:07:15.402989: Current learning rate: 0.00885 +2026-04-08 20:08:58.739833: train_loss -0.1006 +2026-04-08 20:08:58.746160: val_loss -0.0812 +2026-04-08 20:08:58.748536: Pseudo dice [0.2797, 0.5367, 0.5779, 0.0286, 0.3223, 0.2033, 0.5395] +2026-04-08 20:08:58.755095: Epoch time: 103.34 s +2026-04-08 20:08:58.758740: Yayy! New best EMA pseudo Dice: 0.3323 +2026-04-08 20:09:01.723373: +2026-04-08 20:09:01.725049: Epoch 128 +2026-04-08 20:09:01.726876: Current learning rate: 0.00884 +2026-04-08 20:10:44.975601: train_loss -0.0897 +2026-04-08 20:10:44.982900: val_loss -0.0487 +2026-04-08 20:10:44.985830: Pseudo dice [0.51, 0.283, 0.3824, 0.1302, 0.3663, 0.2562, 0.3692] +2026-04-08 20:10:44.987945: Epoch time: 103.26 s +2026-04-08 20:10:46.102203: +2026-04-08 20:10:46.107850: Epoch 129 +2026-04-08 20:10:46.111005: Current learning rate: 0.00883 +2026-04-08 20:12:28.594821: train_loss -0.0903 +2026-04-08 20:12:28.602154: val_loss -0.0488 +2026-04-08 20:12:28.606048: Pseudo dice [0.3467, 0.0614, 0.4606, 0.1159, 0.1968, 0.2291, 0.536] +2026-04-08 20:12:28.609020: Epoch time: 102.5 s +2026-04-08 20:12:29.694784: +2026-04-08 20:12:29.697230: Epoch 130 +2026-04-08 20:12:29.699394: Current learning rate: 0.00882 +2026-04-08 20:14:12.511672: train_loss -0.1046 +2026-04-08 20:14:12.517996: val_loss -0.071 +2026-04-08 20:14:12.519930: Pseudo dice [0.4304, 0.2987, 0.5342, 0.0155, 0.1063, 0.4629, 0.7289] +2026-04-08 20:14:12.522217: Epoch time: 102.82 s +2026-04-08 20:14:13.595895: +2026-04-08 20:14:13.597838: Epoch 131 +2026-04-08 20:14:13.599774: Current learning rate: 0.00881 +2026-04-08 20:15:56.637441: train_loss -0.0968 +2026-04-08 20:15:56.646359: val_loss -0.0892 +2026-04-08 20:15:56.649194: Pseudo dice [0.3692, 0.0996, 0.5457, 0.0069, 0.2823, 0.3167, 0.733] +2026-04-08 20:15:56.651834: Epoch time: 103.04 s +2026-04-08 20:15:57.726110: +2026-04-08 20:15:57.727829: Epoch 132 +2026-04-08 20:15:57.729448: Current learning rate: 0.0088 +2026-04-08 20:17:41.044111: train_loss -0.1066 +2026-04-08 20:17:41.049599: val_loss -0.0709 +2026-04-08 20:17:41.052113: Pseudo dice [0.5812, 0.6305, 0.4494, 0.0559, 0.0978, 0.5808, 0.4473] +2026-04-08 20:17:41.054178: Epoch time: 103.32 s +2026-04-08 20:17:41.056623: Yayy! New best EMA pseudo Dice: 0.3387 +2026-04-08 20:17:43.833036: +2026-04-08 20:17:43.834761: Epoch 133 +2026-04-08 20:17:43.836204: Current learning rate: 0.00879 +2026-04-08 20:19:26.411855: train_loss -0.089 +2026-04-08 20:19:26.421237: val_loss -0.1029 +2026-04-08 20:19:26.424324: Pseudo dice [0.6008, 0.659, 0.5954, 0.0649, 0.2221, 0.1713, 0.4823] +2026-04-08 20:19:26.427218: Epoch time: 102.58 s +2026-04-08 20:19:26.429587: Yayy! New best EMA pseudo Dice: 0.3448 +2026-04-08 20:19:29.299476: +2026-04-08 20:19:29.301563: Epoch 134 +2026-04-08 20:19:29.303131: Current learning rate: 0.00879 +2026-04-08 20:21:12.593646: train_loss -0.0989 +2026-04-08 20:21:12.599838: val_loss -0.0911 +2026-04-08 20:21:12.602121: Pseudo dice [0.2001, 0.5493, 0.5816, 0.0031, 0.3482, 0.1511, 0.5137] +2026-04-08 20:21:12.605744: Epoch time: 103.3 s +2026-04-08 20:21:13.687663: +2026-04-08 20:21:13.689949: Epoch 135 +2026-04-08 20:21:13.693064: Current learning rate: 0.00878 +2026-04-08 20:22:56.359188: train_loss -0.1002 +2026-04-08 20:22:56.364362: val_loss -0.097 +2026-04-08 20:22:56.366224: Pseudo dice [0.3279, 0.4601, 0.6106, 0.0127, 0.204, 0.1891, 0.6776] +2026-04-08 20:22:56.368210: Epoch time: 102.67 s +2026-04-08 20:22:56.370267: Yayy! New best EMA pseudo Dice: 0.3449 +2026-04-08 20:22:58.870023: +2026-04-08 20:22:58.872290: Epoch 136 +2026-04-08 20:22:58.873899: Current learning rate: 0.00877 +2026-04-08 20:24:41.904196: train_loss -0.094 +2026-04-08 20:24:41.909285: val_loss -0.0679 +2026-04-08 20:24:41.911168: Pseudo dice [0.3265, 0.8029, 0.5708, 0.0, 0.3065, 0.254, 0.6952] +2026-04-08 20:24:41.913898: Epoch time: 103.04 s +2026-04-08 20:24:41.915812: Yayy! New best EMA pseudo Dice: 0.3527 +2026-04-08 20:24:44.738714: +2026-04-08 20:24:44.740421: Epoch 137 +2026-04-08 20:24:44.741897: Current learning rate: 0.00876 +2026-04-08 20:26:27.976754: train_loss -0.0937 +2026-04-08 20:26:27.983589: val_loss -0.079 +2026-04-08 20:26:27.985754: Pseudo dice [0.3002, 0.1485, 0.4351, 0.0021, 0.4128, 0.4233, 0.4151] +2026-04-08 20:26:27.987949: Epoch time: 103.24 s +2026-04-08 20:26:29.070260: +2026-04-08 20:26:29.072463: Epoch 138 +2026-04-08 20:26:29.075710: Current learning rate: 0.00875 +2026-04-08 20:28:11.903234: train_loss -0.1066 +2026-04-08 20:28:11.909390: val_loss -0.0666 +2026-04-08 20:28:11.912211: Pseudo dice [0.2343, 0.2545, 0.3186, 0.0, 0.1908, 0.4606, 0.6738] +2026-04-08 20:28:11.914397: Epoch time: 102.84 s +2026-04-08 20:28:13.017543: +2026-04-08 20:28:13.019763: Epoch 139 +2026-04-08 20:28:13.022684: Current learning rate: 0.00874 +2026-04-08 20:29:56.455137: train_loss -0.1173 +2026-04-08 20:29:56.461014: val_loss -0.0878 +2026-04-08 20:29:56.463312: Pseudo dice [0.1103, 0.1784, 0.4883, 0.1756, 0.2745, 0.3954, 0.6872] +2026-04-08 20:29:56.465989: Epoch time: 103.44 s +2026-04-08 20:29:58.649614: +2026-04-08 20:29:58.653464: Epoch 140 +2026-04-08 20:29:58.655161: Current learning rate: 0.00873 +2026-04-08 20:31:41.755455: train_loss -0.0956 +2026-04-08 20:31:41.761777: val_loss -0.035 +2026-04-08 20:31:41.764147: Pseudo dice [0.2634, 0.1733, 0.4667, 0.003, 0.2709, 0.1206, 0.5736] +2026-04-08 20:31:41.766422: Epoch time: 103.11 s +2026-04-08 20:31:42.869381: +2026-04-08 20:31:42.871111: Epoch 141 +2026-04-08 20:31:42.873122: Current learning rate: 0.00872 +2026-04-08 20:33:25.537911: train_loss -0.1079 +2026-04-08 20:33:25.545825: val_loss -0.0639 +2026-04-08 20:33:25.548647: Pseudo dice [0.3537, 0.5191, 0.6063, 0.0894, 0.3538, 0.5412, 0.3108] +2026-04-08 20:33:25.558249: Epoch time: 102.67 s +2026-04-08 20:33:26.659209: +2026-04-08 20:33:26.661269: Epoch 142 +2026-04-08 20:33:26.663589: Current learning rate: 0.00871 +2026-04-08 20:35:10.132894: train_loss -0.1025 +2026-04-08 20:35:10.144215: val_loss -0.0909 +2026-04-08 20:35:10.148432: Pseudo dice [0.3797, 0.6569, 0.4427, 0.1745, 0.3319, 0.4017, 0.5998] +2026-04-08 20:35:10.152102: Epoch time: 103.48 s +2026-04-08 20:35:11.241375: +2026-04-08 20:35:11.243435: Epoch 143 +2026-04-08 20:35:11.244986: Current learning rate: 0.0087 +2026-04-08 20:36:54.338276: train_loss -0.122 +2026-04-08 20:36:54.344983: val_loss -0.0923 +2026-04-08 20:36:54.347340: Pseudo dice [0.4317, 0.2436, 0.7139, 0.0073, 0.0575, 0.556, 0.6801] +2026-04-08 20:36:54.350628: Epoch time: 103.1 s +2026-04-08 20:36:54.352712: Yayy! New best EMA pseudo Dice: 0.353 +2026-04-08 20:36:57.299943: +2026-04-08 20:36:57.301838: Epoch 144 +2026-04-08 20:36:57.303365: Current learning rate: 0.00869 +2026-04-08 20:38:39.963391: train_loss -0.1093 +2026-04-08 20:38:39.976033: val_loss -0.0903 +2026-04-08 20:38:39.978675: Pseudo dice [0.2138, 0.4686, 0.6948, 0.2218, 0.2887, 0.1386, 0.4332] +2026-04-08 20:38:39.982239: Epoch time: 102.67 s +2026-04-08 20:38:41.089422: +2026-04-08 20:38:41.092342: Epoch 145 +2026-04-08 20:38:41.097398: Current learning rate: 0.00868 +2026-04-08 20:40:23.919445: train_loss -0.1119 +2026-04-08 20:40:23.926207: val_loss -0.0778 +2026-04-08 20:40:23.928161: Pseudo dice [0.3328, 0.0982, 0.3598, 0.0702, 0.2142, 0.2956, 0.5908] +2026-04-08 20:40:23.933703: Epoch time: 102.83 s +2026-04-08 20:40:25.052203: +2026-04-08 20:40:25.054102: Epoch 146 +2026-04-08 20:40:25.056093: Current learning rate: 0.00868 +2026-04-08 20:42:08.309895: train_loss -0.1018 +2026-04-08 20:42:08.317034: val_loss -0.0567 +2026-04-08 20:42:08.319408: Pseudo dice [0.4291, 0.1344, 0.4172, 0.0, 0.0754, 0.4521, 0.7617] +2026-04-08 20:42:08.321651: Epoch time: 103.26 s +2026-04-08 20:42:09.406587: +2026-04-08 20:42:09.408546: Epoch 147 +2026-04-08 20:42:09.409979: Current learning rate: 0.00867 +2026-04-08 20:43:51.715315: train_loss -0.1122 +2026-04-08 20:43:51.727345: val_loss -0.0933 +2026-04-08 20:43:51.730387: Pseudo dice [0.4823, 0.5371, 0.6339, 0.2173, 0.4015, 0.4183, 0.4243] +2026-04-08 20:43:51.734629: Epoch time: 102.31 s +2026-04-08 20:43:51.737195: Yayy! New best EMA pseudo Dice: 0.3536 +2026-04-08 20:43:54.646902: +2026-04-08 20:43:54.648885: Epoch 148 +2026-04-08 20:43:54.650493: Current learning rate: 0.00866 +2026-04-08 20:45:38.001443: train_loss -0.1055 +2026-04-08 20:45:38.007348: val_loss -0.0901 +2026-04-08 20:45:38.009898: Pseudo dice [0.1628, 0.2304, 0.6167, 0.0, 0.2997, 0.4219, 0.6653] +2026-04-08 20:45:38.011716: Epoch time: 103.36 s +2026-04-08 20:45:39.108943: +2026-04-08 20:45:39.111643: Epoch 149 +2026-04-08 20:45:39.114406: Current learning rate: 0.00865 +2026-04-08 20:47:22.917763: train_loss -0.1146 +2026-04-08 20:47:22.923707: val_loss -0.0789 +2026-04-08 20:47:22.926463: Pseudo dice [0.4297, 0.8732, 0.6044, 0.0, 0.2631, 0.3674, 0.6082] +2026-04-08 20:47:22.929080: Epoch time: 103.81 s +2026-04-08 20:47:24.652717: Yayy! New best EMA pseudo Dice: 0.3622 +2026-04-08 20:47:27.399374: +2026-04-08 20:47:27.401284: Epoch 150 +2026-04-08 20:47:27.402908: Current learning rate: 0.00864 +2026-04-08 20:49:09.507782: train_loss -0.1252 +2026-04-08 20:49:09.515408: val_loss -0.0735 +2026-04-08 20:49:09.517688: Pseudo dice [0.3621, 0.5662, 0.6123, 0.0624, 0.2421, 0.155, 0.6902] +2026-04-08 20:49:09.519869: Epoch time: 102.11 s +2026-04-08 20:49:09.521588: Yayy! New best EMA pseudo Dice: 0.3644 +2026-04-08 20:49:12.331034: +2026-04-08 20:49:12.333147: Epoch 151 +2026-04-08 20:49:12.335773: Current learning rate: 0.00863 +2026-04-08 20:50:55.842639: train_loss -0.1133 +2026-04-08 20:50:55.849684: val_loss -0.0495 +2026-04-08 20:50:55.854054: Pseudo dice [0.487, 0.27, 0.5247, 0.1222, 0.3049, 0.1651, 0.6658] +2026-04-08 20:50:55.857280: Epoch time: 103.51 s +2026-04-08 20:50:56.958204: +2026-04-08 20:50:56.961886: Epoch 152 +2026-04-08 20:50:56.966204: Current learning rate: 0.00862 +2026-04-08 20:52:42.439280: train_loss -0.1205 +2026-04-08 20:52:42.447066: val_loss -0.1025 +2026-04-08 20:52:42.449981: Pseudo dice [0.3132, 0.1084, 0.7266, 0.0064, 0.3332, 0.2292, 0.4892] +2026-04-08 20:52:42.452492: Epoch time: 105.48 s +2026-04-08 20:52:43.562181: +2026-04-08 20:52:43.566932: Epoch 153 +2026-04-08 20:52:43.569966: Current learning rate: 0.00861 +2026-04-08 20:54:28.674963: train_loss -0.1249 +2026-04-08 20:54:28.696159: val_loss -0.0651 +2026-04-08 20:54:28.704430: Pseudo dice [0.5569, 0.8318, 0.4764, 0.0804, 0.0099, 0.104, 0.4053] +2026-04-08 20:54:28.707942: Epoch time: 105.12 s +2026-04-08 20:54:29.842769: +2026-04-08 20:54:29.845734: Epoch 154 +2026-04-08 20:54:29.848516: Current learning rate: 0.0086 +2026-04-08 20:56:13.970667: train_loss -0.1001 +2026-04-08 20:56:13.976167: val_loss -0.0565 +2026-04-08 20:56:13.978591: Pseudo dice [0.4951, 0.8281, 0.6775, 0.1523, 0.3755, 0.4978, 0.2781] +2026-04-08 20:56:13.982054: Epoch time: 104.13 s +2026-04-08 20:56:13.985795: Yayy! New best EMA pseudo Dice: 0.3699 +2026-04-08 20:56:17.032117: +2026-04-08 20:56:17.035477: Epoch 155 +2026-04-08 20:56:17.037069: Current learning rate: 0.00859 +2026-04-08 20:58:01.526963: train_loss -0.1078 +2026-04-08 20:58:01.536383: val_loss -0.09 +2026-04-08 20:58:01.541054: Pseudo dice [0.1187, 0.8811, 0.5407, 0.0507, 0.4271, 0.5482, 0.7071] +2026-04-08 20:58:01.545359: Epoch time: 104.5 s +2026-04-08 20:58:01.548873: Yayy! New best EMA pseudo Dice: 0.3797 +2026-04-08 20:58:04.744239: +2026-04-08 20:58:04.747173: Epoch 156 +2026-04-08 20:58:04.750817: Current learning rate: 0.00858 +2026-04-08 20:59:49.141815: train_loss -0.1083 +2026-04-08 20:59:49.149761: val_loss -0.0726 +2026-04-08 20:59:49.152814: Pseudo dice [0.582, 0.8294, 0.5469, 0.0444, 0.2191, 0.3849, 0.3834] +2026-04-08 20:59:49.155568: Epoch time: 104.4 s +2026-04-08 20:59:49.160226: Yayy! New best EMA pseudo Dice: 0.3845 +2026-04-08 20:59:53.131938: +2026-04-08 20:59:53.154938: Epoch 157 +2026-04-08 20:59:53.169799: Current learning rate: 0.00858 +2026-04-08 21:01:36.836100: train_loss -0.1039 +2026-04-08 21:01:36.841828: val_loss -0.085 +2026-04-08 21:01:36.843930: Pseudo dice [0.3422, 0.0955, 0.652, 0.2605, 0.4108, 0.1528, 0.4294] +2026-04-08 21:01:36.846037: Epoch time: 103.71 s +2026-04-08 21:01:37.976157: +2026-04-08 21:01:37.979772: Epoch 158 +2026-04-08 21:01:37.982306: Current learning rate: 0.00857 +2026-04-08 21:03:21.134218: train_loss -0.1154 +2026-04-08 21:03:21.139993: val_loss -0.072 +2026-04-08 21:03:21.142048: Pseudo dice [0.2258, 0.4437, 0.5759, 0.0889, 0.398, 0.2735, 0.6058] +2026-04-08 21:03:21.143973: Epoch time: 103.16 s +2026-04-08 21:03:22.283901: +2026-04-08 21:03:22.286122: Epoch 159 +2026-04-08 21:03:22.289377: Current learning rate: 0.00856 +2026-04-08 21:05:05.359153: train_loss -0.1134 +2026-04-08 21:05:05.365962: val_loss -0.0723 +2026-04-08 21:05:05.367901: Pseudo dice [0.4914, 0.8814, 0.5669, 0.0139, 0.308, 0.2039, 0.4239] +2026-04-08 21:05:05.369560: Epoch time: 103.08 s +2026-04-08 21:05:06.488321: +2026-04-08 21:05:06.490868: Epoch 160 +2026-04-08 21:05:06.492995: Current learning rate: 0.00855 +2026-04-08 21:06:48.775996: train_loss -0.1138 +2026-04-08 21:06:48.781702: val_loss -0.0658 +2026-04-08 21:06:48.783456: Pseudo dice [0.1011, 0.5201, 0.6493, 0.0684, 0.2845, 0.1449, 0.6474] +2026-04-08 21:06:48.785237: Epoch time: 102.29 s +2026-04-08 21:06:49.901132: +2026-04-08 21:06:49.902699: Epoch 161 +2026-04-08 21:06:49.904195: Current learning rate: 0.00854 +2026-04-08 21:08:33.445189: train_loss -0.1241 +2026-04-08 21:08:33.458629: val_loss -0.1117 +2026-04-08 21:08:33.466537: Pseudo dice [0.4365, 0.8362, 0.6233, 0.3872, 0.2117, 0.1994, 0.5672] +2026-04-08 21:08:33.469662: Epoch time: 103.55 s +2026-04-08 21:08:33.471797: Yayy! New best EMA pseudo Dice: 0.3873 +2026-04-08 21:08:36.369193: +2026-04-08 21:08:36.371228: Epoch 162 +2026-04-08 21:08:36.373292: Current learning rate: 0.00853 +2026-04-08 21:10:19.845267: train_loss -0.1177 +2026-04-08 21:10:19.852258: val_loss -0.058 +2026-04-08 21:10:19.855742: Pseudo dice [0.4481, 0.6606, 0.48, 0.214, 0.2037, 0.1915, 0.556] +2026-04-08 21:10:19.858484: Epoch time: 103.48 s +2026-04-08 21:10:19.860640: Yayy! New best EMA pseudo Dice: 0.3879 +2026-04-08 21:10:22.530818: +2026-04-08 21:10:22.532349: Epoch 163 +2026-04-08 21:10:22.533702: Current learning rate: 0.00852 +2026-04-08 21:12:05.293196: train_loss -0.1256 +2026-04-08 21:12:05.301335: val_loss -0.0677 +2026-04-08 21:12:05.303922: Pseudo dice [0.3003, 0.2127, 0.5918, 0.0191, 0.3345, 0.1465, 0.6898] +2026-04-08 21:12:05.305815: Epoch time: 102.77 s +2026-04-08 21:12:06.418697: +2026-04-08 21:12:06.420278: Epoch 164 +2026-04-08 21:12:06.421865: Current learning rate: 0.00851 +2026-04-08 21:13:49.688964: train_loss -0.1306 +2026-04-08 21:13:49.696684: val_loss -0.1108 +2026-04-08 21:13:49.699224: Pseudo dice [0.4632, 0.6416, 0.5772, 0.1423, 0.5199, 0.3805, 0.6639] +2026-04-08 21:13:49.701717: Epoch time: 103.27 s +2026-04-08 21:13:49.703576: Yayy! New best EMA pseudo Dice: 0.3921 +2026-04-08 21:13:52.517328: +2026-04-08 21:13:52.518834: Epoch 165 +2026-04-08 21:13:52.520385: Current learning rate: 0.0085 +2026-04-08 21:15:35.511199: train_loss -0.1175 +2026-04-08 21:15:35.518479: val_loss -0.0881 +2026-04-08 21:15:35.521252: Pseudo dice [0.439, 0.7806, 0.5708, 0.2236, 0.4331, 0.2416, 0.5366] +2026-04-08 21:15:35.524141: Epoch time: 103.0 s +2026-04-08 21:15:35.525978: Yayy! New best EMA pseudo Dice: 0.399 +2026-04-08 21:15:38.388142: +2026-04-08 21:15:38.390038: Epoch 166 +2026-04-08 21:15:38.391742: Current learning rate: 0.00849 +2026-04-08 21:17:20.973529: train_loss -0.132 +2026-04-08 21:17:20.981770: val_loss -0.1024 +2026-04-08 21:17:20.984250: Pseudo dice [0.6413, 0.1097, 0.6193, 0.1519, 0.4356, 0.6268, 0.5147] +2026-04-08 21:17:20.989614: Epoch time: 102.59 s +2026-04-08 21:17:20.992079: Yayy! New best EMA pseudo Dice: 0.4033 +2026-04-08 21:17:23.922256: +2026-04-08 21:17:23.924344: Epoch 167 +2026-04-08 21:17:23.925963: Current learning rate: 0.00848 +2026-04-08 21:19:07.582230: train_loss -0.1311 +2026-04-08 21:19:07.612884: val_loss -0.0591 +2026-04-08 21:19:07.616388: Pseudo dice [0.1238, 0.3727, 0.5734, 0.0, 0.2571, 0.3441, 0.4059] +2026-04-08 21:19:07.621471: Epoch time: 103.66 s +2026-04-08 21:19:08.711367: +2026-04-08 21:19:08.714072: Epoch 168 +2026-04-08 21:19:08.718071: Current learning rate: 0.00847 +2026-04-08 21:20:52.239274: train_loss -0.1239 +2026-04-08 21:20:52.244323: val_loss -0.0884 +2026-04-08 21:20:52.246318: Pseudo dice [0.1716, 0.6796, 0.4557, 0.3559, 0.299, 0.3718, 0.7024] +2026-04-08 21:20:52.249305: Epoch time: 103.53 s +2026-04-08 21:20:53.384450: +2026-04-08 21:20:53.386426: Epoch 169 +2026-04-08 21:20:53.388193: Current learning rate: 0.00847 +2026-04-08 21:22:36.578818: train_loss -0.1123 +2026-04-08 21:22:36.586264: val_loss -0.1348 +2026-04-08 21:22:36.589004: Pseudo dice [0.1027, 0.3281, 0.7773, 0.2472, 0.4901, 0.311, 0.7681] +2026-04-08 21:22:36.591238: Epoch time: 103.2 s +2026-04-08 21:22:37.685380: +2026-04-08 21:22:37.687663: Epoch 170 +2026-04-08 21:22:37.690103: Current learning rate: 0.00846 +2026-04-08 21:24:20.971269: train_loss -0.1448 +2026-04-08 21:24:20.977302: val_loss -0.109 +2026-04-08 21:24:20.979465: Pseudo dice [0.3944, 0.7847, 0.6103, 0.1232, 0.3436, 0.4928, 0.5838] +2026-04-08 21:24:20.981458: Epoch time: 103.29 s +2026-04-08 21:24:20.984017: Yayy! New best EMA pseudo Dice: 0.4079 +2026-04-08 21:24:23.798471: +2026-04-08 21:24:23.800015: Epoch 171 +2026-04-08 21:24:23.801538: Current learning rate: 0.00845 +2026-04-08 21:26:06.715483: train_loss -0.1162 +2026-04-08 21:26:06.727301: val_loss -0.0885 +2026-04-08 21:26:06.729748: Pseudo dice [0.3465, 0.639, 0.6472, 0.1146, 0.467, 0.2906, 0.7197] +2026-04-08 21:26:06.732862: Epoch time: 102.92 s +2026-04-08 21:26:06.735172: Yayy! New best EMA pseudo Dice: 0.4132 +2026-04-08 21:26:09.339072: +2026-04-08 21:26:09.341380: Epoch 172 +2026-04-08 21:26:09.343022: Current learning rate: 0.00844 +2026-04-08 21:27:52.508141: train_loss -0.1285 +2026-04-08 21:27:52.514401: val_loss -0.1138 +2026-04-08 21:27:52.516215: Pseudo dice [0.4107, 0.4345, 0.683, 0.4319, 0.4176, 0.126, 0.5712] +2026-04-08 21:27:52.518966: Epoch time: 103.17 s +2026-04-08 21:27:52.522590: Yayy! New best EMA pseudo Dice: 0.4158 +2026-04-08 21:27:55.292304: +2026-04-08 21:27:55.293776: Epoch 173 +2026-04-08 21:27:55.295211: Current learning rate: 0.00843 +2026-04-08 21:29:38.798275: train_loss -0.1319 +2026-04-08 21:29:38.805967: val_loss -0.1254 +2026-04-08 21:29:38.808040: Pseudo dice [0.5258, 0.7296, 0.6955, 0.1112, 0.5071, 0.3234, 0.6707] +2026-04-08 21:29:38.813670: Epoch time: 103.51 s +2026-04-08 21:29:38.816484: Yayy! New best EMA pseudo Dice: 0.4251 +2026-04-08 21:29:42.479681: +2026-04-08 21:29:42.483033: Epoch 174 +2026-04-08 21:29:42.484690: Current learning rate: 0.00842 +2026-04-08 21:31:25.326210: train_loss -0.1217 +2026-04-08 21:31:25.331979: val_loss -0.0992 +2026-04-08 21:31:25.334127: Pseudo dice [0.341, 0.6319, 0.5478, 0.0815, 0.4138, 0.2094, 0.7286] +2026-04-08 21:31:25.336304: Epoch time: 102.85 s +2026-04-08 21:31:26.447762: +2026-04-08 21:31:26.450361: Epoch 175 +2026-04-08 21:31:26.453510: Current learning rate: 0.00841 +2026-04-08 21:33:09.400751: train_loss -0.1357 +2026-04-08 21:33:09.406555: val_loss -0.0865 +2026-04-08 21:33:09.408888: Pseudo dice [0.8276, 0.6946, 0.7317, 0.1869, 0.3873, 0.1493, 0.3917] +2026-04-08 21:33:09.411823: Epoch time: 102.96 s +2026-04-08 21:33:09.413613: Yayy! New best EMA pseudo Dice: 0.4304 +2026-04-08 21:33:12.166255: +2026-04-08 21:33:12.168518: Epoch 176 +2026-04-08 21:33:12.170105: Current learning rate: 0.0084 +2026-04-08 21:34:54.647601: train_loss -0.1287 +2026-04-08 21:34:54.654038: val_loss -0.0952 +2026-04-08 21:34:54.656636: Pseudo dice [0.4948, 0.6434, 0.7053, 0.3225, 0.1976, 0.251, 0.6349] +2026-04-08 21:34:54.659323: Epoch time: 102.48 s +2026-04-08 21:34:54.661611: Yayy! New best EMA pseudo Dice: 0.4338 +2026-04-08 21:34:57.554964: +2026-04-08 21:34:57.556608: Epoch 177 +2026-04-08 21:34:57.559283: Current learning rate: 0.00839 +2026-04-08 21:36:41.201050: train_loss -0.124 +2026-04-08 21:36:41.209227: val_loss -0.095 +2026-04-08 21:36:41.213258: Pseudo dice [0.3064, 0.5044, 0.6161, 0.0991, 0.4202, 0.431, 0.4484] +2026-04-08 21:36:41.217010: Epoch time: 103.65 s +2026-04-08 21:36:42.335326: +2026-04-08 21:36:42.337938: Epoch 178 +2026-04-08 21:36:42.340676: Current learning rate: 0.00838 +2026-04-08 21:38:25.742278: train_loss -0.1177 +2026-04-08 21:38:25.753178: val_loss -0.1006 +2026-04-08 21:38:25.755570: Pseudo dice [0.5125, 0.8152, 0.6336, 0.1845, 0.2707, 0.2385, 0.7312] +2026-04-08 21:38:25.759294: Epoch time: 103.41 s +2026-04-08 21:38:25.761525: Yayy! New best EMA pseudo Dice: 0.4361 +2026-04-08 21:38:28.672385: +2026-04-08 21:38:28.674423: Epoch 179 +2026-04-08 21:38:28.676841: Current learning rate: 0.00837 +2026-04-08 21:40:10.861916: train_loss -0.1281 +2026-04-08 21:40:10.868014: val_loss -0.1058 +2026-04-08 21:40:10.870721: Pseudo dice [0.1428, 0.861, 0.6632, 0.1265, 0.4518, 0.4788, 0.6171] +2026-04-08 21:40:10.872489: Epoch time: 102.19 s +2026-04-08 21:40:10.875336: Yayy! New best EMA pseudo Dice: 0.4402 +2026-04-08 21:40:13.704303: +2026-04-08 21:40:13.705863: Epoch 180 +2026-04-08 21:40:13.707422: Current learning rate: 0.00836 +2026-04-08 21:41:56.855072: train_loss -0.1137 +2026-04-08 21:41:56.861237: val_loss -0.0872 +2026-04-08 21:41:56.863216: Pseudo dice [0.3167, 0.7643, 0.649, 0.2526, 0.3625, 0.2247, 0.719] +2026-04-08 21:41:56.865300: Epoch time: 103.15 s +2026-04-08 21:41:56.867363: Yayy! New best EMA pseudo Dice: 0.4432 +2026-04-08 21:41:59.530068: +2026-04-08 21:41:59.532723: Epoch 181 +2026-04-08 21:41:59.534410: Current learning rate: 0.00836 +2026-04-08 21:43:42.667496: train_loss -0.1269 +2026-04-08 21:43:42.674061: val_loss -0.0677 +2026-04-08 21:43:42.676267: Pseudo dice [0.0986, 0.7584, 0.1074, 0.1745, 0.2487, 0.1209, 0.7085] +2026-04-08 21:43:42.679363: Epoch time: 103.14 s +2026-04-08 21:43:43.788581: +2026-04-08 21:43:43.790501: Epoch 182 +2026-04-08 21:43:43.792279: Current learning rate: 0.00835 +2026-04-08 21:45:26.773122: train_loss -0.1262 +2026-04-08 21:45:26.779526: val_loss -0.1162 +2026-04-08 21:45:26.781649: Pseudo dice [0.3753, 0.799, 0.5722, 0.5861, 0.1939, 0.3961, 0.8215] +2026-04-08 21:45:26.785077: Epoch time: 102.99 s +2026-04-08 21:45:27.884475: +2026-04-08 21:45:27.887343: Epoch 183 +2026-04-08 21:45:27.889897: Current learning rate: 0.00834 +2026-04-08 21:47:12.673702: train_loss -0.1317 +2026-04-08 21:47:12.683291: val_loss -0.0775 +2026-04-08 21:47:12.691647: Pseudo dice [0.428, 0.1302, 0.485, 0.0648, 0.3936, 0.4167, 0.6342] +2026-04-08 21:47:12.695201: Epoch time: 104.79 s +2026-04-08 21:47:13.803706: +2026-04-08 21:47:13.806157: Epoch 184 +2026-04-08 21:47:13.809556: Current learning rate: 0.00833 +2026-04-08 21:48:56.908453: train_loss -0.1337 +2026-04-08 21:48:56.916128: val_loss -0.0975 +2026-04-08 21:48:56.918559: Pseudo dice [0.2594, 0.1289, 0.6258, 0.5288, 0.1291, 0.4762, 0.4661] +2026-04-08 21:48:56.921582: Epoch time: 103.11 s +2026-04-08 21:48:58.072740: +2026-04-08 21:48:58.074943: Epoch 185 +2026-04-08 21:48:58.079344: Current learning rate: 0.00832 +2026-04-08 21:50:43.819913: train_loss -0.1191 +2026-04-08 21:50:43.827410: val_loss -0.102 +2026-04-08 21:50:43.829967: Pseudo dice [0.4783, 0.6906, 0.7477, 0.0114, 0.1916, 0.212, 0.8329] +2026-04-08 21:50:43.832516: Epoch time: 105.75 s +2026-04-08 21:50:44.930936: +2026-04-08 21:50:44.932882: Epoch 186 +2026-04-08 21:50:44.936781: Current learning rate: 0.00831 +2026-04-08 21:52:27.808942: train_loss -0.1137 +2026-04-08 21:52:27.817091: val_loss -0.0957 +2026-04-08 21:52:27.820296: Pseudo dice [0.55, 0.3611, 0.5478, 0.2279, 0.3711, 0.0746, 0.2324] +2026-04-08 21:52:27.824672: Epoch time: 102.88 s +2026-04-08 21:52:28.935493: +2026-04-08 21:52:28.937542: Epoch 187 +2026-04-08 21:52:28.940505: Current learning rate: 0.0083 +2026-04-08 21:54:12.018418: train_loss -0.1257 +2026-04-08 21:54:12.025621: val_loss -0.1139 +2026-04-08 21:54:12.029147: Pseudo dice [0.3413, 0.6631, 0.6628, 0.2338, 0.5543, 0.5429, 0.6626] +2026-04-08 21:54:12.032027: Epoch time: 103.09 s +2026-04-08 21:54:13.115431: +2026-04-08 21:54:13.119732: Epoch 188 +2026-04-08 21:54:13.122148: Current learning rate: 0.00829 +2026-04-08 21:55:56.083364: train_loss -0.1331 +2026-04-08 21:55:56.091483: val_loss -0.0443 +2026-04-08 21:55:56.095172: Pseudo dice [0.2461, 0.903, 0.5245, 0.2755, 0.2398, 0.0763, 0.6742] +2026-04-08 21:55:56.097901: Epoch time: 102.97 s +2026-04-08 21:55:57.217017: +2026-04-08 21:55:57.219151: Epoch 189 +2026-04-08 21:55:57.220605: Current learning rate: 0.00828 +2026-04-08 21:57:40.597369: train_loss -0.1154 +2026-04-08 21:57:40.603950: val_loss -0.0583 +2026-04-08 21:57:40.607186: Pseudo dice [0.3591, 0.4804, 0.6264, 0.0627, 0.251, 0.3098, 0.4183] +2026-04-08 21:57:40.617598: Epoch time: 103.38 s +2026-04-08 21:57:41.742501: +2026-04-08 21:57:41.746227: Epoch 190 +2026-04-08 21:57:41.753032: Current learning rate: 0.00827 +2026-04-08 21:59:24.937155: train_loss -0.1342 +2026-04-08 21:59:24.944354: val_loss -0.0774 +2026-04-08 21:59:24.947932: Pseudo dice [0.1498, 0.8651, 0.5165, 0.2081, 0.3359, 0.2484, 0.5232] +2026-04-08 21:59:24.950581: Epoch time: 103.2 s +2026-04-08 21:59:26.070699: +2026-04-08 21:59:26.073477: Epoch 191 +2026-04-08 21:59:26.075572: Current learning rate: 0.00826 +2026-04-08 22:01:08.810564: train_loss -0.1371 +2026-04-08 22:01:08.817814: val_loss -0.1348 +2026-04-08 22:01:08.820119: Pseudo dice [0.5551, 0.5051, 0.6854, 0.4604, 0.3769, 0.4155, 0.6533] +2026-04-08 22:01:08.822296: Epoch time: 102.74 s +2026-04-08 22:01:11.163684: +2026-04-08 22:01:11.166058: Epoch 192 +2026-04-08 22:01:11.168489: Current learning rate: 0.00825 +2026-04-08 22:02:54.253021: train_loss -0.1396 +2026-04-08 22:02:54.258959: val_loss -0.1051 +2026-04-08 22:02:54.261314: Pseudo dice [0.2917, 0.5706, 0.6808, 0.4676, 0.2555, 0.55, 0.6279] +2026-04-08 22:02:54.262967: Epoch time: 103.09 s +2026-04-08 22:02:55.396391: +2026-04-08 22:02:55.398407: Epoch 193 +2026-04-08 22:02:55.400126: Current learning rate: 0.00824 +2026-04-08 22:04:38.876571: train_loss -0.137 +2026-04-08 22:04:38.883813: val_loss -0.1082 +2026-04-08 22:04:38.886155: Pseudo dice [0.3387, 0.7859, 0.7641, 0.4705, 0.3666, 0.1429, 0.7395] +2026-04-08 22:04:38.888871: Epoch time: 103.48 s +2026-04-08 22:04:38.891126: Yayy! New best EMA pseudo Dice: 0.445 +2026-04-08 22:04:41.851053: +2026-04-08 22:04:41.854582: Epoch 194 +2026-04-08 22:04:41.856186: Current learning rate: 0.00824 +2026-04-08 22:06:25.245862: train_loss -0.1333 +2026-04-08 22:06:25.254042: val_loss -0.1085 +2026-04-08 22:06:25.257994: Pseudo dice [0.4487, 0.7037, 0.263, 0.0906, 0.4673, 0.4647, 0.7368] +2026-04-08 22:06:25.260482: Epoch time: 103.4 s +2026-04-08 22:06:25.263180: Yayy! New best EMA pseudo Dice: 0.4459 +2026-04-08 22:06:28.115019: +2026-04-08 22:06:28.116993: Epoch 195 +2026-04-08 22:06:28.118576: Current learning rate: 0.00823 +2026-04-08 22:08:11.611760: train_loss -0.1352 +2026-04-08 22:08:11.618770: val_loss -0.0776 +2026-04-08 22:08:11.621405: Pseudo dice [0.3945, 0.5272, 0.5169, 0.1259, 0.444, 0.2588, 0.7724] +2026-04-08 22:08:11.623482: Epoch time: 103.5 s +2026-04-08 22:08:12.736525: +2026-04-08 22:08:12.738666: Epoch 196 +2026-04-08 22:08:12.740754: Current learning rate: 0.00822 +2026-04-08 22:09:56.466099: train_loss -0.1479 +2026-04-08 22:09:56.474473: val_loss -0.1188 +2026-04-08 22:09:56.477288: Pseudo dice [0.5168, 0.4537, 0.6078, 0.4521, 0.5703, 0.6048, 0.7674] +2026-04-08 22:09:56.480525: Epoch time: 103.73 s +2026-04-08 22:09:56.483246: Yayy! New best EMA pseudo Dice: 0.457 +2026-04-08 22:09:59.518250: +2026-04-08 22:09:59.521187: Epoch 197 +2026-04-08 22:09:59.524717: Current learning rate: 0.00821 +2026-04-08 22:11:43.646995: train_loss -0.1386 +2026-04-08 22:11:43.656290: val_loss -0.078 +2026-04-08 22:11:43.660292: Pseudo dice [0.5657, 0.3007, 0.5913, 0.2341, 0.2579, 0.364, 0.1504] +2026-04-08 22:11:43.662681: Epoch time: 104.13 s +2026-04-08 22:11:44.792070: +2026-04-08 22:11:44.794664: Epoch 198 +2026-04-08 22:11:44.796577: Current learning rate: 0.0082 +2026-04-08 22:13:28.345307: train_loss -0.1246 +2026-04-08 22:13:28.351776: val_loss -0.0854 +2026-04-08 22:13:28.354733: Pseudo dice [0.4258, 0.6976, 0.5979, 0.0311, 0.3945, 0.4643, 0.5833] +2026-04-08 22:13:28.357985: Epoch time: 103.56 s +2026-04-08 22:13:29.481958: +2026-04-08 22:13:29.484601: Epoch 199 +2026-04-08 22:13:29.487682: Current learning rate: 0.00819 +2026-04-08 22:15:13.630131: train_loss -0.1303 +2026-04-08 22:15:13.637292: val_loss -0.0763 +2026-04-08 22:15:13.639665: Pseudo dice [0.1037, 0.3695, 0.4873, 0.4459, 0.2005, 0.2884, 0.5508] +2026-04-08 22:15:13.642606: Epoch time: 104.15 s +2026-04-08 22:15:16.521169: +2026-04-08 22:15:16.523338: Epoch 200 +2026-04-08 22:15:16.525759: Current learning rate: 0.00818 +2026-04-08 22:17:00.135528: train_loss -0.1237 +2026-04-08 22:17:00.142195: val_loss -0.1109 +2026-04-08 22:17:00.144272: Pseudo dice [0.4067, 0.2236, 0.6981, 0.1604, 0.385, 0.375, 0.5659] +2026-04-08 22:17:00.146298: Epoch time: 103.62 s +2026-04-08 22:17:01.288302: +2026-04-08 22:17:01.294133: Epoch 201 +2026-04-08 22:17:01.298953: Current learning rate: 0.00817 +2026-04-08 22:18:44.518429: train_loss -0.1391 +2026-04-08 22:18:44.525674: val_loss -0.1036 +2026-04-08 22:18:44.528722: Pseudo dice [0.3938, 0.5226, 0.7039, 0.132, 0.1845, 0.7029, 0.6274] +2026-04-08 22:18:44.530764: Epoch time: 103.23 s +2026-04-08 22:18:45.679446: +2026-04-08 22:18:45.683971: Epoch 202 +2026-04-08 22:18:45.685867: Current learning rate: 0.00816 +2026-04-08 22:20:28.506156: train_loss -0.1472 +2026-04-08 22:20:28.516638: val_loss -0.1222 +2026-04-08 22:20:28.519876: Pseudo dice [0.5631, 0.1132, 0.595, 0.1282, 0.4747, 0.5616, 0.6035] +2026-04-08 22:20:28.522114: Epoch time: 102.83 s +2026-04-08 22:20:29.649810: +2026-04-08 22:20:29.652723: Epoch 203 +2026-04-08 22:20:29.655596: Current learning rate: 0.00815 +2026-04-08 22:22:12.955679: train_loss -0.1357 +2026-04-08 22:22:12.965451: val_loss -0.1215 +2026-04-08 22:22:12.968261: Pseudo dice [0.2196, 0.5776, 0.6136, 0.272, 0.5921, 0.7268, 0.8144] +2026-04-08 22:22:12.971505: Epoch time: 103.31 s +2026-04-08 22:22:14.103605: +2026-04-08 22:22:14.105668: Epoch 204 +2026-04-08 22:22:14.107625: Current learning rate: 0.00814 +2026-04-08 22:23:57.255722: train_loss -0.1342 +2026-04-08 22:23:57.264507: val_loss -0.1005 +2026-04-08 22:23:57.267634: Pseudo dice [0.4742, 0.2672, 0.6402, 0.2669, 0.2243, 0.5576, 0.8182] +2026-04-08 22:23:57.270177: Epoch time: 103.16 s +2026-04-08 22:23:58.466370: +2026-04-08 22:23:58.468471: Epoch 205 +2026-04-08 22:23:58.471702: Current learning rate: 0.00813 +2026-04-08 22:25:42.917941: train_loss -0.1411 +2026-04-08 22:25:42.925279: val_loss -0.1226 +2026-04-08 22:25:42.928965: Pseudo dice [0.3908, 0.8818, 0.6696, 0.272, 0.3805, 0.3594, 0.7409] +2026-04-08 22:25:42.931753: Epoch time: 104.45 s +2026-04-08 22:25:42.935841: Yayy! New best EMA pseudo Dice: 0.4573 +2026-04-08 22:25:45.860482: +2026-04-08 22:25:45.862746: Epoch 206 +2026-04-08 22:25:45.864704: Current learning rate: 0.00813 +2026-04-08 22:27:30.013126: train_loss -0.142 +2026-04-08 22:27:30.021229: val_loss -0.114 +2026-04-08 22:27:30.024497: Pseudo dice [0.4385, 0.55, 0.6854, 0.2713, 0.384, 0.5465, 0.6309] +2026-04-08 22:27:30.027094: Epoch time: 104.16 s +2026-04-08 22:27:30.029432: Yayy! New best EMA pseudo Dice: 0.4617 +2026-04-08 22:27:32.852372: +2026-04-08 22:27:32.854991: Epoch 207 +2026-04-08 22:27:32.856643: Current learning rate: 0.00812 +2026-04-08 22:29:16.978544: train_loss -0.1332 +2026-04-08 22:29:16.989288: val_loss -0.0817 +2026-04-08 22:29:16.991496: Pseudo dice [0.2738, 0.6595, 0.6122, 0.4211, 0.2377, 0.4659, 0.0573] +2026-04-08 22:29:16.994494: Epoch time: 104.13 s +2026-04-08 22:29:18.036670: +2026-04-08 22:29:18.038901: Epoch 208 +2026-04-08 22:29:18.041093: Current learning rate: 0.00811 +2026-04-08 22:31:02.762980: train_loss -0.1327 +2026-04-08 22:31:02.773981: val_loss -0.1134 +2026-04-08 22:31:02.777566: Pseudo dice [0.285, 0.3714, 0.5988, 0.4562, 0.5634, 0.5655, 0.813] +2026-04-08 22:31:02.784350: Epoch time: 104.73 s +2026-04-08 22:31:03.823872: +2026-04-08 22:31:03.828159: Epoch 209 +2026-04-08 22:31:03.832459: Current learning rate: 0.0081 +2026-04-08 22:32:47.813854: train_loss -0.1352 +2026-04-08 22:32:47.819515: val_loss -0.1031 +2026-04-08 22:32:47.821350: Pseudo dice [0.4849, 0.3308, 0.4869, 0.1818, 0.2793, 0.1128, 0.618] +2026-04-08 22:32:47.823367: Epoch time: 103.99 s +2026-04-08 22:32:49.996260: +2026-04-08 22:32:49.998071: Epoch 210 +2026-04-08 22:32:49.999820: Current learning rate: 0.00809 +2026-04-08 22:34:33.349937: train_loss -0.1345 +2026-04-08 22:34:33.359232: val_loss -0.1149 +2026-04-08 22:34:33.361350: Pseudo dice [0.4705, 0.8535, 0.6621, 0.0695, 0.4296, 0.4102, 0.5774] +2026-04-08 22:34:33.363261: Epoch time: 103.36 s +2026-04-08 22:34:34.442724: +2026-04-08 22:34:34.444737: Epoch 211 +2026-04-08 22:34:34.446788: Current learning rate: 0.00808 +2026-04-08 22:36:16.863659: train_loss -0.1427 +2026-04-08 22:36:16.869382: val_loss -0.1254 +2026-04-08 22:36:16.871680: Pseudo dice [0.6131, 0.3816, 0.6943, 0.311, 0.2693, 0.4013, 0.7774] +2026-04-08 22:36:16.873322: Epoch time: 102.43 s +2026-04-08 22:36:17.939445: +2026-04-08 22:36:17.941746: Epoch 212 +2026-04-08 22:36:17.944596: Current learning rate: 0.00807 +2026-04-08 22:38:01.115602: train_loss -0.1482 +2026-04-08 22:38:01.123889: val_loss -0.1125 +2026-04-08 22:38:01.126461: Pseudo dice [0.4147, 0.193, 0.5204, 0.2956, 0.4014, 0.4611, 0.8039] +2026-04-08 22:38:01.128879: Epoch time: 103.18 s +2026-04-08 22:38:02.208017: +2026-04-08 22:38:02.210806: Epoch 213 +2026-04-08 22:38:02.213570: Current learning rate: 0.00806 +2026-04-08 22:39:45.807976: train_loss -0.1433 +2026-04-08 22:39:45.815419: val_loss -0.1221 +2026-04-08 22:39:45.817806: Pseudo dice [0.142, 0.8623, 0.7267, 0.3515, 0.3812, 0.6825, 0.5195] +2026-04-08 22:39:45.820563: Epoch time: 103.6 s +2026-04-08 22:39:45.823077: Yayy! New best EMA pseudo Dice: 0.4639 +2026-04-08 22:39:48.668551: +2026-04-08 22:39:48.670724: Epoch 214 +2026-04-08 22:39:48.672400: Current learning rate: 0.00805 +2026-04-08 22:41:32.222120: train_loss -0.1173 +2026-04-08 22:41:32.227842: val_loss -0.0749 +2026-04-08 22:41:32.230203: Pseudo dice [0.7728, 0.8634, 0.4548, 0.2663, 0.258, 0.1669, 0.5189] +2026-04-08 22:41:32.232706: Epoch time: 103.56 s +2026-04-08 22:41:32.235005: Yayy! New best EMA pseudo Dice: 0.4647 +2026-04-08 22:41:35.016656: +2026-04-08 22:41:35.018372: Epoch 215 +2026-04-08 22:41:35.020366: Current learning rate: 0.00804 +2026-04-08 22:43:18.719819: train_loss -0.1466 +2026-04-08 22:43:18.727144: val_loss -0.1191 +2026-04-08 22:43:18.729573: Pseudo dice [0.4019, 0.1971, 0.5948, 0.4346, 0.2451, 0.4449, 0.4806] +2026-04-08 22:43:18.731327: Epoch time: 103.71 s +2026-04-08 22:43:19.787628: +2026-04-08 22:43:19.791799: Epoch 216 +2026-04-08 22:43:19.793577: Current learning rate: 0.00803 +2026-04-08 22:45:03.318103: train_loss -0.1597 +2026-04-08 22:45:03.324332: val_loss -0.0925 +2026-04-08 22:45:03.327377: Pseudo dice [0.5538, 0.8697, 0.6934, 0.5208, 0.3889, 0.1576, 0.5373] +2026-04-08 22:45:03.331579: Epoch time: 103.53 s +2026-04-08 22:45:03.334558: Yayy! New best EMA pseudo Dice: 0.4655 +2026-04-08 22:45:06.284539: +2026-04-08 22:45:06.286171: Epoch 217 +2026-04-08 22:45:06.288135: Current learning rate: 0.00802 +2026-04-08 22:46:49.970544: train_loss -0.1519 +2026-04-08 22:46:49.976164: val_loss -0.114 +2026-04-08 22:46:49.978620: Pseudo dice [0.1656, 0.7861, 0.6053, 0.3265, 0.4928, 0.2822, 0.4107] +2026-04-08 22:46:49.980238: Epoch time: 103.69 s +2026-04-08 22:46:51.054007: +2026-04-08 22:46:51.056739: Epoch 218 +2026-04-08 22:46:51.058599: Current learning rate: 0.00801 +2026-04-08 22:48:34.651766: train_loss -0.137 +2026-04-08 22:48:34.666864: val_loss -0.1269 +2026-04-08 22:48:34.669345: Pseudo dice [0.3587, 0.879, 0.6128, 0.4444, 0.4023, 0.3399, 0.6075] +2026-04-08 22:48:34.672368: Epoch time: 103.6 s +2026-04-08 22:48:34.674305: Yayy! New best EMA pseudo Dice: 0.4686 +2026-04-08 22:48:37.635315: +2026-04-08 22:48:37.638338: Epoch 219 +2026-04-08 22:48:37.640040: Current learning rate: 0.00801 +2026-04-08 22:50:21.608428: train_loss -0.1407 +2026-04-08 22:50:21.614056: val_loss -0.1392 +2026-04-08 22:50:21.616610: Pseudo dice [0.3085, 0.5591, 0.5659, 0.1833, 0.2741, 0.4489, 0.7054] +2026-04-08 22:50:21.619032: Epoch time: 103.98 s +2026-04-08 22:50:22.712159: +2026-04-08 22:50:22.714367: Epoch 220 +2026-04-08 22:50:22.717127: Current learning rate: 0.008 +2026-04-08 22:52:06.153767: train_loss -0.1509 +2026-04-08 22:52:06.160236: val_loss -0.1368 +2026-04-08 22:52:06.165585: Pseudo dice [0.4252, 0.4401, 0.73, 0.2136, 0.3195, 0.654, 0.3907] +2026-04-08 22:52:06.168237: Epoch time: 103.44 s +2026-04-08 22:52:07.236415: +2026-04-08 22:52:07.238632: Epoch 221 +2026-04-08 22:52:07.240957: Current learning rate: 0.00799 +2026-04-08 22:53:50.789499: train_loss -0.1432 +2026-04-08 22:53:50.811928: val_loss -0.1016 +2026-04-08 22:53:50.814333: Pseudo dice [0.6247, 0.8742, 0.6556, 0.6281, 0.2734, 0.3413, 0.4365] +2026-04-08 22:53:50.816160: Epoch time: 103.56 s +2026-04-08 22:53:50.818022: Yayy! New best EMA pseudo Dice: 0.4724 +2026-04-08 22:53:53.929509: +2026-04-08 22:53:53.931421: Epoch 222 +2026-04-08 22:53:53.933076: Current learning rate: 0.00798 +2026-04-08 22:55:36.783058: train_loss -0.1371 +2026-04-08 22:55:36.791344: val_loss -0.1323 +2026-04-08 22:55:36.794770: Pseudo dice [0.3812, 0.6126, 0.6937, 0.2578, 0.2961, 0.8013, 0.6849] +2026-04-08 22:55:36.796573: Epoch time: 102.86 s +2026-04-08 22:55:36.798939: Yayy! New best EMA pseudo Dice: 0.4784 +2026-04-08 22:55:39.530385: +2026-04-08 22:55:39.531833: Epoch 223 +2026-04-08 22:55:39.533329: Current learning rate: 0.00797 +2026-04-08 22:57:22.397792: train_loss -0.1455 +2026-04-08 22:57:22.405341: val_loss -0.0999 +2026-04-08 22:57:22.407656: Pseudo dice [0.6282, 0.6129, 0.5472, 0.0975, 0.1242, 0.6011, 0.3952] +2026-04-08 22:57:22.410326: Epoch time: 102.87 s +2026-04-08 22:57:23.477317: +2026-04-08 22:57:23.479742: Epoch 224 +2026-04-08 22:57:23.483232: Current learning rate: 0.00796 +2026-04-08 22:59:06.755714: train_loss -0.1421 +2026-04-08 22:59:06.762183: val_loss -0.1158 +2026-04-08 22:59:06.764627: Pseudo dice [0.5854, 0.7405, 0.7046, 0.3032, 0.0746, 0.6951, 0.8468] +2026-04-08 22:59:06.767024: Epoch time: 103.28 s +2026-04-08 22:59:06.769273: Yayy! New best EMA pseudo Dice: 0.4826 +2026-04-08 22:59:09.681584: +2026-04-08 22:59:09.699587: Epoch 225 +2026-04-08 22:59:09.701242: Current learning rate: 0.00795 +2026-04-08 23:00:52.306957: train_loss -0.1525 +2026-04-08 23:00:52.312397: val_loss -0.1113 +2026-04-08 23:00:52.314462: Pseudo dice [0.5085, 0.3586, 0.6475, 0.0982, 0.4493, 0.7158, 0.5658] +2026-04-08 23:00:52.316076: Epoch time: 102.63 s +2026-04-08 23:00:53.354392: +2026-04-08 23:00:53.356442: Epoch 226 +2026-04-08 23:00:53.358613: Current learning rate: 0.00794 +2026-04-08 23:02:36.056975: train_loss -0.149 +2026-04-08 23:02:36.062897: val_loss -0.1311 +2026-04-08 23:02:36.064842: Pseudo dice [0.4037, 0.561, 0.7063, 0.5884, 0.5096, 0.4314, 0.7401] +2026-04-08 23:02:36.066723: Epoch time: 102.71 s +2026-04-08 23:02:36.069093: Yayy! New best EMA pseudo Dice: 0.4902 +2026-04-08 23:02:38.841651: +2026-04-08 23:02:38.843707: Epoch 227 +2026-04-08 23:02:38.845616: Current learning rate: 0.00793 +2026-04-08 23:04:21.467271: train_loss -0.1445 +2026-04-08 23:04:21.477352: val_loss -0.1003 +2026-04-08 23:04:21.481878: Pseudo dice [0.5568, 0.7247, 0.7363, 0.5773, 0.4384, 0.4913, 0.3621] +2026-04-08 23:04:21.484575: Epoch time: 102.63 s +2026-04-08 23:04:21.486857: Yayy! New best EMA pseudo Dice: 0.4967 +2026-04-08 23:04:25.438644: +2026-04-08 23:04:25.440992: Epoch 228 +2026-04-08 23:04:25.443485: Current learning rate: 0.00792 +2026-04-08 23:06:08.502050: train_loss -0.1573 +2026-04-08 23:06:08.507774: val_loss -0.1006 +2026-04-08 23:06:08.509906: Pseudo dice [0.6695, 0.623, 0.4984, 0.1953, 0.346, 0.602, 0.6193] +2026-04-08 23:06:08.512383: Epoch time: 103.07 s +2026-04-08 23:06:08.515895: Yayy! New best EMA pseudo Dice: 0.4978 +2026-04-08 23:06:11.274066: +2026-04-08 23:06:11.275991: Epoch 229 +2026-04-08 23:06:11.277494: Current learning rate: 0.00791 +2026-04-08 23:07:55.378970: train_loss -0.1414 +2026-04-08 23:07:55.386062: val_loss -0.1231 +2026-04-08 23:07:55.388075: Pseudo dice [0.4527, 0.4929, 0.7081, 0.1783, 0.4498, 0.6484, 0.3084] +2026-04-08 23:07:55.390191: Epoch time: 104.11 s +2026-04-08 23:07:56.428736: +2026-04-08 23:07:56.431425: Epoch 230 +2026-04-08 23:07:56.433848: Current learning rate: 0.0079 +2026-04-08 23:09:40.688191: train_loss -0.1439 +2026-04-08 23:09:40.694248: val_loss -0.1048 +2026-04-08 23:09:40.697436: Pseudo dice [0.4177, 0.7579, 0.5394, 0.2844, 0.4096, 0.2482, 0.7793] +2026-04-08 23:09:40.700060: Epoch time: 104.26 s +2026-04-08 23:09:41.751708: +2026-04-08 23:09:41.755684: Epoch 231 +2026-04-08 23:09:41.757718: Current learning rate: 0.00789 +2026-04-08 23:11:25.521011: train_loss -0.1441 +2026-04-08 23:11:25.527802: val_loss -0.1212 +2026-04-08 23:11:25.529749: Pseudo dice [0.402, 0.4383, 0.6769, 0.2118, 0.2992, 0.4921, 0.52] +2026-04-08 23:11:25.531543: Epoch time: 103.77 s +2026-04-08 23:11:26.577637: +2026-04-08 23:11:26.580790: Epoch 232 +2026-04-08 23:11:26.582986: Current learning rate: 0.00789 +2026-04-08 23:13:09.932542: train_loss -0.1469 +2026-04-08 23:13:09.938673: val_loss -0.1485 +2026-04-08 23:13:09.942746: Pseudo dice [0.5677, 0.4508, 0.7482, 0.0777, 0.3508, 0.7022, 0.7236] +2026-04-08 23:13:09.945552: Epoch time: 103.36 s +2026-04-08 23:13:10.997900: +2026-04-08 23:13:11.000317: Epoch 233 +2026-04-08 23:13:11.002753: Current learning rate: 0.00788 +2026-04-08 23:14:55.391279: train_loss -0.1516 +2026-04-08 23:14:55.405331: val_loss -0.0927 +2026-04-08 23:14:55.409143: Pseudo dice [0.2045, 0.1061, 0.7054, 0.0201, 0.4116, 0.3843, 0.6993] +2026-04-08 23:14:55.411920: Epoch time: 104.4 s +2026-04-08 23:14:56.467061: +2026-04-08 23:14:56.471399: Epoch 234 +2026-04-08 23:14:56.475953: Current learning rate: 0.00787 +2026-04-08 23:16:39.758045: train_loss -0.1358 +2026-04-08 23:16:39.765298: val_loss -0.1193 +2026-04-08 23:16:39.767665: Pseudo dice [0.35, 0.2723, 0.7309, 0.615, 0.23, 0.5536, 0.7204] +2026-04-08 23:16:39.770752: Epoch time: 103.29 s +2026-04-08 23:16:40.823413: +2026-04-08 23:16:40.825596: Epoch 235 +2026-04-08 23:16:40.827661: Current learning rate: 0.00786 +2026-04-08 23:18:25.701143: train_loss -0.1452 +2026-04-08 23:18:25.708528: val_loss -0.1159 +2026-04-08 23:18:25.710957: Pseudo dice [0.1416, 0.1993, 0.6491, 0.4298, 0.4188, 0.7449, 0.5399] +2026-04-08 23:18:25.714123: Epoch time: 104.88 s +2026-04-08 23:18:26.763117: +2026-04-08 23:18:26.769542: Epoch 236 +2026-04-08 23:18:26.777734: Current learning rate: 0.00785 +2026-04-08 23:20:09.595531: train_loss -0.1423 +2026-04-08 23:20:09.601910: val_loss -0.1394 +2026-04-08 23:20:09.604016: Pseudo dice [0.3766, 0.7583, 0.5991, 0.3134, 0.4081, 0.7342, 0.6795] +2026-04-08 23:20:09.606213: Epoch time: 102.84 s +2026-04-08 23:20:10.667497: +2026-04-08 23:20:10.684164: Epoch 237 +2026-04-08 23:20:10.686046: Current learning rate: 0.00784 +2026-04-08 23:21:53.487300: train_loss -0.1554 +2026-04-08 23:21:53.492626: val_loss -0.1221 +2026-04-08 23:21:53.495373: Pseudo dice [0.7886, 0.8686, 0.6217, 0.5072, 0.468, 0.1866, 0.6276] +2026-04-08 23:21:53.497946: Epoch time: 102.82 s +2026-04-08 23:21:54.548650: +2026-04-08 23:21:54.551050: Epoch 238 +2026-04-08 23:21:54.556227: Current learning rate: 0.00783 +2026-04-08 23:23:38.274046: train_loss -0.1462 +2026-04-08 23:23:38.280430: val_loss -0.1215 +2026-04-08 23:23:38.282826: Pseudo dice [0.225, 0.3119, 0.526, 0.1364, 0.4688, 0.3525, 0.5687] +2026-04-08 23:23:38.285260: Epoch time: 103.73 s +2026-04-08 23:23:39.346632: +2026-04-08 23:23:39.352581: Epoch 239 +2026-04-08 23:23:39.365004: Current learning rate: 0.00782 +2026-04-08 23:25:23.617654: train_loss -0.143 +2026-04-08 23:25:23.625406: val_loss -0.1147 +2026-04-08 23:25:23.628828: Pseudo dice [0.4707, 0.4716, 0.6069, 0.2259, 0.3567, 0.1693, 0.6041] +2026-04-08 23:25:23.631324: Epoch time: 104.27 s +2026-04-08 23:25:24.706988: +2026-04-08 23:25:24.709609: Epoch 240 +2026-04-08 23:25:24.712068: Current learning rate: 0.00781 +2026-04-08 23:27:08.074678: train_loss -0.1359 +2026-04-08 23:27:08.089023: val_loss -0.0831 +2026-04-08 23:27:08.091369: Pseudo dice [0.1461, 0.8345, 0.6478, 0.4545, 0.5032, 0.1007, 0.6458] +2026-04-08 23:27:08.107643: Epoch time: 103.37 s +2026-04-08 23:27:09.170989: +2026-04-08 23:27:09.172975: Epoch 241 +2026-04-08 23:27:09.175099: Current learning rate: 0.0078 +2026-04-08 23:28:52.596725: train_loss -0.1498 +2026-04-08 23:28:52.602082: val_loss -0.115 +2026-04-08 23:28:52.604024: Pseudo dice [0.5294, 0.639, 0.6318, 0.5203, 0.3019, 0.4446, 0.6322] +2026-04-08 23:28:52.607120: Epoch time: 103.43 s +2026-04-08 23:28:53.681661: +2026-04-08 23:28:53.683826: Epoch 242 +2026-04-08 23:28:53.685971: Current learning rate: 0.00779 +2026-04-08 23:30:36.173967: train_loss -0.1474 +2026-04-08 23:30:36.181297: val_loss -0.0929 +2026-04-08 23:30:36.184466: Pseudo dice [0.5178, 0.5487, 0.6875, 0.0623, 0.4484, 0.1079, 0.6943] +2026-04-08 23:30:36.187820: Epoch time: 102.5 s +2026-04-08 23:30:37.262557: +2026-04-08 23:30:37.264929: Epoch 243 +2026-04-08 23:30:37.267174: Current learning rate: 0.00778 +2026-04-08 23:32:19.831160: train_loss -0.1568 +2026-04-08 23:32:19.843724: val_loss -0.0993 +2026-04-08 23:32:19.858047: Pseudo dice [0.5356, 0.2467, 0.6288, 0.2413, 0.4702, 0.4246, 0.7644] +2026-04-08 23:32:19.860017: Epoch time: 102.57 s +2026-04-08 23:32:20.922239: +2026-04-08 23:32:20.923973: Epoch 244 +2026-04-08 23:32:20.925804: Current learning rate: 0.00777 +2026-04-08 23:34:03.768059: train_loss -0.15 +2026-04-08 23:34:03.773350: val_loss -0.1161 +2026-04-08 23:34:03.775484: Pseudo dice [0.5158, 0.8884, 0.624, 0.3851, 0.6357, 0.3272, 0.7661] +2026-04-08 23:34:03.777685: Epoch time: 102.85 s +2026-04-08 23:34:04.854433: +2026-04-08 23:34:04.856717: Epoch 245 +2026-04-08 23:34:04.858917: Current learning rate: 0.00777 +2026-04-08 23:35:47.365661: train_loss -0.1588 +2026-04-08 23:35:47.371241: val_loss -0.1249 +2026-04-08 23:35:47.373443: Pseudo dice [0.5282, 0.2581, 0.6272, 0.4669, 0.5299, 0.7929, 0.6517] +2026-04-08 23:35:47.375547: Epoch time: 102.51 s +2026-04-08 23:35:48.481361: +2026-04-08 23:35:48.483298: Epoch 246 +2026-04-08 23:35:48.485130: Current learning rate: 0.00776 +2026-04-08 23:37:30.458097: train_loss -0.1362 +2026-04-08 23:37:30.463802: val_loss -0.1371 +2026-04-08 23:37:30.466185: Pseudo dice [0.4354, 0.5208, 0.6183, 0.1314, 0.245, 0.525, 0.6687] +2026-04-08 23:37:30.468414: Epoch time: 101.98 s +2026-04-08 23:37:31.539214: +2026-04-08 23:37:31.543441: Epoch 247 +2026-04-08 23:37:31.545557: Current learning rate: 0.00775 +2026-04-08 23:39:14.788818: train_loss -0.1506 +2026-04-08 23:39:14.795269: val_loss -0.1299 +2026-04-08 23:39:14.797296: Pseudo dice [0.2998, 0.0891, 0.655, 0.1925, 0.4555, 0.5005, 0.675] +2026-04-08 23:39:14.800682: Epoch time: 103.25 s +2026-04-08 23:39:15.891006: +2026-04-08 23:39:15.897815: Epoch 248 +2026-04-08 23:39:15.900846: Current learning rate: 0.00774 +2026-04-08 23:41:00.671169: train_loss -0.1559 +2026-04-08 23:41:00.678943: val_loss -0.1414 +2026-04-08 23:41:00.681622: Pseudo dice [0.3755, 0.7421, 0.6641, 0.4995, 0.3063, 0.5317, 0.793] +2026-04-08 23:41:00.684486: Epoch time: 104.78 s +2026-04-08 23:41:01.766010: +2026-04-08 23:41:01.774817: Epoch 249 +2026-04-08 23:41:01.779221: Current learning rate: 0.00773 +2026-04-08 23:42:44.581097: train_loss -0.1569 +2026-04-08 23:42:44.588002: val_loss -0.1132 +2026-04-08 23:42:44.590347: Pseudo dice [0.1057, 0.445, 0.6445, 0.4185, 0.3593, 0.6703, 0.6614] +2026-04-08 23:42:44.592679: Epoch time: 102.82 s +2026-04-08 23:42:47.446180: +2026-04-08 23:42:47.447958: Epoch 250 +2026-04-08 23:42:47.449501: Current learning rate: 0.00772 +2026-04-08 23:44:31.209136: train_loss -0.1483 +2026-04-08 23:44:31.215911: val_loss -0.1264 +2026-04-08 23:44:31.218273: Pseudo dice [0.4087, 0.4098, 0.7294, 0.6372, 0.3483, 0.3886, 0.6606] +2026-04-08 23:44:31.220454: Epoch time: 103.77 s +2026-04-08 23:44:32.309801: +2026-04-08 23:44:32.311823: Epoch 251 +2026-04-08 23:44:32.315182: Current learning rate: 0.00771 +2026-04-08 23:46:15.746738: train_loss -0.1479 +2026-04-08 23:46:15.753252: val_loss -0.133 +2026-04-08 23:46:15.755563: Pseudo dice [0.089, 0.2959, 0.7842, 0.1395, 0.5475, 0.6958, 0.7324] +2026-04-08 23:46:15.757552: Epoch time: 103.44 s +2026-04-08 23:46:16.839249: +2026-04-08 23:46:16.840987: Epoch 252 +2026-04-08 23:46:16.842916: Current learning rate: 0.0077 +2026-04-08 23:48:01.294834: train_loss -0.1505 +2026-04-08 23:48:01.301518: val_loss -0.1453 +2026-04-08 23:48:01.305226: Pseudo dice [0.7352, 0.3192, 0.7479, 0.3303, 0.4681, 0.5951, 0.7888] +2026-04-08 23:48:01.307252: Epoch time: 104.46 s +2026-04-08 23:48:02.380263: +2026-04-08 23:48:02.382744: Epoch 253 +2026-04-08 23:48:02.384288: Current learning rate: 0.00769 +2026-04-08 23:49:45.811954: train_loss -0.1507 +2026-04-08 23:49:45.821244: val_loss -0.1219 +2026-04-08 23:49:45.825075: Pseudo dice [0.2712, 0.0941, 0.6596, 0.0467, 0.3819, 0.5099, 0.6916] +2026-04-08 23:49:45.827458: Epoch time: 103.43 s +2026-04-08 23:49:46.936349: +2026-04-08 23:49:46.939397: Epoch 254 +2026-04-08 23:49:46.941642: Current learning rate: 0.00768 +2026-04-08 23:51:30.313430: train_loss -0.1485 +2026-04-08 23:51:30.322374: val_loss -0.1258 +2026-04-08 23:51:30.325793: Pseudo dice [0.3959, 0.6414, 0.6819, 0.3872, 0.3123, 0.7191, 0.6509] +2026-04-08 23:51:30.327937: Epoch time: 103.38 s +2026-04-08 23:51:31.407857: +2026-04-08 23:51:31.409879: Epoch 255 +2026-04-08 23:51:31.412203: Current learning rate: 0.00767 +2026-04-08 23:53:13.892013: train_loss -0.1583 +2026-04-08 23:53:13.899718: val_loss -0.1081 +2026-04-08 23:53:13.901758: Pseudo dice [0.4359, 0.8294, 0.6177, 0.1509, 0.1625, 0.3843, 0.8149] +2026-04-08 23:53:13.904851: Epoch time: 102.49 s +2026-04-08 23:53:15.012804: +2026-04-08 23:53:15.015054: Epoch 256 +2026-04-08 23:53:15.017006: Current learning rate: 0.00766 +2026-04-08 23:54:57.477657: train_loss -0.1511 +2026-04-08 23:54:57.484020: val_loss -0.0996 +2026-04-08 23:54:57.486968: Pseudo dice [0.2769, 0.271, 0.6877, 0.0844, 0.2641, 0.2711, 0.6329] +2026-04-08 23:54:57.488901: Epoch time: 102.47 s +2026-04-08 23:54:58.558979: +2026-04-08 23:54:58.561399: Epoch 257 +2026-04-08 23:54:58.562753: Current learning rate: 0.00765 +2026-04-08 23:56:41.895171: train_loss -0.1334 +2026-04-08 23:56:41.902606: val_loss -0.0841 +2026-04-08 23:56:41.905849: Pseudo dice [0.2463, 0.7117, 0.287, 0.2943, 0.0244, 0.43, 0.5913] +2026-04-08 23:56:41.908355: Epoch time: 103.34 s +2026-04-08 23:56:42.969819: +2026-04-08 23:56:42.974501: Epoch 258 +2026-04-08 23:56:42.977210: Current learning rate: 0.00764 +2026-04-08 23:58:25.867364: train_loss -0.143 +2026-04-08 23:58:25.875329: val_loss -0.085 +2026-04-08 23:58:25.879550: Pseudo dice [0.3693, 0.1381, 0.5328, 0.1126, 0.5915, 0.3405, 0.6952] +2026-04-08 23:58:25.881722: Epoch time: 102.9 s +2026-04-08 23:58:26.964142: +2026-04-08 23:58:26.966736: Epoch 259 +2026-04-08 23:58:26.968828: Current learning rate: 0.00764 +2026-04-09 00:00:09.420141: train_loss -0.152 +2026-04-09 00:00:09.427133: val_loss -0.1083 +2026-04-09 00:00:09.429714: Pseudo dice [0.2715, 0.7789, 0.5078, 0.0324, 0.303, 0.6354, 0.5705] +2026-04-09 00:00:09.431611: Epoch time: 102.46 s +2026-04-09 00:00:10.529070: +2026-04-09 00:00:10.530979: Epoch 260 +2026-04-09 00:00:10.532973: Current learning rate: 0.00763 +2026-04-09 00:01:55.911562: train_loss -0.152 +2026-04-09 00:01:55.920409: val_loss -0.1112 +2026-04-09 00:01:55.925667: Pseudo dice [0.4643, 0.2405, 0.7826, 0.4457, 0.5021, 0.778, 0.4638] +2026-04-09 00:01:55.930110: Epoch time: 105.39 s +2026-04-09 00:01:57.032729: +2026-04-09 00:01:57.037642: Epoch 261 +2026-04-09 00:01:57.043272: Current learning rate: 0.00762 +2026-04-09 00:03:42.975015: train_loss -0.1538 +2026-04-09 00:03:42.984095: val_loss -0.0979 +2026-04-09 00:03:42.987125: Pseudo dice [0.4235, 0.7435, 0.7717, 0.2133, 0.3502, 0.2033, 0.3488] +2026-04-09 00:03:42.991145: Epoch time: 105.95 s +2026-04-09 00:03:44.132380: +2026-04-09 00:03:44.135374: Epoch 262 +2026-04-09 00:03:44.137666: Current learning rate: 0.00761 +2026-04-09 00:05:27.492689: train_loss -0.1265 +2026-04-09 00:05:27.501145: val_loss -0.1094 +2026-04-09 00:05:27.503962: Pseudo dice [0.5587, 0.6625, 0.605, 0.1978, 0.5448, 0.6667, 0.6791] +2026-04-09 00:05:27.507106: Epoch time: 103.36 s +2026-04-09 00:05:28.565712: +2026-04-09 00:05:28.567977: Epoch 263 +2026-04-09 00:05:28.570519: Current learning rate: 0.0076 +2026-04-09 00:07:13.120020: train_loss -0.1393 +2026-04-09 00:07:13.131426: val_loss -0.112 +2026-04-09 00:07:13.134025: Pseudo dice [0.178, 0.7792, 0.6422, 0.3782, 0.3742, 0.3012, 0.782] +2026-04-09 00:07:13.136680: Epoch time: 104.56 s +2026-04-09 00:07:14.200405: +2026-04-09 00:07:14.203019: Epoch 264 +2026-04-09 00:07:14.204938: Current learning rate: 0.00759 +2026-04-09 00:08:58.528019: train_loss -0.1403 +2026-04-09 00:08:58.536291: val_loss -0.1183 +2026-04-09 00:08:58.541274: Pseudo dice [0.3321, 0.4577, 0.7612, 0.2739, 0.3302, 0.3942, 0.5425] +2026-04-09 00:08:58.547381: Epoch time: 104.33 s +2026-04-09 00:08:59.631631: +2026-04-09 00:08:59.634736: Epoch 265 +2026-04-09 00:08:59.643084: Current learning rate: 0.00758 +2026-04-09 00:10:43.033107: train_loss -0.1463 +2026-04-09 00:10:43.040954: val_loss -0.1604 +2026-04-09 00:10:43.043679: Pseudo dice [0.6054, 0.3819, 0.7516, 0.0924, 0.436, 0.61, 0.6622] +2026-04-09 00:10:43.046009: Epoch time: 103.4 s +2026-04-09 00:10:44.132421: +2026-04-09 00:10:44.134744: Epoch 266 +2026-04-09 00:10:44.137568: Current learning rate: 0.00757 +2026-04-09 00:12:26.606364: train_loss -0.1518 +2026-04-09 00:12:26.612578: val_loss -0.09 +2026-04-09 00:12:26.615147: Pseudo dice [0.4599, 0.5628, 0.4898, 0.2038, 0.4542, 0.5948, 0.6089] +2026-04-09 00:12:26.617608: Epoch time: 102.48 s +2026-04-09 00:12:27.696364: +2026-04-09 00:12:27.698491: Epoch 267 +2026-04-09 00:12:27.700462: Current learning rate: 0.00756 +2026-04-09 00:14:10.282327: train_loss -0.1344 +2026-04-09 00:14:10.288854: val_loss -0.1172 +2026-04-09 00:14:10.291162: Pseudo dice [0.6812, 0.5425, 0.6191, 0.3271, 0.7235, 0.7142, 0.5548] +2026-04-09 00:14:10.293428: Epoch time: 102.59 s +2026-04-09 00:14:12.519976: +2026-04-09 00:14:12.521951: Epoch 268 +2026-04-09 00:14:12.523672: Current learning rate: 0.00755 +2026-04-09 00:15:55.304661: train_loss -0.151 +2026-04-09 00:15:55.312281: val_loss -0.123 +2026-04-09 00:15:55.314623: Pseudo dice [0.3292, 0.5705, 0.7405, 0.4027, 0.418, 0.5807, 0.6824] +2026-04-09 00:15:55.317985: Epoch time: 102.79 s +2026-04-09 00:15:56.394247: +2026-04-09 00:15:56.397693: Epoch 269 +2026-04-09 00:15:56.399905: Current learning rate: 0.00754 +2026-04-09 00:17:39.691736: train_loss -0.1513 +2026-04-09 00:17:39.698546: val_loss -0.1321 +2026-04-09 00:17:39.701111: Pseudo dice [0.4284, 0.8604, 0.5902, 0.2182, 0.2616, 0.1977, 0.7332] +2026-04-09 00:17:39.703423: Epoch time: 103.3 s +2026-04-09 00:17:40.780082: +2026-04-09 00:17:40.782542: Epoch 270 +2026-04-09 00:17:40.784420: Current learning rate: 0.00753 +2026-04-09 00:19:24.086743: train_loss -0.1566 +2026-04-09 00:19:24.093890: val_loss -0.12 +2026-04-09 00:19:24.095885: Pseudo dice [0.5355, 0.888, 0.7127, 0.4709, 0.4169, 0.4284, 0.4733] +2026-04-09 00:19:24.098294: Epoch time: 103.31 s +2026-04-09 00:19:25.221097: +2026-04-09 00:19:25.224511: Epoch 271 +2026-04-09 00:19:25.227608: Current learning rate: 0.00752 +2026-04-09 00:21:08.443583: train_loss -0.1501 +2026-04-09 00:21:08.451293: val_loss -0.0716 +2026-04-09 00:21:08.455980: Pseudo dice [0.4688, 0.8419, 0.5078, 0.1044, 0.3469, 0.4148, 0.4736] +2026-04-09 00:21:08.458185: Epoch time: 103.23 s +2026-04-09 00:21:09.536801: +2026-04-09 00:21:09.539208: Epoch 272 +2026-04-09 00:21:09.541028: Current learning rate: 0.00751 +2026-04-09 00:22:52.455536: train_loss -0.1331 +2026-04-09 00:22:52.464502: val_loss -0.1058 +2026-04-09 00:22:52.470422: Pseudo dice [0.5944, 0.7327, 0.5138, 0.2365, 0.3324, 0.3015, 0.6681] +2026-04-09 00:22:52.473906: Epoch time: 102.92 s +2026-04-09 00:22:53.562691: +2026-04-09 00:22:53.566260: Epoch 273 +2026-04-09 00:22:53.570756: Current learning rate: 0.00751 +2026-04-09 00:24:36.544023: train_loss -0.1537 +2026-04-09 00:24:36.552628: val_loss -0.1177 +2026-04-09 00:24:36.555009: Pseudo dice [0.4487, 0.4382, 0.5764, 0.3685, 0.2468, 0.4089, 0.6608] +2026-04-09 00:24:36.557286: Epoch time: 102.98 s +2026-04-09 00:24:37.638564: +2026-04-09 00:24:37.640682: Epoch 274 +2026-04-09 00:24:37.642911: Current learning rate: 0.0075 +2026-04-09 00:26:20.103066: train_loss -0.1347 +2026-04-09 00:26:20.108874: val_loss -0.1029 +2026-04-09 00:26:20.110958: Pseudo dice [0.3097, 0.1729, 0.6867, 0.2379, 0.4939, 0.178, 0.8349] +2026-04-09 00:26:20.113668: Epoch time: 102.47 s +2026-04-09 00:26:21.209822: +2026-04-09 00:26:21.212044: Epoch 275 +2026-04-09 00:26:21.214099: Current learning rate: 0.00749 +2026-04-09 00:28:03.526926: train_loss -0.1548 +2026-04-09 00:28:03.533476: val_loss -0.1388 +2026-04-09 00:28:03.535710: Pseudo dice [0.5755, 0.7355, 0.7521, 0.1652, 0.3251, 0.5209, 0.7772] +2026-04-09 00:28:03.537719: Epoch time: 102.32 s +2026-04-09 00:28:04.624358: +2026-04-09 00:28:04.626753: Epoch 276 +2026-04-09 00:28:04.629393: Current learning rate: 0.00748 +2026-04-09 00:29:47.407765: train_loss -0.1634 +2026-04-09 00:29:47.416285: val_loss -0.0945 +2026-04-09 00:29:47.426194: Pseudo dice [0.4814, 0.3312, 0.5217, 0.3677, 0.3277, 0.441, 0.5494] +2026-04-09 00:29:47.430045: Epoch time: 102.79 s +2026-04-09 00:29:48.507714: +2026-04-09 00:29:48.510445: Epoch 277 +2026-04-09 00:29:48.512221: Current learning rate: 0.00747 +2026-04-09 00:31:32.044759: train_loss -0.1584 +2026-04-09 00:31:32.049573: val_loss -0.1437 +2026-04-09 00:31:32.051811: Pseudo dice [0.495, 0.8912, 0.6938, 0.1132, 0.3038, 0.7325, 0.6974] +2026-04-09 00:31:32.054129: Epoch time: 103.54 s +2026-04-09 00:31:33.142854: +2026-04-09 00:31:33.144666: Epoch 278 +2026-04-09 00:31:33.146169: Current learning rate: 0.00746 +2026-04-09 00:33:15.884453: train_loss -0.1617 +2026-04-09 00:33:15.892227: val_loss -0.0912 +2026-04-09 00:33:15.894684: Pseudo dice [0.3561, 0.6567, 0.5102, 0.3834, 0.3736, 0.6012, 0.7669] +2026-04-09 00:33:15.896515: Epoch time: 102.74 s +2026-04-09 00:33:16.997496: +2026-04-09 00:33:17.000551: Epoch 279 +2026-04-09 00:33:17.002693: Current learning rate: 0.00745 +2026-04-09 00:34:59.668212: train_loss -0.1622 +2026-04-09 00:34:59.681091: val_loss -0.1279 +2026-04-09 00:34:59.683720: Pseudo dice [0.427, 0.8178, 0.6799, 0.4747, 0.5303, 0.3752, 0.5499] +2026-04-09 00:34:59.686306: Epoch time: 102.67 s +2026-04-09 00:34:59.688074: Yayy! New best EMA pseudo Dice: 0.4981 +2026-04-09 00:35:02.519202: +2026-04-09 00:35:02.520971: Epoch 280 +2026-04-09 00:35:02.522451: Current learning rate: 0.00744 +2026-04-09 00:36:45.711922: train_loss -0.1677 +2026-04-09 00:36:45.718070: val_loss -0.1463 +2026-04-09 00:36:45.720193: Pseudo dice [0.5216, 0.8439, 0.8186, 0.2046, 0.4437, 0.6963, 0.2557] +2026-04-09 00:36:45.722577: Epoch time: 103.2 s +2026-04-09 00:36:45.724761: Yayy! New best EMA pseudo Dice: 0.5024 +2026-04-09 00:36:48.478524: +2026-04-09 00:36:48.480875: Epoch 281 +2026-04-09 00:36:48.483428: Current learning rate: 0.00743 +2026-04-09 00:38:31.488316: train_loss -0.1582 +2026-04-09 00:38:31.497857: val_loss -0.155 +2026-04-09 00:38:31.500449: Pseudo dice [0.5641, 0.4572, 0.6863, 0.6376, 0.4863, 0.5254, 0.8042] +2026-04-09 00:38:31.503819: Epoch time: 103.01 s +2026-04-09 00:38:31.506954: Yayy! New best EMA pseudo Dice: 0.5116 +2026-04-09 00:38:34.444614: +2026-04-09 00:38:34.446411: Epoch 282 +2026-04-09 00:38:34.448169: Current learning rate: 0.00742 +2026-04-09 00:40:17.267866: train_loss -0.182 +2026-04-09 00:40:17.275273: val_loss -0.0964 +2026-04-09 00:40:17.278389: Pseudo dice [0.533, 0.7777, 0.4529, 0.0965, 0.2987, 0.6059, 0.4732] +2026-04-09 00:40:17.280270: Epoch time: 102.83 s +2026-04-09 00:40:18.360560: +2026-04-09 00:40:18.362875: Epoch 283 +2026-04-09 00:40:18.366327: Current learning rate: 0.00741 +2026-04-09 00:42:01.016185: train_loss -0.1566 +2026-04-09 00:42:01.042101: val_loss -0.1357 +2026-04-09 00:42:01.044106: Pseudo dice [0.3278, 0.3375, 0.5315, 0.3233, 0.4049, 0.4677, 0.7514] +2026-04-09 00:42:01.046493: Epoch time: 102.66 s +2026-04-09 00:42:02.139429: +2026-04-09 00:42:02.141818: Epoch 284 +2026-04-09 00:42:02.143878: Current learning rate: 0.0074 +2026-04-09 00:43:45.585378: train_loss -0.1592 +2026-04-09 00:43:45.592812: val_loss -0.1186 +2026-04-09 00:43:45.596475: Pseudo dice [0.4605, 0.8426, 0.6916, 0.5173, 0.4742, 0.4293, 0.6132] +2026-04-09 00:43:45.599124: Epoch time: 103.45 s +2026-04-09 00:43:46.659966: +2026-04-09 00:43:46.662183: Epoch 285 +2026-04-09 00:43:46.664125: Current learning rate: 0.00739 +2026-04-09 00:45:29.169939: train_loss -0.1666 +2026-04-09 00:45:29.176139: val_loss -0.118 +2026-04-09 00:45:29.178532: Pseudo dice [0.0807, 0.621, 0.6678, 0.2694, 0.474, 0.6574, 0.1411] +2026-04-09 00:45:29.180568: Epoch time: 102.51 s +2026-04-09 00:45:30.294289: +2026-04-09 00:45:30.296751: Epoch 286 +2026-04-09 00:45:30.299769: Current learning rate: 0.00738 +2026-04-09 00:47:14.509560: train_loss -0.1505 +2026-04-09 00:47:14.516449: val_loss -0.0881 +2026-04-09 00:47:14.519049: Pseudo dice [0.0839, 0.8469, 0.5977, 0.1555, 0.4935, 0.3555, 0.6467] +2026-04-09 00:47:14.521755: Epoch time: 104.22 s +2026-04-09 00:47:15.620430: +2026-04-09 00:47:15.622689: Epoch 287 +2026-04-09 00:47:15.624954: Current learning rate: 0.00738 +2026-04-09 00:48:59.314930: train_loss -0.1517 +2026-04-09 00:48:59.322036: val_loss -0.1396 +2026-04-09 00:48:59.324040: Pseudo dice [0.286, 0.7341, 0.6747, 0.5682, 0.3698, 0.7704, 0.8095] +2026-04-09 00:48:59.326480: Epoch time: 103.7 s +2026-04-09 00:49:00.415364: +2026-04-09 00:49:00.417618: Epoch 288 +2026-04-09 00:49:00.419935: Current learning rate: 0.00737 +2026-04-09 00:50:42.838763: train_loss -0.1592 +2026-04-09 00:50:42.844670: val_loss -0.1275 +2026-04-09 00:50:42.846554: Pseudo dice [0.4394, 0.4796, 0.7914, 0.1194, 0.4162, 0.5775, 0.487] +2026-04-09 00:50:42.848460: Epoch time: 102.43 s +2026-04-09 00:50:43.949734: +2026-04-09 00:50:43.952230: Epoch 289 +2026-04-09 00:50:43.954103: Current learning rate: 0.00736 +2026-04-09 00:52:26.658051: train_loss -0.1537 +2026-04-09 00:52:26.665493: val_loss -0.1502 +2026-04-09 00:52:26.667799: Pseudo dice [0.6199, 0.7207, 0.6858, 0.5434, 0.4335, 0.7653, 0.5068] +2026-04-09 00:52:26.669796: Epoch time: 102.71 s +2026-04-09 00:52:26.672321: Yayy! New best EMA pseudo Dice: 0.513 +2026-04-09 00:52:29.296625: +2026-04-09 00:52:29.298617: Epoch 290 +2026-04-09 00:52:29.300163: Current learning rate: 0.00735 +2026-04-09 00:54:12.467210: train_loss -0.1426 +2026-04-09 00:54:12.474168: val_loss -0.1336 +2026-04-09 00:54:12.477183: Pseudo dice [0.373, 0.578, 0.6413, 0.2978, 0.3387, 0.3618, 0.6399] +2026-04-09 00:54:12.479713: Epoch time: 103.17 s +2026-04-09 00:54:13.577541: +2026-04-09 00:54:13.580374: Epoch 291 +2026-04-09 00:54:13.582547: Current learning rate: 0.00734 +2026-04-09 00:55:55.785635: train_loss -0.1495 +2026-04-09 00:55:55.791878: val_loss -0.1173 +2026-04-09 00:55:55.793781: Pseudo dice [0.5517, 0.7827, 0.5376, 0.2824, 0.5618, 0.3001, 0.6477] +2026-04-09 00:55:55.795532: Epoch time: 102.21 s +2026-04-09 00:55:56.910596: +2026-04-09 00:55:56.912730: Epoch 292 +2026-04-09 00:55:56.914548: Current learning rate: 0.00733 +2026-04-09 00:57:40.099307: train_loss -0.1664 +2026-04-09 00:57:40.105236: val_loss -0.1524 +2026-04-09 00:57:40.107268: Pseudo dice [0.5199, 0.4022, 0.7717, 0.5553, 0.4259, 0.7875, 0.6577] +2026-04-09 00:57:40.109441: Epoch time: 103.19 s +2026-04-09 00:57:40.111406: Yayy! New best EMA pseudo Dice: 0.5173 +2026-04-09 00:57:42.925737: +2026-04-09 00:57:42.927389: Epoch 293 +2026-04-09 00:57:42.928907: Current learning rate: 0.00732 +2026-04-09 00:59:25.427602: train_loss -0.1648 +2026-04-09 00:59:25.433472: val_loss -0.0952 +2026-04-09 00:59:25.447972: Pseudo dice [0.6143, 0.7343, 0.7002, 0.1226, 0.3314, 0.6569, 0.7921] +2026-04-09 00:59:25.450736: Epoch time: 102.51 s +2026-04-09 00:59:25.454140: Yayy! New best EMA pseudo Dice: 0.522 +2026-04-09 00:59:28.017541: +2026-04-09 00:59:28.019267: Epoch 294 +2026-04-09 00:59:28.020658: Current learning rate: 0.00731 +2026-04-09 01:01:10.669029: train_loss -0.1522 +2026-04-09 01:01:10.675813: val_loss -0.1498 +2026-04-09 01:01:10.678265: Pseudo dice [0.7328, 0.8634, 0.3703, 0.5054, 0.6316, 0.632, 0.4322] +2026-04-09 01:01:10.680284: Epoch time: 102.65 s +2026-04-09 01:01:10.682431: Yayy! New best EMA pseudo Dice: 0.5294 +2026-04-09 01:01:13.519163: +2026-04-09 01:01:13.520693: Epoch 295 +2026-04-09 01:01:13.522613: Current learning rate: 0.0073 +2026-04-09 01:02:55.597372: train_loss -0.1503 +2026-04-09 01:02:55.605501: val_loss -0.1093 +2026-04-09 01:02:55.608943: Pseudo dice [0.5641, 0.4359, 0.6048, 0.3772, 0.2923, 0.428, 0.6747] +2026-04-09 01:02:55.611022: Epoch time: 102.08 s +2026-04-09 01:02:56.709799: +2026-04-09 01:02:56.712736: Epoch 296 +2026-04-09 01:02:56.716056: Current learning rate: 0.00729 +2026-04-09 01:04:39.100660: train_loss -0.1739 +2026-04-09 01:04:39.108909: val_loss -0.1287 +2026-04-09 01:04:39.111829: Pseudo dice [0.5854, 0.359, 0.7098, 0.1415, 0.3058, 0.3197, 0.7342] +2026-04-09 01:04:39.115027: Epoch time: 102.39 s +2026-04-09 01:04:40.208231: +2026-04-09 01:04:40.211790: Epoch 297 +2026-04-09 01:04:40.214130: Current learning rate: 0.00728 +2026-04-09 01:06:22.703087: train_loss -0.169 +2026-04-09 01:06:22.709579: val_loss -0.1231 +2026-04-09 01:06:22.711818: Pseudo dice [0.4683, 0.5308, 0.7596, 0.2819, 0.2843, 0.6185, 0.6193] +2026-04-09 01:06:22.713846: Epoch time: 102.5 s +2026-04-09 01:06:23.819188: +2026-04-09 01:06:23.821648: Epoch 298 +2026-04-09 01:06:23.823941: Current learning rate: 0.00727 +2026-04-09 01:08:06.550251: train_loss -0.1561 +2026-04-09 01:08:06.556537: val_loss -0.1249 +2026-04-09 01:08:06.559113: Pseudo dice [0.4425, 0.7536, 0.5962, 0.2605, 0.5005, 0.4888, 0.8233] +2026-04-09 01:08:06.560894: Epoch time: 102.73 s +2026-04-09 01:08:07.667575: +2026-04-09 01:08:07.669302: Epoch 299 +2026-04-09 01:08:07.671050: Current learning rate: 0.00726 +2026-04-09 01:09:50.233837: train_loss -0.1678 +2026-04-09 01:09:50.242037: val_loss -0.1212 +2026-04-09 01:09:50.244184: Pseudo dice [0.5739, 0.3382, 0.472, 0.0842, 0.3356, 0.7176, 0.8009] +2026-04-09 01:09:50.246129: Epoch time: 102.57 s +2026-04-09 01:09:52.795584: +2026-04-09 01:09:52.797221: Epoch 300 +2026-04-09 01:09:52.798632: Current learning rate: 0.00725 +2026-04-09 01:11:35.391608: train_loss -0.1689 +2026-04-09 01:11:35.397215: val_loss -0.1177 +2026-04-09 01:11:35.399268: Pseudo dice [0.3904, 0.8806, 0.7419, 0.4381, 0.3307, 0.3033, 0.7225] +2026-04-09 01:11:35.402037: Epoch time: 102.6 s +2026-04-09 01:11:36.498881: +2026-04-09 01:11:36.500997: Epoch 301 +2026-04-09 01:11:36.502599: Current learning rate: 0.00724 +2026-04-09 01:13:19.060113: train_loss -0.1561 +2026-04-09 01:13:19.066724: val_loss -0.125 +2026-04-09 01:13:19.069556: Pseudo dice [0.646, 0.2788, 0.7099, 0.2184, 0.3958, 0.5104, 0.777] +2026-04-09 01:13:19.072536: Epoch time: 102.56 s +2026-04-09 01:13:20.163204: +2026-04-09 01:13:20.165419: Epoch 302 +2026-04-09 01:13:20.166864: Current learning rate: 0.00724 +2026-04-09 01:15:03.114883: train_loss -0.1651 +2026-04-09 01:15:03.121998: val_loss -0.1255 +2026-04-09 01:15:03.124113: Pseudo dice [0.1536, 0.7515, 0.7157, 0.2251, 0.3295, 0.7459, 0.5711] +2026-04-09 01:15:03.127023: Epoch time: 102.95 s +2026-04-09 01:15:04.243759: +2026-04-09 01:15:04.246481: Epoch 303 +2026-04-09 01:15:04.248293: Current learning rate: 0.00723 +2026-04-09 01:16:46.639441: train_loss -0.1611 +2026-04-09 01:16:46.644951: val_loss -0.1254 +2026-04-09 01:16:46.647254: Pseudo dice [0.2471, 0.8497, 0.6888, 0.2482, 0.3931, 0.6814, 0.6565] +2026-04-09 01:16:46.648832: Epoch time: 102.4 s +2026-04-09 01:16:47.746071: +2026-04-09 01:16:47.748256: Epoch 304 +2026-04-09 01:16:47.749604: Current learning rate: 0.00722 +2026-04-09 01:18:30.769583: train_loss -0.1697 +2026-04-09 01:18:30.776201: val_loss -0.1265 +2026-04-09 01:18:30.778917: Pseudo dice [0.6192, 0.2145, 0.7074, 0.3864, 0.5133, 0.6864, 0.6455] +2026-04-09 01:18:30.781176: Epoch time: 103.03 s +2026-04-09 01:18:31.866293: +2026-04-09 01:18:31.868369: Epoch 305 +2026-04-09 01:18:31.870138: Current learning rate: 0.00721 +2026-04-09 01:20:15.495844: train_loss -0.1668 +2026-04-09 01:20:15.501756: val_loss -0.1283 +2026-04-09 01:20:15.503876: Pseudo dice [0.546, 0.8616, 0.6175, 0.2398, 0.3977, 0.7377, 0.5935] +2026-04-09 01:20:15.505750: Epoch time: 103.63 s +2026-04-09 01:20:16.608387: +2026-04-09 01:20:16.610845: Epoch 306 +2026-04-09 01:20:16.613260: Current learning rate: 0.0072 +2026-04-09 01:21:59.064713: train_loss -0.1618 +2026-04-09 01:21:59.070646: val_loss -0.0945 +2026-04-09 01:21:59.072834: Pseudo dice [0.0603, 0.3194, 0.5705, 0.3933, 0.2782, 0.1939, 0.6897] +2026-04-09 01:21:59.074662: Epoch time: 102.46 s +2026-04-09 01:22:00.175533: +2026-04-09 01:22:00.178007: Epoch 307 +2026-04-09 01:22:00.180154: Current learning rate: 0.00719 +2026-04-09 01:23:42.777250: train_loss -0.1705 +2026-04-09 01:23:42.783314: val_loss -0.1234 +2026-04-09 01:23:42.785699: Pseudo dice [0.6008, 0.8528, 0.6911, 0.1976, 0.3533, 0.7315, 0.5564] +2026-04-09 01:23:42.789380: Epoch time: 102.6 s +2026-04-09 01:23:43.965765: +2026-04-09 01:23:43.967707: Epoch 308 +2026-04-09 01:23:43.970561: Current learning rate: 0.00718 +2026-04-09 01:25:26.491892: train_loss -0.1449 +2026-04-09 01:25:26.496928: val_loss -0.1256 +2026-04-09 01:25:26.498775: Pseudo dice [0.2445, 0.7684, 0.6909, 0.1692, 0.4473, 0.2222, 0.6616] +2026-04-09 01:25:26.500818: Epoch time: 102.53 s +2026-04-09 01:25:27.590531: +2026-04-09 01:25:27.592449: Epoch 309 +2026-04-09 01:25:27.594657: Current learning rate: 0.00717 +2026-04-09 01:27:10.174687: train_loss -0.166 +2026-04-09 01:27:10.181086: val_loss -0.1073 +2026-04-09 01:27:10.184602: Pseudo dice [0.5881, 0.8837, 0.667, 0.0989, 0.4408, 0.2925, 0.6262] +2026-04-09 01:27:10.186586: Epoch time: 102.59 s +2026-04-09 01:27:11.279763: +2026-04-09 01:27:11.281860: Epoch 310 +2026-04-09 01:27:11.283810: Current learning rate: 0.00716 +2026-04-09 01:28:53.637882: train_loss -0.1619 +2026-04-09 01:28:53.646079: val_loss -0.1358 +2026-04-09 01:28:53.648587: Pseudo dice [0.4012, 0.2006, 0.684, 0.4194, 0.2784, 0.8244, 0.5708] +2026-04-09 01:28:53.651265: Epoch time: 102.36 s +2026-04-09 01:28:54.751328: +2026-04-09 01:28:54.754184: Epoch 311 +2026-04-09 01:28:54.756567: Current learning rate: 0.00715 +2026-04-09 01:30:37.698970: train_loss -0.1663 +2026-04-09 01:30:37.704834: val_loss -0.1462 +2026-04-09 01:30:37.707633: Pseudo dice [0.4795, 0.6948, 0.7144, 0.2656, 0.5914, 0.643, 0.4478] +2026-04-09 01:30:37.710022: Epoch time: 102.95 s +2026-04-09 01:30:38.811660: +2026-04-09 01:30:38.813536: Epoch 312 +2026-04-09 01:30:38.816444: Current learning rate: 0.00714 +2026-04-09 01:32:21.296922: train_loss -0.1719 +2026-04-09 01:32:21.304728: val_loss -0.1333 +2026-04-09 01:32:21.306856: Pseudo dice [0.593, 0.2025, 0.7621, 0.2335, 0.3795, 0.1443, 0.6816] +2026-04-09 01:32:21.309171: Epoch time: 102.49 s +2026-04-09 01:32:22.420461: +2026-04-09 01:32:22.422203: Epoch 313 +2026-04-09 01:32:22.424111: Current learning rate: 0.00713 +2026-04-09 01:34:04.641582: train_loss -0.163 +2026-04-09 01:34:04.646998: val_loss -0.1377 +2026-04-09 01:34:04.648954: Pseudo dice [0.3259, 0.8164, 0.6707, 0.1367, 0.4546, 0.4328, 0.7271] +2026-04-09 01:34:04.650852: Epoch time: 102.22 s +2026-04-09 01:34:05.744989: +2026-04-09 01:34:05.747096: Epoch 314 +2026-04-09 01:34:05.750295: Current learning rate: 0.00712 +2026-04-09 01:35:48.558254: train_loss -0.1571 +2026-04-09 01:35:48.563852: val_loss -0.1258 +2026-04-09 01:35:48.566030: Pseudo dice [0.3964, 0.809, 0.767, 0.393, 0.4728, 0.3, 0.7178] +2026-04-09 01:35:48.568420: Epoch time: 102.82 s +2026-04-09 01:35:49.673658: +2026-04-09 01:35:49.675775: Epoch 315 +2026-04-09 01:35:49.677369: Current learning rate: 0.00711 +2026-04-09 01:37:31.966826: train_loss -0.1783 +2026-04-09 01:37:31.972754: val_loss -0.1135 +2026-04-09 01:37:31.974710: Pseudo dice [0.6465, 0.1294, 0.684, 0.0011, 0.3494, 0.5087, 0.7703] +2026-04-09 01:37:31.977264: Epoch time: 102.3 s +2026-04-09 01:37:33.090408: +2026-04-09 01:37:33.092591: Epoch 316 +2026-04-09 01:37:33.094149: Current learning rate: 0.0071 +2026-04-09 01:39:15.635055: train_loss -0.1564 +2026-04-09 01:39:15.640615: val_loss -0.0954 +2026-04-09 01:39:15.642462: Pseudo dice [0.384, 0.2956, 0.5801, 0.3125, 0.3614, 0.5325, 0.523] +2026-04-09 01:39:15.645281: Epoch time: 102.55 s +2026-04-09 01:39:16.758756: +2026-04-09 01:39:16.760574: Epoch 317 +2026-04-09 01:39:16.762444: Current learning rate: 0.0071 +2026-04-09 01:40:59.219514: train_loss -0.1505 +2026-04-09 01:40:59.227038: val_loss -0.1693 +2026-04-09 01:40:59.230273: Pseudo dice [0.2238, 0.3519, 0.7231, 0.3659, 0.4344, 0.8241, 0.5678] +2026-04-09 01:40:59.232764: Epoch time: 102.46 s +2026-04-09 01:41:00.328281: +2026-04-09 01:41:00.330333: Epoch 318 +2026-04-09 01:41:00.332077: Current learning rate: 0.00709 +2026-04-09 01:42:42.903855: train_loss -0.1662 +2026-04-09 01:42:42.929794: val_loss -0.1266 +2026-04-09 01:42:42.931860: Pseudo dice [0.3681, 0.755, 0.6128, 0.0111, 0.3588, 0.8032, 0.2024] +2026-04-09 01:42:42.934386: Epoch time: 102.58 s +2026-04-09 01:42:44.045585: +2026-04-09 01:42:44.047690: Epoch 319 +2026-04-09 01:42:44.051656: Current learning rate: 0.00708 +2026-04-09 01:44:26.432981: train_loss -0.1702 +2026-04-09 01:44:26.441120: val_loss -0.1035 +2026-04-09 01:44:26.443048: Pseudo dice [0.4924, 0.4089, 0.6124, 0.1447, 0.4956, 0.427, 0.6934] +2026-04-09 01:44:26.445336: Epoch time: 102.39 s +2026-04-09 01:44:27.551882: +2026-04-09 01:44:27.553587: Epoch 320 +2026-04-09 01:44:27.555149: Current learning rate: 0.00707 +2026-04-09 01:46:10.318632: train_loss -0.1792 +2026-04-09 01:46:10.330423: val_loss -0.1126 +2026-04-09 01:46:10.337829: Pseudo dice [0.5298, 0.9211, 0.7656, 0.3528, 0.3268, 0.2315, 0.6937] +2026-04-09 01:46:10.340635: Epoch time: 102.77 s +2026-04-09 01:46:11.443360: +2026-04-09 01:46:11.445471: Epoch 321 +2026-04-09 01:46:11.447329: Current learning rate: 0.00706 +2026-04-09 01:47:54.171415: train_loss -0.1746 +2026-04-09 01:47:54.177253: val_loss -0.106 +2026-04-09 01:47:54.179679: Pseudo dice [0.3702, 0.3954, 0.4797, 0.5985, 0.3468, 0.6386, 0.6893] +2026-04-09 01:47:54.182118: Epoch time: 102.73 s +2026-04-09 01:47:55.290257: +2026-04-09 01:47:55.292518: Epoch 322 +2026-04-09 01:47:55.294351: Current learning rate: 0.00705 +2026-04-09 01:49:37.538207: train_loss -0.1687 +2026-04-09 01:49:37.544243: val_loss -0.1176 +2026-04-09 01:49:37.546974: Pseudo dice [0.7187, 0.5116, 0.6296, 0.3867, 0.405, 0.5382, 0.4388] +2026-04-09 01:49:37.549375: Epoch time: 102.25 s +2026-04-09 01:49:38.640964: +2026-04-09 01:49:38.643173: Epoch 323 +2026-04-09 01:49:38.644854: Current learning rate: 0.00704 +2026-04-09 01:51:20.773281: train_loss -0.1711 +2026-04-09 01:51:20.778397: val_loss -0.159 +2026-04-09 01:51:20.781708: Pseudo dice [0.667, 0.5471, 0.7073, 0.3722, 0.4666, 0.6636, 0.8572] +2026-04-09 01:51:20.784424: Epoch time: 102.14 s +2026-04-09 01:51:21.895858: +2026-04-09 01:51:21.897984: Epoch 324 +2026-04-09 01:51:21.899904: Current learning rate: 0.00703 +2026-04-09 01:53:04.248074: train_loss -0.1735 +2026-04-09 01:53:04.256375: val_loss -0.1147 +2026-04-09 01:53:04.258048: Pseudo dice [0.242, 0.7809, 0.5984, 0.5593, 0.5017, 0.4004, 0.7993] +2026-04-09 01:53:04.260112: Epoch time: 102.36 s +2026-04-09 01:53:06.623356: +2026-04-09 01:53:06.625773: Epoch 325 +2026-04-09 01:53:06.627188: Current learning rate: 0.00702 +2026-04-09 01:54:49.184986: train_loss -0.1528 +2026-04-09 01:54:49.192441: val_loss -0.1093 +2026-04-09 01:54:49.194115: Pseudo dice [0.1564, 0.2059, 0.6822, 0.3351, 0.2765, 0.6547, 0.7568] +2026-04-09 01:54:49.196178: Epoch time: 102.56 s +2026-04-09 01:54:50.306521: +2026-04-09 01:54:50.308096: Epoch 326 +2026-04-09 01:54:50.309542: Current learning rate: 0.00701 +2026-04-09 01:56:33.437661: train_loss -0.1788 +2026-04-09 01:56:33.444045: val_loss -0.1188 +2026-04-09 01:56:33.446277: Pseudo dice [0.6705, 0.2675, 0.6774, 0.3347, 0.3858, 0.7704, 0.7455] +2026-04-09 01:56:33.448244: Epoch time: 103.13 s +2026-04-09 01:56:34.541072: +2026-04-09 01:56:34.543818: Epoch 327 +2026-04-09 01:56:34.546559: Current learning rate: 0.007 +2026-04-09 01:58:17.203777: train_loss -0.1666 +2026-04-09 01:58:17.210102: val_loss -0.1353 +2026-04-09 01:58:17.212289: Pseudo dice [0.3784, 0.1302, 0.5852, 0.3613, 0.4004, 0.7918, 0.7652] +2026-04-09 01:58:17.214530: Epoch time: 102.67 s +2026-04-09 01:58:18.316021: +2026-04-09 01:58:18.320811: Epoch 328 +2026-04-09 01:58:18.322806: Current learning rate: 0.00699 +2026-04-09 02:00:00.967023: train_loss -0.173 +2026-04-09 02:00:00.973286: val_loss -0.1419 +2026-04-09 02:00:00.975378: Pseudo dice [0.6848, 0.3193, 0.7406, 0.2562, 0.4044, 0.7138, 0.7622] +2026-04-09 02:00:00.978384: Epoch time: 102.65 s +2026-04-09 02:00:02.079717: +2026-04-09 02:00:02.081681: Epoch 329 +2026-04-09 02:00:02.083779: Current learning rate: 0.00698 +2026-04-09 02:01:44.938425: train_loss -0.1682 +2026-04-09 02:01:44.945687: val_loss -0.1014 +2026-04-09 02:01:44.947611: Pseudo dice [0.6125, 0.6954, 0.7065, 0.1627, 0.3449, 0.6337, 0.5189] +2026-04-09 02:01:44.949789: Epoch time: 102.86 s +2026-04-09 02:01:46.083540: +2026-04-09 02:01:46.085339: Epoch 330 +2026-04-09 02:01:46.087583: Current learning rate: 0.00697 +2026-04-09 02:03:28.883132: train_loss -0.1689 +2026-04-09 02:03:28.888632: val_loss -0.1096 +2026-04-09 02:03:28.890295: Pseudo dice [0.582, 0.3461, 0.68, 0.1739, 0.1663, 0.406, 0.7777] +2026-04-09 02:03:28.891785: Epoch time: 102.8 s +2026-04-09 02:03:29.998599: +2026-04-09 02:03:30.000598: Epoch 331 +2026-04-09 02:03:30.002321: Current learning rate: 0.00696 +2026-04-09 02:05:12.320678: train_loss -0.1705 +2026-04-09 02:05:12.326934: val_loss -0.0921 +2026-04-09 02:05:12.329416: Pseudo dice [0.3136, 0.5391, 0.5705, 0.2342, 0.1956, 0.4874, 0.603] +2026-04-09 02:05:12.331393: Epoch time: 102.33 s +2026-04-09 02:05:13.462892: +2026-04-09 02:05:13.464867: Epoch 332 +2026-04-09 02:05:13.466491: Current learning rate: 0.00696 +2026-04-09 02:06:56.934138: train_loss -0.1457 +2026-04-09 02:06:56.941817: val_loss -0.1493 +2026-04-09 02:06:56.944489: Pseudo dice [0.8067, 0.7848, 0.6908, 0.5549, 0.4488, 0.5971, 0.6212] +2026-04-09 02:06:56.947135: Epoch time: 103.47 s +2026-04-09 02:06:58.082193: +2026-04-09 02:06:58.084697: Epoch 333 +2026-04-09 02:06:58.087692: Current learning rate: 0.00695 +2026-04-09 02:08:40.368875: train_loss -0.1543 +2026-04-09 02:08:40.375051: val_loss -0.1428 +2026-04-09 02:08:40.377857: Pseudo dice [0.4852, 0.2303, 0.7058, 0.72, 0.5368, 0.6802, 0.5995] +2026-04-09 02:08:40.380560: Epoch time: 102.29 s +2026-04-09 02:08:41.473930: +2026-04-09 02:08:41.475972: Epoch 334 +2026-04-09 02:08:41.478531: Current learning rate: 0.00694 +2026-04-09 02:10:23.972469: train_loss -0.1606 +2026-04-09 02:10:23.980056: val_loss -0.1583 +2026-04-09 02:10:23.982246: Pseudo dice [0.317, 0.4217, 0.7963, 0.4774, 0.6393, 0.8322, 0.7851] +2026-04-09 02:10:23.984442: Epoch time: 102.5 s +2026-04-09 02:10:25.097697: +2026-04-09 02:10:25.100192: Epoch 335 +2026-04-09 02:10:25.102127: Current learning rate: 0.00693 +2026-04-09 02:12:08.079554: train_loss -0.1802 +2026-04-09 02:12:08.095645: val_loss -0.1604 +2026-04-09 02:12:08.099074: Pseudo dice [0.7241, 0.6724, 0.6116, 0.3824, 0.436, 0.5121, 0.8267] +2026-04-09 02:12:08.101454: Epoch time: 102.99 s +2026-04-09 02:12:08.103106: Yayy! New best EMA pseudo Dice: 0.534 +2026-04-09 02:12:11.079380: +2026-04-09 02:12:11.081099: Epoch 336 +2026-04-09 02:12:11.082671: Current learning rate: 0.00692 +2026-04-09 02:13:54.327351: train_loss -0.1668 +2026-04-09 02:13:54.335177: val_loss -0.1191 +2026-04-09 02:13:54.338007: Pseudo dice [0.5244, 0.8793, 0.5851, 0.3448, 0.6705, 0.1302, 0.3274] +2026-04-09 02:13:54.340685: Epoch time: 103.25 s +2026-04-09 02:13:55.460490: +2026-04-09 02:13:55.463414: Epoch 337 +2026-04-09 02:13:55.465041: Current learning rate: 0.00691 +2026-04-09 02:15:38.493479: train_loss -0.1486 +2026-04-09 02:15:38.500593: val_loss -0.1197 +2026-04-09 02:15:38.502971: Pseudo dice [0.5472, 0.7611, 0.7772, 0.4144, 0.2599, 0.4156, 0.7799] +2026-04-09 02:15:38.512411: Epoch time: 103.04 s +2026-04-09 02:15:39.639697: +2026-04-09 02:15:39.641929: Epoch 338 +2026-04-09 02:15:39.643720: Current learning rate: 0.0069 +2026-04-09 02:17:22.007573: train_loss -0.1621 +2026-04-09 02:17:22.013848: val_loss -0.1666 +2026-04-09 02:17:22.015553: Pseudo dice [0.4767, 0.5445, 0.717, 0.6952, 0.447, 0.6825, 0.8017] +2026-04-09 02:17:22.017843: Epoch time: 102.37 s +2026-04-09 02:17:22.019965: Yayy! New best EMA pseudo Dice: 0.5425 +2026-04-09 02:17:24.910770: +2026-04-09 02:17:24.912978: Epoch 339 +2026-04-09 02:17:24.914651: Current learning rate: 0.00689 +2026-04-09 02:19:07.969862: train_loss -0.1679 +2026-04-09 02:19:07.976104: val_loss -0.1189 +2026-04-09 02:19:07.978731: Pseudo dice [0.718, 0.5202, 0.6705, 0.3611, 0.3174, 0.3842, 0.6771] +2026-04-09 02:19:07.980531: Epoch time: 103.06 s +2026-04-09 02:19:09.115392: +2026-04-09 02:19:09.117127: Epoch 340 +2026-04-09 02:19:09.118641: Current learning rate: 0.00688 +2026-04-09 02:20:51.296945: train_loss -0.1723 +2026-04-09 02:20:51.309962: val_loss -0.1389 +2026-04-09 02:20:51.314343: Pseudo dice [0.7128, 0.1859, 0.678, 0.4032, 0.4619, 0.6811, 0.7858] +2026-04-09 02:20:51.316270: Epoch time: 102.18 s +2026-04-09 02:20:52.426588: +2026-04-09 02:20:52.429030: Epoch 341 +2026-04-09 02:20:52.431540: Current learning rate: 0.00687 +2026-04-09 02:22:35.237237: train_loss -0.1738 +2026-04-09 02:22:35.245890: val_loss -0.1328 +2026-04-09 02:22:35.249448: Pseudo dice [0.6598, 0.6678, 0.7165, 0.5208, 0.4056, 0.2692, 0.8344] +2026-04-09 02:22:35.252725: Epoch time: 102.81 s +2026-04-09 02:22:35.256603: Yayy! New best EMA pseudo Dice: 0.5462 +2026-04-09 02:22:38.117559: +2026-04-09 02:22:38.119414: Epoch 342 +2026-04-09 02:22:38.120884: Current learning rate: 0.00686 +2026-04-09 02:24:20.286428: train_loss -0.1612 +2026-04-09 02:24:20.309365: val_loss -0.147 +2026-04-09 02:24:20.313833: Pseudo dice [0.5123, 0.5393, 0.7257, 0.3279, 0.5516, 0.5984, 0.5647] +2026-04-09 02:24:20.316545: Epoch time: 102.17 s +2026-04-09 02:24:21.476495: +2026-04-09 02:24:21.479870: Epoch 343 +2026-04-09 02:24:21.482785: Current learning rate: 0.00685 +2026-04-09 02:26:04.301280: train_loss -0.1612 +2026-04-09 02:26:04.307950: val_loss -0.141 +2026-04-09 02:26:04.309948: Pseudo dice [0.5188, 0.8955, 0.4136, 0.0561, 0.5533, 0.5437, 0.5964] +2026-04-09 02:26:04.312025: Epoch time: 102.83 s +2026-04-09 02:26:06.603919: +2026-04-09 02:26:06.606625: Epoch 344 +2026-04-09 02:26:06.608524: Current learning rate: 0.00684 +2026-04-09 02:27:49.243571: train_loss -0.169 +2026-04-09 02:27:49.252599: val_loss -0.1333 +2026-04-09 02:27:49.255107: Pseudo dice [0.8088, 0.71, 0.7363, 0.3203, 0.6031, 0.6943, 0.7484] +2026-04-09 02:27:49.257285: Epoch time: 102.64 s +2026-04-09 02:27:49.259490: Yayy! New best EMA pseudo Dice: 0.5544 +2026-04-09 02:27:52.159374: +2026-04-09 02:27:52.161834: Epoch 345 +2026-04-09 02:27:52.163430: Current learning rate: 0.00683 +2026-04-09 02:29:34.753922: train_loss -0.1555 +2026-04-09 02:29:34.760615: val_loss -0.1117 +2026-04-09 02:29:34.763103: Pseudo dice [0.5286, 0.433, 0.5396, 0.3645, 0.1458, 0.5323, 0.5152] +2026-04-09 02:29:34.765283: Epoch time: 102.6 s +2026-04-09 02:29:35.925390: +2026-04-09 02:29:35.927346: Epoch 346 +2026-04-09 02:29:35.928963: Current learning rate: 0.00682 +2026-04-09 02:31:18.296349: train_loss -0.1656 +2026-04-09 02:31:18.303319: val_loss -0.143 +2026-04-09 02:31:18.307309: Pseudo dice [0.5192, 0.4107, 0.5594, 0.0219, 0.4013, 0.7256, 0.6725] +2026-04-09 02:31:18.312320: Epoch time: 102.37 s +2026-04-09 02:31:19.434112: +2026-04-09 02:31:19.436036: Epoch 347 +2026-04-09 02:31:19.438225: Current learning rate: 0.00681 +2026-04-09 02:33:02.312184: train_loss -0.1747 +2026-04-09 02:33:02.318204: val_loss -0.1257 +2026-04-09 02:33:02.320484: Pseudo dice [0.4997, 0.8424, 0.6637, 0.4822, 0.4752, 0.4397, 0.7337] +2026-04-09 02:33:02.322701: Epoch time: 102.88 s +2026-04-09 02:33:03.457557: +2026-04-09 02:33:03.463680: Epoch 348 +2026-04-09 02:33:03.465959: Current learning rate: 0.0068 +2026-04-09 02:34:46.609825: train_loss -0.1744 +2026-04-09 02:34:46.616872: val_loss -0.1308 +2026-04-09 02:34:46.619120: Pseudo dice [0.8533, 0.8741, 0.6833, 0.4325, 0.5083, 0.3879, 0.8361] +2026-04-09 02:34:46.622183: Epoch time: 103.16 s +2026-04-09 02:34:47.747689: +2026-04-09 02:34:47.749628: Epoch 349 +2026-04-09 02:34:47.751452: Current learning rate: 0.0068 +2026-04-09 02:36:30.569240: train_loss -0.1718 +2026-04-09 02:36:30.577160: val_loss -0.1452 +2026-04-09 02:36:30.580679: Pseudo dice [0.4382, 0.8974, 0.6879, 0.2323, 0.4885, 0.455, 0.6834] +2026-04-09 02:36:30.583020: Epoch time: 102.82 s +2026-04-09 02:36:33.516975: +2026-04-09 02:36:33.519631: Epoch 350 +2026-04-09 02:36:33.521215: Current learning rate: 0.00679 +2026-04-09 02:38:16.709327: train_loss -0.1665 +2026-04-09 02:38:16.715549: val_loss -0.1152 +2026-04-09 02:38:16.717669: Pseudo dice [0.34, 0.5149, 0.6957, 0.5197, 0.4875, 0.556, 0.5691] +2026-04-09 02:38:16.719579: Epoch time: 103.2 s +2026-04-09 02:38:17.840967: +2026-04-09 02:38:17.842794: Epoch 351 +2026-04-09 02:38:17.844404: Current learning rate: 0.00678 +2026-04-09 02:40:01.012679: train_loss -0.1685 +2026-04-09 02:40:01.018638: val_loss -0.1524 +2026-04-09 02:40:01.020857: Pseudo dice [0.4953, 0.5301, 0.6788, 0.6886, 0.3691, 0.8049, 0.6447] +2026-04-09 02:40:01.023092: Epoch time: 103.17 s +2026-04-09 02:40:01.025798: Yayy! New best EMA pseudo Dice: 0.5552 +2026-04-09 02:40:03.967496: +2026-04-09 02:40:03.969401: Epoch 352 +2026-04-09 02:40:03.972090: Current learning rate: 0.00677 +2026-04-09 02:41:47.895016: train_loss -0.1612 +2026-04-09 02:41:47.901656: val_loss -0.1406 +2026-04-09 02:41:47.904975: Pseudo dice [0.2297, 0.3424, 0.7584, 0.7904, 0.523, 0.1312, 0.6641] +2026-04-09 02:41:47.908002: Epoch time: 103.93 s +2026-04-09 02:41:49.048683: +2026-04-09 02:41:49.050449: Epoch 353 +2026-04-09 02:41:49.052358: Current learning rate: 0.00676 +2026-04-09 02:43:31.642330: train_loss -0.1655 +2026-04-09 02:43:31.648868: val_loss -0.1306 +2026-04-09 02:43:31.651917: Pseudo dice [0.5501, 0.6349, 0.7435, 0.369, 0.3635, 0.8414, 0.7527] +2026-04-09 02:43:31.654596: Epoch time: 102.6 s +2026-04-09 02:43:32.771283: +2026-04-09 02:43:32.773474: Epoch 354 +2026-04-09 02:43:32.775173: Current learning rate: 0.00675 +2026-04-09 02:45:16.018480: train_loss -0.1758 +2026-04-09 02:45:16.026624: val_loss -0.1434 +2026-04-09 02:45:16.028689: Pseudo dice [0.3396, 0.8832, 0.6401, 0.3211, 0.4582, 0.6316, 0.7222] +2026-04-09 02:45:16.030683: Epoch time: 103.25 s +2026-04-09 02:45:16.033112: Yayy! New best EMA pseudo Dice: 0.5563 +2026-04-09 02:45:18.995608: +2026-04-09 02:45:18.997437: Epoch 355 +2026-04-09 02:45:18.999084: Current learning rate: 0.00674 +2026-04-09 02:47:01.787343: train_loss -0.1764 +2026-04-09 02:47:01.793339: val_loss -0.1358 +2026-04-09 02:47:01.795500: Pseudo dice [0.5547, 0.7644, 0.7464, 0.0809, 0.166, 0.3749, 0.7374] +2026-04-09 02:47:01.797453: Epoch time: 102.79 s +2026-04-09 02:47:02.902998: +2026-04-09 02:47:02.906868: Epoch 356 +2026-04-09 02:47:02.918067: Current learning rate: 0.00673 +2026-04-09 02:48:45.282478: train_loss -0.1598 +2026-04-09 02:48:45.304288: val_loss -0.1385 +2026-04-09 02:48:45.309210: Pseudo dice [0.558, 0.1725, 0.7469, 0.3795, 0.2181, 0.0855, 0.7008] +2026-04-09 02:48:45.311871: Epoch time: 102.38 s +2026-04-09 02:48:46.480230: +2026-04-09 02:48:46.484194: Epoch 357 +2026-04-09 02:48:46.487510: Current learning rate: 0.00672 +2026-04-09 02:50:29.432968: train_loss -0.1623 +2026-04-09 02:50:29.441265: val_loss -0.1123 +2026-04-09 02:50:29.444019: Pseudo dice [0.3702, 0.8265, 0.3886, 0.3238, 0.1981, 0.4886, 0.6086] +2026-04-09 02:50:29.447158: Epoch time: 102.96 s +2026-04-09 02:50:30.577975: +2026-04-09 02:50:30.581838: Epoch 358 +2026-04-09 02:50:30.583818: Current learning rate: 0.00671 +2026-04-09 02:52:13.577153: train_loss -0.1569 +2026-04-09 02:52:13.582989: val_loss -0.1263 +2026-04-09 02:52:13.584975: Pseudo dice [0.402, 0.916, 0.7759, 0.2831, 0.3936, 0.3506, 0.7409] +2026-04-09 02:52:13.586982: Epoch time: 103.0 s +2026-04-09 02:52:14.710340: +2026-04-09 02:52:14.712656: Epoch 359 +2026-04-09 02:52:14.714307: Current learning rate: 0.0067 +2026-04-09 02:53:58.607471: train_loss -0.1707 +2026-04-09 02:53:58.623879: val_loss -0.1209 +2026-04-09 02:53:58.627516: Pseudo dice [0.2798, 0.765, 0.6018, 0.2725, 0.4642, 0.4952, 0.7844] +2026-04-09 02:53:58.629750: Epoch time: 103.9 s +2026-04-09 02:53:59.749599: +2026-04-09 02:53:59.751848: Epoch 360 +2026-04-09 02:53:59.753851: Current learning rate: 0.00669 +2026-04-09 02:55:42.988954: train_loss -0.1797 +2026-04-09 02:55:42.997089: val_loss -0.1547 +2026-04-09 02:55:42.999129: Pseudo dice [0.2019, 0.8131, 0.7734, 0.3498, 0.503, 0.2111, 0.8127] +2026-04-09 02:55:43.001770: Epoch time: 103.24 s +2026-04-09 02:55:44.124030: +2026-04-09 02:55:44.126081: Epoch 361 +2026-04-09 02:55:44.127995: Current learning rate: 0.00668 +2026-04-09 02:57:26.533927: train_loss -0.1785 +2026-04-09 02:57:26.540147: val_loss -0.1288 +2026-04-09 02:57:26.542033: Pseudo dice [0.4361, 0.8231, 0.793, 0.31, 0.3973, 0.4041, 0.6319] +2026-04-09 02:57:26.544682: Epoch time: 102.41 s +2026-04-09 02:57:27.637446: +2026-04-09 02:57:27.640147: Epoch 362 +2026-04-09 02:57:27.642296: Current learning rate: 0.00667 +2026-04-09 02:59:11.342934: train_loss -0.1707 +2026-04-09 02:59:11.350775: val_loss -0.1566 +2026-04-09 02:59:11.353414: Pseudo dice [0.517, 0.3251, 0.7403, 0.6067, 0.4717, 0.5632, 0.6784] +2026-04-09 02:59:11.355395: Epoch time: 103.71 s +2026-04-09 02:59:12.490763: +2026-04-09 02:59:12.493036: Epoch 363 +2026-04-09 02:59:12.494906: Current learning rate: 0.00666 +2026-04-09 03:00:55.008226: train_loss -0.1806 +2026-04-09 03:00:55.016174: val_loss -0.1239 +2026-04-09 03:00:55.018432: Pseudo dice [0.3424, 0.1657, 0.7357, 0.3527, 0.4737, 0.7766, 0.8131] +2026-04-09 03:00:55.020807: Epoch time: 102.52 s +2026-04-09 03:00:56.160675: +2026-04-09 03:00:56.162739: Epoch 364 +2026-04-09 03:00:56.164699: Current learning rate: 0.00665 +2026-04-09 03:02:39.216231: train_loss -0.1714 +2026-04-09 03:02:39.223055: val_loss -0.1183 +2026-04-09 03:02:39.225534: Pseudo dice [0.4622, 0.1443, 0.7311, 0.4382, 0.2578, 0.5167, 0.7269] +2026-04-09 03:02:39.228277: Epoch time: 103.06 s +2026-04-09 03:02:40.343207: +2026-04-09 03:02:40.345727: Epoch 365 +2026-04-09 03:02:40.348078: Current learning rate: 0.00665 +2026-04-09 03:04:23.267531: train_loss -0.1683 +2026-04-09 03:04:23.275769: val_loss -0.1312 +2026-04-09 03:04:23.277680: Pseudo dice [0.4649, 0.4572, 0.6584, 0.2476, 0.4495, 0.8323, 0.5878] +2026-04-09 03:04:23.280372: Epoch time: 102.93 s +2026-04-09 03:04:24.427276: +2026-04-09 03:04:24.430361: Epoch 366 +2026-04-09 03:04:24.432681: Current learning rate: 0.00664 +2026-04-09 03:06:07.675233: train_loss -0.1727 +2026-04-09 03:06:07.683940: val_loss -0.1294 +2026-04-09 03:06:07.686278: Pseudo dice [0.728, 0.3744, 0.7703, 0.253, 0.4693, 0.6743, 0.6574] +2026-04-09 03:06:07.689006: Epoch time: 103.25 s +2026-04-09 03:06:08.824750: +2026-04-09 03:06:08.826956: Epoch 367 +2026-04-09 03:06:08.830647: Current learning rate: 0.00663 +2026-04-09 03:07:51.947989: train_loss -0.1752 +2026-04-09 03:07:51.953255: val_loss -0.0952 +2026-04-09 03:07:51.955063: Pseudo dice [0.4934, 0.8205, 0.5454, 0.4818, 0.4954, 0.4671, 0.734] +2026-04-09 03:07:51.957699: Epoch time: 103.13 s +2026-04-09 03:07:53.079588: +2026-04-09 03:07:53.081723: Epoch 368 +2026-04-09 03:07:53.084873: Current learning rate: 0.00662 +2026-04-09 03:09:35.600841: train_loss -0.1755 +2026-04-09 03:09:35.607001: val_loss -0.1601 +2026-04-09 03:09:35.609520: Pseudo dice [0.736, 0.7453, 0.6839, 0.722, 0.4418, 0.1375, 0.8421] +2026-04-09 03:09:35.612073: Epoch time: 102.52 s +2026-04-09 03:09:36.724714: +2026-04-09 03:09:36.727228: Epoch 369 +2026-04-09 03:09:36.729238: Current learning rate: 0.00661 +2026-04-09 03:11:19.651711: train_loss -0.1624 +2026-04-09 03:11:19.671910: val_loss -0.146 +2026-04-09 03:11:19.674497: Pseudo dice [0.5446, 0.8615, 0.7542, 0.4476, 0.3229, 0.3448, 0.7986] +2026-04-09 03:11:19.676797: Epoch time: 102.93 s +2026-04-09 03:11:20.836731: +2026-04-09 03:11:20.840928: Epoch 370 +2026-04-09 03:11:20.843128: Current learning rate: 0.0066 +2026-04-09 03:13:03.596898: train_loss -0.1649 +2026-04-09 03:13:03.602392: val_loss -0.1332 +2026-04-09 03:13:03.604672: Pseudo dice [0.5398, 0.7651, 0.5733, 0.3711, 0.4827, 0.6203, 0.7699] +2026-04-09 03:13:03.606727: Epoch time: 102.76 s +2026-04-09 03:13:04.731536: +2026-04-09 03:13:04.733392: Epoch 371 +2026-04-09 03:13:04.735224: Current learning rate: 0.00659 +2026-04-09 03:14:47.719035: train_loss -0.1692 +2026-04-09 03:14:47.728133: val_loss -0.1501 +2026-04-09 03:14:47.730758: Pseudo dice [0.6671, 0.8208, 0.6857, 0.4667, 0.5927, 0.6388, 0.5262] +2026-04-09 03:14:47.733995: Epoch time: 102.99 s +2026-04-09 03:14:47.736483: Yayy! New best EMA pseudo Dice: 0.5583 +2026-04-09 03:14:50.311792: +2026-04-09 03:14:50.313972: Epoch 372 +2026-04-09 03:14:50.315362: Current learning rate: 0.00658 +2026-04-09 03:16:33.785590: train_loss -0.1782 +2026-04-09 03:16:33.793149: val_loss -0.1334 +2026-04-09 03:16:33.796301: Pseudo dice [0.757, 0.8765, 0.7049, 0.2867, 0.3766, 0.5754, 0.7595] +2026-04-09 03:16:33.798989: Epoch time: 103.48 s +2026-04-09 03:16:33.801195: Yayy! New best EMA pseudo Dice: 0.5644 +2026-04-09 03:16:36.830144: +2026-04-09 03:16:36.831899: Epoch 373 +2026-04-09 03:16:36.833641: Current learning rate: 0.00657 +2026-04-09 03:18:19.557947: train_loss -0.1633 +2026-04-09 03:18:19.564490: val_loss -0.1083 +2026-04-09 03:18:19.566781: Pseudo dice [0.2382, 0.6855, 0.7326, 0.4638, 0.2992, 0.7432, 0.5141] +2026-04-09 03:18:19.569204: Epoch time: 102.73 s +2026-04-09 03:18:20.794661: +2026-04-09 03:18:20.796306: Epoch 374 +2026-04-09 03:18:20.797687: Current learning rate: 0.00656 +2026-04-09 03:20:02.989173: train_loss -0.167 +2026-04-09 03:20:02.997049: val_loss -0.0718 +2026-04-09 03:20:02.999103: Pseudo dice [0.3895, 0.5252, 0.5523, 0.3284, 0.1191, 0.4262, 0.6741] +2026-04-09 03:20:03.001880: Epoch time: 102.2 s +2026-04-09 03:20:04.129418: +2026-04-09 03:20:04.132917: Epoch 375 +2026-04-09 03:20:04.135412: Current learning rate: 0.00655 +2026-04-09 03:21:47.417001: train_loss -0.1521 +2026-04-09 03:21:47.421653: val_loss -0.1093 +2026-04-09 03:21:47.423434: Pseudo dice [0.1724, 0.7556, 0.3898, 0.3041, 0.4418, 0.8018, 0.7317] +2026-04-09 03:21:47.425218: Epoch time: 103.29 s +2026-04-09 03:21:48.553906: +2026-04-09 03:21:48.555934: Epoch 376 +2026-04-09 03:21:48.557877: Current learning rate: 0.00654 +2026-04-09 03:23:31.714111: train_loss -0.1726 +2026-04-09 03:23:31.721423: val_loss -0.1089 +2026-04-09 03:23:31.723389: Pseudo dice [0.4031, 0.464, 0.5534, 0.4121, 0.6561, 0.7593, 0.8065] +2026-04-09 03:23:31.725500: Epoch time: 103.16 s +2026-04-09 03:23:32.847484: +2026-04-09 03:23:32.850953: Epoch 377 +2026-04-09 03:23:32.853797: Current learning rate: 0.00653 +2026-04-09 03:25:15.399117: train_loss -0.1662 +2026-04-09 03:25:15.406070: val_loss -0.1236 +2026-04-09 03:25:15.408466: Pseudo dice [0.7172, 0.7352, 0.7559, 0.1948, 0.1933, 0.6447, 0.6477] +2026-04-09 03:25:15.412705: Epoch time: 102.55 s +2026-04-09 03:25:16.542836: +2026-04-09 03:25:16.544714: Epoch 378 +2026-04-09 03:25:16.546635: Current learning rate: 0.00652 +2026-04-09 03:26:58.749782: train_loss -0.1636 +2026-04-09 03:26:58.757598: val_loss -0.1459 +2026-04-09 03:26:58.760171: Pseudo dice [0.6269, 0.9114, 0.7073, 0.1885, 0.5339, 0.6849, 0.7264] +2026-04-09 03:26:58.763473: Epoch time: 102.21 s +2026-04-09 03:26:59.871897: +2026-04-09 03:26:59.876010: Epoch 379 +2026-04-09 03:26:59.877557: Current learning rate: 0.00651 +2026-04-09 03:28:43.000736: train_loss -0.1691 +2026-04-09 03:28:43.005983: val_loss -0.1194 +2026-04-09 03:28:43.008085: Pseudo dice [0.6104, 0.8278, 0.5246, 0.2889, 0.4732, 0.8319, 0.6528] +2026-04-09 03:28:43.010050: Epoch time: 103.13 s +2026-04-09 03:28:44.164529: +2026-04-09 03:28:44.167468: Epoch 380 +2026-04-09 03:28:44.169862: Current learning rate: 0.0065 +2026-04-09 03:30:27.092646: train_loss -0.1628 +2026-04-09 03:30:27.099451: val_loss -0.1364 +2026-04-09 03:30:27.101465: Pseudo dice [0.7878, 0.653, 0.7101, 0.2232, 0.408, 0.4103, 0.5788] +2026-04-09 03:30:27.103319: Epoch time: 102.93 s +2026-04-09 03:30:29.312045: +2026-04-09 03:30:29.314221: Epoch 381 +2026-04-09 03:30:29.315784: Current learning rate: 0.00649 +2026-04-09 03:32:12.454581: train_loss -0.1572 +2026-04-09 03:32:12.461261: val_loss -0.1579 +2026-04-09 03:32:12.463377: Pseudo dice [0.5767, 0.4813, 0.7827, 0.7741, 0.5674, 0.2699, 0.7655] +2026-04-09 03:32:12.465717: Epoch time: 103.15 s +2026-04-09 03:32:13.605604: +2026-04-09 03:32:13.608215: Epoch 382 +2026-04-09 03:32:13.610621: Current learning rate: 0.00648 +2026-04-09 03:33:56.198122: train_loss -0.1741 +2026-04-09 03:33:56.205504: val_loss -0.1551 +2026-04-09 03:33:56.207726: Pseudo dice [0.5826, 0.8414, 0.6856, 0.7266, 0.5541, 0.2928, 0.8455] +2026-04-09 03:33:56.210095: Epoch time: 102.6 s +2026-04-09 03:33:56.211932: Yayy! New best EMA pseudo Dice: 0.5713 +2026-04-09 03:33:59.041644: +2026-04-09 03:33:59.043976: Epoch 383 +2026-04-09 03:33:59.045318: Current learning rate: 0.00648 +2026-04-09 03:35:41.852943: train_loss -0.173 +2026-04-09 03:35:41.859294: val_loss -0.1443 +2026-04-09 03:35:41.861978: Pseudo dice [0.6564, 0.7151, 0.6326, 0.3869, 0.3018, 0.693, 0.7138] +2026-04-09 03:35:41.863842: Epoch time: 102.81 s +2026-04-09 03:35:41.867121: Yayy! New best EMA pseudo Dice: 0.5727 +2026-04-09 03:35:44.402048: +2026-04-09 03:35:44.404155: Epoch 384 +2026-04-09 03:35:44.405650: Current learning rate: 0.00647 +2026-04-09 03:37:27.215696: train_loss -0.1753 +2026-04-09 03:37:27.222486: val_loss -0.1481 +2026-04-09 03:37:27.225261: Pseudo dice [0.2478, 0.906, 0.7064, 0.4898, 0.5005, 0.6496, 0.6394] +2026-04-09 03:37:27.227761: Epoch time: 102.82 s +2026-04-09 03:37:27.230076: Yayy! New best EMA pseudo Dice: 0.5746 +2026-04-09 03:37:30.084379: +2026-04-09 03:37:30.086130: Epoch 385 +2026-04-09 03:37:30.087630: Current learning rate: 0.00646 +2026-04-09 03:39:12.643020: train_loss -0.173 +2026-04-09 03:39:12.650964: val_loss -0.1549 +2026-04-09 03:39:12.654136: Pseudo dice [0.518, 0.4081, 0.7758, 0.6899, 0.4764, 0.386, 0.8043] +2026-04-09 03:39:12.657470: Epoch time: 102.56 s +2026-04-09 03:39:12.659283: Yayy! New best EMA pseudo Dice: 0.5751 +2026-04-09 03:39:15.268803: +2026-04-09 03:39:15.270633: Epoch 386 +2026-04-09 03:39:15.272200: Current learning rate: 0.00645 +2026-04-09 03:40:57.468588: train_loss -0.164 +2026-04-09 03:40:57.478334: val_loss -0.1423 +2026-04-09 03:40:57.480919: Pseudo dice [0.3514, 0.5973, 0.7768, 0.7084, 0.3611, 0.5866, 0.7047] +2026-04-09 03:40:57.483504: Epoch time: 102.2 s +2026-04-09 03:40:57.485779: Yayy! New best EMA pseudo Dice: 0.576 +2026-04-09 03:41:00.310935: +2026-04-09 03:41:00.316078: Epoch 387 +2026-04-09 03:41:00.317802: Current learning rate: 0.00644 +2026-04-09 03:42:43.200897: train_loss -0.1671 +2026-04-09 03:42:43.207062: val_loss -0.1334 +2026-04-09 03:42:43.208748: Pseudo dice [0.1312, 0.681, 0.7651, 0.533, 0.1824, 0.9094, 0.5518] +2026-04-09 03:42:43.210585: Epoch time: 102.89 s +2026-04-09 03:42:44.445037: +2026-04-09 03:42:44.448448: Epoch 388 +2026-04-09 03:42:44.450628: Current learning rate: 0.00643 +2026-04-09 03:44:26.857466: train_loss -0.1714 +2026-04-09 03:44:26.864957: val_loss -0.1337 +2026-04-09 03:44:26.868057: Pseudo dice [0.3908, 0.3501, 0.5596, 0.1418, 0.4706, 0.5875, 0.6388] +2026-04-09 03:44:26.870763: Epoch time: 102.42 s +2026-04-09 03:44:28.025061: +2026-04-09 03:44:28.035523: Epoch 389 +2026-04-09 03:44:28.046957: Current learning rate: 0.00642 +2026-04-09 03:46:10.409244: train_loss -0.1712 +2026-04-09 03:46:10.415622: val_loss -0.1287 +2026-04-09 03:46:10.417660: Pseudo dice [0.5521, 0.5998, 0.6085, 0.3719, 0.5945, 0.2721, 0.7492] +2026-04-09 03:46:10.419704: Epoch time: 102.39 s +2026-04-09 03:46:11.581758: +2026-04-09 03:46:11.583856: Epoch 390 +2026-04-09 03:46:11.585836: Current learning rate: 0.00641 +2026-04-09 03:47:54.349645: train_loss -0.1713 +2026-04-09 03:47:54.356179: val_loss -0.0968 +2026-04-09 03:47:54.359819: Pseudo dice [0.6105, 0.6543, 0.7768, 0.2764, 0.4651, 0.6888, 0.7511] +2026-04-09 03:47:54.362606: Epoch time: 102.77 s +2026-04-09 03:47:55.486653: +2026-04-09 03:47:55.488715: Epoch 391 +2026-04-09 03:47:55.491369: Current learning rate: 0.0064 +2026-04-09 03:49:37.832567: train_loss -0.174 +2026-04-09 03:49:37.839669: val_loss -0.1195 +2026-04-09 03:49:37.841936: Pseudo dice [0.5507, 0.8703, 0.7595, 0.4617, 0.25, 0.133, 0.8055] +2026-04-09 03:49:37.844025: Epoch time: 102.35 s +2026-04-09 03:49:38.986050: +2026-04-09 03:49:38.988122: Epoch 392 +2026-04-09 03:49:38.989718: Current learning rate: 0.00639 +2026-04-09 03:51:21.437623: train_loss -0.1751 +2026-04-09 03:51:21.442745: val_loss -0.1601 +2026-04-09 03:51:21.446049: Pseudo dice [0.5757, 0.784, 0.6659, 0.4041, 0.4137, 0.8106, 0.734] +2026-04-09 03:51:21.447731: Epoch time: 102.45 s +2026-04-09 03:51:22.602377: +2026-04-09 03:51:22.604213: Epoch 393 +2026-04-09 03:51:22.606102: Current learning rate: 0.00638 +2026-04-09 03:53:05.839401: train_loss -0.1795 +2026-04-09 03:53:05.845537: val_loss -0.1241 +2026-04-09 03:53:05.847930: Pseudo dice [0.5538, 0.4944, 0.5447, 0.405, 0.4048, 0.6141, 0.7505] +2026-04-09 03:53:05.850954: Epoch time: 103.24 s +2026-04-09 03:53:06.988267: +2026-04-09 03:53:06.990031: Epoch 394 +2026-04-09 03:53:06.991905: Current learning rate: 0.00637 +2026-04-09 03:54:49.505030: train_loss -0.1691 +2026-04-09 03:54:49.512829: val_loss -0.1467 +2026-04-09 03:54:49.516461: Pseudo dice [0.5543, 0.646, 0.7989, 0.5946, 0.4153, 0.2446, 0.692] +2026-04-09 03:54:49.524128: Epoch time: 102.52 s +2026-04-09 03:54:50.642746: +2026-04-09 03:54:50.644613: Epoch 395 +2026-04-09 03:54:50.646736: Current learning rate: 0.00636 +2026-04-09 03:56:33.708365: train_loss -0.1781 +2026-04-09 03:56:33.713693: val_loss -0.1368 +2026-04-09 03:56:33.716224: Pseudo dice [0.3897, 0.8294, 0.6751, 0.2096, 0.4201, 0.5903, 0.6253] +2026-04-09 03:56:33.718345: Epoch time: 103.07 s +2026-04-09 03:56:34.861098: +2026-04-09 03:56:34.863182: Epoch 396 +2026-04-09 03:56:34.864789: Current learning rate: 0.00635 +2026-04-09 03:58:16.875818: train_loss -0.1715 +2026-04-09 03:58:16.884035: val_loss -0.1481 +2026-04-09 03:58:16.886487: Pseudo dice [0.6517, 0.2856, 0.4419, 0.522, 0.5573, 0.4585, 0.8009] +2026-04-09 03:58:16.888561: Epoch time: 102.02 s +2026-04-09 03:58:18.036208: +2026-04-09 03:58:18.038212: Epoch 397 +2026-04-09 03:58:18.040912: Current learning rate: 0.00634 +2026-04-09 03:59:59.997720: train_loss -0.1865 +2026-04-09 04:00:00.004258: val_loss -0.1274 +2026-04-09 04:00:00.007020: Pseudo dice [0.5224, 0.7848, 0.7591, 0.4033, 0.4066, 0.3771, 0.6192] +2026-04-09 04:00:00.009343: Epoch time: 101.96 s +2026-04-09 04:00:01.140004: +2026-04-09 04:00:01.141799: Epoch 398 +2026-04-09 04:00:01.143968: Current learning rate: 0.00633 +2026-04-09 04:01:45.021835: train_loss -0.1723 +2026-04-09 04:01:45.029052: val_loss -0.1584 +2026-04-09 04:01:45.031332: Pseudo dice [0.756, 0.9141, 0.6731, 0.779, 0.45, 0.7719, 0.7531] +2026-04-09 04:01:45.033494: Epoch time: 103.88 s +2026-04-09 04:01:46.187595: +2026-04-09 04:01:46.190588: Epoch 399 +2026-04-09 04:01:46.195860: Current learning rate: 0.00632 +2026-04-09 04:03:29.876691: train_loss -0.1784 +2026-04-09 04:03:29.882677: val_loss -0.1584 +2026-04-09 04:03:29.884963: Pseudo dice [0.6704, 0.26, 0.6742, 0.6261, 0.5629, 0.8056, 0.6504] +2026-04-09 04:03:29.887601: Epoch time: 103.69 s +2026-04-09 04:03:31.701641: Yayy! New best EMA pseudo Dice: 0.5779 +2026-04-09 04:03:34.533530: +2026-04-09 04:03:34.536169: Epoch 400 +2026-04-09 04:03:34.537697: Current learning rate: 0.00631 +2026-04-09 04:05:16.372167: train_loss -0.181 +2026-04-09 04:05:16.377590: val_loss -0.108 +2026-04-09 04:05:16.380307: Pseudo dice [0.7245, 0.8385, 0.6082, 0.2759, 0.5424, 0.1138, 0.7977] +2026-04-09 04:05:16.383045: Epoch time: 101.84 s +2026-04-09 04:05:17.520659: +2026-04-09 04:05:17.522507: Epoch 401 +2026-04-09 04:05:17.524386: Current learning rate: 0.0063 +2026-04-09 04:07:00.539545: train_loss -0.1909 +2026-04-09 04:07:00.545331: val_loss -0.1363 +2026-04-09 04:07:00.547768: Pseudo dice [0.4561, 0.8676, 0.7946, 0.2765, 0.4916, 0.2124, 0.4979] +2026-04-09 04:07:00.549486: Epoch time: 103.02 s +2026-04-09 04:07:01.695959: +2026-04-09 04:07:01.697991: Epoch 402 +2026-04-09 04:07:01.699621: Current learning rate: 0.0063 +2026-04-09 04:08:43.972473: train_loss -0.1886 +2026-04-09 04:08:43.978377: val_loss -0.1501 +2026-04-09 04:08:43.981516: Pseudo dice [0.1421, 0.759, 0.8186, 0.4405, 0.3092, 0.4881, 0.6429] +2026-04-09 04:08:43.984281: Epoch time: 102.28 s +2026-04-09 04:08:45.138371: +2026-04-09 04:08:45.140523: Epoch 403 +2026-04-09 04:08:45.142343: Current learning rate: 0.00629 +2026-04-09 04:10:27.374966: train_loss -0.1853 +2026-04-09 04:10:27.381626: val_loss -0.1301 +2026-04-09 04:10:27.384420: Pseudo dice [0.4626, 0.7167, 0.7433, 0.5899, 0.4335, 0.5062, 0.7346] +2026-04-09 04:10:27.387054: Epoch time: 102.24 s +2026-04-09 04:10:28.560848: +2026-04-09 04:10:28.562879: Epoch 404 +2026-04-09 04:10:28.567411: Current learning rate: 0.00628 +2026-04-09 04:12:11.688141: train_loss -0.1709 +2026-04-09 04:12:11.695226: val_loss -0.1312 +2026-04-09 04:12:11.698188: Pseudo dice [0.7389, 0.245, 0.7676, 0.279, 0.4252, 0.6907, 0.7087] +2026-04-09 04:12:11.706922: Epoch time: 103.13 s +2026-04-09 04:12:12.865289: +2026-04-09 04:12:12.869566: Epoch 405 +2026-04-09 04:12:12.871366: Current learning rate: 0.00627 +2026-04-09 04:13:55.495205: train_loss -0.1762 +2026-04-09 04:13:55.501532: val_loss -0.1467 +2026-04-09 04:13:55.504892: Pseudo dice [0.3647, 0.2692, 0.618, 0.1974, 0.6331, 0.2737, 0.7931] +2026-04-09 04:13:55.508308: Epoch time: 102.63 s +2026-04-09 04:13:56.642172: +2026-04-09 04:13:56.644514: Epoch 406 +2026-04-09 04:13:56.646387: Current learning rate: 0.00626 +2026-04-09 04:15:38.709361: train_loss -0.1756 +2026-04-09 04:15:38.715902: val_loss -0.144 +2026-04-09 04:15:38.718135: Pseudo dice [0.5859, 0.3511, 0.7034, 0.321, 0.5222, 0.6486, 0.6622] +2026-04-09 04:15:38.721652: Epoch time: 102.07 s +2026-04-09 04:15:39.873260: +2026-04-09 04:15:39.874772: Epoch 407 +2026-04-09 04:15:39.876300: Current learning rate: 0.00625 +2026-04-09 04:17:22.259064: train_loss -0.1709 +2026-04-09 04:17:22.267231: val_loss -0.0863 +2026-04-09 04:17:22.272275: Pseudo dice [0.542, 0.4317, 0.3364, 0.2901, 0.606, 0.3981, 0.5143] +2026-04-09 04:17:22.278576: Epoch time: 102.39 s +2026-04-09 04:17:23.437879: +2026-04-09 04:17:23.439660: Epoch 408 +2026-04-09 04:17:23.441389: Current learning rate: 0.00624 +2026-04-09 04:19:05.995507: train_loss -0.174 +2026-04-09 04:19:06.001062: val_loss -0.1254 +2026-04-09 04:19:06.002596: Pseudo dice [0.4609, 0.4677, 0.6901, 0.2317, 0.3672, 0.3701, 0.415] +2026-04-09 04:19:06.005440: Epoch time: 102.56 s +2026-04-09 04:19:07.146436: +2026-04-09 04:19:07.148604: Epoch 409 +2026-04-09 04:19:07.151779: Current learning rate: 0.00623 +2026-04-09 04:20:49.911138: train_loss -0.17 +2026-04-09 04:20:49.919242: val_loss -0.132 +2026-04-09 04:20:49.922031: Pseudo dice [0.1672, 0.1298, 0.7435, 0.4442, 0.5555, 0.3474, 0.4235] +2026-04-09 04:20:49.924362: Epoch time: 102.77 s +2026-04-09 04:20:51.062222: +2026-04-09 04:20:51.064771: Epoch 410 +2026-04-09 04:20:51.066632: Current learning rate: 0.00622 +2026-04-09 04:22:34.066403: train_loss -0.1849 +2026-04-09 04:22:34.072001: val_loss -0.1038 +2026-04-09 04:22:34.074260: Pseudo dice [0.3795, 0.8572, 0.7415, 0.4299, 0.3463, 0.5104, 0.2086] +2026-04-09 04:22:34.076502: Epoch time: 103.01 s +2026-04-09 04:22:35.139585: +2026-04-09 04:22:35.143476: Epoch 411 +2026-04-09 04:22:35.145439: Current learning rate: 0.00621 +2026-04-09 04:24:18.084733: train_loss -0.1758 +2026-04-09 04:24:18.093147: val_loss -0.1452 +2026-04-09 04:24:18.095647: Pseudo dice [0.6715, 0.3213, 0.6876, 0.4205, 0.2808, 0.4742, 0.7636] +2026-04-09 04:24:18.097970: Epoch time: 102.95 s +2026-04-09 04:24:19.175272: +2026-04-09 04:24:19.178639: Epoch 412 +2026-04-09 04:24:19.180729: Current learning rate: 0.0062 +2026-04-09 04:26:01.883236: train_loss -0.1773 +2026-04-09 04:26:01.889261: val_loss -0.1235 +2026-04-09 04:26:01.891467: Pseudo dice [0.6516, 0.8785, 0.7626, 0.707, 0.5767, 0.5096, 0.7187] +2026-04-09 04:26:01.894246: Epoch time: 102.71 s +2026-04-09 04:26:02.973318: +2026-04-09 04:26:02.975801: Epoch 413 +2026-04-09 04:26:02.977640: Current learning rate: 0.00619 +2026-04-09 04:27:45.273905: train_loss -0.1748 +2026-04-09 04:27:45.282214: val_loss -0.1115 +2026-04-09 04:27:45.284610: Pseudo dice [0.462, 0.8353, 0.6223, 0.3309, 0.4366, 0.2298, 0.7318] +2026-04-09 04:27:45.287602: Epoch time: 102.3 s +2026-04-09 04:27:46.351894: +2026-04-09 04:27:46.354429: Epoch 414 +2026-04-09 04:27:46.356957: Current learning rate: 0.00618 +2026-04-09 04:29:29.027150: train_loss -0.1884 +2026-04-09 04:29:29.034235: val_loss -0.1291 +2026-04-09 04:29:29.037055: Pseudo dice [0.2747, 0.8694, 0.7173, 0.7275, 0.5094, 0.5447, 0.7981] +2026-04-09 04:29:29.040354: Epoch time: 102.68 s +2026-04-09 04:29:30.122930: +2026-04-09 04:29:30.126637: Epoch 415 +2026-04-09 04:29:30.130381: Current learning rate: 0.00617 +2026-04-09 04:31:12.805146: train_loss -0.1792 +2026-04-09 04:31:12.812209: val_loss -0.1288 +2026-04-09 04:31:12.816159: Pseudo dice [0.4196, 0.6906, 0.7628, 0.0408, 0.5633, 0.8141, 0.7324] +2026-04-09 04:31:12.821052: Epoch time: 102.69 s +2026-04-09 04:31:13.904028: +2026-04-09 04:31:13.906505: Epoch 416 +2026-04-09 04:31:13.909329: Current learning rate: 0.00616 +2026-04-09 04:32:56.765750: train_loss -0.1755 +2026-04-09 04:32:56.772388: val_loss -0.1491 +2026-04-09 04:32:56.774877: Pseudo dice [0.646, 0.1347, 0.574, 0.4439, 0.4883, 0.7864, 0.7331] +2026-04-09 04:32:56.777284: Epoch time: 102.86 s +2026-04-09 04:32:59.068781: +2026-04-09 04:32:59.070567: Epoch 417 +2026-04-09 04:32:59.072066: Current learning rate: 0.00615 +2026-04-09 04:34:41.663790: train_loss -0.1735 +2026-04-09 04:34:41.670285: val_loss -0.179 +2026-04-09 04:34:41.672877: Pseudo dice [0.5822, 0.8487, 0.7422, 0.6988, 0.5306, 0.7849, 0.7552] +2026-04-09 04:34:41.675105: Epoch time: 102.6 s +2026-04-09 04:34:42.751892: +2026-04-09 04:34:42.753936: Epoch 418 +2026-04-09 04:34:42.757616: Current learning rate: 0.00614 +2026-04-09 04:36:25.094235: train_loss -0.1734 +2026-04-09 04:36:25.099741: val_loss -0.1325 +2026-04-09 04:36:25.102844: Pseudo dice [0.6202, 0.3126, 0.7606, 0.3043, 0.5119, 0.4516, 0.7539] +2026-04-09 04:36:25.105215: Epoch time: 102.35 s +2026-04-09 04:36:26.181844: +2026-04-09 04:36:26.184197: Epoch 419 +2026-04-09 04:36:26.186838: Current learning rate: 0.00613 +2026-04-09 04:38:09.023383: train_loss -0.1739 +2026-04-09 04:38:09.030903: val_loss -0.1514 +2026-04-09 04:38:09.033442: Pseudo dice [0.207, 0.8028, 0.6302, 0.4792, 0.3988, 0.2862, 0.8326] +2026-04-09 04:38:09.035501: Epoch time: 102.84 s +2026-04-09 04:38:10.117374: +2026-04-09 04:38:10.119781: Epoch 420 +2026-04-09 04:38:10.121577: Current learning rate: 0.00612 +2026-04-09 04:39:52.758649: train_loss -0.1678 +2026-04-09 04:39:52.764955: val_loss -0.1205 +2026-04-09 04:39:52.766786: Pseudo dice [0.492, 0.727, 0.777, 0.4962, 0.3019, 0.6613, 0.512] +2026-04-09 04:39:52.769091: Epoch time: 102.64 s +2026-04-09 04:39:53.861054: +2026-04-09 04:39:53.863811: Epoch 421 +2026-04-09 04:39:53.865545: Current learning rate: 0.00612 +2026-04-09 04:41:36.423680: train_loss -0.17 +2026-04-09 04:41:36.429919: val_loss -0.1257 +2026-04-09 04:41:36.431985: Pseudo dice [0.4329, 0.3975, 0.5824, 0.4099, 0.4036, 0.6855, 0.6915] +2026-04-09 04:41:36.434165: Epoch time: 102.57 s +2026-04-09 04:41:37.516526: +2026-04-09 04:41:37.520365: Epoch 422 +2026-04-09 04:41:37.522350: Current learning rate: 0.00611 +2026-04-09 04:43:20.114284: train_loss -0.1891 +2026-04-09 04:43:20.121532: val_loss -0.1339 +2026-04-09 04:43:20.123825: Pseudo dice [0.3628, 0.5186, 0.6229, 0.6993, 0.4816, 0.8583, 0.7168] +2026-04-09 04:43:20.128880: Epoch time: 102.6 s +2026-04-09 04:43:21.209600: +2026-04-09 04:43:21.211576: Epoch 423 +2026-04-09 04:43:21.213415: Current learning rate: 0.0061 +2026-04-09 04:45:03.415662: train_loss -0.187 +2026-04-09 04:45:03.421293: val_loss -0.1048 +2026-04-09 04:45:03.424333: Pseudo dice [0.4567, 0.8351, 0.6553, 0.4854, 0.6438, 0.5352, 0.5897] +2026-04-09 04:45:03.426560: Epoch time: 102.21 s +2026-04-09 04:45:04.501021: +2026-04-09 04:45:04.502712: Epoch 424 +2026-04-09 04:45:04.504151: Current learning rate: 0.00609 +2026-04-09 04:46:47.716477: train_loss -0.1809 +2026-04-09 04:46:47.723631: val_loss -0.1179 +2026-04-09 04:46:47.725781: Pseudo dice [0.5764, 0.7516, 0.7124, 0.5374, 0.3758, 0.502, 0.6968] +2026-04-09 04:46:47.728040: Epoch time: 103.22 s +2026-04-09 04:46:48.819911: +2026-04-09 04:46:48.821718: Epoch 425 +2026-04-09 04:46:48.823594: Current learning rate: 0.00608 +2026-04-09 04:48:31.094889: train_loss -0.1862 +2026-04-09 04:48:31.100151: val_loss -0.1612 +2026-04-09 04:48:31.102808: Pseudo dice [0.5976, 0.2736, 0.7773, 0.5836, 0.3106, 0.6044, 0.486] +2026-04-09 04:48:31.105323: Epoch time: 102.28 s +2026-04-09 04:48:32.178688: +2026-04-09 04:48:32.183391: Epoch 426 +2026-04-09 04:48:32.186032: Current learning rate: 0.00607 +2026-04-09 04:50:15.086769: train_loss -0.1799 +2026-04-09 04:50:15.093351: val_loss -0.1139 +2026-04-09 04:50:15.095432: Pseudo dice [0.6213, 0.6791, 0.6912, 0.2837, 0.3745, 0.5469, 0.1025] +2026-04-09 04:50:15.097798: Epoch time: 102.91 s +2026-04-09 04:50:16.172616: +2026-04-09 04:50:16.174824: Epoch 427 +2026-04-09 04:50:16.177689: Current learning rate: 0.00606 +2026-04-09 04:51:58.486394: train_loss -0.1756 +2026-04-09 04:51:58.494444: val_loss -0.145 +2026-04-09 04:51:58.496706: Pseudo dice [0.412, 0.4416, 0.7422, 0.5831, 0.5049, 0.6293, 0.834] +2026-04-09 04:51:58.499512: Epoch time: 102.32 s +2026-04-09 04:51:59.572780: +2026-04-09 04:51:59.574768: Epoch 428 +2026-04-09 04:51:59.577211: Current learning rate: 0.00605 +2026-04-09 04:53:42.073225: train_loss -0.1624 +2026-04-09 04:53:42.080363: val_loss -0.0104 +2026-04-09 04:53:42.082961: Pseudo dice [0.2367, 0.8017, 0.4076, 0.2373, 0.5851, 0.1537, 0.7905] +2026-04-09 04:53:42.085458: Epoch time: 102.5 s +2026-04-09 04:53:43.163594: +2026-04-09 04:53:43.165973: Epoch 429 +2026-04-09 04:53:43.168676: Current learning rate: 0.00604 +2026-04-09 04:55:26.222176: train_loss -0.1766 +2026-04-09 04:55:26.238261: val_loss -0.1478 +2026-04-09 04:55:26.240336: Pseudo dice [0.5614, 0.6538, 0.7463, 0.2777, 0.2948, 0.778, 0.6726] +2026-04-09 04:55:26.242666: Epoch time: 103.06 s +2026-04-09 04:55:27.368043: +2026-04-09 04:55:27.371492: Epoch 430 +2026-04-09 04:55:27.375318: Current learning rate: 0.00603 +2026-04-09 04:57:09.906188: train_loss -0.1796 +2026-04-09 04:57:09.912615: val_loss -0.1416 +2026-04-09 04:57:09.914298: Pseudo dice [0.1328, 0.6913, 0.6812, 0.6543, 0.4852, 0.7285, 0.7288] +2026-04-09 04:57:09.916201: Epoch time: 102.54 s +2026-04-09 04:57:11.007540: +2026-04-09 04:57:11.009815: Epoch 431 +2026-04-09 04:57:11.012008: Current learning rate: 0.00602 +2026-04-09 04:58:54.571360: train_loss -0.1751 +2026-04-09 04:58:54.578835: val_loss -0.1395 +2026-04-09 04:58:54.581380: Pseudo dice [0.516, 0.8652, 0.7374, 0.4305, 0.4191, 0.4744, 0.3915] +2026-04-09 04:58:54.584163: Epoch time: 103.57 s +2026-04-09 04:58:55.670633: +2026-04-09 04:58:55.672900: Epoch 432 +2026-04-09 04:58:55.676196: Current learning rate: 0.00601 +2026-04-09 05:00:38.317547: train_loss -0.1753 +2026-04-09 05:00:38.324672: val_loss -0.1456 +2026-04-09 05:00:38.327336: Pseudo dice [0.2379, 0.2658, 0.7624, 0.4462, 0.2913, 0.6844, 0.5013] +2026-04-09 05:00:38.329431: Epoch time: 102.65 s +2026-04-09 05:00:39.405168: +2026-04-09 05:00:39.408635: Epoch 433 +2026-04-09 05:00:39.411755: Current learning rate: 0.006 +2026-04-09 05:02:21.902734: train_loss -0.1785 +2026-04-09 05:02:21.909615: val_loss -0.1345 +2026-04-09 05:02:21.912066: Pseudo dice [0.5085, 0.1721, 0.7586, 0.7228, 0.3498, 0.747, 0.8206] +2026-04-09 05:02:21.913844: Epoch time: 102.5 s +2026-04-09 05:02:22.986507: +2026-04-09 05:02:22.988530: Epoch 434 +2026-04-09 05:02:22.990006: Current learning rate: 0.00599 +2026-04-09 05:04:05.703928: train_loss -0.1752 +2026-04-09 05:04:05.709514: val_loss -0.0929 +2026-04-09 05:04:05.711730: Pseudo dice [0.4624, 0.846, 0.6077, 0.1698, 0.5976, 0.2276, 0.5429] +2026-04-09 05:04:05.713691: Epoch time: 102.72 s +2026-04-09 05:04:06.791008: +2026-04-09 05:04:06.793463: Epoch 435 +2026-04-09 05:04:06.795463: Current learning rate: 0.00598 +2026-04-09 05:05:48.946165: train_loss -0.1667 +2026-04-09 05:05:48.951505: val_loss -0.1636 +2026-04-09 05:05:48.954024: Pseudo dice [0.6319, 0.5387, 0.733, 0.4508, 0.3706, 0.6481, 0.6683] +2026-04-09 05:05:48.956013: Epoch time: 102.16 s +2026-04-09 05:05:50.007006: +2026-04-09 05:05:50.009116: Epoch 436 +2026-04-09 05:05:50.011327: Current learning rate: 0.00597 +2026-04-09 05:07:32.311660: train_loss -0.188 +2026-04-09 05:07:32.318348: val_loss -0.1371 +2026-04-09 05:07:32.320608: Pseudo dice [0.712, 0.9035, 0.7264, 0.3463, 0.444, 0.1409, 0.7021] +2026-04-09 05:07:32.323228: Epoch time: 102.31 s +2026-04-09 05:07:33.397817: +2026-04-09 05:07:33.400547: Epoch 437 +2026-04-09 05:07:33.405149: Current learning rate: 0.00596 +2026-04-09 05:09:17.038844: train_loss -0.1757 +2026-04-09 05:09:17.047149: val_loss -0.1211 +2026-04-09 05:09:17.050145: Pseudo dice [0.6518, 0.6562, 0.6201, 0.1825, 0.2853, 0.3021, 0.7203] +2026-04-09 05:09:17.054289: Epoch time: 103.64 s +2026-04-09 05:09:18.163305: +2026-04-09 05:09:18.165851: Epoch 438 +2026-04-09 05:09:18.168076: Current learning rate: 0.00595 +2026-04-09 05:11:01.059463: train_loss -0.1742 +2026-04-09 05:11:01.067791: val_loss -0.1343 +2026-04-09 05:11:01.070130: Pseudo dice [0.4166, 0.5883, 0.6368, 0.1762, 0.4325, 0.7756, 0.8337] +2026-04-09 05:11:01.071551: Epoch time: 102.9 s +2026-04-09 05:11:02.137795: +2026-04-09 05:11:02.140399: Epoch 439 +2026-04-09 05:11:02.142595: Current learning rate: 0.00594 +2026-04-09 05:12:45.486738: train_loss -0.182 +2026-04-09 05:12:45.492213: val_loss -0.1219 +2026-04-09 05:12:45.494472: Pseudo dice [0.6471, 0.7402, 0.7251, 0.2257, 0.3845, 0.3485, 0.293] +2026-04-09 05:12:45.496497: Epoch time: 103.35 s +2026-04-09 05:12:46.581161: +2026-04-09 05:12:46.583035: Epoch 440 +2026-04-09 05:12:46.584560: Current learning rate: 0.00593 +2026-04-09 05:14:29.464900: train_loss -0.1731 +2026-04-09 05:14:29.470545: val_loss -0.1428 +2026-04-09 05:14:29.472558: Pseudo dice [0.2656, 0.7856, 0.7066, 0.302, 0.345, 0.6143, 0.6698] +2026-04-09 05:14:29.475132: Epoch time: 102.89 s +2026-04-09 05:14:30.545229: +2026-04-09 05:14:30.547282: Epoch 441 +2026-04-09 05:14:30.550392: Current learning rate: 0.00592 +2026-04-09 05:16:13.582990: train_loss -0.1671 +2026-04-09 05:16:13.589712: val_loss -0.1555 +2026-04-09 05:16:13.591818: Pseudo dice [0.258, 0.8164, 0.7629, 0.3373, 0.3885, 0.3492, 0.6761] +2026-04-09 05:16:13.594001: Epoch time: 103.04 s +2026-04-09 05:16:14.694172: +2026-04-09 05:16:14.696373: Epoch 442 +2026-04-09 05:16:14.698309: Current learning rate: 0.00592 +2026-04-09 05:17:57.226225: train_loss -0.1814 +2026-04-09 05:17:57.232522: val_loss -0.139 +2026-04-09 05:17:57.235049: Pseudo dice [0.3997, 0.4486, 0.7755, 0.4561, 0.4294, 0.8134, 0.76] +2026-04-09 05:17:57.237211: Epoch time: 102.54 s +2026-04-09 05:17:58.297337: +2026-04-09 05:17:58.299637: Epoch 443 +2026-04-09 05:17:58.301534: Current learning rate: 0.00591 +2026-04-09 05:19:40.556670: train_loss -0.1766 +2026-04-09 05:19:40.562593: val_loss -0.1209 +2026-04-09 05:19:40.565008: Pseudo dice [0.1468, 0.1737, 0.6757, 0.3864, 0.6463, 0.4772, 0.6257] +2026-04-09 05:19:40.566947: Epoch time: 102.26 s +2026-04-09 05:19:41.630632: +2026-04-09 05:19:41.633025: Epoch 444 +2026-04-09 05:19:41.635072: Current learning rate: 0.0059 +2026-04-09 05:21:23.837663: train_loss -0.1672 +2026-04-09 05:21:23.842750: val_loss -0.1338 +2026-04-09 05:21:23.844838: Pseudo dice [0.4701, 0.6264, 0.5203, 0.4412, 0.3205, 0.8405, 0.8102] +2026-04-09 05:21:23.846911: Epoch time: 102.21 s +2026-04-09 05:21:24.908359: +2026-04-09 05:21:24.910250: Epoch 445 +2026-04-09 05:21:24.912358: Current learning rate: 0.00589 +2026-04-09 05:23:06.930647: train_loss -0.1801 +2026-04-09 05:23:06.938389: val_loss -0.1562 +2026-04-09 05:23:06.940512: Pseudo dice [0.6866, 0.8027, 0.5888, 0.3379, 0.4871, 0.4472, 0.7711] +2026-04-09 05:23:06.945525: Epoch time: 102.03 s +2026-04-09 05:23:08.010569: +2026-04-09 05:23:08.012950: Epoch 446 +2026-04-09 05:23:08.019064: Current learning rate: 0.00588 +2026-04-09 05:24:50.810136: train_loss -0.1661 +2026-04-09 05:24:50.816672: val_loss -0.1149 +2026-04-09 05:24:50.818489: Pseudo dice [0.2976, 0.8072, 0.6689, 0.6291, 0.4254, 0.5548, 0.7142] +2026-04-09 05:24:50.820206: Epoch time: 102.8 s +2026-04-09 05:24:51.889624: +2026-04-09 05:24:51.893246: Epoch 447 +2026-04-09 05:24:51.898388: Current learning rate: 0.00587 +2026-04-09 05:26:33.763323: train_loss -0.1751 +2026-04-09 05:26:33.769020: val_loss -0.1397 +2026-04-09 05:26:33.770948: Pseudo dice [0.2807, 0.2276, 0.7321, 0.5366, 0.408, 0.869, 0.6859] +2026-04-09 05:26:33.774772: Epoch time: 101.88 s +2026-04-09 05:26:34.851798: +2026-04-09 05:26:34.853456: Epoch 448 +2026-04-09 05:26:34.855333: Current learning rate: 0.00586 +2026-04-09 05:28:17.074524: train_loss -0.1841 +2026-04-09 05:28:17.083056: val_loss -0.1471 +2026-04-09 05:28:17.085549: Pseudo dice [0.4935, 0.8771, 0.7173, 0.4178, 0.4482, 0.6601, 0.7641] +2026-04-09 05:28:17.090246: Epoch time: 102.23 s +2026-04-09 05:28:18.160202: +2026-04-09 05:28:18.162608: Epoch 449 +2026-04-09 05:28:18.164894: Current learning rate: 0.00585 +2026-04-09 05:30:00.310639: train_loss -0.1779 +2026-04-09 05:30:00.318013: val_loss -0.149 +2026-04-09 05:30:00.320158: Pseudo dice [0.3565, 0.8987, 0.7021, 0.6397, 0.4906, 0.6028, 0.7496] +2026-04-09 05:30:00.324683: Epoch time: 102.15 s +2026-04-09 05:30:02.830786: +2026-04-09 05:30:02.832570: Epoch 450 +2026-04-09 05:30:02.834092: Current learning rate: 0.00584 +2026-04-09 05:31:44.968437: train_loss -0.18 +2026-04-09 05:31:44.974624: val_loss -0.1539 +2026-04-09 05:31:44.976730: Pseudo dice [0.5797, 0.3499, 0.821, 0.5126, 0.1756, 0.5263, 0.7135] +2026-04-09 05:31:44.978918: Epoch time: 102.14 s +2026-04-09 05:31:46.048761: +2026-04-09 05:31:46.051490: Epoch 451 +2026-04-09 05:31:46.055472: Current learning rate: 0.00583 +2026-04-09 05:33:28.338402: train_loss -0.1836 +2026-04-09 05:33:28.347646: val_loss -0.1385 +2026-04-09 05:33:28.350995: Pseudo dice [0.1077, 0.4995, 0.6493, 0.1628, 0.43, 0.5582, 0.6918] +2026-04-09 05:33:28.353087: Epoch time: 102.29 s +2026-04-09 05:33:29.420857: +2026-04-09 05:33:29.422973: Epoch 452 +2026-04-09 05:33:29.424839: Current learning rate: 0.00582 +2026-04-09 05:35:12.307329: train_loss -0.1785 +2026-04-09 05:35:12.315212: val_loss -0.1302 +2026-04-09 05:35:12.317030: Pseudo dice [0.2572, 0.8582, 0.7312, 0.6339, 0.44, 0.6124, 0.784] +2026-04-09 05:35:12.319267: Epoch time: 102.89 s +2026-04-09 05:35:13.392603: +2026-04-09 05:35:13.394701: Epoch 453 +2026-04-09 05:35:13.396451: Current learning rate: 0.00581 +2026-04-09 05:36:55.761598: train_loss -0.1774 +2026-04-09 05:36:55.770624: val_loss -0.1233 +2026-04-09 05:36:55.772542: Pseudo dice [0.6422, 0.3749, 0.6091, 0.2124, 0.3112, 0.3852, 0.48] +2026-04-09 05:36:55.775291: Epoch time: 102.37 s +2026-04-09 05:36:56.840476: +2026-04-09 05:36:56.842775: Epoch 454 +2026-04-09 05:36:56.844696: Current learning rate: 0.0058 +2026-04-09 05:38:39.590839: train_loss -0.1764 +2026-04-09 05:38:39.600673: val_loss -0.1504 +2026-04-09 05:38:39.603038: Pseudo dice [0.5194, 0.5268, 0.7984, 0.3446, 0.4268, 0.5232, 0.7243] +2026-04-09 05:38:39.606811: Epoch time: 102.75 s +2026-04-09 05:38:40.733436: +2026-04-09 05:38:40.735474: Epoch 455 +2026-04-09 05:38:40.737846: Current learning rate: 0.00579 +2026-04-09 05:40:22.850940: train_loss -0.1708 +2026-04-09 05:40:22.859853: val_loss -0.1182 +2026-04-09 05:40:22.862057: Pseudo dice [0.6536, 0.7186, 0.7327, 0.3194, 0.4953, 0.1469, 0.7265] +2026-04-09 05:40:22.864442: Epoch time: 102.12 s +2026-04-09 05:40:23.934282: +2026-04-09 05:40:23.935915: Epoch 456 +2026-04-09 05:40:23.937467: Current learning rate: 0.00578 +2026-04-09 05:42:06.605157: train_loss -0.1881 +2026-04-09 05:42:06.615191: val_loss -0.1319 +2026-04-09 05:42:06.617543: Pseudo dice [0.5463, 0.7052, 0.5937, 0.2066, 0.4372, 0.1577, 0.7567] +2026-04-09 05:42:06.620796: Epoch time: 102.67 s +2026-04-09 05:42:07.687063: +2026-04-09 05:42:07.688608: Epoch 457 +2026-04-09 05:42:07.690078: Current learning rate: 0.00577 +2026-04-09 05:43:51.908277: train_loss -0.1942 +2026-04-09 05:43:51.916995: val_loss -0.1744 +2026-04-09 05:43:51.919876: Pseudo dice [0.7611, 0.8578, 0.7003, 0.6583, 0.4341, 0.1528, 0.8271] +2026-04-09 05:43:51.922246: Epoch time: 104.22 s +2026-04-09 05:43:52.986444: +2026-04-09 05:43:52.988994: Epoch 458 +2026-04-09 05:43:52.991155: Current learning rate: 0.00576 +2026-04-09 05:45:35.728428: train_loss -0.1842 +2026-04-09 05:45:35.737137: val_loss -0.1533 +2026-04-09 05:45:35.739109: Pseudo dice [0.7482, 0.2391, 0.7431, 0.5819, 0.4186, 0.7865, 0.7657] +2026-04-09 05:45:35.740664: Epoch time: 102.75 s +2026-04-09 05:45:36.804796: +2026-04-09 05:45:36.806389: Epoch 459 +2026-04-09 05:45:36.808048: Current learning rate: 0.00575 +2026-04-09 05:47:19.557929: train_loss -0.1708 +2026-04-09 05:47:19.567326: val_loss -0.1618 +2026-04-09 05:47:19.570141: Pseudo dice [0.6495, 0.8836, 0.7519, 0.4836, 0.5373, 0.8279, 0.8367] +2026-04-09 05:47:19.572302: Epoch time: 102.76 s +2026-04-09 05:47:20.651166: +2026-04-09 05:47:20.652857: Epoch 460 +2026-04-09 05:47:20.657035: Current learning rate: 0.00574 +2026-04-09 05:49:03.521124: train_loss -0.1824 +2026-04-09 05:49:03.531677: val_loss -0.1561 +2026-04-09 05:49:03.533721: Pseudo dice [0.5017, 0.4256, 0.6717, 0.4703, 0.2349, 0.905, 0.7933] +2026-04-09 05:49:03.535433: Epoch time: 102.87 s +2026-04-09 05:49:04.635376: +2026-04-09 05:49:04.637019: Epoch 461 +2026-04-09 05:49:04.638846: Current learning rate: 0.00573 +2026-04-09 05:50:47.421648: train_loss -0.1867 +2026-04-09 05:50:47.429324: val_loss -0.1168 +2026-04-09 05:50:47.431264: Pseudo dice [0.4962, 0.7864, 0.6994, 0.0214, 0.3077, 0.5776, 0.7485] +2026-04-09 05:50:47.437262: Epoch time: 102.79 s +2026-04-09 05:50:48.550654: +2026-04-09 05:50:48.552562: Epoch 462 +2026-04-09 05:50:48.556597: Current learning rate: 0.00572 +2026-04-09 05:52:31.450178: train_loss -0.185 +2026-04-09 05:52:31.462946: val_loss -0.1117 +2026-04-09 05:52:31.465465: Pseudo dice [0.6527, 0.7769, 0.4054, 0.0495, 0.1396, 0.1501, 0.4903] +2026-04-09 05:52:31.467398: Epoch time: 102.9 s +2026-04-09 05:52:32.543663: +2026-04-09 05:52:32.545362: Epoch 463 +2026-04-09 05:52:32.547198: Current learning rate: 0.00571 +2026-04-09 05:54:14.966818: train_loss -0.1841 +2026-04-09 05:54:14.975765: val_loss -0.1423 +2026-04-09 05:54:14.978669: Pseudo dice [0.8214, 0.6281, 0.6469, 0.2035, 0.6526, 0.3908, 0.5372] +2026-04-09 05:54:14.981889: Epoch time: 102.43 s +2026-04-09 05:54:16.043612: +2026-04-09 05:54:16.045612: Epoch 464 +2026-04-09 05:54:16.048692: Current learning rate: 0.0057 +2026-04-09 05:55:58.348571: train_loss -0.1758 +2026-04-09 05:55:58.356051: val_loss -0.1468 +2026-04-09 05:55:58.358103: Pseudo dice [0.5926, 0.426, 0.7117, 0.4122, 0.4855, 0.7587, 0.7818] +2026-04-09 05:55:58.360188: Epoch time: 102.31 s +2026-04-09 05:55:59.440326: +2026-04-09 05:55:59.442529: Epoch 465 +2026-04-09 05:55:59.444983: Current learning rate: 0.0057 +2026-04-09 05:57:42.112803: train_loss -0.1784 +2026-04-09 05:57:42.119832: val_loss -0.1418 +2026-04-09 05:57:42.121893: Pseudo dice [0.485, 0.5685, 0.7585, 0.3564, 0.4082, 0.7692, 0.8302] +2026-04-09 05:57:42.124287: Epoch time: 102.68 s +2026-04-09 05:57:43.188420: +2026-04-09 05:57:43.190827: Epoch 466 +2026-04-09 05:57:43.192643: Current learning rate: 0.00569 +2026-04-09 05:59:25.633672: train_loss -0.167 +2026-04-09 05:59:25.681324: val_loss -0.1437 +2026-04-09 05:59:25.683628: Pseudo dice [0.5568, 0.8043, 0.7734, 0.7241, 0.3549, 0.5432, 0.4747] +2026-04-09 05:59:25.686341: Epoch time: 102.45 s +2026-04-09 05:59:26.758567: +2026-04-09 05:59:26.760226: Epoch 467 +2026-04-09 05:59:26.762050: Current learning rate: 0.00568 +2026-04-09 06:01:09.608739: train_loss -0.1799 +2026-04-09 06:01:09.617573: val_loss -0.1385 +2026-04-09 06:01:09.619490: Pseudo dice [0.1962, 0.5183, 0.7853, 0.2878, 0.422, 0.5737, 0.7812] +2026-04-09 06:01:09.621625: Epoch time: 102.85 s +2026-04-09 06:01:10.693259: +2026-04-09 06:01:10.695133: Epoch 468 +2026-04-09 06:01:10.696889: Current learning rate: 0.00567 +2026-04-09 06:02:52.612217: train_loss -0.1844 +2026-04-09 06:02:52.620201: val_loss -0.1601 +2026-04-09 06:02:52.622625: Pseudo dice [0.5425, 0.5103, 0.5292, 0.7044, 0.4334, 0.6618, 0.8276] +2026-04-09 06:02:52.625242: Epoch time: 101.92 s +2026-04-09 06:02:53.694457: +2026-04-09 06:02:53.698731: Epoch 469 +2026-04-09 06:02:53.702486: Current learning rate: 0.00566 +2026-04-09 06:04:35.997939: train_loss -0.1833 +2026-04-09 06:04:36.008250: val_loss -0.1161 +2026-04-09 06:04:36.010364: Pseudo dice [0.5669, 0.8919, 0.7474, 0.5324, 0.5303, 0.7194, 0.8205] +2026-04-09 06:04:36.013144: Epoch time: 102.31 s +2026-04-09 06:04:37.083681: +2026-04-09 06:04:37.087220: Epoch 470 +2026-04-09 06:04:37.089279: Current learning rate: 0.00565 +2026-04-09 06:06:20.162248: train_loss -0.1991 +2026-04-09 06:06:20.172063: val_loss -0.1594 +2026-04-09 06:06:20.174345: Pseudo dice [0.7779, 0.8118, 0.6859, 0.262, 0.5127, 0.7011, 0.7417] +2026-04-09 06:06:20.177059: Epoch time: 103.08 s +2026-04-09 06:06:20.179209: Yayy! New best EMA pseudo Dice: 0.5794 +2026-04-09 06:06:23.072236: +2026-04-09 06:06:23.073917: Epoch 471 +2026-04-09 06:06:23.075694: Current learning rate: 0.00564 +2026-04-09 06:08:06.686391: train_loss -0.198 +2026-04-09 06:08:06.694005: val_loss -0.1581 +2026-04-09 06:08:06.696274: Pseudo dice [0.389, 0.3108, 0.6875, 0.6716, 0.4255, 0.7742, 0.7874] +2026-04-09 06:08:06.698827: Epoch time: 103.62 s +2026-04-09 06:08:07.759611: +2026-04-09 06:08:07.764850: Epoch 472 +2026-04-09 06:08:07.767943: Current learning rate: 0.00563 +2026-04-09 06:09:50.570688: train_loss -0.1863 +2026-04-09 06:09:50.579849: val_loss -0.1373 +2026-04-09 06:09:50.581880: Pseudo dice [0.4083, 0.5431, 0.7348, 0.5718, 0.4413, 0.6674, 0.5929] +2026-04-09 06:09:50.586028: Epoch time: 102.81 s +2026-04-09 06:09:51.642187: +2026-04-09 06:09:51.645959: Epoch 473 +2026-04-09 06:09:51.649473: Current learning rate: 0.00562 +2026-04-09 06:11:34.364832: train_loss -0.1784 +2026-04-09 06:11:34.371258: val_loss -0.1378 +2026-04-09 06:11:34.374091: Pseudo dice [0.6683, 0.8055, 0.6314, 0.4079, 0.3346, 0.6992, 0.4915] +2026-04-09 06:11:34.376570: Epoch time: 102.73 s +2026-04-09 06:11:35.439110: +2026-04-09 06:11:35.442019: Epoch 474 +2026-04-09 06:11:35.444630: Current learning rate: 0.00561 +2026-04-09 06:13:18.810563: train_loss -0.1788 +2026-04-09 06:13:18.817686: val_loss -0.13 +2026-04-09 06:13:18.820061: Pseudo dice [0.6544, 0.6176, 0.815, 0.4035, 0.28, 0.5083, 0.5149] +2026-04-09 06:13:18.822451: Epoch time: 103.37 s +2026-04-09 06:13:19.869939: +2026-04-09 06:13:19.871954: Epoch 475 +2026-04-09 06:13:19.873752: Current learning rate: 0.0056 +2026-04-09 06:15:01.902136: train_loss -0.1989 +2026-04-09 06:15:01.908007: val_loss -0.1439 +2026-04-09 06:15:01.910597: Pseudo dice [0.5965, 0.9052, 0.7754, 0.3322, 0.2962, 0.2203, 0.6184] +2026-04-09 06:15:01.913901: Epoch time: 102.04 s +2026-04-09 06:15:02.969278: +2026-04-09 06:15:02.974584: Epoch 476 +2026-04-09 06:15:02.980913: Current learning rate: 0.00559 +2026-04-09 06:16:44.986223: train_loss -0.1915 +2026-04-09 06:16:44.993766: val_loss -0.1087 +2026-04-09 06:16:44.995880: Pseudo dice [0.6916, 0.7513, 0.6572, 0.0202, 0.2752, 0.2677, 0.7756] +2026-04-09 06:16:44.998147: Epoch time: 102.02 s +2026-04-09 06:16:46.061049: +2026-04-09 06:16:46.062642: Epoch 477 +2026-04-09 06:16:46.064780: Current learning rate: 0.00558 +2026-04-09 06:18:29.697234: train_loss -0.193 +2026-04-09 06:18:29.702098: val_loss -0.1316 +2026-04-09 06:18:29.703723: Pseudo dice [0.676, 0.9042, 0.7621, 0.4571, 0.4822, 0.4168, 0.8015] +2026-04-09 06:18:29.705212: Epoch time: 103.64 s +2026-04-09 06:18:30.787423: +2026-04-09 06:18:30.789257: Epoch 478 +2026-04-09 06:18:30.791858: Current learning rate: 0.00557 +2026-04-09 06:20:13.825722: train_loss -0.1967 +2026-04-09 06:20:13.831307: val_loss -0.1158 +2026-04-09 06:20:13.833506: Pseudo dice [0.3442, 0.8754, 0.4853, 0.7641, 0.3567, 0.476, 0.7193] +2026-04-09 06:20:13.836273: Epoch time: 103.04 s +2026-04-09 06:20:14.926336: +2026-04-09 06:20:14.927989: Epoch 479 +2026-04-09 06:20:14.930492: Current learning rate: 0.00556 +2026-04-09 06:21:57.551703: train_loss -0.1834 +2026-04-09 06:21:57.558101: val_loss -0.1225 +2026-04-09 06:21:57.559928: Pseudo dice [0.2309, 0.7115, 0.7184, 0.1282, 0.4639, 0.2405, 0.7532] +2026-04-09 06:21:57.562057: Epoch time: 102.63 s +2026-04-09 06:21:58.687495: +2026-04-09 06:21:58.690284: Epoch 480 +2026-04-09 06:21:58.692568: Current learning rate: 0.00555 +2026-04-09 06:23:40.978003: train_loss -0.1739 +2026-04-09 06:23:40.987561: val_loss -0.1232 +2026-04-09 06:23:40.989702: Pseudo dice [0.6334, 0.8155, 0.7027, 0.2623, 0.4129, 0.6495, 0.679] +2026-04-09 06:23:40.994128: Epoch time: 102.29 s +2026-04-09 06:23:42.089679: +2026-04-09 06:23:42.091463: Epoch 481 +2026-04-09 06:23:42.094725: Current learning rate: 0.00554 +2026-04-09 06:25:25.379447: train_loss -0.1833 +2026-04-09 06:25:25.385650: val_loss -0.1618 +2026-04-09 06:25:25.388403: Pseudo dice [0.4658, 0.8919, 0.7885, 0.4945, 0.4461, 0.2112, 0.7794] +2026-04-09 06:25:25.394467: Epoch time: 103.29 s +2026-04-09 06:25:26.505655: +2026-04-09 06:25:26.507598: Epoch 482 +2026-04-09 06:25:26.509071: Current learning rate: 0.00553 +2026-04-09 06:27:09.005430: train_loss -0.1737 +2026-04-09 06:27:09.010753: val_loss -0.1399 +2026-04-09 06:27:09.013942: Pseudo dice [0.4445, 0.8277, 0.7634, 0.5998, 0.5348, 0.3606, 0.6784] +2026-04-09 06:27:09.016037: Epoch time: 102.5 s +2026-04-09 06:27:10.106564: +2026-04-09 06:27:10.108117: Epoch 483 +2026-04-09 06:27:10.110066: Current learning rate: 0.00552 +2026-04-09 06:28:52.908025: train_loss -0.1695 +2026-04-09 06:28:52.914274: val_loss -0.1646 +2026-04-09 06:28:52.917017: Pseudo dice [0.7364, 0.4453, 0.8256, 0.6419, 0.4981, 0.8248, 0.8224] +2026-04-09 06:28:52.920001: Epoch time: 102.8 s +2026-04-09 06:28:52.923126: Yayy! New best EMA pseudo Dice: 0.5806 +2026-04-09 06:28:55.737660: +2026-04-09 06:28:55.739541: Epoch 484 +2026-04-09 06:28:55.741374: Current learning rate: 0.00551 +2026-04-09 06:30:38.344505: train_loss -0.1846 +2026-04-09 06:30:38.351464: val_loss -0.1516 +2026-04-09 06:30:38.353645: Pseudo dice [0.6485, 0.6968, 0.6142, 0.5429, 0.397, 0.4175, 0.5468] +2026-04-09 06:30:38.357050: Epoch time: 102.61 s +2026-04-09 06:30:39.441726: +2026-04-09 06:30:39.444459: Epoch 485 +2026-04-09 06:30:39.447001: Current learning rate: 0.0055 +2026-04-09 06:32:21.928370: train_loss -0.1685 +2026-04-09 06:32:21.933918: val_loss -0.152 +2026-04-09 06:32:21.936281: Pseudo dice [0.7274, 0.368, 0.7916, 0.4379, 0.4736, 0.3759, 0.8583] +2026-04-09 06:32:21.938374: Epoch time: 102.49 s +2026-04-09 06:32:23.068418: +2026-04-09 06:32:23.070831: Epoch 486 +2026-04-09 06:32:23.072724: Current learning rate: 0.00549 +2026-04-09 06:34:05.886904: train_loss -0.182 +2026-04-09 06:34:05.892838: val_loss -0.1276 +2026-04-09 06:34:05.894946: Pseudo dice [0.6294, 0.6057, 0.5864, 0.2299, 0.3531, 0.686, 0.4641] +2026-04-09 06:34:05.897482: Epoch time: 102.82 s +2026-04-09 06:34:06.999421: +2026-04-09 06:34:07.001126: Epoch 487 +2026-04-09 06:34:07.003725: Current learning rate: 0.00548 +2026-04-09 06:35:49.653455: train_loss -0.1841 +2026-04-09 06:35:49.659425: val_loss -0.1742 +2026-04-09 06:35:49.661414: Pseudo dice [0.6526, 0.3197, 0.7379, 0.5563, 0.4587, 0.8765, 0.8651] +2026-04-09 06:35:49.663637: Epoch time: 102.66 s +2026-04-09 06:35:50.746490: +2026-04-09 06:35:50.748308: Epoch 488 +2026-04-09 06:35:50.750450: Current learning rate: 0.00547 +2026-04-09 06:37:33.190815: train_loss -0.1899 +2026-04-09 06:37:33.196225: val_loss -0.1509 +2026-04-09 06:37:33.198216: Pseudo dice [0.7565, 0.7712, 0.7752, 0.6062, 0.5962, 0.4167, 0.2062] +2026-04-09 06:37:33.199976: Epoch time: 102.45 s +2026-04-09 06:37:34.268860: +2026-04-09 06:37:34.272913: Epoch 489 +2026-04-09 06:37:34.274368: Current learning rate: 0.00546 +2026-04-09 06:39:16.635706: train_loss -0.1858 +2026-04-09 06:39:16.640687: val_loss -0.1495 +2026-04-09 06:39:16.642702: Pseudo dice [0.7358, 0.4702, 0.7267, 0.7934, 0.6151, 0.4732, 0.8054] +2026-04-09 06:39:16.644609: Epoch time: 102.37 s +2026-04-09 06:39:16.646547: Yayy! New best EMA pseudo Dice: 0.5867 +2026-04-09 06:39:19.185649: +2026-04-09 06:39:19.187430: Epoch 490 +2026-04-09 06:39:19.188921: Current learning rate: 0.00546 +2026-04-09 06:41:01.049041: train_loss -0.1915 +2026-04-09 06:41:01.058656: val_loss -0.1348 +2026-04-09 06:41:01.060836: Pseudo dice [0.6734, 0.7588, 0.7204, 0.128, 0.521, 0.733, 0.6729] +2026-04-09 06:41:01.062982: Epoch time: 101.87 s +2026-04-09 06:41:01.064909: Yayy! New best EMA pseudo Dice: 0.5881 +2026-04-09 06:41:03.837254: +2026-04-09 06:41:03.839218: Epoch 491 +2026-04-09 06:41:03.841092: Current learning rate: 0.00545 +2026-04-09 06:42:46.240333: train_loss -0.1817 +2026-04-09 06:42:46.247389: val_loss -0.1157 +2026-04-09 06:42:46.249557: Pseudo dice [0.2601, 0.3219, 0.5424, 0.5773, 0.1844, 0.4817, 0.7618] +2026-04-09 06:42:46.251350: Epoch time: 102.41 s +2026-04-09 06:42:47.336787: +2026-04-09 06:42:47.338801: Epoch 492 +2026-04-09 06:42:47.340577: Current learning rate: 0.00544 +2026-04-09 06:44:29.512932: train_loss -0.1835 +2026-04-09 06:44:29.519579: val_loss -0.1356 +2026-04-09 06:44:29.522343: Pseudo dice [0.7395, 0.4975, 0.7254, 0.1604, 0.4076, 0.4462, 0.6704] +2026-04-09 06:44:29.525666: Epoch time: 102.18 s +2026-04-09 06:44:30.613510: +2026-04-09 06:44:30.615510: Epoch 493 +2026-04-09 06:44:30.617503: Current learning rate: 0.00543 +2026-04-09 06:46:13.047384: train_loss -0.1853 +2026-04-09 06:46:13.053850: val_loss -0.0823 +2026-04-09 06:46:13.057567: Pseudo dice [0.648, 0.3635, 0.5947, 0.2819, 0.5423, 0.453, 0.7648] +2026-04-09 06:46:13.060817: Epoch time: 102.44 s +2026-04-09 06:46:14.150781: +2026-04-09 06:46:14.153966: Epoch 494 +2026-04-09 06:46:14.157159: Current learning rate: 0.00542 +2026-04-09 06:47:55.935251: train_loss -0.1825 +2026-04-09 06:47:55.940948: val_loss -0.1339 +2026-04-09 06:47:55.944473: Pseudo dice [0.506, 0.5137, 0.8391, 0.4716, 0.4861, 0.7548, 0.6764] +2026-04-09 06:47:55.946563: Epoch time: 101.79 s +2026-04-09 06:47:57.042878: +2026-04-09 06:47:57.044978: Epoch 495 +2026-04-09 06:47:57.047306: Current learning rate: 0.00541 +2026-04-09 06:49:39.034878: train_loss -0.175 +2026-04-09 06:49:39.040263: val_loss -0.1588 +2026-04-09 06:49:39.042091: Pseudo dice [0.6949, 0.6524, 0.8358, 0.5903, 0.4315, 0.8756, 0.8224] +2026-04-09 06:49:39.043899: Epoch time: 102.0 s +2026-04-09 06:49:40.129002: +2026-04-09 06:49:40.131443: Epoch 496 +2026-04-09 06:49:40.133863: Current learning rate: 0.0054 +2026-04-09 06:51:23.537498: train_loss -0.1935 +2026-04-09 06:51:23.542713: val_loss -0.1345 +2026-04-09 06:51:23.544371: Pseudo dice [0.3097, 0.9166, 0.6567, 0.3656, 0.4753, 0.7104, 0.7774] +2026-04-09 06:51:23.546618: Epoch time: 103.41 s +2026-04-09 06:51:24.644716: +2026-04-09 06:51:24.646761: Epoch 497 +2026-04-09 06:51:24.648981: Current learning rate: 0.00539 +2026-04-09 06:53:06.963921: train_loss -0.1954 +2026-04-09 06:53:06.971997: val_loss -0.1551 +2026-04-09 06:53:06.973865: Pseudo dice [0.6436, 0.8237, 0.8098, 0.3365, 0.3109, 0.4916, 0.6878] +2026-04-09 06:53:06.976372: Epoch time: 102.32 s +2026-04-09 06:53:08.062226: +2026-04-09 06:53:08.064305: Epoch 498 +2026-04-09 06:53:08.066456: Current learning rate: 0.00538 +2026-04-09 06:54:50.685670: train_loss -0.1857 +2026-04-09 06:54:50.692700: val_loss -0.1609 +2026-04-09 06:54:50.695225: Pseudo dice [0.5349, 0.1838, 0.7025, 0.3895, 0.4498, 0.6211, 0.8765] +2026-04-09 06:54:50.697639: Epoch time: 102.63 s +2026-04-09 06:54:51.787333: +2026-04-09 06:54:51.789565: Epoch 499 +2026-04-09 06:54:51.791069: Current learning rate: 0.00537 +2026-04-09 06:56:34.907150: train_loss -0.1915 +2026-04-09 06:56:34.913965: val_loss -0.1275 +2026-04-09 06:56:34.916028: Pseudo dice [0.54, 0.8985, 0.6961, 0.6567, 0.2779, 0.5986, 0.7898] +2026-04-09 06:56:34.917991: Epoch time: 103.12 s +2026-04-09 06:56:37.639904: +2026-04-09 06:56:37.642000: Epoch 500 +2026-04-09 06:56:37.643365: Current learning rate: 0.00536 +2026-04-09 06:58:19.782639: train_loss -0.1966 +2026-04-09 06:58:19.793353: val_loss -0.1367 +2026-04-09 06:58:19.806242: Pseudo dice [0.3655, 0.6663, 0.7234, 0.5293, 0.4384, 0.7728, 0.7165] +2026-04-09 06:58:19.808241: Epoch time: 102.15 s +2026-04-09 06:58:20.885627: +2026-04-09 06:58:20.887715: Epoch 501 +2026-04-09 06:58:20.889498: Current learning rate: 0.00535 +2026-04-09 07:00:03.663827: train_loss -0.1796 +2026-04-09 07:00:03.669836: val_loss -0.0947 +2026-04-09 07:00:03.671967: Pseudo dice [0.5631, 0.8404, 0.5901, 0.4589, 0.3992, 0.1112, 0.6402] +2026-04-09 07:00:03.674043: Epoch time: 102.78 s +2026-04-09 07:00:04.782283: +2026-04-09 07:00:04.784458: Epoch 502 +2026-04-09 07:00:04.786151: Current learning rate: 0.00534 +2026-04-09 07:01:46.908445: train_loss -0.1738 +2026-04-09 07:01:46.914341: val_loss -0.1216 +2026-04-09 07:01:46.916109: Pseudo dice [0.5837, 0.8901, 0.6694, 0.3382, 0.4107, 0.5147, 0.686] +2026-04-09 07:01:46.918200: Epoch time: 102.13 s +2026-04-09 07:01:47.990716: +2026-04-09 07:01:47.993369: Epoch 503 +2026-04-09 07:01:47.995207: Current learning rate: 0.00533 +2026-04-09 07:03:29.893241: train_loss -0.1824 +2026-04-09 07:03:29.899373: val_loss -0.1517 +2026-04-09 07:03:29.901497: Pseudo dice [0.2054, 0.5122, 0.7356, 0.1959, 0.4974, 0.3892, 0.7859] +2026-04-09 07:03:29.903731: Epoch time: 101.91 s +2026-04-09 07:03:31.000364: +2026-04-09 07:03:31.013124: Epoch 504 +2026-04-09 07:03:31.014760: Current learning rate: 0.00532 +2026-04-09 07:05:13.777484: train_loss -0.1748 +2026-04-09 07:05:13.783192: val_loss -0.1277 +2026-04-09 07:05:13.785393: Pseudo dice [0.1197, 0.8959, 0.6563, 0.4402, 0.3021, 0.4071, 0.6983] +2026-04-09 07:05:13.787233: Epoch time: 102.78 s +2026-04-09 07:05:14.870149: +2026-04-09 07:05:14.872146: Epoch 505 +2026-04-09 07:05:14.875548: Current learning rate: 0.00531 +2026-04-09 07:06:57.631783: train_loss -0.1838 +2026-04-09 07:06:57.639065: val_loss -0.1399 +2026-04-09 07:06:57.641181: Pseudo dice [0.6121, 0.6379, 0.6133, 0.7175, 0.4886, 0.238, 0.7594] +2026-04-09 07:06:57.643124: Epoch time: 102.76 s +2026-04-09 07:06:58.743288: +2026-04-09 07:06:58.745787: Epoch 506 +2026-04-09 07:06:58.747961: Current learning rate: 0.0053 +2026-04-09 07:08:40.381554: train_loss -0.1882 +2026-04-09 07:08:40.386979: val_loss -0.1167 +2026-04-09 07:08:40.388711: Pseudo dice [0.3637, 0.902, 0.7466, 0.5475, 0.4356, 0.3903, 0.7963] +2026-04-09 07:08:40.390483: Epoch time: 101.64 s +2026-04-09 07:08:41.488858: +2026-04-09 07:08:41.490337: Epoch 507 +2026-04-09 07:08:41.491902: Current learning rate: 0.00529 +2026-04-09 07:10:23.762156: train_loss -0.1945 +2026-04-09 07:10:23.767973: val_loss -0.1358 +2026-04-09 07:10:23.770408: Pseudo dice [0.4836, 0.8981, 0.6732, 0.4338, 0.2779, 0.2968, 0.5592] +2026-04-09 07:10:23.772020: Epoch time: 102.28 s +2026-04-09 07:10:24.877242: +2026-04-09 07:10:24.879100: Epoch 508 +2026-04-09 07:10:24.881216: Current learning rate: 0.00528 +2026-04-09 07:12:07.115676: train_loss -0.1821 +2026-04-09 07:12:07.120456: val_loss -0.159 +2026-04-09 07:12:07.122584: Pseudo dice [0.3526, 0.3819, 0.7722, 0.2146, 0.454, 0.1819, 0.6137] +2026-04-09 07:12:07.125578: Epoch time: 102.24 s +2026-04-09 07:12:08.218747: +2026-04-09 07:12:08.220449: Epoch 509 +2026-04-09 07:12:08.222518: Current learning rate: 0.00527 +2026-04-09 07:13:50.618955: train_loss -0.1695 +2026-04-09 07:13:50.624192: val_loss -0.1222 +2026-04-09 07:13:50.625905: Pseudo dice [0.1158, 0.7161, 0.7042, 0.1962, 0.3896, 0.3973, 0.3214] +2026-04-09 07:13:50.627372: Epoch time: 102.4 s +2026-04-09 07:13:51.710366: +2026-04-09 07:13:51.712592: Epoch 510 +2026-04-09 07:13:51.714652: Current learning rate: 0.00526 +2026-04-09 07:15:33.797756: train_loss -0.1924 +2026-04-09 07:15:33.804672: val_loss -0.1396 +2026-04-09 07:15:33.806591: Pseudo dice [0.6344, 0.5147, 0.7935, 0.4996, 0.3515, 0.3552, 0.6942] +2026-04-09 07:15:33.808697: Epoch time: 102.09 s +2026-04-09 07:15:34.889414: +2026-04-09 07:15:34.891842: Epoch 511 +2026-04-09 07:15:34.894024: Current learning rate: 0.00525 +2026-04-09 07:17:17.169130: train_loss -0.1912 +2026-04-09 07:17:17.176183: val_loss -0.1531 +2026-04-09 07:17:17.178040: Pseudo dice [0.6358, 0.8521, 0.79, 0.1805, 0.4784, 0.4316, 0.8088] +2026-04-09 07:17:17.179985: Epoch time: 102.28 s +2026-04-09 07:17:18.280617: +2026-04-09 07:17:18.282169: Epoch 512 +2026-04-09 07:17:18.283767: Current learning rate: 0.00524 +2026-04-09 07:19:02.098411: train_loss -0.1855 +2026-04-09 07:19:02.104374: val_loss -0.1426 +2026-04-09 07:19:02.106728: Pseudo dice [0.3859, 0.8437, 0.6972, 0.3799, 0.4352, 0.6051, 0.646] +2026-04-09 07:19:02.108491: Epoch time: 103.82 s +2026-04-09 07:19:03.218890: +2026-04-09 07:19:03.221133: Epoch 513 +2026-04-09 07:19:03.223207: Current learning rate: 0.00523 +2026-04-09 07:20:45.470078: train_loss -0.1858 +2026-04-09 07:20:45.475730: val_loss -0.1771 +2026-04-09 07:20:45.478930: Pseudo dice [0.506, 0.9179, 0.7767, 0.7119, 0.5504, 0.2084, 0.2828] +2026-04-09 07:20:45.480587: Epoch time: 102.25 s +2026-04-09 07:20:46.567120: +2026-04-09 07:20:46.568724: Epoch 514 +2026-04-09 07:20:46.571630: Current learning rate: 0.00522 +2026-04-09 07:22:28.954829: train_loss -0.1788 +2026-04-09 07:22:28.962733: val_loss -0.1325 +2026-04-09 07:22:28.965174: Pseudo dice [0.082, 0.7674, 0.7534, 0.4242, 0.234, 0.7541, 0.4846] +2026-04-09 07:22:28.967489: Epoch time: 102.39 s +2026-04-09 07:22:30.057690: +2026-04-09 07:22:30.060120: Epoch 515 +2026-04-09 07:22:30.062363: Current learning rate: 0.00521 +2026-04-09 07:24:12.385973: train_loss -0.1839 +2026-04-09 07:24:12.392985: val_loss -0.0982 +2026-04-09 07:24:12.395128: Pseudo dice [0.7272, 0.6481, 0.737, 0.2882, 0.5428, 0.3849, 0.4174] +2026-04-09 07:24:12.397010: Epoch time: 102.33 s +2026-04-09 07:24:14.638336: +2026-04-09 07:24:14.640807: Epoch 516 +2026-04-09 07:24:14.642179: Current learning rate: 0.0052 +2026-04-09 07:25:56.948486: train_loss -0.186 +2026-04-09 07:25:56.953807: val_loss -0.14 +2026-04-09 07:25:56.956394: Pseudo dice [0.5328, 0.8447, 0.7849, 0.464, 0.4752, 0.1599, 0.8024] +2026-04-09 07:25:56.958592: Epoch time: 102.31 s +2026-04-09 07:25:58.066413: +2026-04-09 07:25:58.068400: Epoch 517 +2026-04-09 07:25:58.070175: Current learning rate: 0.00519 +2026-04-09 07:27:40.607913: train_loss -0.1894 +2026-04-09 07:27:40.614783: val_loss -0.1294 +2026-04-09 07:27:40.616980: Pseudo dice [0.4687, 0.8076, 0.7956, 0.4487, 0.3704, 0.4481, 0.7615] +2026-04-09 07:27:40.619671: Epoch time: 102.54 s +2026-04-09 07:27:41.691053: +2026-04-09 07:27:41.692898: Epoch 518 +2026-04-09 07:27:41.694648: Current learning rate: 0.00518 +2026-04-09 07:29:24.591689: train_loss -0.194 +2026-04-09 07:29:24.597434: val_loss -0.1416 +2026-04-09 07:29:24.599860: Pseudo dice [0.1422, 0.8911, 0.7373, 0.504, 0.5644, 0.5049, 0.7343] +2026-04-09 07:29:24.601633: Epoch time: 102.9 s +2026-04-09 07:29:25.686918: +2026-04-09 07:29:25.688656: Epoch 519 +2026-04-09 07:29:25.690376: Current learning rate: 0.00518 +2026-04-09 07:31:08.521885: train_loss -0.1868 +2026-04-09 07:31:08.528627: val_loss -0.1639 +2026-04-09 07:31:08.531245: Pseudo dice [0.6556, 0.8809, 0.8072, 0.5642, 0.6386, 0.1924, 0.6707] +2026-04-09 07:31:08.534647: Epoch time: 102.84 s +2026-04-09 07:31:09.688768: +2026-04-09 07:31:09.691291: Epoch 520 +2026-04-09 07:31:09.694971: Current learning rate: 0.00517 +2026-04-09 07:32:52.757996: train_loss -0.1874 +2026-04-09 07:32:52.766411: val_loss -0.134 +2026-04-09 07:32:52.769852: Pseudo dice [0.4232, 0.8559, 0.7234, 0.5755, 0.3021, 0.3418, 0.7824] +2026-04-09 07:32:52.773094: Epoch time: 103.07 s +2026-04-09 07:32:53.879739: +2026-04-09 07:32:53.882245: Epoch 521 +2026-04-09 07:32:53.885133: Current learning rate: 0.00516 +2026-04-09 07:34:38.321665: train_loss -0.1831 +2026-04-09 07:34:38.329986: val_loss -0.1315 +2026-04-09 07:34:38.331836: Pseudo dice [0.5692, 0.2474, 0.6828, 0.5451, 0.5827, 0.6323, 0.5649] +2026-04-09 07:34:38.334015: Epoch time: 104.44 s +2026-04-09 07:34:39.462664: +2026-04-09 07:34:39.465563: Epoch 522 +2026-04-09 07:34:39.467169: Current learning rate: 0.00515 +2026-04-09 07:36:21.565957: train_loss -0.1875 +2026-04-09 07:36:21.572604: val_loss -0.1688 +2026-04-09 07:36:21.575284: Pseudo dice [0.6095, 0.8742, 0.7776, 0.5551, 0.4814, 0.8529, 0.7922] +2026-04-09 07:36:21.577463: Epoch time: 102.11 s +2026-04-09 07:36:22.700547: +2026-04-09 07:36:22.702727: Epoch 523 +2026-04-09 07:36:22.704791: Current learning rate: 0.00514 +2026-04-09 07:38:05.242508: train_loss -0.1871 +2026-04-09 07:38:05.252222: val_loss -0.1181 +2026-04-09 07:38:05.255202: Pseudo dice [0.1653, 0.5893, 0.7374, 0.5301, 0.4561, 0.5155, 0.3563] +2026-04-09 07:38:05.258517: Epoch time: 102.55 s +2026-04-09 07:38:06.349053: +2026-04-09 07:38:06.351762: Epoch 524 +2026-04-09 07:38:06.354372: Current learning rate: 0.00513 +2026-04-09 07:39:48.579333: train_loss -0.1845 +2026-04-09 07:39:48.585635: val_loss -0.1637 +2026-04-09 07:39:48.587465: Pseudo dice [0.2969, 0.3883, 0.7246, 0.5934, 0.5622, 0.5507, 0.7879] +2026-04-09 07:39:48.589297: Epoch time: 102.23 s +2026-04-09 07:39:49.682392: +2026-04-09 07:39:49.684054: Epoch 525 +2026-04-09 07:39:49.685611: Current learning rate: 0.00512 +2026-04-09 07:41:32.524750: train_loss -0.1875 +2026-04-09 07:41:32.530417: val_loss -0.1304 +2026-04-09 07:41:32.532252: Pseudo dice [0.4789, 0.6947, 0.7636, 0.3453, 0.2923, 0.6034, 0.6699] +2026-04-09 07:41:32.534245: Epoch time: 102.85 s +2026-04-09 07:41:33.610784: +2026-04-09 07:41:33.612739: Epoch 526 +2026-04-09 07:41:33.614974: Current learning rate: 0.00511 +2026-04-09 07:43:16.325588: train_loss -0.1936 +2026-04-09 07:43:16.331176: val_loss -0.1388 +2026-04-09 07:43:16.333208: Pseudo dice [0.594, 0.6871, 0.72, 0.1097, 0.5191, 0.7855, 0.6875] +2026-04-09 07:43:16.335286: Epoch time: 102.72 s +2026-04-09 07:43:17.430242: +2026-04-09 07:43:17.432296: Epoch 527 +2026-04-09 07:43:17.434074: Current learning rate: 0.0051 +2026-04-09 07:44:59.310885: train_loss -0.1911 +2026-04-09 07:44:59.324612: val_loss -0.1576 +2026-04-09 07:44:59.327443: Pseudo dice [0.67, 0.4442, 0.7113, 0.5237, 0.6722, 0.3049, 0.2989] +2026-04-09 07:44:59.329302: Epoch time: 101.88 s +2026-04-09 07:45:00.427251: +2026-04-09 07:45:00.429383: Epoch 528 +2026-04-09 07:45:00.432091: Current learning rate: 0.00509 +2026-04-09 07:46:43.117235: train_loss -0.193 +2026-04-09 07:46:43.123431: val_loss -0.1571 +2026-04-09 07:46:43.125609: Pseudo dice [0.7493, 0.8285, 0.7803, 0.6777, 0.5213, 0.7001, 0.714] +2026-04-09 07:46:43.127470: Epoch time: 102.69 s +2026-04-09 07:46:44.227945: +2026-04-09 07:46:44.229682: Epoch 529 +2026-04-09 07:46:44.231370: Current learning rate: 0.00508 +2026-04-09 07:48:26.703488: train_loss -0.1963 +2026-04-09 07:48:26.708867: val_loss -0.1301 +2026-04-09 07:48:26.710922: Pseudo dice [0.3015, 0.84, 0.6998, 0.6196, 0.4275, 0.5887, 0.6366] +2026-04-09 07:48:26.713413: Epoch time: 102.48 s +2026-04-09 07:48:27.807553: +2026-04-09 07:48:27.809758: Epoch 530 +2026-04-09 07:48:27.811751: Current learning rate: 0.00507 +2026-04-09 07:50:09.796335: train_loss -0.1775 +2026-04-09 07:50:09.801853: val_loss -0.1131 +2026-04-09 07:50:09.803672: Pseudo dice [0.6086, 0.7111, 0.3972, 0.3587, 0.434, 0.2981, 0.2581] +2026-04-09 07:50:09.806659: Epoch time: 101.99 s +2026-04-09 07:50:10.908810: +2026-04-09 07:50:10.910717: Epoch 531 +2026-04-09 07:50:10.912647: Current learning rate: 0.00506 +2026-04-09 07:51:53.895723: train_loss -0.1892 +2026-04-09 07:51:53.902679: val_loss -0.1583 +2026-04-09 07:51:53.905169: Pseudo dice [0.3909, 0.6786, 0.6417, 0.6224, 0.4744, 0.5406, 0.7822] +2026-04-09 07:51:53.907329: Epoch time: 102.99 s +2026-04-09 07:51:54.995493: +2026-04-09 07:51:54.997434: Epoch 532 +2026-04-09 07:51:54.999191: Current learning rate: 0.00505 +2026-04-09 07:53:37.219975: train_loss -0.1873 +2026-04-09 07:53:37.225302: val_loss -0.1256 +2026-04-09 07:53:37.227067: Pseudo dice [0.7751, 0.0952, 0.7132, 0.271, 0.4464, 0.5228, 0.5333] +2026-04-09 07:53:37.228827: Epoch time: 102.23 s +2026-04-09 07:53:38.303823: +2026-04-09 07:53:38.305365: Epoch 533 +2026-04-09 07:53:38.306978: Current learning rate: 0.00504 +2026-04-09 07:55:20.760888: train_loss -0.1817 +2026-04-09 07:55:20.767334: val_loss -0.1819 +2026-04-09 07:55:20.770726: Pseudo dice [0.5656, 0.8605, 0.8586, 0.6842, 0.6065, 0.8489, 0.7552] +2026-04-09 07:55:20.773311: Epoch time: 102.46 s +2026-04-09 07:55:21.848706: +2026-04-09 07:55:21.852194: Epoch 534 +2026-04-09 07:55:21.854518: Current learning rate: 0.00503 +2026-04-09 07:57:04.305166: train_loss -0.1892 +2026-04-09 07:57:04.312588: val_loss -0.0956 +2026-04-09 07:57:04.316226: Pseudo dice [0.5835, 0.8503, 0.6842, 0.2675, 0.4296, 0.4285, 0.1316] +2026-04-09 07:57:04.319267: Epoch time: 102.46 s +2026-04-09 07:57:05.390856: +2026-04-09 07:57:05.393800: Epoch 535 +2026-04-09 07:57:05.397019: Current learning rate: 0.00502 +2026-04-09 07:58:48.468209: train_loss -0.1876 +2026-04-09 07:58:48.474848: val_loss -0.1605 +2026-04-09 07:58:48.477361: Pseudo dice [0.4897, 0.3729, 0.7166, 0.5439, 0.6077, 0.7033, 0.8054] +2026-04-09 07:58:48.479406: Epoch time: 103.08 s +2026-04-09 07:58:49.553720: +2026-04-09 07:58:49.556303: Epoch 536 +2026-04-09 07:58:49.558483: Current learning rate: 0.00501 +2026-04-09 08:00:32.826874: train_loss -0.1917 +2026-04-09 08:00:32.835297: val_loss -0.1521 +2026-04-09 08:00:32.837202: Pseudo dice [0.3588, 0.8942, 0.7613, 0.4236, 0.4286, 0.8119, 0.3507] +2026-04-09 08:00:32.839605: Epoch time: 103.28 s +2026-04-09 08:00:33.963589: +2026-04-09 08:00:33.965730: Epoch 537 +2026-04-09 08:00:33.967707: Current learning rate: 0.005 +2026-04-09 08:02:17.025012: train_loss -0.185 +2026-04-09 08:02:17.031392: val_loss -0.1435 +2026-04-09 08:02:17.033455: Pseudo dice [0.4362, 0.8889, 0.6133, 0.3706, 0.3759, 0.5285, 0.6142] +2026-04-09 08:02:17.035278: Epoch time: 103.06 s +2026-04-09 08:02:18.161235: +2026-04-09 08:02:18.163082: Epoch 538 +2026-04-09 08:02:18.164629: Current learning rate: 0.00499 +2026-04-09 08:04:01.146703: train_loss -0.1759 +2026-04-09 08:04:01.152295: val_loss -0.1418 +2026-04-09 08:04:01.157337: Pseudo dice [0.7311, 0.6553, 0.4704, 0.2917, 0.2838, 0.7207, 0.6742] +2026-04-09 08:04:01.160970: Epoch time: 102.99 s +2026-04-09 08:04:02.259295: +2026-04-09 08:04:02.261438: Epoch 539 +2026-04-09 08:04:02.264077: Current learning rate: 0.00498 +2026-04-09 08:05:44.646718: train_loss -0.2029 +2026-04-09 08:05:44.655193: val_loss -0.1902 +2026-04-09 08:05:44.657595: Pseudo dice [0.6526, 0.5237, 0.8251, 0.6446, 0.4931, 0.822, 0.6756] +2026-04-09 08:05:44.660566: Epoch time: 102.39 s +2026-04-09 08:05:45.776105: +2026-04-09 08:05:45.778531: Epoch 540 +2026-04-09 08:05:45.779921: Current learning rate: 0.00497 +2026-04-09 08:07:27.995746: train_loss -0.1818 +2026-04-09 08:07:28.001336: val_loss -0.1295 +2026-04-09 08:07:28.003097: Pseudo dice [0.1989, 0.8832, 0.5531, 0.3577, 0.5038, 0.6316, 0.6859] +2026-04-09 08:07:28.005205: Epoch time: 102.22 s +2026-04-09 08:07:29.081517: +2026-04-09 08:07:29.083251: Epoch 541 +2026-04-09 08:07:29.085313: Current learning rate: 0.00496 +2026-04-09 08:09:12.657223: train_loss -0.187 +2026-04-09 08:09:12.662740: val_loss -0.1653 +2026-04-09 08:09:12.664574: Pseudo dice [0.673, 0.4606, 0.7561, 0.2025, 0.2746, 0.6911, 0.8305] +2026-04-09 08:09:12.666521: Epoch time: 103.58 s +2026-04-09 08:09:13.767092: +2026-04-09 08:09:13.768892: Epoch 542 +2026-04-09 08:09:13.770517: Current learning rate: 0.00495 +2026-04-09 08:10:56.114803: train_loss -0.1926 +2026-04-09 08:10:56.121216: val_loss -0.1167 +2026-04-09 08:10:56.124054: Pseudo dice [0.7343, 0.8604, 0.7041, 0.2315, 0.4567, 0.6441, 0.7338] +2026-04-09 08:10:56.125797: Epoch time: 102.35 s +2026-04-09 08:10:57.212701: +2026-04-09 08:10:57.214317: Epoch 543 +2026-04-09 08:10:57.217095: Current learning rate: 0.00494 +2026-04-09 08:12:39.563060: train_loss -0.2064 +2026-04-09 08:12:39.570321: val_loss -0.1688 +2026-04-09 08:12:39.573126: Pseudo dice [0.5211, 0.7576, 0.7077, 0.7626, 0.3277, 0.7515, 0.798] +2026-04-09 08:12:39.576012: Epoch time: 102.35 s +2026-04-09 08:12:40.720412: +2026-04-09 08:12:40.722563: Epoch 544 +2026-04-09 08:12:40.724966: Current learning rate: 0.00493 +2026-04-09 08:14:23.274730: train_loss -0.1958 +2026-04-09 08:14:23.284313: val_loss -0.1218 +2026-04-09 08:14:23.288396: Pseudo dice [0.2586, 0.5055, 0.6295, 0.1659, 0.5853, 0.052, 0.6497] +2026-04-09 08:14:23.292219: Epoch time: 102.56 s +2026-04-09 08:14:24.381378: +2026-04-09 08:14:24.383206: Epoch 545 +2026-04-09 08:14:24.384818: Current learning rate: 0.00492 +2026-04-09 08:16:06.990254: train_loss -0.1993 +2026-04-09 08:16:06.996419: val_loss -0.1581 +2026-04-09 08:16:06.999042: Pseudo dice [0.4391, 0.4968, 0.7747, 0.763, 0.6211, 0.8196, 0.7481] +2026-04-09 08:16:07.000749: Epoch time: 102.61 s +2026-04-09 08:16:08.078606: +2026-04-09 08:16:08.082311: Epoch 546 +2026-04-09 08:16:08.085162: Current learning rate: 0.00491 +2026-04-09 08:17:52.051783: train_loss -0.1958 +2026-04-09 08:17:52.060567: val_loss -0.1459 +2026-04-09 08:17:52.063589: Pseudo dice [0.6399, 0.5677, 0.6345, 0.5145, 0.5748, 0.6103, 0.807] +2026-04-09 08:17:52.065852: Epoch time: 103.98 s +2026-04-09 08:17:53.169297: +2026-04-09 08:17:53.172762: Epoch 547 +2026-04-09 08:17:53.175557: Current learning rate: 0.0049 +2026-04-09 08:19:35.821172: train_loss -0.2047 +2026-04-09 08:19:35.827212: val_loss -0.1336 +2026-04-09 08:19:35.829750: Pseudo dice [0.5731, 0.3205, 0.7107, 0.0658, 0.4211, 0.1998, 0.5634] +2026-04-09 08:19:35.831451: Epoch time: 102.65 s +2026-04-09 08:19:36.941569: +2026-04-09 08:19:36.943844: Epoch 548 +2026-04-09 08:19:36.945460: Current learning rate: 0.00489 +2026-04-09 08:21:19.678235: train_loss -0.1738 +2026-04-09 08:21:19.685354: val_loss -0.1525 +2026-04-09 08:21:19.687815: Pseudo dice [0.6177, 0.8927, 0.7549, 0.1971, 0.4927, 0.5298, 0.7296] +2026-04-09 08:21:19.690744: Epoch time: 102.74 s +2026-04-09 08:21:20.844834: +2026-04-09 08:21:20.846982: Epoch 549 +2026-04-09 08:21:20.848570: Current learning rate: 0.00488 +2026-04-09 08:23:04.276461: train_loss -0.2108 +2026-04-09 08:23:04.284391: val_loss -0.1372 +2026-04-09 08:23:04.288320: Pseudo dice [0.7287, 0.6015, 0.6254, 0.4909, 0.4256, 0.1899, 0.7121] +2026-04-09 08:23:04.290288: Epoch time: 103.44 s +2026-04-09 08:23:07.139950: +2026-04-09 08:23:07.142143: Epoch 550 +2026-04-09 08:23:07.144250: Current learning rate: 0.00487 +2026-04-09 08:24:49.298879: train_loss -0.175 +2026-04-09 08:24:49.304864: val_loss -0.1662 +2026-04-09 08:24:49.307157: Pseudo dice [0.5509, 0.7122, 0.7799, 0.6238, 0.6267, 0.8297, 0.8011] +2026-04-09 08:24:49.308967: Epoch time: 102.16 s +2026-04-09 08:24:50.382960: +2026-04-09 08:24:50.384858: Epoch 551 +2026-04-09 08:24:50.386900: Current learning rate: 0.00486 +2026-04-09 08:26:33.074757: train_loss -0.1701 +2026-04-09 08:26:33.080793: val_loss -0.1299 +2026-04-09 08:26:33.083153: Pseudo dice [0.5566, 0.9022, 0.6369, 0.4622, 0.4503, 0.3784, 0.7511] +2026-04-09 08:26:33.087197: Epoch time: 102.69 s +2026-04-09 08:26:34.179365: +2026-04-09 08:26:34.182671: Epoch 552 +2026-04-09 08:26:34.184699: Current learning rate: 0.00485 +2026-04-09 08:28:17.062981: train_loss -0.1997 +2026-04-09 08:28:17.071943: val_loss -0.1111 +2026-04-09 08:28:17.074490: Pseudo dice [0.4823, 0.6613, 0.6705, 0.5491, 0.3857, 0.9026, 0.1959] +2026-04-09 08:28:17.078609: Epoch time: 102.89 s +2026-04-09 08:28:18.169890: +2026-04-09 08:28:18.171544: Epoch 553 +2026-04-09 08:28:18.173946: Current learning rate: 0.00484 +2026-04-09 08:30:00.874240: train_loss -0.2037 +2026-04-09 08:30:00.879947: val_loss -0.1495 +2026-04-09 08:30:00.882316: Pseudo dice [0.7526, 0.4431, 0.7198, 0.4362, 0.6373, 0.4666, 0.5411] +2026-04-09 08:30:00.884629: Epoch time: 102.71 s +2026-04-09 08:30:01.996286: +2026-04-09 08:30:02.010176: Epoch 554 +2026-04-09 08:30:02.012648: Current learning rate: 0.00484 +2026-04-09 08:31:45.209538: train_loss -0.1954 +2026-04-09 08:31:45.215962: val_loss -0.1427 +2026-04-09 08:31:45.218098: Pseudo dice [0.6543, 0.6464, 0.7802, 0.6275, 0.5814, 0.222, 0.825] +2026-04-09 08:31:45.219903: Epoch time: 103.22 s +2026-04-09 08:31:46.315141: +2026-04-09 08:31:46.316905: Epoch 555 +2026-04-09 08:31:46.319413: Current learning rate: 0.00483 +2026-04-09 08:33:28.905906: train_loss -0.1795 +2026-04-09 08:33:28.912601: val_loss -0.167 +2026-04-09 08:33:28.915057: Pseudo dice [0.75, 0.7371, 0.6985, 0.5312, 0.3694, 0.8771, 0.8257] +2026-04-09 08:33:28.918015: Epoch time: 102.59 s +2026-04-09 08:33:28.920950: Yayy! New best EMA pseudo Dice: 0.591 +2026-04-09 08:33:32.870581: +2026-04-09 08:33:32.874427: Epoch 556 +2026-04-09 08:33:32.876229: Current learning rate: 0.00482 +2026-04-09 08:35:15.768505: train_loss -0.1904 +2026-04-09 08:35:15.775654: val_loss -0.1417 +2026-04-09 08:35:15.777699: Pseudo dice [0.772, 0.7254, 0.6896, 0.4854, 0.3654, 0.6092, 0.8627] +2026-04-09 08:35:15.779610: Epoch time: 102.9 s +2026-04-09 08:35:15.781612: Yayy! New best EMA pseudo Dice: 0.5963 +2026-04-09 08:35:18.629700: +2026-04-09 08:35:18.631283: Epoch 557 +2026-04-09 08:35:18.632727: Current learning rate: 0.00481 +2026-04-09 08:37:01.178737: train_loss -0.2084 +2026-04-09 08:37:01.184929: val_loss -0.1502 +2026-04-09 08:37:01.187018: Pseudo dice [0.6309, 0.2621, 0.7483, 0.1571, 0.6081, 0.6997, 0.6206] +2026-04-09 08:37:01.189263: Epoch time: 102.55 s +2026-04-09 08:37:02.290616: +2026-04-09 08:37:02.292688: Epoch 558 +2026-04-09 08:37:02.294222: Current learning rate: 0.0048 +2026-04-09 08:38:45.195138: train_loss -0.1927 +2026-04-09 08:38:45.201174: val_loss -0.155 +2026-04-09 08:38:45.203407: Pseudo dice [0.6666, 0.6775, 0.8041, 0.7017, 0.4454, 0.6517, 0.8519] +2026-04-09 08:38:45.205420: Epoch time: 102.91 s +2026-04-09 08:38:45.208208: Yayy! New best EMA pseudo Dice: 0.5995 +2026-04-09 08:38:48.130289: +2026-04-09 08:38:48.131947: Epoch 559 +2026-04-09 08:38:48.133534: Current learning rate: 0.00479 +2026-04-09 08:40:30.708928: train_loss -0.1904 +2026-04-09 08:40:30.714980: val_loss -0.1268 +2026-04-09 08:40:30.717585: Pseudo dice [0.6962, 0.814, 0.5834, 0.2433, 0.5449, 0.0466, 0.5139] +2026-04-09 08:40:30.719510: Epoch time: 102.58 s +2026-04-09 08:40:31.837312: +2026-04-09 08:40:31.839095: Epoch 560 +2026-04-09 08:40:31.840767: Current learning rate: 0.00478 +2026-04-09 08:42:14.857264: train_loss -0.1983 +2026-04-09 08:42:14.864107: val_loss -0.154 +2026-04-09 08:42:14.866060: Pseudo dice [0.226, 0.1758, 0.6946, 0.4162, 0.4823, 0.5503, 0.5772] +2026-04-09 08:42:14.868010: Epoch time: 103.02 s +2026-04-09 08:42:15.968139: +2026-04-09 08:42:15.969806: Epoch 561 +2026-04-09 08:42:15.971355: Current learning rate: 0.00477 +2026-04-09 08:43:58.712089: train_loss -0.1832 +2026-04-09 08:43:58.720091: val_loss -0.1672 +2026-04-09 08:43:58.724006: Pseudo dice [0.7416, 0.6276, 0.6378, 0.696, 0.4768, 0.5491, 0.7888] +2026-04-09 08:43:58.727261: Epoch time: 102.75 s +2026-04-09 08:43:59.827708: +2026-04-09 08:43:59.829666: Epoch 562 +2026-04-09 08:43:59.831041: Current learning rate: 0.00476 +2026-04-09 08:45:42.475065: train_loss -0.2019 +2026-04-09 08:45:42.480754: val_loss -0.1467 +2026-04-09 08:45:42.482729: Pseudo dice [0.6987, 0.8725, 0.7626, 0.6739, 0.5454, 0.8159, 0.718] +2026-04-09 08:45:42.485093: Epoch time: 102.65 s +2026-04-09 08:45:43.601101: +2026-04-09 08:45:43.603240: Epoch 563 +2026-04-09 08:45:43.604847: Current learning rate: 0.00475 +2026-04-09 08:47:26.418125: train_loss -0.2051 +2026-04-09 08:47:26.425897: val_loss -0.1452 +2026-04-09 08:47:26.427613: Pseudo dice [0.6534, 0.8114, 0.7127, 0.0942, 0.4093, 0.6569, 0.494] +2026-04-09 08:47:26.429596: Epoch time: 102.82 s +2026-04-09 08:47:27.524168: +2026-04-09 08:47:27.526387: Epoch 564 +2026-04-09 08:47:27.528101: Current learning rate: 0.00474 +2026-04-09 08:49:09.633299: train_loss -0.1881 +2026-04-09 08:49:09.640209: val_loss -0.1505 +2026-04-09 08:49:09.642792: Pseudo dice [0.5193, 0.8656, 0.6813, 0.7164, 0.5959, 0.7055, 0.7738] +2026-04-09 08:49:09.647230: Epoch time: 102.11 s +2026-04-09 08:49:09.649584: Yayy! New best EMA pseudo Dice: 0.6015 +2026-04-09 08:49:12.516799: +2026-04-09 08:49:12.519020: Epoch 565 +2026-04-09 08:49:12.520589: Current learning rate: 0.00473 +2026-04-09 08:50:54.877786: train_loss -0.1978 +2026-04-09 08:50:54.885087: val_loss -0.1285 +2026-04-09 08:50:54.887760: Pseudo dice [0.7419, 0.8017, 0.7472, 0.4635, 0.4018, 0.7173, 0.4777] +2026-04-09 08:50:54.889894: Epoch time: 102.36 s +2026-04-09 08:50:54.892008: Yayy! New best EMA pseudo Dice: 0.6035 +2026-04-09 08:50:57.692136: +2026-04-09 08:50:57.694207: Epoch 566 +2026-04-09 08:50:57.695926: Current learning rate: 0.00472 +2026-04-09 08:52:40.281714: train_loss -0.1917 +2026-04-09 08:52:40.287912: val_loss -0.1362 +2026-04-09 08:52:40.289730: Pseudo dice [0.6977, 0.8108, 0.7188, 0.2698, 0.3999, 0.7891, 0.6628] +2026-04-09 08:52:40.292117: Epoch time: 102.59 s +2026-04-09 08:52:40.294212: Yayy! New best EMA pseudo Dice: 0.6053 +2026-04-09 08:52:43.147614: +2026-04-09 08:52:43.149254: Epoch 567 +2026-04-09 08:52:43.150937: Current learning rate: 0.00471 +2026-04-09 08:54:25.982009: train_loss -0.1968 +2026-04-09 08:54:25.988361: val_loss -0.1337 +2026-04-09 08:54:25.990631: Pseudo dice [0.4363, 0.6191, 0.819, 0.1906, 0.5633, 0.6378, 0.7397] +2026-04-09 08:54:25.992935: Epoch time: 102.84 s +2026-04-09 08:54:27.074667: +2026-04-09 08:54:27.076704: Epoch 568 +2026-04-09 08:54:27.078584: Current learning rate: 0.0047 +2026-04-09 08:56:10.029424: train_loss -0.1831 +2026-04-09 08:56:10.046742: val_loss -0.1517 +2026-04-09 08:56:10.054974: Pseudo dice [0.4407, 0.1519, 0.7441, 0.6418, 0.4512, 0.8708, 0.7068] +2026-04-09 08:56:10.056831: Epoch time: 102.96 s +2026-04-09 08:56:11.150531: +2026-04-09 08:56:11.153598: Epoch 569 +2026-04-09 08:56:11.155170: Current learning rate: 0.00469 +2026-04-09 08:57:53.719428: train_loss -0.1929 +2026-04-09 08:57:53.725490: val_loss -0.1689 +2026-04-09 08:57:53.727458: Pseudo dice [0.6973, 0.7773, 0.7886, 0.7727, 0.4463, 0.0983, 0.8183] +2026-04-09 08:57:53.729530: Epoch time: 102.57 s +2026-04-09 08:57:54.844650: +2026-04-09 08:57:54.846332: Epoch 570 +2026-04-09 08:57:54.847991: Current learning rate: 0.00468 +2026-04-09 08:59:37.944962: train_loss -0.1837 +2026-04-09 08:59:37.950893: val_loss -0.1464 +2026-04-09 08:59:37.952957: Pseudo dice [0.5233, 0.8773, 0.8002, 0.3154, 0.4208, 0.5548, 0.5468] +2026-04-09 08:59:37.955546: Epoch time: 103.1 s +2026-04-09 08:59:39.038113: +2026-04-09 08:59:39.040098: Epoch 571 +2026-04-09 08:59:39.042746: Current learning rate: 0.00467 +2026-04-09 09:01:21.350141: train_loss -0.1866 +2026-04-09 09:01:21.356464: val_loss -0.1257 +2026-04-09 09:01:21.358185: Pseudo dice [0.7189, 0.7036, 0.4738, 0.1373, 0.3398, 0.294, 0.6153] +2026-04-09 09:01:21.359827: Epoch time: 102.32 s +2026-04-09 09:01:22.518677: +2026-04-09 09:01:22.526816: Epoch 572 +2026-04-09 09:01:22.530336: Current learning rate: 0.00466 +2026-04-09 09:03:05.955012: train_loss -0.1943 +2026-04-09 09:03:05.962862: val_loss -0.1699 +2026-04-09 09:03:05.966837: Pseudo dice [0.579, 0.6796, 0.8004, 0.5967, 0.5511, 0.8247, 0.8313] +2026-04-09 09:03:05.969904: Epoch time: 103.44 s +2026-04-09 09:03:07.114716: +2026-04-09 09:03:07.116867: Epoch 573 +2026-04-09 09:03:07.118547: Current learning rate: 0.00465 +2026-04-09 09:04:50.082709: train_loss -0.1922 +2026-04-09 09:04:50.088565: val_loss -0.138 +2026-04-09 09:04:50.090652: Pseudo dice [0.7222, 0.2415, 0.5773, 0.6145, 0.5018, 0.2128, 0.6543] +2026-04-09 09:04:50.092706: Epoch time: 102.97 s +2026-04-09 09:04:51.235562: +2026-04-09 09:04:51.238854: Epoch 574 +2026-04-09 09:04:51.241410: Current learning rate: 0.00464 +2026-04-09 09:06:35.718236: train_loss -0.1789 +2026-04-09 09:06:35.723979: val_loss -0.1498 +2026-04-09 09:06:35.726735: Pseudo dice [0.6434, 0.4164, 0.7936, 0.2648, 0.5451, 0.3932, 0.7628] +2026-04-09 09:06:35.728437: Epoch time: 104.49 s +2026-04-09 09:06:36.871064: +2026-04-09 09:06:36.872796: Epoch 575 +2026-04-09 09:06:36.874474: Current learning rate: 0.00463 +2026-04-09 09:08:20.013152: train_loss -0.1809 +2026-04-09 09:08:20.019244: val_loss -0.1534 +2026-04-09 09:08:20.021552: Pseudo dice [0.8527, 0.7784, 0.8231, 0.4843, 0.3531, 0.7372, 0.6531] +2026-04-09 09:08:20.024018: Epoch time: 103.15 s +2026-04-09 09:08:21.207046: +2026-04-09 09:08:21.209136: Epoch 576 +2026-04-09 09:08:21.210756: Current learning rate: 0.00462 +2026-04-09 09:10:03.572099: train_loss -0.1851 +2026-04-09 09:10:03.578524: val_loss -0.1253 +2026-04-09 09:10:03.580737: Pseudo dice [0.6604, 0.7659, 0.7219, 0.5547, 0.2124, 0.6291, 0.6509] +2026-04-09 09:10:03.583170: Epoch time: 102.37 s +2026-04-09 09:10:04.715539: +2026-04-09 09:10:04.718477: Epoch 577 +2026-04-09 09:10:04.720248: Current learning rate: 0.00461 +2026-04-09 09:11:47.445185: train_loss -0.1805 +2026-04-09 09:11:47.452845: val_loss -0.164 +2026-04-09 09:11:47.455534: Pseudo dice [0.4859, 0.8617, 0.7897, 0.668, 0.4339, 0.8519, 0.7405] +2026-04-09 09:11:47.458701: Epoch time: 102.73 s +2026-04-09 09:11:48.581606: +2026-04-09 09:11:48.585070: Epoch 578 +2026-04-09 09:11:48.587863: Current learning rate: 0.0046 +2026-04-09 09:13:31.760911: train_loss -0.2031 +2026-04-09 09:13:31.770011: val_loss -0.1489 +2026-04-09 09:13:31.772378: Pseudo dice [0.6777, 0.7701, 0.6731, 0.6253, 0.4174, 0.5797, 0.3989] +2026-04-09 09:13:31.774856: Epoch time: 103.18 s +2026-04-09 09:13:32.902289: +2026-04-09 09:13:32.904101: Epoch 579 +2026-04-09 09:13:32.906566: Current learning rate: 0.00459 +2026-04-09 09:15:15.818729: train_loss -0.1848 +2026-04-09 09:15:15.825469: val_loss -0.18 +2026-04-09 09:15:15.827809: Pseudo dice [0.7657, 0.8953, 0.7762, 0.5269, 0.4997, 0.7268, 0.6979] +2026-04-09 09:15:15.830060: Epoch time: 102.92 s +2026-04-09 09:15:15.833301: Yayy! New best EMA pseudo Dice: 0.6112 +2026-04-09 09:15:18.780879: +2026-04-09 09:15:18.782667: Epoch 580 +2026-04-09 09:15:18.784286: Current learning rate: 0.00458 +2026-04-09 09:17:01.423843: train_loss -0.1882 +2026-04-09 09:17:01.428955: val_loss -0.1334 +2026-04-09 09:17:01.431076: Pseudo dice [0.6253, 0.895, 0.63, 0.0838, 0.2503, 0.8742, 0.6304] +2026-04-09 09:17:01.432846: Epoch time: 102.65 s +2026-04-09 09:17:02.578874: +2026-04-09 09:17:02.581418: Epoch 581 +2026-04-09 09:17:02.583667: Current learning rate: 0.00457 +2026-04-09 09:18:45.316934: train_loss -0.1886 +2026-04-09 09:18:45.322217: val_loss -0.1181 +2026-04-09 09:18:45.324733: Pseudo dice [0.6194, 0.847, 0.7497, 0.5823, 0.5804, 0.4341, 0.6345] +2026-04-09 09:18:45.326836: Epoch time: 102.74 s +2026-04-09 09:18:46.494646: +2026-04-09 09:18:46.496475: Epoch 582 +2026-04-09 09:18:46.498069: Current learning rate: 0.00456 +2026-04-09 09:20:29.856497: train_loss -0.1871 +2026-04-09 09:20:29.862278: val_loss -0.1623 +2026-04-09 09:20:29.864837: Pseudo dice [0.625, 0.3315, 0.742, 0.4394, 0.4634, 0.8663, 0.8246] +2026-04-09 09:20:29.866688: Epoch time: 103.36 s +2026-04-09 09:20:31.002948: +2026-04-09 09:20:31.005032: Epoch 583 +2026-04-09 09:20:31.007368: Current learning rate: 0.00455 +2026-04-09 09:22:13.657673: train_loss -0.1938 +2026-04-09 09:22:13.664289: val_loss -0.1701 +2026-04-09 09:22:13.666618: Pseudo dice [0.5145, 0.7439, 0.7555, 0.5182, 0.5265, 0.7885, 0.3985] +2026-04-09 09:22:13.670097: Epoch time: 102.66 s +2026-04-09 09:22:14.839508: +2026-04-09 09:22:14.842491: Epoch 584 +2026-04-09 09:22:14.845822: Current learning rate: 0.00454 +2026-04-09 09:23:57.399176: train_loss -0.192 +2026-04-09 09:23:57.408280: val_loss -0.1306 +2026-04-09 09:23:57.410743: Pseudo dice [0.7911, 0.7673, 0.6378, 0.221, 0.511, 0.8022, 0.8583] +2026-04-09 09:23:57.412692: Epoch time: 102.56 s +2026-04-09 09:23:57.415552: Yayy! New best EMA pseudo Dice: 0.6144 +2026-04-09 09:24:00.296145: +2026-04-09 09:24:00.297987: Epoch 585 +2026-04-09 09:24:00.299773: Current learning rate: 0.00453 +2026-04-09 09:25:43.175209: train_loss -0.1976 +2026-04-09 09:25:43.183460: val_loss -0.159 +2026-04-09 09:25:43.199014: Pseudo dice [0.7313, 0.2334, 0.7541, 0.3131, 0.402, 0.7543, 0.616] +2026-04-09 09:25:43.204979: Epoch time: 102.88 s +2026-04-09 09:25:44.311280: +2026-04-09 09:25:44.313684: Epoch 586 +2026-04-09 09:25:44.316581: Current learning rate: 0.00452 +2026-04-09 09:27:26.982767: train_loss -0.195 +2026-04-09 09:27:26.991363: val_loss -0.1607 +2026-04-09 09:27:26.993942: Pseudo dice [0.7981, 0.8675, 0.7706, 0.096, 0.7532, 0.3803, 0.5774] +2026-04-09 09:27:26.996593: Epoch time: 102.67 s +2026-04-09 09:27:28.105799: +2026-04-09 09:27:28.107531: Epoch 587 +2026-04-09 09:27:28.109423: Current learning rate: 0.00451 +2026-04-09 09:29:10.619303: train_loss -0.2033 +2026-04-09 09:29:10.628625: val_loss -0.1496 +2026-04-09 09:29:10.632379: Pseudo dice [0.8111, 0.6853, 0.778, 0.6273, 0.5816, 0.5438, 0.799] +2026-04-09 09:29:10.635290: Epoch time: 102.52 s +2026-04-09 09:29:10.638029: Yayy! New best EMA pseudo Dice: 0.6154 +2026-04-09 09:29:13.558896: +2026-04-09 09:29:13.561024: Epoch 588 +2026-04-09 09:29:13.562685: Current learning rate: 0.0045 +2026-04-09 09:30:56.068108: train_loss -0.1991 +2026-04-09 09:30:56.074382: val_loss -0.1179 +2026-04-09 09:30:56.077507: Pseudo dice [0.5382, 0.8836, 0.6459, 0.3326, 0.5236, 0.3315, 0.4587] +2026-04-09 09:30:56.079630: Epoch time: 102.51 s +2026-04-09 09:30:57.194550: +2026-04-09 09:30:57.196488: Epoch 589 +2026-04-09 09:30:57.198063: Current learning rate: 0.00449 +2026-04-09 09:32:40.325775: train_loss -0.2021 +2026-04-09 09:32:40.330885: val_loss -0.1436 +2026-04-09 09:32:40.332779: Pseudo dice [0.4557, 0.465, 0.6982, 0.3062, 0.3328, 0.8991, 0.7981] +2026-04-09 09:32:40.334641: Epoch time: 103.13 s +2026-04-09 09:32:41.435636: +2026-04-09 09:32:41.437527: Epoch 590 +2026-04-09 09:32:41.438982: Current learning rate: 0.00448 +2026-04-09 09:34:24.998141: train_loss -0.198 +2026-04-09 09:34:25.003285: val_loss -0.1204 +2026-04-09 09:34:25.005806: Pseudo dice [0.7551, 0.8747, 0.6909, 0.1684, 0.6184, 0.7504, 0.6664] +2026-04-09 09:34:25.008111: Epoch time: 103.57 s +2026-04-09 09:34:26.153848: +2026-04-09 09:34:26.156177: Epoch 591 +2026-04-09 09:34:26.158211: Current learning rate: 0.00447 +2026-04-09 09:36:09.057185: train_loss -0.2055 +2026-04-09 09:36:09.062184: val_loss -0.1344 +2026-04-09 09:36:09.064089: Pseudo dice [0.7084, 0.6924, 0.625, 0.4389, 0.3686, 0.8397, 0.6288] +2026-04-09 09:36:09.066144: Epoch time: 102.91 s +2026-04-09 09:36:10.182359: +2026-04-09 09:36:10.184062: Epoch 592 +2026-04-09 09:36:10.185569: Current learning rate: 0.00446 +2026-04-09 09:37:53.391752: train_loss -0.1991 +2026-04-09 09:37:53.401817: val_loss -0.1349 +2026-04-09 09:37:53.404101: Pseudo dice [0.618, 0.5369, 0.6734, 0.3573, 0.6202, 0.6329, 0.7779] +2026-04-09 09:37:53.406793: Epoch time: 103.21 s +2026-04-09 09:37:55.704128: +2026-04-09 09:37:55.706035: Epoch 593 +2026-04-09 09:37:55.707534: Current learning rate: 0.00445 +2026-04-09 09:39:39.553269: train_loss -0.195 +2026-04-09 09:39:39.559467: val_loss -0.1627 +2026-04-09 09:39:39.561488: Pseudo dice [0.7369, 0.629, 0.8172, 0.5592, 0.5129, 0.7086, 0.2168] +2026-04-09 09:39:39.563698: Epoch time: 103.85 s +2026-04-09 09:39:40.666718: +2026-04-09 09:39:40.668827: Epoch 594 +2026-04-09 09:39:40.672612: Current learning rate: 0.00444 +2026-04-09 09:41:23.889236: train_loss -0.1878 +2026-04-09 09:41:23.897418: val_loss -0.1337 +2026-04-09 09:41:23.900578: Pseudo dice [0.1712, 0.6802, 0.7219, 0.0863, 0.4483, 0.8246, 0.6352] +2026-04-09 09:41:23.903530: Epoch time: 103.23 s +2026-04-09 09:41:25.023597: +2026-04-09 09:41:25.026459: Epoch 595 +2026-04-09 09:41:25.028894: Current learning rate: 0.00443 +2026-04-09 09:43:07.967391: train_loss -0.1989 +2026-04-09 09:43:07.973803: val_loss -0.1484 +2026-04-09 09:43:07.976058: Pseudo dice [0.2135, 0.4681, 0.8086, 0.2571, 0.5116, 0.5749, 0.7811] +2026-04-09 09:43:07.979017: Epoch time: 102.95 s +2026-04-09 09:43:09.101177: +2026-04-09 09:43:09.104209: Epoch 596 +2026-04-09 09:43:09.106282: Current learning rate: 0.00442 +2026-04-09 09:44:52.254143: train_loss -0.1934 +2026-04-09 09:44:52.259558: val_loss -0.1741 +2026-04-09 09:44:52.261428: Pseudo dice [0.5656, 0.9033, 0.7662, 0.427, 0.4704, 0.7075, 0.6947] +2026-04-09 09:44:52.264712: Epoch time: 103.16 s +2026-04-09 09:44:53.450199: +2026-04-09 09:44:53.451966: Epoch 597 +2026-04-09 09:44:53.453427: Current learning rate: 0.00441 +2026-04-09 09:46:36.694032: train_loss -0.197 +2026-04-09 09:46:36.699320: val_loss -0.1182 +2026-04-09 09:46:36.701328: Pseudo dice [0.5555, 0.1407, 0.5881, 0.3965, 0.2153, 0.1909, 0.7219] +2026-04-09 09:46:36.703536: Epoch time: 103.25 s +2026-04-09 09:46:37.844941: +2026-04-09 09:46:37.846741: Epoch 598 +2026-04-09 09:46:37.848553: Current learning rate: 0.0044 +2026-04-09 09:48:20.882128: train_loss -0.182 +2026-04-09 09:48:20.890589: val_loss -0.1578 +2026-04-09 09:48:20.892894: Pseudo dice [0.6558, 0.7653, 0.7484, 0.6389, 0.6525, 0.6429, 0.7148] +2026-04-09 09:48:20.894945: Epoch time: 103.04 s +2026-04-09 09:48:22.055665: +2026-04-09 09:48:22.057944: Epoch 599 +2026-04-09 09:48:22.060366: Current learning rate: 0.00439 +2026-04-09 09:50:05.257618: train_loss -0.1883 +2026-04-09 09:50:05.273008: val_loss -0.1506 +2026-04-09 09:50:05.275404: Pseudo dice [0.8524, 0.7942, 0.7197, 0.4558, 0.3716, 0.5862, 0.6984] +2026-04-09 09:50:05.278035: Epoch time: 103.21 s +2026-04-09 09:50:08.119974: +2026-04-09 09:50:08.123017: Epoch 600 +2026-04-09 09:50:08.124841: Current learning rate: 0.00438 +2026-04-09 09:51:52.616852: train_loss -0.1919 +2026-04-09 09:51:52.622497: val_loss -0.2077 +2026-04-09 09:51:52.624696: Pseudo dice [0.755, 0.7017, 0.7508, 0.6046, 0.6017, 0.7962, 0.8226] +2026-04-09 09:51:52.626877: Epoch time: 104.5 s +2026-04-09 09:51:53.759079: +2026-04-09 09:51:53.761042: Epoch 601 +2026-04-09 09:51:53.764953: Current learning rate: 0.00437 +2026-04-09 09:53:36.855091: train_loss -0.1968 +2026-04-09 09:53:36.862954: val_loss -0.1474 +2026-04-09 09:53:36.865573: Pseudo dice [0.5005, 0.5926, 0.729, 0.3778, 0.4131, 0.5267, 0.7979] +2026-04-09 09:53:36.867703: Epoch time: 103.1 s +2026-04-09 09:53:37.993226: +2026-04-09 09:53:37.995508: Epoch 602 +2026-04-09 09:53:37.997495: Current learning rate: 0.00436 +2026-04-09 09:55:21.252360: train_loss -0.1977 +2026-04-09 09:55:21.262174: val_loss -0.1602 +2026-04-09 09:55:21.264359: Pseudo dice [0.6714, 0.6655, 0.8129, 0.4582, 0.464, 0.6463, 0.7771] +2026-04-09 09:55:21.266041: Epoch time: 103.26 s +2026-04-09 09:55:22.412826: +2026-04-09 09:55:22.414367: Epoch 603 +2026-04-09 09:55:22.415837: Current learning rate: 0.00435 +2026-04-09 09:57:06.106502: train_loss -0.2086 +2026-04-09 09:57:06.111581: val_loss -0.1822 +2026-04-09 09:57:06.114021: Pseudo dice [0.6598, 0.7549, 0.7057, 0.4953, 0.3866, 0.9378, 0.8209] +2026-04-09 09:57:06.115489: Epoch time: 103.7 s +2026-04-09 09:57:07.264603: +2026-04-09 09:57:07.266620: Epoch 604 +2026-04-09 09:57:07.269806: Current learning rate: 0.00434 +2026-04-09 09:58:50.180834: train_loss -0.2678 +2026-04-09 09:58:50.187427: val_loss -0.297 +2026-04-09 09:58:50.189553: Pseudo dice [0.64, 0.7906, 0.6845, 0.185, 0.3665, 0.7619, 0.8413] +2026-04-09 09:58:50.191919: Epoch time: 102.92 s +2026-04-09 09:58:51.303521: +2026-04-09 09:58:51.306357: Epoch 605 +2026-04-09 09:58:51.307766: Current learning rate: 0.00433 +2026-04-09 10:00:34.296152: train_loss -0.3678 +2026-04-09 10:00:34.303457: val_loss -0.2651 +2026-04-09 10:00:34.306105: Pseudo dice [0.0, 0.6956, 0.3785, 0.1293, 0.4934, 0.243, 0.4389] +2026-04-09 10:00:34.308274: Epoch time: 103.0 s +2026-04-09 10:00:35.390709: +2026-04-09 10:00:35.392744: Epoch 606 +2026-04-09 10:00:35.394498: Current learning rate: 0.00432 +2026-04-09 10:02:18.503021: train_loss -0.3791 +2026-04-09 10:02:18.509207: val_loss -0.3326 +2026-04-09 10:02:18.511432: Pseudo dice [0.0, 0.1493, 0.5311, 0.0, 0.0, 0.0514, 0.0] +2026-04-09 10:02:18.515019: Epoch time: 103.12 s +2026-04-09 10:02:19.625057: +2026-04-09 10:02:19.628642: Epoch 607 +2026-04-09 10:02:19.631401: Current learning rate: 0.00431 +2026-04-09 10:04:02.167173: train_loss -0.3811 +2026-04-09 10:04:02.172441: val_loss -0.4521 +2026-04-09 10:04:02.174705: Pseudo dice [0.0, 0.0, 0.5644, 0.0, 0.0, 0.0586, 0.712] +2026-04-09 10:04:02.176289: Epoch time: 102.55 s +2026-04-09 10:04:03.283784: +2026-04-09 10:04:03.285528: Epoch 608 +2026-04-09 10:04:03.286908: Current learning rate: 0.0043 +2026-04-09 10:05:46.062788: train_loss -0.4834 +2026-04-09 10:05:46.068726: val_loss -0.4479 +2026-04-09 10:05:46.070727: Pseudo dice [0.0, 0.0011, 0.5176, 0.0, 0.0, 0.1831, 0.5176] +2026-04-09 10:05:46.073207: Epoch time: 102.78 s +2026-04-09 10:05:47.168148: +2026-04-09 10:05:47.171217: Epoch 609 +2026-04-09 10:05:47.174499: Current learning rate: 0.00429 +2026-04-09 10:07:29.687375: train_loss -0.389 +2026-04-09 10:07:29.693378: val_loss -0.4849 +2026-04-09 10:07:29.695459: Pseudo dice [0.0, 0.632, 0.5497, 0.0, 0.0, 0.615, 0.1326] +2026-04-09 10:07:29.697653: Epoch time: 102.52 s +2026-04-09 10:07:30.810019: +2026-04-09 10:07:30.811987: Epoch 610 +2026-04-09 10:07:30.813901: Current learning rate: 0.00429 +2026-04-09 10:09:12.846725: train_loss -0.4108 +2026-04-09 10:09:12.851981: val_loss -0.429 +2026-04-09 10:09:12.854073: Pseudo dice [0.0, 0.1851, 0.7511, 0.0, 0.0, 0.0605, 0.0876] +2026-04-09 10:09:12.855678: Epoch time: 102.04 s +2026-04-09 10:09:13.972165: +2026-04-09 10:09:13.973643: Epoch 611 +2026-04-09 10:09:13.975217: Current learning rate: 0.00428 +2026-04-09 10:10:56.857707: train_loss -0.3962 +2026-04-09 10:10:56.864642: val_loss -0.3422 +2026-04-09 10:10:56.867561: Pseudo dice [0.0, 0.0, 0.4753, 0.0, 0.0, 0.0, 0.0] +2026-04-09 10:10:56.870986: Epoch time: 102.89 s +2026-04-09 10:10:58.038229: +2026-04-09 10:10:58.041718: Epoch 612 +2026-04-09 10:10:58.046593: Current learning rate: 0.00427 +2026-04-09 10:12:41.508154: train_loss -0.4144 +2026-04-09 10:12:41.523209: val_loss -0.375 +2026-04-09 10:12:41.525702: Pseudo dice [0.0, 0.0, 0.7238, 0.0, 0.0, 0.0, 0.621] +2026-04-09 10:12:41.527881: Epoch time: 103.47 s +2026-04-09 10:12:43.911094: +2026-04-09 10:12:43.912712: Epoch 613 +2026-04-09 10:12:43.914172: Current learning rate: 0.00426 +2026-04-09 10:14:27.212254: train_loss -0.4453 +2026-04-09 10:14:27.219504: val_loss -0.4095 +2026-04-09 10:14:27.222678: Pseudo dice [0.0, 0.0, 0.5806, 0.0, 0.0, 0.0049, 0.5622] +2026-04-09 10:14:27.225590: Epoch time: 103.3 s +2026-04-09 10:14:28.354040: +2026-04-09 10:14:28.355703: Epoch 614 +2026-04-09 10:14:28.357517: Current learning rate: 0.00425 +2026-04-09 10:16:11.647054: train_loss -0.4723 +2026-04-09 10:16:11.654353: val_loss -0.4222 +2026-04-09 10:16:11.656281: Pseudo dice [0.0, 0.0, 0.6508, 0.0, 0.006, 0.0113, 0.0178] +2026-04-09 10:16:11.658831: Epoch time: 103.3 s +2026-04-09 10:16:12.784858: +2026-04-09 10:16:12.790372: Epoch 615 +2026-04-09 10:16:12.794860: Current learning rate: 0.00424 +2026-04-09 10:17:58.522615: train_loss -0.4469 +2026-04-09 10:17:58.534768: val_loss -0.4557 +2026-04-09 10:17:58.538482: Pseudo dice [0.0, 0.0, 0.5712, 0.0, 0.0, 0.4339, 0.6523] +2026-04-09 10:17:58.542696: Epoch time: 105.74 s +2026-04-09 10:17:59.695117: +2026-04-09 10:17:59.697653: Epoch 616 +2026-04-09 10:17:59.701939: Current learning rate: 0.00423 +2026-04-09 10:19:43.106007: train_loss -0.4716 +2026-04-09 10:19:43.113379: val_loss -0.5014 +2026-04-09 10:19:43.115529: Pseudo dice [0.0, 0.0, 0.7014, 0.0, 0.0, 0.2512, 0.7605] +2026-04-09 10:19:43.118996: Epoch time: 103.41 s +2026-04-09 10:19:44.278652: +2026-04-09 10:19:44.280922: Epoch 617 +2026-04-09 10:19:44.282957: Current learning rate: 0.00422 +2026-04-09 10:21:27.872014: train_loss -0.4274 +2026-04-09 10:21:27.881903: val_loss -0.4333 +2026-04-09 10:21:27.884629: Pseudo dice [0.0, 0.0, 0.752, 0.0, 0.0, 0.4099, 0.6576] +2026-04-09 10:21:27.887024: Epoch time: 103.6 s +2026-04-09 10:21:28.984379: +2026-04-09 10:21:28.986845: Epoch 618 +2026-04-09 10:21:28.990201: Current learning rate: 0.00421 +2026-04-09 10:23:12.452270: train_loss -0.4437 +2026-04-09 10:23:12.459359: val_loss -0.4431 +2026-04-09 10:23:12.461546: Pseudo dice [0.0, 0.0, 0.7114, 0.0, 0.0, 0.0207, 0.6123] +2026-04-09 10:23:12.463450: Epoch time: 103.47 s +2026-04-09 10:23:13.612728: +2026-04-09 10:23:13.614858: Epoch 619 +2026-04-09 10:23:13.616760: Current learning rate: 0.0042 +2026-04-09 10:24:56.997979: train_loss -0.4571 +2026-04-09 10:24:57.003963: val_loss -0.4139 +2026-04-09 10:24:57.007195: Pseudo dice [0.0, 0.0, 0.4739, 0.0, 0.0, 0.6755, 0.3707] +2026-04-09 10:24:57.009463: Epoch time: 103.39 s +2026-04-09 10:24:58.167940: +2026-04-09 10:24:58.171349: Epoch 620 +2026-04-09 10:24:58.173085: Current learning rate: 0.00419 +2026-04-09 10:26:40.551085: train_loss -0.3593 +2026-04-09 10:26:40.557123: val_loss -0.3568 +2026-04-09 10:26:40.559731: Pseudo dice [0.0, 0.0, 0.3639, 0.0, 0.0, 0.0, 0.0] +2026-04-09 10:26:40.561960: Epoch time: 102.39 s +2026-04-09 10:26:41.707745: +2026-04-09 10:26:41.709219: Epoch 621 +2026-04-09 10:26:41.710534: Current learning rate: 0.00418 +2026-04-09 10:28:25.390046: train_loss -0.4086 +2026-04-09 10:28:25.395928: val_loss -0.3896 +2026-04-09 10:28:25.398445: Pseudo dice [0.0, 0.0, 0.2447, 0.0, 0.0, 0.0, 0.6058] +2026-04-09 10:28:25.400524: Epoch time: 103.69 s +2026-04-09 10:28:26.528180: +2026-04-09 10:28:26.531093: Epoch 622 +2026-04-09 10:28:26.533415: Current learning rate: 0.00417 +2026-04-09 10:30:09.334102: train_loss -0.4233 +2026-04-09 10:30:09.340608: val_loss -0.3629 +2026-04-09 10:30:09.342649: Pseudo dice [0.0, 0.0, 0.5169, 0.0, 0.0, 0.0012, 0.4858] +2026-04-09 10:30:09.345154: Epoch time: 102.81 s +2026-04-09 10:30:10.481678: +2026-04-09 10:30:10.483728: Epoch 623 +2026-04-09 10:30:10.497222: Current learning rate: 0.00416 +2026-04-09 10:31:52.575101: train_loss -0.413 +2026-04-09 10:31:52.581183: val_loss -0.4867 +2026-04-09 10:31:52.583511: Pseudo dice [0.0, 0.0, 0.4256, 0.0, 0.0, 0.0139, 0.3858] +2026-04-09 10:31:52.585620: Epoch time: 102.1 s +2026-04-09 10:31:53.694456: +2026-04-09 10:31:53.695914: Epoch 624 +2026-04-09 10:31:53.697922: Current learning rate: 0.00415 +2026-04-09 10:33:36.202637: train_loss -0.4262 +2026-04-09 10:33:36.208101: val_loss -0.4392 +2026-04-09 10:33:36.209852: Pseudo dice [0.0, 0.003, 0.6172, 0.0, 0.0, 0.3638, 0.039] +2026-04-09 10:33:36.212051: Epoch time: 102.51 s +2026-04-09 10:33:37.343133: +2026-04-09 10:33:37.344873: Epoch 625 +2026-04-09 10:33:37.346614: Current learning rate: 0.00414 +2026-04-09 10:35:19.807498: train_loss -0.4254 +2026-04-09 10:35:19.813349: val_loss -0.4509 +2026-04-09 10:35:19.815658: Pseudo dice [0.0, 0.6162, 0.5984, 0.0, 0.0, 0.0002, 0.3941] +2026-04-09 10:35:19.817622: Epoch time: 102.47 s +2026-04-09 10:35:20.925892: +2026-04-09 10:35:20.929190: Epoch 626 +2026-04-09 10:35:20.931078: Current learning rate: 0.00413 +2026-04-09 10:37:03.382450: train_loss -0.4498 +2026-04-09 10:37:03.389498: val_loss -0.4547 +2026-04-09 10:37:03.393156: Pseudo dice [0.0, 0.0, 0.7558, 0.0, 0.0, 0.3082, 0.6241] +2026-04-09 10:37:03.395245: Epoch time: 102.46 s +2026-04-09 10:37:04.505548: +2026-04-09 10:37:04.507838: Epoch 627 +2026-04-09 10:37:04.509838: Current learning rate: 0.00412 +2026-04-09 10:38:47.201749: train_loss -0.4239 +2026-04-09 10:38:47.207515: val_loss -0.4445 +2026-04-09 10:38:47.209011: Pseudo dice [0.0, 0.0, 0.3538, 0.0, 0.0, 0.3652, 0.4279] +2026-04-09 10:38:47.210972: Epoch time: 102.7 s +2026-04-09 10:38:48.345291: +2026-04-09 10:38:48.347157: Epoch 628 +2026-04-09 10:38:48.350241: Current learning rate: 0.00411 +2026-04-09 10:40:30.625063: train_loss -0.4359 +2026-04-09 10:40:30.630677: val_loss -0.4374 +2026-04-09 10:40:30.633630: Pseudo dice [0.0, 0.0, 0.5843, 0.0, 0.0, 0.1278, 0.5669] +2026-04-09 10:40:30.636037: Epoch time: 102.28 s +2026-04-09 10:40:31.753132: +2026-04-09 10:40:31.754894: Epoch 629 +2026-04-09 10:40:31.756866: Current learning rate: 0.0041 +2026-04-09 10:42:13.969234: train_loss -0.416 +2026-04-09 10:42:13.975106: val_loss -0.4132 +2026-04-09 10:42:13.976815: Pseudo dice [0.0, 0.0, 0.4641, 0.0, 0.0, 0.0041, 0.2635] +2026-04-09 10:42:13.978789: Epoch time: 102.22 s +2026-04-09 10:42:15.086658: +2026-04-09 10:42:15.091372: Epoch 630 +2026-04-09 10:42:15.093041: Current learning rate: 0.00409 +2026-04-09 10:43:57.492140: train_loss -0.3976 +2026-04-09 10:43:57.500189: val_loss -0.4802 +2026-04-09 10:43:57.502939: Pseudo dice [0.0, 0.0, 0.7669, 0.0, 0.0, 0.0, 0.7118] +2026-04-09 10:43:57.505598: Epoch time: 102.41 s +2026-04-09 10:43:58.627284: +2026-04-09 10:43:58.630649: Epoch 631 +2026-04-09 10:43:58.633862: Current learning rate: 0.00408 +2026-04-09 10:45:40.423396: train_loss -0.4373 +2026-04-09 10:45:40.429735: val_loss -0.4585 +2026-04-09 10:45:40.431500: Pseudo dice [0.0, 0.0, 0.691, 0.0, 0.0, 0.0, 0.7168] +2026-04-09 10:45:40.433989: Epoch time: 101.8 s +2026-04-09 10:45:41.565795: +2026-04-09 10:45:41.567385: Epoch 632 +2026-04-09 10:45:41.569300: Current learning rate: 0.00407 +2026-04-09 10:47:23.658944: train_loss -0.3902 +2026-04-09 10:47:23.665403: val_loss -0.4399 +2026-04-09 10:47:23.667477: Pseudo dice [0.0, 0.0, 0.5501, 0.0, 0.0, 0.0061, 0.0774] +2026-04-09 10:47:23.669381: Epoch time: 102.1 s +2026-04-09 10:47:25.947297: +2026-04-09 10:47:25.948712: Epoch 633 +2026-04-09 10:47:25.950129: Current learning rate: 0.00406 +2026-04-09 10:49:08.371741: train_loss -0.449 +2026-04-09 10:49:08.380519: val_loss -0.4711 +2026-04-09 10:49:08.382674: Pseudo dice [0.0, 0.0, 0.6657, 0.0, 0.0, 0.0099, 0.5519] +2026-04-09 10:49:08.384960: Epoch time: 102.43 s +2026-04-09 10:49:09.493373: +2026-04-09 10:49:09.495386: Epoch 634 +2026-04-09 10:49:09.497848: Current learning rate: 0.00405 +2026-04-09 10:50:52.227094: train_loss -0.482 +2026-04-09 10:50:52.233528: val_loss -0.4612 +2026-04-09 10:50:52.236480: Pseudo dice [0.0, 0.0, 0.4115, 0.0, 0.0, 0.4064, 0.4173] +2026-04-09 10:50:52.238710: Epoch time: 102.74 s +2026-04-09 10:50:53.319296: +2026-04-09 10:50:53.321744: Epoch 635 +2026-04-09 10:50:53.324403: Current learning rate: 0.00404 +2026-04-09 10:52:35.799352: train_loss -0.4777 +2026-04-09 10:52:35.804744: val_loss -0.4341 +2026-04-09 10:52:35.806540: Pseudo dice [0.0, 0.0, 0.6245, 0.0, 0.0, 0.1404, 0.697] +2026-04-09 10:52:35.808830: Epoch time: 102.48 s +2026-04-09 10:52:36.921342: +2026-04-09 10:52:36.923357: Epoch 636 +2026-04-09 10:52:36.924928: Current learning rate: 0.00403 +2026-04-09 10:54:20.068555: train_loss -0.4503 +2026-04-09 10:54:20.074749: val_loss -0.3778 +2026-04-09 10:54:20.077483: Pseudo dice [0.0, 0.0, 0.1268, 0.0, 0.0, 0.3508, 0.0146] +2026-04-09 10:54:20.080307: Epoch time: 103.15 s +2026-04-09 10:54:21.238638: +2026-04-09 10:54:21.240816: Epoch 637 +2026-04-09 10:54:21.242634: Current learning rate: 0.00402 +2026-04-09 10:56:04.444794: train_loss -0.4182 +2026-04-09 10:56:04.451496: val_loss -0.3974 +2026-04-09 10:56:04.453656: Pseudo dice [0.0, 0.0, 0.5031, 0.0, 0.0, 0.0, 0.6897] +2026-04-09 10:56:04.455171: Epoch time: 103.21 s +2026-04-09 10:56:05.593776: +2026-04-09 10:56:05.595642: Epoch 638 +2026-04-09 10:56:05.597439: Current learning rate: 0.00401 +2026-04-09 10:57:47.854185: train_loss -0.4367 +2026-04-09 10:57:47.860530: val_loss -0.474 +2026-04-09 10:57:47.863077: Pseudo dice [0.0, 0.0, 0.4748, 0.0, 0.0, 0.0129, 0.7652] +2026-04-09 10:57:47.865610: Epoch time: 102.26 s +2026-04-09 10:57:48.984648: +2026-04-09 10:57:48.986422: Epoch 639 +2026-04-09 10:57:48.988391: Current learning rate: 0.004 +2026-04-09 10:59:30.897000: train_loss -0.4836 +2026-04-09 10:59:30.902246: val_loss -0.501 +2026-04-09 10:59:30.904642: Pseudo dice [0.0, 0.0, 0.6838, 0.0, 0.0, 0.1044, 0.738] +2026-04-09 10:59:30.907362: Epoch time: 101.92 s +2026-04-09 10:59:32.016174: +2026-04-09 10:59:32.018706: Epoch 640 +2026-04-09 10:59:32.020600: Current learning rate: 0.00399 +2026-04-09 11:01:14.525800: train_loss -0.4793 +2026-04-09 11:01:14.532006: val_loss -0.4547 +2026-04-09 11:01:14.534126: Pseudo dice [0.0, 0.0, 0.6354, 0.0, 0.0, 0.4089, 0.6288] +2026-04-09 11:01:14.536272: Epoch time: 102.51 s +2026-04-09 11:01:15.653319: +2026-04-09 11:01:15.655922: Epoch 641 +2026-04-09 11:01:15.658264: Current learning rate: 0.00398 +2026-04-09 11:02:58.025761: train_loss -0.4529 +2026-04-09 11:02:58.031680: val_loss -0.4874 +2026-04-09 11:02:58.034402: Pseudo dice [0.0, 0.0, 0.681, 0.0, 0.0, 0.2797, 0.73] +2026-04-09 11:02:58.037020: Epoch time: 102.38 s +2026-04-09 11:02:59.136147: +2026-04-09 11:02:59.137861: Epoch 642 +2026-04-09 11:02:59.139989: Current learning rate: 0.00397 +2026-04-09 11:04:41.652613: train_loss -0.4743 +2026-04-09 11:04:41.659240: val_loss -0.4731 +2026-04-09 11:04:41.661855: Pseudo dice [0.0, 0.0, 0.6637, 0.0, 0.0, 0.4783, 0.5792] +2026-04-09 11:04:41.664280: Epoch time: 102.52 s +2026-04-09 11:04:42.841483: +2026-04-09 11:04:42.843852: Epoch 643 +2026-04-09 11:04:42.845861: Current learning rate: 0.00396 +2026-04-09 11:06:26.255084: train_loss -0.475 +2026-04-09 11:06:26.269918: val_loss -0.4737 +2026-04-09 11:06:26.273613: Pseudo dice [0.0, 0.0, 0.734, 0.0, 0.0, 0.3285, 0.5155] +2026-04-09 11:06:26.276669: Epoch time: 103.42 s +2026-04-09 11:06:27.395527: +2026-04-09 11:06:27.399555: Epoch 644 +2026-04-09 11:06:27.401887: Current learning rate: 0.00395 +2026-04-09 11:08:10.971044: train_loss -0.454 +2026-04-09 11:08:10.978713: val_loss -0.4311 +2026-04-09 11:08:10.982169: Pseudo dice [0.0, 0.0, 0.5155, 0.0, 0.0, 0.1485, 0.3332] +2026-04-09 11:08:10.985076: Epoch time: 103.58 s +2026-04-09 11:08:12.088542: +2026-04-09 11:08:12.090819: Epoch 645 +2026-04-09 11:08:12.092797: Current learning rate: 0.00394 +2026-04-09 11:09:55.638220: train_loss -0.3991 +2026-04-09 11:09:55.645522: val_loss -0.4493 +2026-04-09 11:09:55.649216: Pseudo dice [0.0, 0.0, 0.0547, 0.0, 0.0, 0.284, 0.6786] +2026-04-09 11:09:55.651377: Epoch time: 103.55 s +2026-04-09 11:09:56.769680: +2026-04-09 11:09:56.772141: Epoch 646 +2026-04-09 11:09:56.774423: Current learning rate: 0.00393 +2026-04-09 11:11:40.160004: train_loss -0.4043 +2026-04-09 11:11:40.167268: val_loss -0.4408 +2026-04-09 11:11:40.170125: Pseudo dice [0.0, 0.0, 0.6709, 0.0, 0.0, 0.4715, 0.1649] +2026-04-09 11:11:40.172423: Epoch time: 103.39 s +2026-04-09 11:11:41.312244: +2026-04-09 11:11:41.314447: Epoch 647 +2026-04-09 11:11:41.315855: Current learning rate: 0.00392 +2026-04-09 11:13:24.146056: train_loss -0.448 +2026-04-09 11:13:24.154000: val_loss -0.3955 +2026-04-09 11:13:24.156657: Pseudo dice [0.0, 0.0048, 0.2191, 0.0, 0.0, 0.3324, 0.6886] +2026-04-09 11:13:24.159549: Epoch time: 102.84 s +2026-04-09 11:13:25.333006: +2026-04-09 11:13:25.335877: Epoch 648 +2026-04-09 11:13:25.337759: Current learning rate: 0.00391 +2026-04-09 11:15:07.842330: train_loss -0.4814 +2026-04-09 11:15:07.849958: val_loss -0.4867 +2026-04-09 11:15:07.852461: Pseudo dice [0.0, 0.597, 0.5388, 0.0, 0.2232, 0.1632, 0.7584] +2026-04-09 11:15:07.855047: Epoch time: 102.51 s +2026-04-09 11:15:08.968168: +2026-04-09 11:15:08.970464: Epoch 649 +2026-04-09 11:15:08.972022: Current learning rate: 0.0039 +2026-04-09 11:16:51.418649: train_loss -0.4044 +2026-04-09 11:16:51.426237: val_loss -0.4624 +2026-04-09 11:16:51.428281: Pseudo dice [0.0, 0.4781, 0.696, 0.0, 0.0, 0.0772, 0.0111] +2026-04-09 11:16:51.431338: Epoch time: 102.45 s +2026-04-09 11:16:54.345293: +2026-04-09 11:16:54.346727: Epoch 650 +2026-04-09 11:16:54.348135: Current learning rate: 0.00389 +2026-04-09 11:18:36.806859: train_loss -0.3976 +2026-04-09 11:18:36.813508: val_loss -0.4168 +2026-04-09 11:18:36.815582: Pseudo dice [0.0, 0.2463, 0.7092, 0.0, 0.0, 0.0, 0.0242] +2026-04-09 11:18:36.818028: Epoch time: 102.46 s +2026-04-09 11:18:37.910627: +2026-04-09 11:18:37.912685: Epoch 651 +2026-04-09 11:18:37.914116: Current learning rate: 0.00388 +2026-04-09 11:20:20.434550: train_loss -0.4103 +2026-04-09 11:20:20.441619: val_loss -0.4364 +2026-04-09 11:20:20.444239: Pseudo dice [0.0, 0.0, 0.7768, 0.0, 0.0, 0.0, 0.0043] +2026-04-09 11:20:20.446527: Epoch time: 102.53 s +2026-04-09 11:20:21.706615: +2026-04-09 11:20:21.708624: Epoch 652 +2026-04-09 11:20:21.712389: Current learning rate: 0.00387 +2026-04-09 11:22:04.070706: train_loss -0.452 +2026-04-09 11:22:04.077222: val_loss -0.4351 +2026-04-09 11:22:04.079102: Pseudo dice [0.0, 0.0, 0.76, 0.0, 0.0, 0.2573, 0.4695] +2026-04-09 11:22:04.084370: Epoch time: 102.37 s +2026-04-09 11:22:06.414971: +2026-04-09 11:22:06.417358: Epoch 653 +2026-04-09 11:22:06.419664: Current learning rate: 0.00386 +2026-04-09 11:23:50.774114: train_loss -0.4241 +2026-04-09 11:23:50.780958: val_loss -0.4322 +2026-04-09 11:23:50.782918: Pseudo dice [0.0, 0.0, 0.5897, 0.0, 0.0, 0.0831, 0.6947] +2026-04-09 11:23:50.785576: Epoch time: 104.36 s +2026-04-09 11:23:51.913484: +2026-04-09 11:23:51.915993: Epoch 654 +2026-04-09 11:23:51.919605: Current learning rate: 0.00385 +2026-04-09 11:25:35.060063: train_loss -0.4646 +2026-04-09 11:25:35.066013: val_loss -0.4183 +2026-04-09 11:25:35.068861: Pseudo dice [0.0, 0.0, 0.6406, 0.0, 0.0, 0.6242, 0.4912] +2026-04-09 11:25:35.071457: Epoch time: 103.15 s +2026-04-09 11:25:36.213780: +2026-04-09 11:25:36.215450: Epoch 655 +2026-04-09 11:25:36.216834: Current learning rate: 0.00384 +2026-04-09 11:27:19.601053: train_loss -0.444 +2026-04-09 11:27:19.606817: val_loss -0.3971 +2026-04-09 11:27:19.609635: Pseudo dice [0.0, 0.0, 0.6524, 0.0, 0.0, 0.1242, 0.2882] +2026-04-09 11:27:19.611699: Epoch time: 103.39 s +2026-04-09 11:27:20.722070: +2026-04-09 11:27:20.724089: Epoch 656 +2026-04-09 11:27:20.726213: Current learning rate: 0.00383 +2026-04-09 11:29:03.530731: train_loss -0.4368 +2026-04-09 11:29:03.536492: val_loss -0.3876 +2026-04-09 11:29:03.538453: Pseudo dice [0.0, 0.0, 0.7115, 0.0, 0.1376, 0.3938, 0.3255] +2026-04-09 11:29:03.540198: Epoch time: 102.81 s +2026-04-09 11:29:04.677381: +2026-04-09 11:29:04.679145: Epoch 657 +2026-04-09 11:29:04.680878: Current learning rate: 0.00382 +2026-04-09 11:30:47.682035: train_loss -0.4699 +2026-04-09 11:30:47.687626: val_loss -0.4647 +2026-04-09 11:30:47.689426: Pseudo dice [0.0, 0.0, 0.7238, 0.0, 0.0, 0.0, 0.151] +2026-04-09 11:30:47.691051: Epoch time: 103.01 s +2026-04-09 11:30:48.816782: +2026-04-09 11:30:48.818861: Epoch 658 +2026-04-09 11:30:48.820432: Current learning rate: 0.00381 +2026-04-09 11:32:31.267996: train_loss -0.442 +2026-04-09 11:32:31.274568: val_loss -0.4152 +2026-04-09 11:32:31.276656: Pseudo dice [0.0, 0.0, 0.6525, 0.0, 0.0255, 0.0, 0.4225] +2026-04-09 11:32:31.278438: Epoch time: 102.45 s +2026-04-09 11:32:32.404626: +2026-04-09 11:32:32.406650: Epoch 659 +2026-04-09 11:32:32.408433: Current learning rate: 0.0038 +2026-04-09 11:34:15.056488: train_loss -0.4614 +2026-04-09 11:34:15.062408: val_loss -0.3951 +2026-04-09 11:34:15.066956: Pseudo dice [0.0, 0.0, 0.5462, 0.0, 0.0027, 0.1168, 0.2958] +2026-04-09 11:34:15.069648: Epoch time: 102.65 s +2026-04-09 11:34:16.174062: +2026-04-09 11:34:16.176129: Epoch 660 +2026-04-09 11:34:16.178058: Current learning rate: 0.00379 +2026-04-09 11:35:58.986528: train_loss -0.4915 +2026-04-09 11:35:58.991709: val_loss -0.4215 +2026-04-09 11:35:58.993506: Pseudo dice [0.0, 0.0, 0.4111, 0.0, 0.3699, 0.2479, 0.4261] +2026-04-09 11:35:58.995670: Epoch time: 102.82 s +2026-04-09 11:36:00.120749: +2026-04-09 11:36:00.122863: Epoch 661 +2026-04-09 11:36:00.124733: Current learning rate: 0.00378 +2026-04-09 11:37:42.700187: train_loss -0.4269 +2026-04-09 11:37:42.708707: val_loss -0.3946 +2026-04-09 11:37:42.711193: Pseudo dice [0.0, 0.5338, 0.3637, 0.0, 0.0, 0.0113, 0.0334] +2026-04-09 11:37:42.713101: Epoch time: 102.58 s +2026-04-09 11:37:43.843960: +2026-04-09 11:37:43.845711: Epoch 662 +2026-04-09 11:37:43.847327: Current learning rate: 0.00377 +2026-04-09 11:39:26.948855: train_loss -0.4229 +2026-04-09 11:39:26.955377: val_loss -0.4636 +2026-04-09 11:39:26.958285: Pseudo dice [0.0, 0.442, 0.7089, 0.0, 0.0, 0.6612, 0.4611] +2026-04-09 11:39:26.960328: Epoch time: 103.11 s +2026-04-09 11:39:28.082776: +2026-04-09 11:39:28.084394: Epoch 663 +2026-04-09 11:39:28.088546: Current learning rate: 0.00376 +2026-04-09 11:41:10.639030: train_loss -0.4515 +2026-04-09 11:41:10.644986: val_loss -0.5297 +2026-04-09 11:41:10.647065: Pseudo dice [0.0, 0.0, 0.6514, 0.0, 0.1222, 0.2274, 0.7588] +2026-04-09 11:41:10.649025: Epoch time: 102.56 s +2026-04-09 11:41:11.778532: +2026-04-09 11:41:11.780222: Epoch 664 +2026-04-09 11:41:11.781736: Current learning rate: 0.00375 +2026-04-09 11:42:54.314113: train_loss -0.4416 +2026-04-09 11:42:54.320737: val_loss -0.4074 +2026-04-09 11:42:54.322829: Pseudo dice [0.0, 0.0, 0.4955, 0.0, 0.3027, 0.0604, 0.0987] +2026-04-09 11:42:54.324837: Epoch time: 102.54 s +2026-04-09 11:42:55.441735: +2026-04-09 11:42:55.443318: Epoch 665 +2026-04-09 11:42:55.445076: Current learning rate: 0.00374 +2026-04-09 11:44:38.850185: train_loss -0.4367 +2026-04-09 11:44:38.869047: val_loss -0.4429 +2026-04-09 11:44:38.872030: Pseudo dice [0.0, 0.0, 0.5967, 0.0, 0.0, 0.158, 0.498] +2026-04-09 11:44:38.873945: Epoch time: 103.41 s +2026-04-09 11:44:39.991638: +2026-04-09 11:44:39.993637: Epoch 666 +2026-04-09 11:44:39.995240: Current learning rate: 0.00373 +2026-04-09 11:46:23.224309: train_loss -0.4674 +2026-04-09 11:46:23.230433: val_loss -0.5129 +2026-04-09 11:46:23.232194: Pseudo dice [0.0, 0.5037, 0.6685, 0.0, 0.3681, 0.7556, 0.6834] +2026-04-09 11:46:23.234520: Epoch time: 103.24 s +2026-04-09 11:46:24.359459: +2026-04-09 11:46:24.361370: Epoch 667 +2026-04-09 11:46:24.362976: Current learning rate: 0.00372 +2026-04-09 11:48:07.497653: train_loss -0.4561 +2026-04-09 11:48:07.522936: val_loss -0.4449 +2026-04-09 11:48:07.524721: Pseudo dice [0.0, 0.1904, 0.6634, 0.0, 0.0, 0.3608, 0.7882] +2026-04-09 11:48:07.526880: Epoch time: 103.14 s +2026-04-09 11:48:08.672302: +2026-04-09 11:48:08.674921: Epoch 668 +2026-04-09 11:48:08.677734: Current learning rate: 0.00371 +2026-04-09 11:49:51.944028: train_loss -0.4632 +2026-04-09 11:49:51.950772: val_loss -0.4274 +2026-04-09 11:49:51.956419: Pseudo dice [0.0, 0.1477, 0.6792, 0.0, 0.0, 0.1197, 0.582] +2026-04-09 11:49:51.959448: Epoch time: 103.27 s +2026-04-09 11:49:53.136440: +2026-04-09 11:49:53.138363: Epoch 669 +2026-04-09 11:49:53.140072: Current learning rate: 0.0037 +2026-04-09 11:51:35.423691: train_loss -0.4542 +2026-04-09 11:51:35.435899: val_loss -0.4418 +2026-04-09 11:51:35.438477: Pseudo dice [0.0, 0.669, 0.5706, 0.0, 0.0, 0.4401, 0.3097] +2026-04-09 11:51:35.440827: Epoch time: 102.29 s +2026-04-09 11:51:36.581604: +2026-04-09 11:51:36.583520: Epoch 670 +2026-04-09 11:51:36.585432: Current learning rate: 0.00369 +2026-04-09 11:53:18.912390: train_loss -0.4863 +2026-04-09 11:53:18.918201: val_loss -0.481 +2026-04-09 11:53:18.920079: Pseudo dice [0.0, 0.6633, 0.6429, 0.0, 0.0, 0.15, 0.3461] +2026-04-09 11:53:18.922771: Epoch time: 102.33 s +2026-04-09 11:53:20.070043: +2026-04-09 11:53:20.071597: Epoch 671 +2026-04-09 11:53:20.073098: Current learning rate: 0.00368 +2026-04-09 11:55:03.662422: train_loss -0.4908 +2026-04-09 11:55:03.667818: val_loss -0.4516 +2026-04-09 11:55:03.670181: Pseudo dice [0.0, 0.0, 0.7493, 0.0, 0.0, 0.7188, 0.6448] +2026-04-09 11:55:03.676149: Epoch time: 103.6 s +2026-04-09 11:55:04.817209: +2026-04-09 11:55:04.819688: Epoch 672 +2026-04-09 11:55:04.821483: Current learning rate: 0.00367 +2026-04-09 11:56:47.292716: train_loss -0.3838 +2026-04-09 11:56:47.297674: val_loss -0.4891 +2026-04-09 11:56:47.299602: Pseudo dice [0.0, 0.0, 0.6055, 0.0, 0.0, 0.2991, 0.6166] +2026-04-09 11:56:47.301882: Epoch time: 102.48 s +2026-04-09 11:56:49.621077: +2026-04-09 11:56:49.623171: Epoch 673 +2026-04-09 11:56:49.625022: Current learning rate: 0.00366 +2026-04-09 11:58:32.636868: train_loss -0.4586 +2026-04-09 11:58:32.642712: val_loss -0.4227 +2026-04-09 11:58:32.644640: Pseudo dice [0.0, 0.0, 0.4214, 0.0, 0.0, 0.1603, 0.7393] +2026-04-09 11:58:32.646341: Epoch time: 103.02 s +2026-04-09 11:58:33.798913: +2026-04-09 11:58:33.801976: Epoch 674 +2026-04-09 11:58:33.804527: Current learning rate: 0.00365 +2026-04-09 12:00:16.693536: train_loss -0.467 +2026-04-09 12:00:16.700669: val_loss -0.429 +2026-04-09 12:00:16.704621: Pseudo dice [0.0, 0.0, 0.6804, 0.0, 0.3041, 0.0327, 0.347] +2026-04-09 12:00:16.707227: Epoch time: 102.9 s +2026-04-09 12:00:17.845665: +2026-04-09 12:00:17.847547: Epoch 675 +2026-04-09 12:00:17.849310: Current learning rate: 0.00364 +2026-04-09 12:02:00.781912: train_loss -0.4371 +2026-04-09 12:02:00.787125: val_loss -0.4195 +2026-04-09 12:02:00.789629: Pseudo dice [0.0, 0.1157, 0.5583, 0.0, 0.0228, 0.414, 0.6784] +2026-04-09 12:02:00.791471: Epoch time: 102.94 s +2026-04-09 12:02:01.950245: +2026-04-09 12:02:01.952527: Epoch 676 +2026-04-09 12:02:01.954190: Current learning rate: 0.00363 +2026-04-09 12:03:44.601101: train_loss -0.5057 +2026-04-09 12:03:44.606767: val_loss -0.4927 +2026-04-09 12:03:44.609120: Pseudo dice [0.0, 0.476, 0.6577, 0.0, 0.2078, 0.4336, 0.6735] +2026-04-09 12:03:44.611246: Epoch time: 102.65 s +2026-04-09 12:03:45.753099: +2026-04-09 12:03:45.756647: Epoch 677 +2026-04-09 12:03:45.760843: Current learning rate: 0.00362 +2026-04-09 12:05:29.018370: train_loss -0.4773 +2026-04-09 12:05:29.024482: val_loss -0.473 +2026-04-09 12:05:29.026080: Pseudo dice [0.0, 0.1682, 0.5791, 0.0, 0.0, 0.5716, 0.4955] +2026-04-09 12:05:29.027753: Epoch time: 103.27 s +2026-04-09 12:05:30.171835: +2026-04-09 12:05:30.173660: Epoch 678 +2026-04-09 12:05:30.177825: Current learning rate: 0.00361 +2026-04-09 12:07:13.350812: train_loss -0.4405 +2026-04-09 12:07:13.357717: val_loss -0.4602 +2026-04-09 12:07:13.360576: Pseudo dice [0.0, 0.1271, 0.6756, 0.0, 0.2519, 0.0653, 0.0081] +2026-04-09 12:07:13.363824: Epoch time: 103.18 s +2026-04-09 12:07:14.490377: +2026-04-09 12:07:14.491960: Epoch 679 +2026-04-09 12:07:14.494380: Current learning rate: 0.0036 +2026-04-09 12:08:57.630597: train_loss -0.4176 +2026-04-09 12:08:57.636940: val_loss -0.4221 +2026-04-09 12:08:57.638973: Pseudo dice [0.0, 0.1023, 0.5839, 0.0, 0.0, 0.2541, 0.1853] +2026-04-09 12:08:57.641825: Epoch time: 103.14 s +2026-04-09 12:08:58.794998: +2026-04-09 12:08:58.796860: Epoch 680 +2026-04-09 12:08:58.799191: Current learning rate: 0.00359 +2026-04-09 12:10:41.547499: train_loss -0.4642 +2026-04-09 12:10:41.553122: val_loss -0.4685 +2026-04-09 12:10:41.555403: Pseudo dice [0.0, 0.5364, 0.738, 0.0, 0.2897, 0.5477, 0.6562] +2026-04-09 12:10:41.557284: Epoch time: 102.76 s +2026-04-09 12:10:42.681570: +2026-04-09 12:10:42.683499: Epoch 681 +2026-04-09 12:10:42.685407: Current learning rate: 0.00358 +2026-04-09 12:12:26.017435: train_loss -0.4488 +2026-04-09 12:12:26.024095: val_loss -0.5068 +2026-04-09 12:12:26.026765: Pseudo dice [0.0, 0.3388, 0.7492, 0.0, 0.1222, 0.6019, 0.5346] +2026-04-09 12:12:26.029348: Epoch time: 103.34 s +2026-04-09 12:12:27.198367: +2026-04-09 12:12:27.201068: Epoch 682 +2026-04-09 12:12:27.203582: Current learning rate: 0.00357 +2026-04-09 12:14:10.400671: train_loss -0.4619 +2026-04-09 12:14:10.410788: val_loss -0.4191 +2026-04-09 12:14:10.413151: Pseudo dice [0.0, 0.7709, 0.6107, 0.0, 0.3375, 0.1849, 0.5631] +2026-04-09 12:14:10.416118: Epoch time: 103.21 s +2026-04-09 12:14:11.537460: +2026-04-09 12:14:11.539811: Epoch 683 +2026-04-09 12:14:11.541550: Current learning rate: 0.00356 +2026-04-09 12:15:53.621992: train_loss -0.4775 +2026-04-09 12:15:53.627098: val_loss -0.4269 +2026-04-09 12:15:53.629092: Pseudo dice [0.0, 0.3082, 0.6166, 0.0, 0.3932, 0.5516, 0.53] +2026-04-09 12:15:53.630629: Epoch time: 102.09 s +2026-04-09 12:15:54.775133: +2026-04-09 12:15:54.777027: Epoch 684 +2026-04-09 12:15:54.778653: Current learning rate: 0.00355 +2026-04-09 12:17:37.063560: train_loss -0.4766 +2026-04-09 12:17:37.069049: val_loss -0.5113 +2026-04-09 12:17:37.071437: Pseudo dice [0.0, 0.6383, 0.7067, 0.0, 0.3064, 0.6487, 0.5775] +2026-04-09 12:17:37.073900: Epoch time: 102.29 s +2026-04-09 12:17:38.226114: +2026-04-09 12:17:38.228482: Epoch 685 +2026-04-09 12:17:38.230239: Current learning rate: 0.00354 +2026-04-09 12:19:20.706865: train_loss -0.4958 +2026-04-09 12:19:20.713660: val_loss -0.4368 +2026-04-09 12:19:20.715927: Pseudo dice [0.0, 0.4718, 0.3765, 0.0, 0.0, 0.1918, 0.5316] +2026-04-09 12:19:20.717954: Epoch time: 102.48 s +2026-04-09 12:19:21.867076: +2026-04-09 12:19:21.868963: Epoch 686 +2026-04-09 12:19:21.870669: Current learning rate: 0.00353 +2026-04-09 12:21:05.174029: train_loss -0.4444 +2026-04-09 12:21:05.180582: val_loss -0.3908 +2026-04-09 12:21:05.182844: Pseudo dice [0.0, 0.0523, 0.6421, 0.0, 0.0, 0.0413, 0.3853] +2026-04-09 12:21:05.185595: Epoch time: 103.31 s +2026-04-09 12:21:06.330282: +2026-04-09 12:21:06.332258: Epoch 687 +2026-04-09 12:21:06.334045: Current learning rate: 0.00352 +2026-04-09 12:22:49.293139: train_loss -0.4653 +2026-04-09 12:22:49.299004: val_loss -0.4887 +2026-04-09 12:22:49.302352: Pseudo dice [0.0, 0.4678, 0.5985, 0.0, 0.0, 0.6224, 0.2432] +2026-04-09 12:22:49.304312: Epoch time: 102.97 s +2026-04-09 12:22:50.456249: +2026-04-09 12:22:50.458283: Epoch 688 +2026-04-09 12:22:50.460976: Current learning rate: 0.00351 +2026-04-09 12:24:33.508080: train_loss -0.4391 +2026-04-09 12:24:33.516282: val_loss -0.4613 +2026-04-09 12:24:33.518391: Pseudo dice [0.0, 0.6657, 0.7225, 0.0, 0.0, 0.321, 0.3498] +2026-04-09 12:24:33.520617: Epoch time: 103.05 s +2026-04-09 12:24:34.668903: +2026-04-09 12:24:34.671634: Epoch 689 +2026-04-09 12:24:34.673504: Current learning rate: 0.0035 +2026-04-09 12:26:17.253381: train_loss -0.4513 +2026-04-09 12:26:17.258899: val_loss -0.3427 +2026-04-09 12:26:17.260660: Pseudo dice [0.0, 0.5249, 0.503, 0.0, 0.0, 0.0136, 0.4369] +2026-04-09 12:26:17.262674: Epoch time: 102.59 s +2026-04-09 12:26:18.413035: +2026-04-09 12:26:18.415120: Epoch 690 +2026-04-09 12:26:18.416770: Current learning rate: 0.00349 +2026-04-09 12:28:00.568341: train_loss -0.4394 +2026-04-09 12:28:00.577293: val_loss -0.4659 +2026-04-09 12:28:00.579708: Pseudo dice [0.0, 0.8568, 0.6846, 0.0, 0.0066, 0.5818, 0.7085] +2026-04-09 12:28:00.581483: Epoch time: 102.16 s +2026-04-09 12:28:01.734016: +2026-04-09 12:28:01.736565: Epoch 691 +2026-04-09 12:28:01.738263: Current learning rate: 0.00348 +2026-04-09 12:29:44.821327: train_loss -0.4722 +2026-04-09 12:29:44.829096: val_loss -0.4835 +2026-04-09 12:29:44.831758: Pseudo dice [0.0, 0.7453, 0.6975, 0.0, 0.1313, 0.3943, 0.3315] +2026-04-09 12:29:44.834640: Epoch time: 103.09 s +2026-04-09 12:29:45.962532: +2026-04-09 12:29:45.964660: Epoch 692 +2026-04-09 12:29:45.966964: Current learning rate: 0.00346 +2026-04-09 12:31:30.555428: train_loss -0.4658 +2026-04-09 12:31:30.561415: val_loss -0.442 +2026-04-09 12:31:30.564575: Pseudo dice [0.0, 0.7835, 0.6116, 0.0, 0.5734, 0.3166, 0.3421] +2026-04-09 12:31:30.566476: Epoch time: 104.6 s +2026-04-09 12:31:31.694833: +2026-04-09 12:31:31.697232: Epoch 693 +2026-04-09 12:31:31.699517: Current learning rate: 0.00345 +2026-04-09 12:33:14.569224: train_loss -0.4662 +2026-04-09 12:33:14.579327: val_loss -0.48 +2026-04-09 12:33:14.596518: Pseudo dice [0.0, 0.6728, 0.6049, 0.0, 0.0577, 0.7339, 0.6949] +2026-04-09 12:33:14.598751: Epoch time: 102.88 s +2026-04-09 12:33:15.783680: +2026-04-09 12:33:15.785929: Epoch 694 +2026-04-09 12:33:15.787757: Current learning rate: 0.00344 +2026-04-09 12:34:58.856434: train_loss -0.5006 +2026-04-09 12:34:58.863307: val_loss -0.5053 +2026-04-09 12:34:58.865622: Pseudo dice [0.0, 0.7887, 0.6332, 0.0, 0.0002, 0.8015, 0.7173] +2026-04-09 12:34:58.867549: Epoch time: 103.08 s +2026-04-09 12:35:00.017108: +2026-04-09 12:35:00.019003: Epoch 695 +2026-04-09 12:35:00.020969: Current learning rate: 0.00343 +2026-04-09 12:36:42.851895: train_loss -0.5179 +2026-04-09 12:36:42.862633: val_loss -0.4669 +2026-04-09 12:36:42.864802: Pseudo dice [0.0, 0.8055, 0.7246, 0.0, 0.3354, 0.319, 0.5857] +2026-04-09 12:36:42.867259: Epoch time: 102.84 s +2026-04-09 12:36:44.086115: +2026-04-09 12:36:44.090311: Epoch 696 +2026-04-09 12:36:44.092419: Current learning rate: 0.00342 +2026-04-09 12:38:26.930195: train_loss -0.4861 +2026-04-09 12:38:26.936479: val_loss -0.4453 +2026-04-09 12:38:26.938208: Pseudo dice [0.0, 0.1946, 0.7167, 0.0, 0.0065, 0.2342, 0.3654] +2026-04-09 12:38:26.942404: Epoch time: 102.85 s +2026-04-09 12:38:28.089797: +2026-04-09 12:38:28.092655: Epoch 697 +2026-04-09 12:38:28.094817: Current learning rate: 0.00341 +2026-04-09 12:40:10.464637: train_loss -0.4742 +2026-04-09 12:40:10.471056: val_loss -0.4556 +2026-04-09 12:40:10.473945: Pseudo dice [0.0, 0.8022, 0.6587, 0.0, 0.0, 0.1667, 0.6272] +2026-04-09 12:40:10.476255: Epoch time: 102.38 s +2026-04-09 12:40:11.607873: +2026-04-09 12:40:11.609962: Epoch 698 +2026-04-09 12:40:11.611841: Current learning rate: 0.0034 +2026-04-09 12:41:54.852400: train_loss -0.4657 +2026-04-09 12:41:54.859602: val_loss -0.4817 +2026-04-09 12:41:54.862523: Pseudo dice [0.0, 0.7445, 0.6616, 0.0, 0.0, 0.2003, 0.4393] +2026-04-09 12:41:54.864664: Epoch time: 103.25 s +2026-04-09 12:41:56.008692: +2026-04-09 12:41:56.010855: Epoch 699 +2026-04-09 12:41:56.012587: Current learning rate: 0.00339 +2026-04-09 12:43:38.721864: train_loss -0.4665 +2026-04-09 12:43:38.733273: val_loss -0.4261 +2026-04-09 12:43:38.735986: Pseudo dice [0.0, 0.414, 0.4618, 0.0, 0.0, 0.0146, 0.1401] +2026-04-09 12:43:38.738071: Epoch time: 102.72 s +2026-04-09 12:43:41.804300: +2026-04-09 12:43:41.812755: Epoch 700 +2026-04-09 12:43:41.821666: Current learning rate: 0.00338 +2026-04-09 12:45:24.482114: train_loss -0.4375 +2026-04-09 12:45:24.488615: val_loss -0.4714 +2026-04-09 12:45:24.491034: Pseudo dice [0.0, 0.5685, 0.6909, 0.0, 0.0, 0.5101, 0.5287] +2026-04-09 12:45:24.493250: Epoch time: 102.68 s +2026-04-09 12:45:25.638597: +2026-04-09 12:45:25.640516: Epoch 701 +2026-04-09 12:45:25.642176: Current learning rate: 0.00337 +2026-04-09 12:47:08.073052: train_loss -0.5166 +2026-04-09 12:47:08.078464: val_loss -0.514 +2026-04-09 12:47:08.080379: Pseudo dice [0.0, 0.7825, 0.8058, 0.0, 0.0519, 0.3348, 0.6478] +2026-04-09 12:47:08.082154: Epoch time: 102.44 s +2026-04-09 12:47:09.237713: +2026-04-09 12:47:09.239831: Epoch 702 +2026-04-09 12:47:09.241929: Current learning rate: 0.00336 +2026-04-09 12:48:55.001672: train_loss -0.4647 +2026-04-09 12:48:55.042789: val_loss -0.5161 +2026-04-09 12:48:55.044749: Pseudo dice [0.0, 0.4099, 0.7284, 0.0, 0.2689, 0.358, 0.5703] +2026-04-09 12:48:55.047865: Epoch time: 105.77 s +2026-04-09 12:48:56.257952: +2026-04-09 12:48:56.261068: Epoch 703 +2026-04-09 12:48:56.266281: Current learning rate: 0.00335 +2026-04-09 12:50:40.088918: train_loss -0.5051 +2026-04-09 12:50:40.094888: val_loss -0.4508 +2026-04-09 12:50:40.097498: Pseudo dice [0.0, 0.72, 0.5526, 0.0, 0.1783, 0.6317, 0.746] +2026-04-09 12:50:40.099701: Epoch time: 103.83 s +2026-04-09 12:50:41.237177: +2026-04-09 12:50:41.239021: Epoch 704 +2026-04-09 12:50:41.242603: Current learning rate: 0.00334 +2026-04-09 12:52:25.513397: train_loss -0.4865 +2026-04-09 12:52:25.519462: val_loss -0.4618 +2026-04-09 12:52:25.521646: Pseudo dice [0.0, 0.1451, 0.7481, 0.0, 0.234, 0.5042, 0.435] +2026-04-09 12:52:25.523655: Epoch time: 104.28 s +2026-04-09 12:52:26.667326: +2026-04-09 12:52:26.670946: Epoch 705 +2026-04-09 12:52:26.672669: Current learning rate: 0.00333 +2026-04-09 12:54:09.914487: train_loss -0.5172 +2026-04-09 12:54:09.929881: val_loss -0.5304 +2026-04-09 12:54:09.932405: Pseudo dice [0.0, 0.6319, 0.7416, 0.0, 0.005, 0.7702, 0.3496] +2026-04-09 12:54:09.934291: Epoch time: 103.25 s +2026-04-09 12:54:11.076988: +2026-04-09 12:54:11.078921: Epoch 706 +2026-04-09 12:54:11.080569: Current learning rate: 0.00332 +2026-04-09 12:55:53.433302: train_loss -0.4921 +2026-04-09 12:55:53.438710: val_loss -0.5265 +2026-04-09 12:55:53.440645: Pseudo dice [0.0, 0.618, 0.6995, 0.0, 0.505, 0.6388, 0.5855] +2026-04-09 12:55:53.442221: Epoch time: 102.36 s +2026-04-09 12:55:54.602489: +2026-04-09 12:55:54.606228: Epoch 707 +2026-04-09 12:55:54.618884: Current learning rate: 0.00331 +2026-04-09 12:57:37.657623: train_loss -0.5064 +2026-04-09 12:57:37.663089: val_loss -0.5007 +2026-04-09 12:57:37.665040: Pseudo dice [0.0, 0.6812, 0.6809, 0.0, 0.4012, 0.2154, 0.6891] +2026-04-09 12:57:37.666927: Epoch time: 103.06 s +2026-04-09 12:57:38.834659: +2026-04-09 12:57:38.836350: Epoch 708 +2026-04-09 12:57:38.856704: Current learning rate: 0.0033 +2026-04-09 12:59:21.133953: train_loss -0.5099 +2026-04-09 12:59:21.150346: val_loss -0.4708 +2026-04-09 12:59:21.167075: Pseudo dice [0.0, 0.4662, 0.6174, 0.0, 0.1867, 0.1687, 0.541] +2026-04-09 12:59:21.169742: Epoch time: 102.3 s +2026-04-09 12:59:22.300657: +2026-04-09 12:59:22.302355: Epoch 709 +2026-04-09 12:59:22.304929: Current learning rate: 0.00329 +2026-04-09 13:01:05.630605: train_loss -0.4588 +2026-04-09 13:01:05.638171: val_loss -0.4707 +2026-04-09 13:01:05.640492: Pseudo dice [0.0, 0.4796, 0.6093, 0.0, 0.2654, 0.0342, 0.3266] +2026-04-09 13:01:05.642964: Epoch time: 103.33 s +2026-04-09 13:01:06.770378: +2026-04-09 13:01:06.772641: Epoch 710 +2026-04-09 13:01:06.774667: Current learning rate: 0.00328 +2026-04-09 13:02:49.179877: train_loss -0.4761 +2026-04-09 13:02:49.186479: val_loss -0.4781 +2026-04-09 13:02:49.189262: Pseudo dice [0.0, 0.6082, 0.6865, 0.0, 0.3093, 0.0861, 0.4945] +2026-04-09 13:02:49.191763: Epoch time: 102.41 s +2026-04-09 13:02:50.355482: +2026-04-09 13:02:50.357256: Epoch 711 +2026-04-09 13:02:50.358889: Current learning rate: 0.00327 +2026-04-09 13:04:32.510608: train_loss -0.5032 +2026-04-09 13:04:32.516774: val_loss -0.5198 +2026-04-09 13:04:32.519327: Pseudo dice [0.0, 0.576, 0.7202, 0.0, 0.417, 0.4663, 0.7887] +2026-04-09 13:04:32.521088: Epoch time: 102.16 s +2026-04-09 13:04:34.740786: +2026-04-09 13:04:34.752739: Epoch 712 +2026-04-09 13:04:34.754572: Current learning rate: 0.00326 +2026-04-09 13:06:17.577202: train_loss -0.508 +2026-04-09 13:06:17.585742: val_loss -0.444 +2026-04-09 13:06:17.588584: Pseudo dice [0.0, 0.0, 0.7081, 0.0, 0.4601, 0.5391, 0.2651] +2026-04-09 13:06:17.591423: Epoch time: 102.84 s +2026-04-09 13:06:18.718841: +2026-04-09 13:06:18.721015: Epoch 713 +2026-04-09 13:06:18.722952: Current learning rate: 0.00325 +2026-04-09 13:08:00.911113: train_loss -0.468 +2026-04-09 13:08:00.917784: val_loss -0.4917 +2026-04-09 13:08:00.919901: Pseudo dice [0.0, 0.3805, 0.5849, 0.0, 0.1211, 0.5487, 0.5308] +2026-04-09 13:08:00.922508: Epoch time: 102.2 s +2026-04-09 13:08:02.093315: +2026-04-09 13:08:02.096319: Epoch 714 +2026-04-09 13:08:02.098027: Current learning rate: 0.00324 +2026-04-09 13:09:45.221810: train_loss -0.4652 +2026-04-09 13:09:45.228382: val_loss -0.486 +2026-04-09 13:09:45.230845: Pseudo dice [0.0, 0.1484, 0.7346, 0.0, 0.4848, 0.8088, 0.5908] +2026-04-09 13:09:45.234063: Epoch time: 103.13 s +2026-04-09 13:09:46.396603: +2026-04-09 13:09:46.402135: Epoch 715 +2026-04-09 13:09:46.404631: Current learning rate: 0.00323 +2026-04-09 13:11:29.022992: train_loss -0.4629 +2026-04-09 13:11:29.028795: val_loss -0.3906 +2026-04-09 13:11:29.031583: Pseudo dice [0.0, 0.1137, 0.5509, 0.0, 0.0, 0.1076, 0.3824] +2026-04-09 13:11:29.034123: Epoch time: 102.63 s +2026-04-09 13:11:30.170823: +2026-04-09 13:11:30.174952: Epoch 716 +2026-04-09 13:11:30.178053: Current learning rate: 0.00322 +2026-04-09 13:13:13.095254: train_loss -0.4416 +2026-04-09 13:13:13.124438: val_loss -0.4667 +2026-04-09 13:13:13.128408: Pseudo dice [0.0, 0.6743, 0.572, 0.0, 0.0, 0.5062, 0.2943] +2026-04-09 13:13:13.142517: Epoch time: 102.93 s +2026-04-09 13:13:14.306684: +2026-04-09 13:13:14.308382: Epoch 717 +2026-04-09 13:13:14.310011: Current learning rate: 0.00321 +2026-04-09 13:14:56.968308: train_loss -0.4839 +2026-04-09 13:14:56.975866: val_loss -0.4585 +2026-04-09 13:14:56.977885: Pseudo dice [0.0, 0.3457, 0.6407, 0.0, 0.5005, 0.121, 0.5028] +2026-04-09 13:14:56.980705: Epoch time: 102.66 s +2026-04-09 13:14:58.145506: +2026-04-09 13:14:58.148022: Epoch 718 +2026-04-09 13:14:58.149739: Current learning rate: 0.0032 +2026-04-09 13:16:40.450080: train_loss -0.4965 +2026-04-09 13:16:40.455621: val_loss -0.4713 +2026-04-09 13:16:40.458382: Pseudo dice [0.0, 0.0, 0.7374, 0.0, 0.3898, 0.0916, 0.57] +2026-04-09 13:16:40.460496: Epoch time: 102.31 s +2026-04-09 13:16:41.614415: +2026-04-09 13:16:41.616187: Epoch 719 +2026-04-09 13:16:41.617844: Current learning rate: 0.00319 +2026-04-09 13:18:24.478772: train_loss -0.4824 +2026-04-09 13:18:24.486899: val_loss -0.4896 +2026-04-09 13:18:24.489060: Pseudo dice [0.0, 0.0, 0.7224, 0.0, 0.0012, 0.5822, 0.6359] +2026-04-09 13:18:24.491668: Epoch time: 102.87 s +2026-04-09 13:18:25.631120: +2026-04-09 13:18:25.633017: Epoch 720 +2026-04-09 13:18:25.635152: Current learning rate: 0.00318 +2026-04-09 13:20:07.643392: train_loss -0.4876 +2026-04-09 13:20:07.648487: val_loss -0.4373 +2026-04-09 13:20:07.650427: Pseudo dice [0.0, 0.0, 0.5741, 0.0, 0.2436, 0.2295, 0.6155] +2026-04-09 13:20:07.652164: Epoch time: 102.02 s +2026-04-09 13:20:08.836035: +2026-04-09 13:20:08.837875: Epoch 721 +2026-04-09 13:20:08.839765: Current learning rate: 0.00317 +2026-04-09 13:21:51.019475: train_loss -0.4994 +2026-04-09 13:21:51.024601: val_loss -0.4957 +2026-04-09 13:21:51.026638: Pseudo dice [0.0, 0.0, 0.7357, 0.0, 0.4025, 0.627, 0.7176] +2026-04-09 13:21:51.028732: Epoch time: 102.19 s +2026-04-09 13:21:52.391578: +2026-04-09 13:21:52.393537: Epoch 722 +2026-04-09 13:21:52.395209: Current learning rate: 0.00316 +2026-04-09 13:23:34.856734: train_loss -0.4833 +2026-04-09 13:23:34.862110: val_loss -0.4457 +2026-04-09 13:23:34.864387: Pseudo dice [0.0, 0.0464, 0.684, 0.0, 0.2493, 0.101, 0.6803] +2026-04-09 13:23:34.867053: Epoch time: 102.47 s +2026-04-09 13:23:36.038399: +2026-04-09 13:23:36.039884: Epoch 723 +2026-04-09 13:23:36.041398: Current learning rate: 0.00315 +2026-04-09 13:25:18.993700: train_loss -0.4985 +2026-04-09 13:25:19.002501: val_loss -0.4967 +2026-04-09 13:25:19.005838: Pseudo dice [0.0, 0.138, 0.7315, 0.0, 0.3661, 0.1908, 0.5031] +2026-04-09 13:25:19.010111: Epoch time: 102.96 s +2026-04-09 13:25:20.194831: +2026-04-09 13:25:20.197129: Epoch 724 +2026-04-09 13:25:20.200937: Current learning rate: 0.00314 +2026-04-09 13:27:02.406229: train_loss -0.4683 +2026-04-09 13:27:02.413388: val_loss -0.4969 +2026-04-09 13:27:02.415778: Pseudo dice [0.0, 0.6055, 0.7503, 0.0, 0.1923, 0.3163, 0.6584] +2026-04-09 13:27:02.418025: Epoch time: 102.21 s +2026-04-09 13:27:03.581532: +2026-04-09 13:27:03.583045: Epoch 725 +2026-04-09 13:27:03.584907: Current learning rate: 0.00313 +2026-04-09 13:28:46.239560: train_loss -0.5227 +2026-04-09 13:28:46.245472: val_loss -0.4847 +2026-04-09 13:28:46.248691: Pseudo dice [0.0, 0.0, 0.7667, 0.0, 0.1487, 0.8737, 0.6254] +2026-04-09 13:28:46.251049: Epoch time: 102.66 s +2026-04-09 13:28:47.406533: +2026-04-09 13:28:47.408723: Epoch 726 +2026-04-09 13:28:47.410467: Current learning rate: 0.00312 +2026-04-09 13:30:29.917626: train_loss -0.5109 +2026-04-09 13:30:29.924263: val_loss -0.4455 +2026-04-09 13:30:29.926554: Pseudo dice [0.0, 0.0363, 0.7609, 0.0, 0.4034, 0.0699, 0.2806] +2026-04-09 13:30:29.929075: Epoch time: 102.51 s +2026-04-09 13:30:31.077061: +2026-04-09 13:30:31.079029: Epoch 727 +2026-04-09 13:30:31.082158: Current learning rate: 0.00311 +2026-04-09 13:32:13.917826: train_loss -0.4923 +2026-04-09 13:32:13.925570: val_loss -0.4782 +2026-04-09 13:32:13.927752: Pseudo dice [0.0, 0.2378, 0.7639, 0.0, 0.326, 0.7524, 0.7788] +2026-04-09 13:32:13.929728: Epoch time: 102.84 s +2026-04-09 13:32:15.063858: +2026-04-09 13:32:15.065773: Epoch 728 +2026-04-09 13:32:15.067411: Current learning rate: 0.0031 +2026-04-09 13:33:57.922233: train_loss -0.4887 +2026-04-09 13:33:57.929974: val_loss -0.498 +2026-04-09 13:33:57.932673: Pseudo dice [0.0, 0.6794, 0.75, 0.0, 0.3342, 0.5322, 0.4664] +2026-04-09 13:33:57.934746: Epoch time: 102.86 s +2026-04-09 13:33:59.104690: +2026-04-09 13:33:59.118292: Epoch 729 +2026-04-09 13:33:59.125769: Current learning rate: 0.00309 +2026-04-09 13:35:42.385503: train_loss -0.5095 +2026-04-09 13:35:42.418714: val_loss -0.5155 +2026-04-09 13:35:42.424819: Pseudo dice [0.0, 0.7707, 0.7024, 0.0, 0.6132, 0.7248, 0.7575] +2026-04-09 13:35:42.428442: Epoch time: 103.28 s +2026-04-09 13:35:43.583019: +2026-04-09 13:35:43.586523: Epoch 730 +2026-04-09 13:35:43.588533: Current learning rate: 0.00308 +2026-04-09 13:37:26.459074: train_loss -0.504 +2026-04-09 13:37:26.466695: val_loss -0.5116 +2026-04-09 13:37:26.469783: Pseudo dice [0.0, 0.142, 0.6231, 0.0, 0.428, 0.2253, 0.6889] +2026-04-09 13:37:26.472466: Epoch time: 102.88 s +2026-04-09 13:37:27.611835: +2026-04-09 13:37:27.613801: Epoch 731 +2026-04-09 13:37:27.615267: Current learning rate: 0.00307 +2026-04-09 13:39:11.543321: train_loss -0.5276 +2026-04-09 13:39:11.549306: val_loss -0.5145 +2026-04-09 13:39:11.551955: Pseudo dice [0.0, 0.6126, 0.6311, 0.0, 0.2415, 0.4062, 0.6944] +2026-04-09 13:39:11.554241: Epoch time: 103.93 s +2026-04-09 13:39:12.729206: +2026-04-09 13:39:12.744435: Epoch 732 +2026-04-09 13:39:12.747954: Current learning rate: 0.00306 +2026-04-09 13:40:55.479948: train_loss -0.5442 +2026-04-09 13:40:55.485384: val_loss -0.4658 +2026-04-09 13:40:55.489303: Pseudo dice [0.0, 0.7524, 0.4929, 0.0, 0.2156, 0.1338, 0.7674] +2026-04-09 13:40:55.491343: Epoch time: 102.75 s +2026-04-09 13:40:56.697584: +2026-04-09 13:40:56.700299: Epoch 733 +2026-04-09 13:40:56.705018: Current learning rate: 0.00305 +2026-04-09 13:42:39.376324: train_loss -0.4937 +2026-04-09 13:42:39.381550: val_loss -0.5322 +2026-04-09 13:42:39.383539: Pseudo dice [0.0, 0.4923, 0.7305, 0.0, 0.3331, 0.7829, 0.6939] +2026-04-09 13:42:39.385413: Epoch time: 102.68 s +2026-04-09 13:42:40.545569: +2026-04-09 13:42:40.547789: Epoch 734 +2026-04-09 13:42:40.549689: Current learning rate: 0.00304 +2026-04-09 13:44:23.742852: train_loss -0.535 +2026-04-09 13:44:23.757122: val_loss -0.457 +2026-04-09 13:44:23.759093: Pseudo dice [0.0, 0.6698, 0.6222, 0.0, 0.2561, 0.2249, 0.455] +2026-04-09 13:44:23.761069: Epoch time: 103.2 s +2026-04-09 13:44:24.909851: +2026-04-09 13:44:24.911701: Epoch 735 +2026-04-09 13:44:24.913904: Current learning rate: 0.00303 +2026-04-09 13:46:08.553439: train_loss -0.5235 +2026-04-09 13:46:08.559250: val_loss -0.4838 +2026-04-09 13:46:08.561328: Pseudo dice [0.0, 0.4257, 0.5795, 0.0, 0.3657, 0.5037, 0.6212] +2026-04-09 13:46:08.563440: Epoch time: 103.65 s +2026-04-09 13:46:09.719213: +2026-04-09 13:46:09.727241: Epoch 736 +2026-04-09 13:46:09.732970: Current learning rate: 0.00302 +2026-04-09 13:47:52.004196: train_loss -0.5337 +2026-04-09 13:47:52.009398: val_loss -0.4699 +2026-04-09 13:47:52.011501: Pseudo dice [0.0, 0.5218, 0.5825, 0.0, 0.4237, 0.5623, 0.4871] +2026-04-09 13:47:52.013153: Epoch time: 102.29 s +2026-04-09 13:47:53.163038: +2026-04-09 13:47:53.164625: Epoch 737 +2026-04-09 13:47:53.166237: Current learning rate: 0.00301 +2026-04-09 13:49:35.591941: train_loss -0.4811 +2026-04-09 13:49:35.603553: val_loss -0.4946 +2026-04-09 13:49:35.605590: Pseudo dice [0.0, 0.6631, 0.7022, 0.0, 0.5694, 0.4809, 0.6679] +2026-04-09 13:49:35.607915: Epoch time: 102.43 s +2026-04-09 13:49:36.766598: +2026-04-09 13:49:36.768382: Epoch 738 +2026-04-09 13:49:36.770098: Current learning rate: 0.003 +2026-04-09 13:51:19.065094: train_loss -0.5325 +2026-04-09 13:51:19.073558: val_loss -0.4554 +2026-04-09 13:51:19.076088: Pseudo dice [0.0, 0.3084, 0.7193, 0.0, 0.3073, 0.4107, 0.4545] +2026-04-09 13:51:19.077935: Epoch time: 102.3 s +2026-04-09 13:51:20.240269: +2026-04-09 13:51:20.243826: Epoch 739 +2026-04-09 13:51:20.245720: Current learning rate: 0.00299 +2026-04-09 13:53:02.462439: train_loss -0.4985 +2026-04-09 13:53:02.467770: val_loss -0.4624 +2026-04-09 13:53:02.469722: Pseudo dice [0.0, 0.4958, 0.6972, 0.0, 0.0, 0.2447, 0.4554] +2026-04-09 13:53:02.471521: Epoch time: 102.23 s +2026-04-09 13:53:03.600441: +2026-04-09 13:53:03.602024: Epoch 740 +2026-04-09 13:53:03.603745: Current learning rate: 0.00297 +2026-04-09 13:54:45.989594: train_loss -0.4457 +2026-04-09 13:54:45.995695: val_loss -0.4435 +2026-04-09 13:54:45.997474: Pseudo dice [0.0, 0.1384, 0.63, 0.0, 0.0955, 0.3643, 0.4265] +2026-04-09 13:54:46.000258: Epoch time: 102.39 s +2026-04-09 13:54:47.131316: +2026-04-09 13:54:47.135632: Epoch 741 +2026-04-09 13:54:47.140548: Current learning rate: 0.00296 +2026-04-09 13:56:29.683073: train_loss -0.5029 +2026-04-09 13:56:29.689060: val_loss -0.4852 +2026-04-09 13:56:29.691865: Pseudo dice [0.0, 0.1543, 0.6423, 0.0, 0.2928, 0.5856, 0.7043] +2026-04-09 13:56:29.693811: Epoch time: 102.55 s +2026-04-09 13:56:30.840567: +2026-04-09 13:56:30.851421: Epoch 742 +2026-04-09 13:56:30.852866: Current learning rate: 0.00295 +2026-04-09 13:58:13.970178: train_loss -0.5081 +2026-04-09 13:58:13.976170: val_loss -0.4944 +2026-04-09 13:58:13.978734: Pseudo dice [0.0, 0.529, 0.7095, 0.0, 0.2124, 0.4036, 0.2253] +2026-04-09 13:58:13.980651: Epoch time: 103.13 s +2026-04-09 13:58:15.169557: +2026-04-09 13:58:15.171160: Epoch 743 +2026-04-09 13:58:15.172582: Current learning rate: 0.00294 +2026-04-09 13:59:57.379693: train_loss -0.4802 +2026-04-09 13:59:57.385990: val_loss -0.4861 +2026-04-09 13:59:57.388065: Pseudo dice [0.0, 0.6073, 0.7383, 0.0, 0.1995, 0.6984, 0.6963] +2026-04-09 13:59:57.389879: Epoch time: 102.21 s +2026-04-09 13:59:58.527510: +2026-04-09 13:59:58.529224: Epoch 744 +2026-04-09 13:59:58.530654: Current learning rate: 0.00293 +2026-04-09 14:01:41.187832: train_loss -0.5175 +2026-04-09 14:01:41.195058: val_loss -0.4888 +2026-04-09 14:01:41.197191: Pseudo dice [0.0, 0.1358, 0.7646, 0.0, 0.4433, 0.7329, 0.5826] +2026-04-09 14:01:41.199135: Epoch time: 102.66 s +2026-04-09 14:01:42.330230: +2026-04-09 14:01:42.332541: Epoch 745 +2026-04-09 14:01:42.334356: Current learning rate: 0.00292 +2026-04-09 14:03:25.006607: train_loss -0.5042 +2026-04-09 14:03:25.013611: val_loss -0.5171 +2026-04-09 14:03:25.015459: Pseudo dice [0.0, 0.8293, 0.719, 0.0, 0.2681, 0.6, 0.6077] +2026-04-09 14:03:25.017395: Epoch time: 102.68 s +2026-04-09 14:03:26.157298: +2026-04-09 14:03:26.158937: Epoch 746 +2026-04-09 14:03:26.160830: Current learning rate: 0.00291 +2026-04-09 14:05:09.266770: train_loss -0.4929 +2026-04-09 14:05:09.271932: val_loss -0.4554 +2026-04-09 14:05:09.273963: Pseudo dice [0.0, 0.557, 0.7357, 0.0, 0.1663, 0.7486, 0.7184] +2026-04-09 14:05:09.275860: Epoch time: 103.11 s +2026-04-09 14:05:10.397830: +2026-04-09 14:05:10.400016: Epoch 747 +2026-04-09 14:05:10.402099: Current learning rate: 0.0029 +2026-04-09 14:06:52.934839: train_loss -0.4897 +2026-04-09 14:06:52.941083: val_loss -0.4973 +2026-04-09 14:06:52.943041: Pseudo dice [0.0, 0.0841, 0.507, 0.0, 0.0041, 0.7759, 0.6206] +2026-04-09 14:06:52.945143: Epoch time: 102.54 s +2026-04-09 14:06:54.277828: +2026-04-09 14:06:54.279282: Epoch 748 +2026-04-09 14:06:54.280605: Current learning rate: 0.00289 +2026-04-09 14:08:36.391807: train_loss -0.4861 +2026-04-09 14:08:36.396514: val_loss -0.444 +2026-04-09 14:08:36.398442: Pseudo dice [0.0, 0.7525, 0.5249, 0.0, 0.0, 0.2383, 0.4075] +2026-04-09 14:08:36.400413: Epoch time: 102.12 s +2026-04-09 14:08:37.588018: +2026-04-09 14:08:37.589680: Epoch 749 +2026-04-09 14:08:37.591141: Current learning rate: 0.00288 +2026-04-09 14:10:20.080096: train_loss -0.4807 +2026-04-09 14:10:20.085794: val_loss -0.5333 +2026-04-09 14:10:20.087694: Pseudo dice [0.0, 0.6125, 0.7287, 0.0, 0.0, 0.8192, 0.58] +2026-04-09 14:10:20.089643: Epoch time: 102.5 s +2026-04-09 14:10:22.949630: +2026-04-09 14:10:22.951097: Epoch 750 +2026-04-09 14:10:22.952642: Current learning rate: 0.00287 +2026-04-09 14:12:06.054637: train_loss -0.4842 +2026-04-09 14:12:06.062209: val_loss -0.4911 +2026-04-09 14:12:06.064382: Pseudo dice [0.0, 0.7911, 0.6865, 0.0, 0.0046, 0.7493, 0.6811] +2026-04-09 14:12:06.066250: Epoch time: 103.11 s +2026-04-09 14:12:07.222224: +2026-04-09 14:12:07.223938: Epoch 751 +2026-04-09 14:12:07.225764: Current learning rate: 0.00286 +2026-04-09 14:13:49.683064: train_loss -0.4925 +2026-04-09 14:13:49.687807: val_loss -0.5015 +2026-04-09 14:13:49.689554: Pseudo dice [0.0, 0.8064, 0.7561, 0.0, 0.2242, 0.0976, 0.8459] +2026-04-09 14:13:49.693571: Epoch time: 102.46 s +2026-04-09 14:13:50.840222: +2026-04-09 14:13:50.841878: Epoch 752 +2026-04-09 14:13:50.843243: Current learning rate: 0.00285 +2026-04-09 14:15:33.536079: train_loss -0.5036 +2026-04-09 14:15:33.541219: val_loss -0.5334 +2026-04-09 14:15:33.542841: Pseudo dice [0.0, 0.7214, 0.6522, 0.0, 0.1065, 0.6391, 0.7908] +2026-04-09 14:15:33.545092: Epoch time: 102.7 s +2026-04-09 14:15:34.694646: +2026-04-09 14:15:34.696275: Epoch 753 +2026-04-09 14:15:34.698131: Current learning rate: 0.00284 +2026-04-09 14:17:17.234525: train_loss -0.5099 +2026-04-09 14:17:17.240239: val_loss -0.4944 +2026-04-09 14:17:17.242500: Pseudo dice [0.0, 0.1641, 0.6216, 0.0, 0.4132, 0.7483, 0.5157] +2026-04-09 14:17:17.244189: Epoch time: 102.54 s +2026-04-09 14:17:18.406015: +2026-04-09 14:17:18.408298: Epoch 754 +2026-04-09 14:17:18.409965: Current learning rate: 0.00283 +2026-04-09 14:19:01.319176: train_loss -0.5385 +2026-04-09 14:19:01.325872: val_loss -0.5118 +2026-04-09 14:19:01.328468: Pseudo dice [0.0, 0.3318, 0.705, 0.0, 0.399, 0.6111, 0.5381] +2026-04-09 14:19:01.330414: Epoch time: 102.92 s +2026-04-09 14:19:02.467567: +2026-04-09 14:19:02.469907: Epoch 755 +2026-04-09 14:19:02.472182: Current learning rate: 0.00282 +2026-04-09 14:20:44.858574: train_loss -0.4626 +2026-04-09 14:20:44.864575: val_loss -0.4431 +2026-04-09 14:20:44.866479: Pseudo dice [0.0, 0.0782, 0.5747, 0.0, 0.2365, 0.2044, 0.7643] +2026-04-09 14:20:44.868834: Epoch time: 102.39 s +2026-04-09 14:20:46.087041: +2026-04-09 14:20:46.088771: Epoch 756 +2026-04-09 14:20:46.090298: Current learning rate: 0.00281 +2026-04-09 14:22:29.090839: train_loss -0.5146 +2026-04-09 14:22:29.095902: val_loss -0.51 +2026-04-09 14:22:29.098310: Pseudo dice [0.0, 0.585, 0.7099, 0.0, 0.1979, 0.064, 0.6267] +2026-04-09 14:22:29.100525: Epoch time: 103.01 s +2026-04-09 14:22:30.256779: +2026-04-09 14:22:30.258551: Epoch 757 +2026-04-09 14:22:30.260228: Current learning rate: 0.0028 +2026-04-09 14:24:12.861866: train_loss -0.5181 +2026-04-09 14:24:12.868930: val_loss -0.4961 +2026-04-09 14:24:12.870888: Pseudo dice [0.0, 0.6824, 0.6754, 0.0, 0.3619, 0.2813, 0.7251] +2026-04-09 14:24:12.872445: Epoch time: 102.61 s +2026-04-09 14:24:14.017658: +2026-04-09 14:24:14.019850: Epoch 758 +2026-04-09 14:24:14.021476: Current learning rate: 0.00279 +2026-04-09 14:25:56.446724: train_loss -0.5061 +2026-04-09 14:25:56.454621: val_loss -0.4672 +2026-04-09 14:25:56.456504: Pseudo dice [0.0, 0.3621, 0.7111, 0.1676, 0.3865, 0.742, 0.575] +2026-04-09 14:25:56.458531: Epoch time: 102.43 s +2026-04-09 14:25:57.595496: +2026-04-09 14:25:57.597652: Epoch 759 +2026-04-09 14:25:57.600115: Current learning rate: 0.00278 +2026-04-09 14:27:39.936879: train_loss -0.5221 +2026-04-09 14:27:39.942620: val_loss -0.4179 +2026-04-09 14:27:39.944665: Pseudo dice [0.0, 0.835, 0.7729, 0.0, 0.2845, 0.5428, 0.3985] +2026-04-09 14:27:39.946445: Epoch time: 102.34 s +2026-04-09 14:27:41.093626: +2026-04-09 14:27:41.095762: Epoch 760 +2026-04-09 14:27:41.097467: Current learning rate: 0.00277 +2026-04-09 14:29:23.569923: train_loss -0.4469 +2026-04-09 14:29:23.575024: val_loss -0.4803 +2026-04-09 14:29:23.577103: Pseudo dice [0.0, 0.4736, 0.4808, 0.0, 0.0, 0.0929, 0.3922] +2026-04-09 14:29:23.579086: Epoch time: 102.48 s +2026-04-09 14:29:24.730137: +2026-04-09 14:29:24.733776: Epoch 761 +2026-04-09 14:29:24.735604: Current learning rate: 0.00276 +2026-04-09 14:31:07.274618: train_loss -0.4829 +2026-04-09 14:31:07.280876: val_loss -0.5126 +2026-04-09 14:31:07.283442: Pseudo dice [0.0, 0.5597, 0.7017, 0.0292, 0.4938, 0.1289, 0.6545] +2026-04-09 14:31:07.285567: Epoch time: 102.55 s +2026-04-09 14:31:08.481736: +2026-04-09 14:31:08.483902: Epoch 762 +2026-04-09 14:31:08.485555: Current learning rate: 0.00275 +2026-04-09 14:32:50.931614: train_loss -0.4953 +2026-04-09 14:32:50.937137: val_loss -0.4954 +2026-04-09 14:32:50.938963: Pseudo dice [0.0, 0.7884, 0.6897, 0.041, 0.3915, 0.6642, 0.6424] +2026-04-09 14:32:50.940578: Epoch time: 102.45 s +2026-04-09 14:32:52.094117: +2026-04-09 14:32:52.095947: Epoch 763 +2026-04-09 14:32:52.097361: Current learning rate: 0.00274 +2026-04-09 14:34:34.766600: train_loss -0.5343 +2026-04-09 14:34:34.772110: val_loss -0.5092 +2026-04-09 14:34:34.774602: Pseudo dice [0.0, 0.5968, 0.7047, 0.0, 0.2464, 0.8437, 0.4973] +2026-04-09 14:34:34.776386: Epoch time: 102.68 s +2026-04-09 14:34:35.913235: +2026-04-09 14:34:35.918591: Epoch 764 +2026-04-09 14:34:35.920181: Current learning rate: 0.00273 +2026-04-09 14:36:18.996268: train_loss -0.5051 +2026-04-09 14:36:19.001575: val_loss -0.4719 +2026-04-09 14:36:19.004108: Pseudo dice [0.0, 0.5736, 0.6634, 0.0, 0.4239, 0.5812, 0.4138] +2026-04-09 14:36:19.006084: Epoch time: 103.09 s +2026-04-09 14:36:20.173079: +2026-04-09 14:36:20.174870: Epoch 765 +2026-04-09 14:36:20.177465: Current learning rate: 0.00272 +2026-04-09 14:38:03.043349: train_loss -0.5224 +2026-04-09 14:38:03.050747: val_loss -0.4949 +2026-04-09 14:38:03.052584: Pseudo dice [0.0, 0.4977, 0.5588, 0.0, 0.4141, 0.3085, 0.8199] +2026-04-09 14:38:03.055050: Epoch time: 102.87 s +2026-04-09 14:38:04.214013: +2026-04-09 14:38:04.216158: Epoch 766 +2026-04-09 14:38:04.218732: Current learning rate: 0.00271 +2026-04-09 14:39:47.083590: train_loss -0.5402 +2026-04-09 14:39:47.089660: val_loss -0.4998 +2026-04-09 14:39:47.091428: Pseudo dice [0.0, 0.4444, 0.6995, 0.0, 0.5524, 0.5912, 0.7149] +2026-04-09 14:39:47.093440: Epoch time: 102.87 s +2026-04-09 14:39:48.258285: +2026-04-09 14:39:48.260749: Epoch 767 +2026-04-09 14:39:48.263723: Current learning rate: 0.0027 +2026-04-09 14:41:31.381172: train_loss -0.5309 +2026-04-09 14:41:31.388013: val_loss -0.481 +2026-04-09 14:41:31.389897: Pseudo dice [0.0, 0.6356, 0.7125, 0.1977, 0.6646, 0.5941, 0.7379] +2026-04-09 14:41:31.392238: Epoch time: 103.13 s +2026-04-09 14:41:32.545236: +2026-04-09 14:41:32.547688: Epoch 768 +2026-04-09 14:41:32.549529: Current learning rate: 0.00268 +2026-04-09 14:43:15.223800: train_loss -0.5411 +2026-04-09 14:43:15.230716: val_loss -0.5319 +2026-04-09 14:43:15.232583: Pseudo dice [0.0, 0.5956, 0.75, 0.0033, 0.3393, 0.7244, 0.7678] +2026-04-09 14:43:15.234365: Epoch time: 102.68 s +2026-04-09 14:43:16.394785: +2026-04-09 14:43:16.396561: Epoch 769 +2026-04-09 14:43:16.398270: Current learning rate: 0.00267 +2026-04-09 14:44:59.576290: train_loss -0.5293 +2026-04-09 14:44:59.584146: val_loss -0.4772 +2026-04-09 14:44:59.586465: Pseudo dice [0.0, 0.5758, 0.8029, 0.0341, 0.6755, 0.3299, 0.466] +2026-04-09 14:44:59.588457: Epoch time: 103.18 s +2026-04-09 14:45:01.915990: +2026-04-09 14:45:01.917905: Epoch 770 +2026-04-09 14:45:01.919705: Current learning rate: 0.00266 +2026-04-09 14:46:44.358950: train_loss -0.5457 +2026-04-09 14:46:44.369153: val_loss -0.5094 +2026-04-09 14:46:44.373103: Pseudo dice [0.0, 0.1478, 0.6969, 0.5338, 0.6267, 0.2724, 0.3582] +2026-04-09 14:46:44.375206: Epoch time: 102.45 s +2026-04-09 14:46:45.526876: +2026-04-09 14:46:45.528563: Epoch 771 +2026-04-09 14:46:45.530451: Current learning rate: 0.00265 +2026-04-09 14:48:28.441200: train_loss -0.5001 +2026-04-09 14:48:28.447216: val_loss -0.487 +2026-04-09 14:48:28.449544: Pseudo dice [0.0, 0.6716, 0.6957, 0.4483, 0.4383, 0.1059, 0.3748] +2026-04-09 14:48:28.451667: Epoch time: 102.92 s +2026-04-09 14:48:29.610678: +2026-04-09 14:48:29.612544: Epoch 772 +2026-04-09 14:48:29.614650: Current learning rate: 0.00264 +2026-04-09 14:50:12.618535: train_loss -0.5295 +2026-04-09 14:50:12.629381: val_loss -0.5076 +2026-04-09 14:50:12.639433: Pseudo dice [0.0, 0.5834, 0.681, 0.5033, 0.4739, 0.3228, 0.6575] +2026-04-09 14:50:12.642024: Epoch time: 103.01 s +2026-04-09 14:50:13.800761: +2026-04-09 14:50:13.802662: Epoch 773 +2026-04-09 14:50:13.804780: Current learning rate: 0.00263 +2026-04-09 14:51:56.588268: train_loss -0.5064 +2026-04-09 14:51:56.596408: val_loss -0.5225 +2026-04-09 14:51:56.599664: Pseudo dice [0.0, 0.8286, 0.7082, 0.0, 0.1921, 0.3231, 0.4675] +2026-04-09 14:51:56.602993: Epoch time: 102.79 s +2026-04-09 14:51:57.782933: +2026-04-09 14:51:57.785741: Epoch 774 +2026-04-09 14:51:57.788152: Current learning rate: 0.00262 +2026-04-09 14:53:40.976328: train_loss -0.5447 +2026-04-09 14:53:40.984482: val_loss -0.5022 +2026-04-09 14:53:40.986478: Pseudo dice [0.0, 0.8257, 0.6789, 0.0, 0.336, 0.7782, 0.7372] +2026-04-09 14:53:40.989289: Epoch time: 103.2 s +2026-04-09 14:53:42.147459: +2026-04-09 14:53:42.149345: Epoch 775 +2026-04-09 14:53:42.151392: Current learning rate: 0.00261 +2026-04-09 14:55:24.608294: train_loss -0.5332 +2026-04-09 14:55:24.616247: val_loss -0.5251 +2026-04-09 14:55:24.618974: Pseudo dice [0.0, 0.7945, 0.6031, 0.0, 0.4984, 0.8086, 0.637] +2026-04-09 14:55:24.621465: Epoch time: 102.46 s +2026-04-09 14:55:25.846896: +2026-04-09 14:55:25.848846: Epoch 776 +2026-04-09 14:55:25.851143: Current learning rate: 0.0026 +2026-04-09 14:57:08.412656: train_loss -0.5299 +2026-04-09 14:57:08.421994: val_loss -0.5079 +2026-04-09 14:57:08.424442: Pseudo dice [0.0, 0.7329, 0.7016, 0.4492, 0.3144, 0.113, 0.6017] +2026-04-09 14:57:08.426672: Epoch time: 102.57 s +2026-04-09 14:57:09.591329: +2026-04-09 14:57:09.594105: Epoch 777 +2026-04-09 14:57:09.596344: Current learning rate: 0.00259 +2026-04-09 14:58:52.137000: train_loss -0.5386 +2026-04-09 14:58:52.145497: val_loss -0.476 +2026-04-09 14:58:52.148363: Pseudo dice [0.0, 0.4674, 0.7585, 0.0, 0.1419, 0.6461, 0.7918] +2026-04-09 14:58:52.150981: Epoch time: 102.55 s +2026-04-09 14:58:53.393639: +2026-04-09 14:58:53.396343: Epoch 778 +2026-04-09 14:58:53.398140: Current learning rate: 0.00258 +2026-04-09 15:00:36.432334: train_loss -0.5099 +2026-04-09 15:00:36.439248: val_loss -0.504 +2026-04-09 15:00:36.442029: Pseudo dice [0.0, 0.1981, 0.7093, 0.0002, 0.2805, 0.6509, 0.4543] +2026-04-09 15:00:36.444000: Epoch time: 103.04 s +2026-04-09 15:00:37.622861: +2026-04-09 15:00:37.625143: Epoch 779 +2026-04-09 15:00:37.628067: Current learning rate: 0.00257 +2026-04-09 15:02:20.767235: train_loss -0.5439 +2026-04-09 15:02:20.774767: val_loss -0.523 +2026-04-09 15:02:20.777058: Pseudo dice [0.0, 0.5535, 0.6202, 0.0481, 0.4024, 0.7821, 0.7097] +2026-04-09 15:02:20.779837: Epoch time: 103.15 s +2026-04-09 15:02:21.950930: +2026-04-09 15:02:21.953113: Epoch 780 +2026-04-09 15:02:21.954995: Current learning rate: 0.00256 +2026-04-09 15:04:04.459216: train_loss -0.5199 +2026-04-09 15:04:04.466609: val_loss -0.548 +2026-04-09 15:04:04.468787: Pseudo dice [0.0, 0.5239, 0.7217, 0.0, 0.2637, 0.9214, 0.8217] +2026-04-09 15:04:04.470820: Epoch time: 102.51 s +2026-04-09 15:04:05.644847: +2026-04-09 15:04:05.646879: Epoch 781 +2026-04-09 15:04:05.651083: Current learning rate: 0.00255 +2026-04-09 15:05:48.402461: train_loss -0.5337 +2026-04-09 15:05:48.418883: val_loss -0.4781 +2026-04-09 15:05:48.421052: Pseudo dice [0.0, 0.3257, 0.5367, 0.0109, 0.5401, 0.1866, 0.5834] +2026-04-09 15:05:48.423110: Epoch time: 102.76 s +2026-04-09 15:05:49.643728: +2026-04-09 15:05:49.646404: Epoch 782 +2026-04-09 15:05:49.649127: Current learning rate: 0.00254 +2026-04-09 15:07:32.873836: train_loss -0.5324 +2026-04-09 15:07:32.881456: val_loss -0.4813 +2026-04-09 15:07:32.884087: Pseudo dice [0.0, 0.6989, 0.6163, 0.0, 0.2657, 0.5812, 0.5054] +2026-04-09 15:07:32.886256: Epoch time: 103.23 s +2026-04-09 15:07:34.040920: +2026-04-09 15:07:34.043099: Epoch 783 +2026-04-09 15:07:34.046377: Current learning rate: 0.00253 +2026-04-09 15:09:16.538244: train_loss -0.526 +2026-04-09 15:09:16.546250: val_loss -0.5361 +2026-04-09 15:09:16.549324: Pseudo dice [0.0, 0.5921, 0.5455, 0.1782, 0.5631, 0.8519, 0.6412] +2026-04-09 15:09:16.551589: Epoch time: 102.5 s +2026-04-09 15:09:17.729527: +2026-04-09 15:09:17.732081: Epoch 784 +2026-04-09 15:09:17.734471: Current learning rate: 0.00252 +2026-04-09 15:11:00.470234: train_loss -0.535 +2026-04-09 15:11:00.477286: val_loss -0.5434 +2026-04-09 15:11:00.479507: Pseudo dice [0.0, 0.6864, 0.67, 0.0023, 0.5944, 0.8986, 0.523] +2026-04-09 15:11:00.481796: Epoch time: 102.74 s +2026-04-09 15:11:01.701204: +2026-04-09 15:11:01.703213: Epoch 785 +2026-04-09 15:11:01.705274: Current learning rate: 0.00251 +2026-04-09 15:12:45.780521: train_loss -0.5488 +2026-04-09 15:12:45.791000: val_loss -0.4939 +2026-04-09 15:12:45.795597: Pseudo dice [0.0, 0.8468, 0.7154, 0.0876, 0.4951, 0.1914, 0.3834] +2026-04-09 15:12:45.799132: Epoch time: 104.08 s +2026-04-09 15:12:47.004915: +2026-04-09 15:12:47.009846: Epoch 786 +2026-04-09 15:12:47.014435: Current learning rate: 0.0025 +2026-04-09 15:14:29.706965: train_loss -0.5309 +2026-04-09 15:14:29.716031: val_loss -0.5391 +2026-04-09 15:14:29.718500: Pseudo dice [0.0, 0.8394, 0.6661, 0.0318, 0.3011, 0.799, 0.8526] +2026-04-09 15:14:29.721171: Epoch time: 102.71 s +2026-04-09 15:14:30.875756: +2026-04-09 15:14:30.878397: Epoch 787 +2026-04-09 15:14:30.881326: Current learning rate: 0.00249 +2026-04-09 15:16:13.319570: train_loss -0.56 +2026-04-09 15:16:13.326072: val_loss -0.5167 +2026-04-09 15:16:13.328411: Pseudo dice [0.0, 0.6658, 0.6557, 0.117, 0.3973, 0.6564, 0.7516] +2026-04-09 15:16:13.331018: Epoch time: 102.45 s +2026-04-09 15:16:14.484914: +2026-04-09 15:16:14.489446: Epoch 788 +2026-04-09 15:16:14.491681: Current learning rate: 0.00248 +2026-04-09 15:17:57.055065: train_loss -0.5505 +2026-04-09 15:17:57.061388: val_loss -0.5331 +2026-04-09 15:17:57.063434: Pseudo dice [0.0, 0.4956, 0.6964, 0.2823, 0.2504, 0.4979, 0.4868] +2026-04-09 15:17:57.065417: Epoch time: 102.57 s +2026-04-09 15:17:59.423336: +2026-04-09 15:17:59.424963: Epoch 789 +2026-04-09 15:17:59.426707: Current learning rate: 0.00247 +2026-04-09 15:19:42.473963: train_loss -0.5438 +2026-04-09 15:19:42.479720: val_loss -0.5276 +2026-04-09 15:19:42.482078: Pseudo dice [0.0, 0.0569, 0.7847, 0.2471, 0.5907, 0.6117, 0.4796] +2026-04-09 15:19:42.484934: Epoch time: 103.05 s +2026-04-09 15:19:43.661498: +2026-04-09 15:19:43.663858: Epoch 790 +2026-04-09 15:19:43.666187: Current learning rate: 0.00245 +2026-04-09 15:21:27.533762: train_loss -0.5401 +2026-04-09 15:21:27.544163: val_loss -0.4818 +2026-04-09 15:21:27.547138: Pseudo dice [0.0, 0.8481, 0.7392, 0.0635, 0.4593, 0.3204, 0.7005] +2026-04-09 15:21:27.549598: Epoch time: 103.88 s +2026-04-09 15:21:28.737802: +2026-04-09 15:21:28.741223: Epoch 791 +2026-04-09 15:21:28.748070: Current learning rate: 0.00244 +2026-04-09 15:23:11.771619: train_loss -0.5533 +2026-04-09 15:23:11.781203: val_loss -0.5205 +2026-04-09 15:23:11.783547: Pseudo dice [0.0, 0.5107, 0.6892, 0.1806, 0.5841, 0.7738, 0.6849] +2026-04-09 15:23:11.786798: Epoch time: 103.04 s +2026-04-09 15:23:12.972737: +2026-04-09 15:23:12.974793: Epoch 792 +2026-04-09 15:23:12.977006: Current learning rate: 0.00243 +2026-04-09 15:24:55.592517: train_loss -0.544 +2026-04-09 15:24:55.607424: val_loss -0.5318 +2026-04-09 15:24:55.610511: Pseudo dice [0.0, 0.7892, 0.6629, 0.5529, 0.3434, 0.6349, 0.7862] +2026-04-09 15:24:55.612515: Epoch time: 102.62 s +2026-04-09 15:24:56.789237: +2026-04-09 15:24:56.790876: Epoch 793 +2026-04-09 15:24:56.792706: Current learning rate: 0.00242 +2026-04-09 15:26:39.851491: train_loss -0.5576 +2026-04-09 15:26:39.860483: val_loss -0.5219 +2026-04-09 15:26:39.862739: Pseudo dice [0.0, 0.6126, 0.6674, 0.1476, 0.1912, 0.9033, 0.8357] +2026-04-09 15:26:39.865077: Epoch time: 103.07 s +2026-04-09 15:26:41.027415: +2026-04-09 15:26:41.029229: Epoch 794 +2026-04-09 15:26:41.031250: Current learning rate: 0.00241 +2026-04-09 15:28:23.823526: train_loss -0.5485 +2026-04-09 15:28:23.829964: val_loss -0.564 +2026-04-09 15:28:23.831613: Pseudo dice [0.0, 0.6833, 0.7224, 0.4887, 0.3658, 0.5375, 0.693] +2026-04-09 15:28:23.833441: Epoch time: 102.8 s +2026-04-09 15:28:24.994443: +2026-04-09 15:28:24.996662: Epoch 795 +2026-04-09 15:28:24.999110: Current learning rate: 0.0024 +2026-04-09 15:30:08.341337: train_loss -0.5277 +2026-04-09 15:30:08.347561: val_loss -0.5021 +2026-04-09 15:30:08.351681: Pseudo dice [0.0, 0.8583, 0.7267, 0.5749, 0.4013, 0.6629, 0.3367] +2026-04-09 15:30:08.353656: Epoch time: 103.35 s +2026-04-09 15:30:09.576223: +2026-04-09 15:30:09.578140: Epoch 796 +2026-04-09 15:30:09.581099: Current learning rate: 0.00239 +2026-04-09 15:31:52.481740: train_loss -0.5081 +2026-04-09 15:31:52.489909: val_loss -0.5359 +2026-04-09 15:31:52.493393: Pseudo dice [0.0, 0.7249, 0.7614, 0.0, 0.2452, 0.4649, 0.8525] +2026-04-09 15:31:52.496611: Epoch time: 102.91 s +2026-04-09 15:31:53.676069: +2026-04-09 15:31:53.678283: Epoch 797 +2026-04-09 15:31:53.680311: Current learning rate: 0.00238 +2026-04-09 15:33:36.777944: train_loss -0.545 +2026-04-09 15:33:36.785637: val_loss -0.5057 +2026-04-09 15:33:36.788514: Pseudo dice [0.0, 0.7037, 0.5859, 0.1745, 0.259, 0.7242, 0.7225] +2026-04-09 15:33:36.791075: Epoch time: 103.1 s +2026-04-09 15:33:37.968651: +2026-04-09 15:33:37.972992: Epoch 798 +2026-04-09 15:33:37.975921: Current learning rate: 0.00237 +2026-04-09 15:35:20.997078: train_loss -0.5433 +2026-04-09 15:35:21.003430: val_loss -0.5632 +2026-04-09 15:35:21.005896: Pseudo dice [0.0, 0.7819, 0.7943, 0.1549, 0.5134, 0.7818, 0.7707] +2026-04-09 15:35:21.008135: Epoch time: 103.03 s +2026-04-09 15:35:22.228168: +2026-04-09 15:35:22.231660: Epoch 799 +2026-04-09 15:35:22.235337: Current learning rate: 0.00236 +2026-04-09 15:37:04.835536: train_loss -0.5338 +2026-04-09 15:37:04.842919: val_loss -0.5225 +2026-04-09 15:37:04.845226: Pseudo dice [0.0, 0.7848, 0.7925, 0.5494, 0.0, 0.6071, 0.7696] +2026-04-09 15:37:04.848049: Epoch time: 102.61 s +2026-04-09 15:37:07.871998: +2026-04-09 15:37:07.874537: Epoch 800 +2026-04-09 15:37:07.876696: Current learning rate: 0.00235 +2026-04-09 15:38:50.952616: train_loss -0.5434 +2026-04-09 15:38:50.959502: val_loss -0.5228 +2026-04-09 15:38:50.961749: Pseudo dice [0.0, 0.3093, 0.6691, 0.0, 0.1488, 0.9002, 0.777] +2026-04-09 15:38:50.964089: Epoch time: 103.08 s +2026-04-09 15:38:52.171361: +2026-04-09 15:38:52.174646: Epoch 801 +2026-04-09 15:38:52.177694: Current learning rate: 0.00234 +2026-04-09 15:40:34.798423: train_loss -0.5521 +2026-04-09 15:40:34.806798: val_loss -0.4902 +2026-04-09 15:40:34.809297: Pseudo dice [0.0, 0.8689, 0.6902, 0.006, 0.3181, 0.0537, 0.8012] +2026-04-09 15:40:34.811937: Epoch time: 102.63 s +2026-04-09 15:40:35.977767: +2026-04-09 15:40:35.979895: Epoch 802 +2026-04-09 15:40:35.981820: Current learning rate: 0.00233 +2026-04-09 15:42:18.878097: train_loss -0.5339 +2026-04-09 15:42:18.886673: val_loss -0.535 +2026-04-09 15:42:18.889798: Pseudo dice [0.0, 0.6027, 0.7829, 0.2138, 0.04, 0.5815, 0.7442] +2026-04-09 15:42:18.893017: Epoch time: 102.9 s +2026-04-09 15:42:20.089049: +2026-04-09 15:42:20.093899: Epoch 803 +2026-04-09 15:42:20.096799: Current learning rate: 0.00232 +2026-04-09 15:44:03.260783: train_loss -0.516 +2026-04-09 15:44:03.273812: val_loss -0.4875 +2026-04-09 15:44:03.276335: Pseudo dice [0.0, 0.8759, 0.7239, 0.0, 0.6428, 0.1821, 0.778] +2026-04-09 15:44:03.278855: Epoch time: 103.17 s +2026-04-09 15:44:04.450547: +2026-04-09 15:44:04.454102: Epoch 804 +2026-04-09 15:44:04.456547: Current learning rate: 0.00231 +2026-04-09 15:45:47.577312: train_loss -0.5271 +2026-04-09 15:45:47.585938: val_loss -0.4516 +2026-04-09 15:45:47.590203: Pseudo dice [0.0, 0.8455, 0.5918, 0.0254, 0.0, 0.0778, 0.6891] +2026-04-09 15:45:47.593206: Epoch time: 103.13 s +2026-04-09 15:45:48.834575: +2026-04-09 15:45:48.837030: Epoch 805 +2026-04-09 15:45:48.840531: Current learning rate: 0.0023 +2026-04-09 15:47:31.749902: train_loss -0.5293 +2026-04-09 15:47:31.756417: val_loss -0.5035 +2026-04-09 15:47:31.758234: Pseudo dice [0.0, 0.5632, 0.7247, 0.3091, 0.0, 0.5108, 0.7944] +2026-04-09 15:47:31.761670: Epoch time: 102.92 s +2026-04-09 15:47:32.957001: +2026-04-09 15:47:32.958998: Epoch 806 +2026-04-09 15:47:32.961733: Current learning rate: 0.00229 +2026-04-09 15:49:15.768701: train_loss -0.5509 +2026-04-09 15:49:15.780388: val_loss -0.5296 +2026-04-09 15:49:15.783321: Pseudo dice [0.0, 0.3187, 0.7697, 0.035, 0.3735, 0.0929, 0.527] +2026-04-09 15:49:15.787512: Epoch time: 102.81 s +2026-04-09 15:49:16.970836: +2026-04-09 15:49:16.983962: Epoch 807 +2026-04-09 15:49:16.986469: Current learning rate: 0.00228 +2026-04-09 15:51:00.533573: train_loss -0.5187 +2026-04-09 15:51:00.542794: val_loss -0.4711 +2026-04-09 15:51:00.546767: Pseudo dice [0.0, 0.6866, 0.4853, 0.0571, 0.4928, 0.2531, 0.4325] +2026-04-09 15:51:00.549332: Epoch time: 103.57 s +2026-04-09 15:51:02.971247: +2026-04-09 15:51:02.973780: Epoch 808 +2026-04-09 15:51:02.976103: Current learning rate: 0.00226 +2026-04-09 15:52:45.290226: train_loss -0.5217 +2026-04-09 15:52:45.301066: val_loss -0.5009 +2026-04-09 15:52:45.304356: Pseudo dice [0.0, 0.6936, 0.7907, 0.4275, 0.2511, 0.3699, 0.7843] +2026-04-09 15:52:45.307410: Epoch time: 102.32 s +2026-04-09 15:52:46.529372: +2026-04-09 15:52:46.531468: Epoch 809 +2026-04-09 15:52:46.533910: Current learning rate: 0.00225 +2026-04-09 15:54:29.508827: train_loss -0.5324 +2026-04-09 15:54:29.515424: val_loss -0.5058 +2026-04-09 15:54:29.518173: Pseudo dice [0.0, 0.4675, 0.6591, 0.3529, 0.449, 0.6394, 0.8577] +2026-04-09 15:54:29.520553: Epoch time: 102.98 s +2026-04-09 15:54:30.727642: +2026-04-09 15:54:30.730021: Epoch 810 +2026-04-09 15:54:30.731815: Current learning rate: 0.00224 +2026-04-09 15:56:13.370440: train_loss -0.5446 +2026-04-09 15:56:13.381859: val_loss -0.4723 +2026-04-09 15:56:13.384366: Pseudo dice [0.0, 0.8749, 0.6523, 0.0003, 0.4088, 0.5869, 0.6034] +2026-04-09 15:56:13.388351: Epoch time: 102.65 s +2026-04-09 15:56:14.654145: +2026-04-09 15:56:14.657130: Epoch 811 +2026-04-09 15:56:14.659519: Current learning rate: 0.00223 +2026-04-09 15:57:57.567550: train_loss -0.5615 +2026-04-09 15:57:57.574698: val_loss -0.5122 +2026-04-09 15:57:57.577011: Pseudo dice [0.0491, 0.5106, 0.7502, 0.4851, 0.2749, 0.7438, 0.4967] +2026-04-09 15:57:57.579731: Epoch time: 102.92 s +2026-04-09 15:57:58.763659: +2026-04-09 15:57:58.766122: Epoch 812 +2026-04-09 15:57:58.768641: Current learning rate: 0.00222 +2026-04-09 15:59:41.402375: train_loss -0.5343 +2026-04-09 15:59:41.408398: val_loss -0.4625 +2026-04-09 15:59:41.411739: Pseudo dice [0.5472, 0.7253, 0.6182, 0.0, 0.3016, 0.332, 0.7252] +2026-04-09 15:59:41.414174: Epoch time: 102.64 s +2026-04-09 15:59:42.631479: +2026-04-09 15:59:42.634001: Epoch 813 +2026-04-09 15:59:42.636210: Current learning rate: 0.00221 +2026-04-09 16:01:25.044872: train_loss -0.5316 +2026-04-09 16:01:25.051908: val_loss -0.5011 +2026-04-09 16:01:25.053958: Pseudo dice [0.0347, 0.5812, 0.6895, 0.0, 0.2874, 0.2016, 0.5135] +2026-04-09 16:01:25.057709: Epoch time: 102.42 s +2026-04-09 16:01:26.242985: +2026-04-09 16:01:26.245224: Epoch 814 +2026-04-09 16:01:26.248276: Current learning rate: 0.0022 +2026-04-09 16:03:08.930983: train_loss -0.5524 +2026-04-09 16:03:08.940073: val_loss -0.5096 +2026-04-09 16:03:08.942509: Pseudo dice [0.1499, 0.8299, 0.5064, 0.0761, 0.15, 0.8715, 0.7547] +2026-04-09 16:03:08.945302: Epoch time: 102.69 s +2026-04-09 16:03:10.133227: +2026-04-09 16:03:10.136337: Epoch 815 +2026-04-09 16:03:10.138901: Current learning rate: 0.00219 +2026-04-09 16:04:53.550997: train_loss -0.5491 +2026-04-09 16:04:53.560650: val_loss -0.5044 +2026-04-09 16:04:53.563644: Pseudo dice [0.792, 0.592, 0.6835, 0.3995, 0.0, 0.5627, 0.6838] +2026-04-09 16:04:53.565783: Epoch time: 103.42 s +2026-04-09 16:04:54.734406: +2026-04-09 16:04:54.737240: Epoch 816 +2026-04-09 16:04:54.740294: Current learning rate: 0.00218 +2026-04-09 16:06:37.647444: train_loss -0.5308 +2026-04-09 16:06:37.654276: val_loss -0.5154 +2026-04-09 16:06:37.656690: Pseudo dice [0.6778, 0.3445, 0.6795, 0.3956, 0.501, 0.7019, 0.4176] +2026-04-09 16:06:37.659965: Epoch time: 102.92 s +2026-04-09 16:06:38.865372: +2026-04-09 16:06:38.867393: Epoch 817 +2026-04-09 16:06:38.869486: Current learning rate: 0.00217 +2026-04-09 16:08:21.100272: train_loss -0.5612 +2026-04-09 16:08:21.106643: val_loss -0.4604 +2026-04-09 16:08:21.109841: Pseudo dice [0.6201, 0.8749, 0.6723, 0.0123, 0.048, 0.6878, 0.7142] +2026-04-09 16:08:21.111970: Epoch time: 102.24 s +2026-04-09 16:08:22.316068: +2026-04-09 16:08:22.318014: Epoch 818 +2026-04-09 16:08:22.320532: Current learning rate: 0.00216 +2026-04-09 16:10:04.854413: train_loss -0.5398 +2026-04-09 16:10:04.861755: val_loss -0.5293 +2026-04-09 16:10:04.864454: Pseudo dice [0.2059, 0.4941, 0.7686, 0.0417, 0.1617, 0.8185, 0.7128] +2026-04-09 16:10:04.866734: Epoch time: 102.54 s +2026-04-09 16:10:06.074509: +2026-04-09 16:10:06.076932: Epoch 819 +2026-04-09 16:10:06.078971: Current learning rate: 0.00215 +2026-04-09 16:11:48.571862: train_loss -0.536 +2026-04-09 16:11:48.578049: val_loss -0.4877 +2026-04-09 16:11:48.580363: Pseudo dice [0.5995, 0.7337, 0.8069, 0.5071, 0.3709, 0.6957, 0.5863] +2026-04-09 16:11:48.582623: Epoch time: 102.5 s +2026-04-09 16:11:49.689141: +2026-04-09 16:11:49.690971: Epoch 820 +2026-04-09 16:11:49.692922: Current learning rate: 0.00214 +2026-04-09 16:13:32.276794: train_loss -0.5587 +2026-04-09 16:13:32.283230: val_loss -0.499 +2026-04-09 16:13:32.285388: Pseudo dice [0.305, 0.8624, 0.4863, 0.0518, 0.0183, 0.1159, 0.8587] +2026-04-09 16:13:32.288364: Epoch time: 102.59 s +2026-04-09 16:13:33.406814: +2026-04-09 16:13:33.408813: Epoch 821 +2026-04-09 16:13:33.412139: Current learning rate: 0.00213 +2026-04-09 16:15:16.608980: train_loss -0.5699 +2026-04-09 16:15:16.619917: val_loss -0.4945 +2026-04-09 16:15:16.622099: Pseudo dice [0.4527, 0.6726, 0.4474, 0.0576, 0.2371, 0.6536, 0.685] +2026-04-09 16:15:16.624447: Epoch time: 103.21 s +2026-04-09 16:15:17.742275: +2026-04-09 16:15:17.745648: Epoch 822 +2026-04-09 16:15:17.748556: Current learning rate: 0.00212 +2026-04-09 16:17:00.881352: train_loss -0.5485 +2026-04-09 16:17:00.887598: val_loss -0.5192 +2026-04-09 16:17:00.889883: Pseudo dice [0.0448, 0.3286, 0.7305, 0.0913, 0.2994, 0.754, 0.8221] +2026-04-09 16:17:00.893353: Epoch time: 103.14 s +2026-04-09 16:17:01.993759: +2026-04-09 16:17:01.995780: Epoch 823 +2026-04-09 16:17:01.997956: Current learning rate: 0.0021 +2026-04-09 16:18:44.730556: train_loss -0.5364 +2026-04-09 16:18:44.739562: val_loss -0.5019 +2026-04-09 16:18:44.742316: Pseudo dice [0.0358, 0.8197, 0.8286, 0.0, 0.4319, 0.3102, 0.6687] +2026-04-09 16:18:44.744848: Epoch time: 102.74 s +2026-04-09 16:18:45.855749: +2026-04-09 16:18:45.858380: Epoch 824 +2026-04-09 16:18:45.860912: Current learning rate: 0.00209 +2026-04-09 16:20:29.074253: train_loss -0.5598 +2026-04-09 16:20:29.080660: val_loss -0.4751 +2026-04-09 16:20:29.082527: Pseudo dice [0.0, 0.332, 0.7247, 0.0491, 0.4107, 0.7012, 0.7132] +2026-04-09 16:20:29.085471: Epoch time: 103.22 s +2026-04-09 16:20:30.179917: +2026-04-09 16:20:30.182260: Epoch 825 +2026-04-09 16:20:30.184903: Current learning rate: 0.00208 +2026-04-09 16:22:12.660572: train_loss -0.5499 +2026-04-09 16:22:12.667303: val_loss -0.5235 +2026-04-09 16:22:12.669986: Pseudo dice [0.0017, 0.4896, 0.7234, 0.0697, 0.4343, 0.3684, 0.7056] +2026-04-09 16:22:12.672271: Epoch time: 102.48 s +2026-04-09 16:22:13.788193: +2026-04-09 16:22:13.790151: Epoch 826 +2026-04-09 16:22:13.792994: Current learning rate: 0.00207 +2026-04-09 16:23:56.236642: train_loss -0.5583 +2026-04-09 16:23:56.242892: val_loss -0.5222 +2026-04-09 16:23:56.245270: Pseudo dice [0.268, 0.7044, 0.6305, 0.0032, 0.1413, 0.4469, 0.8459] +2026-04-09 16:23:56.247527: Epoch time: 102.45 s +2026-04-09 16:23:57.349208: +2026-04-09 16:23:57.351853: Epoch 827 +2026-04-09 16:23:57.353979: Current learning rate: 0.00206 +2026-04-09 16:25:39.743473: train_loss -0.5703 +2026-04-09 16:25:39.750562: val_loss -0.5316 +2026-04-09 16:25:39.754789: Pseudo dice [0.4256, 0.6104, 0.7399, 0.4012, 0.429, 0.8789, 0.7561] +2026-04-09 16:25:39.771265: Epoch time: 102.4 s +2026-04-09 16:25:42.060841: +2026-04-09 16:25:42.063107: Epoch 828 +2026-04-09 16:25:42.065027: Current learning rate: 0.00205 +2026-04-09 16:27:25.370508: train_loss -0.5753 +2026-04-09 16:27:25.379540: val_loss -0.5485 +2026-04-09 16:27:25.381632: Pseudo dice [0.5443, 0.836, 0.7023, 0.2194, 0.5499, 0.8716, 0.7391] +2026-04-09 16:27:25.383580: Epoch time: 103.31 s +2026-04-09 16:27:26.496045: +2026-04-09 16:27:26.502788: Epoch 829 +2026-04-09 16:27:26.504951: Current learning rate: 0.00204 +2026-04-09 16:29:09.136203: train_loss -0.5535 +2026-04-09 16:29:09.144213: val_loss -0.5556 +2026-04-09 16:29:09.146771: Pseudo dice [0.4848, 0.3236, 0.773, 0.4846, 0.4627, 0.7032, 0.5664] +2026-04-09 16:29:09.149202: Epoch time: 102.64 s +2026-04-09 16:29:10.287799: +2026-04-09 16:29:10.289704: Epoch 830 +2026-04-09 16:29:10.291603: Current learning rate: 0.00203 +2026-04-09 16:30:53.027069: train_loss -0.5411 +2026-04-09 16:30:53.033144: val_loss -0.443 +2026-04-09 16:30:53.035793: Pseudo dice [0.6325, 0.2765, 0.3434, 0.1351, 0.5395, 0.0759, 0.7779] +2026-04-09 16:30:53.038546: Epoch time: 102.74 s +2026-04-09 16:30:54.152883: +2026-04-09 16:30:54.154499: Epoch 831 +2026-04-09 16:30:54.156302: Current learning rate: 0.00202 +2026-04-09 16:32:36.635199: train_loss -0.5405 +2026-04-09 16:32:36.641580: val_loss -0.4852 +2026-04-09 16:32:36.644014: Pseudo dice [0.0, 0.1039, 0.7111, 0.0, 0.5359, 0.3082, 0.7355] +2026-04-09 16:32:36.646703: Epoch time: 102.49 s +2026-04-09 16:32:37.753216: +2026-04-09 16:32:37.761590: Epoch 832 +2026-04-09 16:32:37.768116: Current learning rate: 0.00201 +2026-04-09 16:34:21.117349: train_loss -0.5606 +2026-04-09 16:34:21.124085: val_loss -0.5071 +2026-04-09 16:34:21.126577: Pseudo dice [0.1208, 0.3964, 0.6925, 0.1372, 0.5413, 0.4935, 0.6666] +2026-04-09 16:34:21.128662: Epoch time: 103.37 s +2026-04-09 16:34:22.248436: +2026-04-09 16:34:22.250921: Epoch 833 +2026-04-09 16:34:22.254478: Current learning rate: 0.002 +2026-04-09 16:36:04.703312: train_loss -0.5646 +2026-04-09 16:36:04.711756: val_loss -0.5452 +2026-04-09 16:36:04.714085: Pseudo dice [0.6694, 0.5438, 0.6233, 0.4328, 0.5805, 0.255, 0.8524] +2026-04-09 16:36:04.717765: Epoch time: 102.46 s +2026-04-09 16:36:05.814411: +2026-04-09 16:36:05.816569: Epoch 834 +2026-04-09 16:36:05.826036: Current learning rate: 0.00199 +2026-04-09 16:37:49.087349: train_loss -0.551 +2026-04-09 16:37:49.095274: val_loss -0.4737 +2026-04-09 16:37:49.101813: Pseudo dice [0.0617, 0.1128, 0.7217, 0.3122, 0.3179, 0.794, 0.6417] +2026-04-09 16:37:49.104124: Epoch time: 103.28 s +2026-04-09 16:37:50.258280: +2026-04-09 16:37:50.260203: Epoch 835 +2026-04-09 16:37:50.262211: Current learning rate: 0.00198 +2026-04-09 16:39:32.511823: train_loss -0.5509 +2026-04-09 16:39:32.518511: val_loss -0.5252 +2026-04-09 16:39:32.520407: Pseudo dice [0.0913, 0.3982, 0.7427, 0.2007, 0.5473, 0.8609, 0.6551] +2026-04-09 16:39:32.523033: Epoch time: 102.26 s +2026-04-09 16:39:33.604238: +2026-04-09 16:39:33.606308: Epoch 836 +2026-04-09 16:39:33.609594: Current learning rate: 0.00196 +2026-04-09 16:41:16.812207: train_loss -0.5897 +2026-04-09 16:41:16.825417: val_loss -0.53 +2026-04-09 16:41:16.828082: Pseudo dice [0.0181, 0.6249, 0.6892, 0.0081, 0.4788, 0.7125, 0.7987] +2026-04-09 16:41:16.833740: Epoch time: 103.21 s +2026-04-09 16:41:17.975260: +2026-04-09 16:41:17.977632: Epoch 837 +2026-04-09 16:41:17.980932: Current learning rate: 0.00195 +2026-04-09 16:43:01.869156: train_loss -0.5608 +2026-04-09 16:43:01.876541: val_loss -0.5505 +2026-04-09 16:43:01.880783: Pseudo dice [0.6515, 0.6595, 0.8209, 0.3597, 0.3511, 0.6482, 0.7091] +2026-04-09 16:43:01.883607: Epoch time: 103.9 s +2026-04-09 16:43:02.983219: +2026-04-09 16:43:02.986514: Epoch 838 +2026-04-09 16:43:02.989301: Current learning rate: 0.00194 +2026-04-09 16:44:46.120317: train_loss -0.5733 +2026-04-09 16:44:46.127662: val_loss -0.5071 +2026-04-09 16:44:46.130537: Pseudo dice [0.4771, 0.7489, 0.7225, 0.2356, 0.3366, 0.4935, 0.7887] +2026-04-09 16:44:46.133137: Epoch time: 103.14 s +2026-04-09 16:44:47.226769: +2026-04-09 16:44:47.229459: Epoch 839 +2026-04-09 16:44:47.232515: Current learning rate: 0.00193 +2026-04-09 16:46:29.711776: train_loss -0.536 +2026-04-09 16:46:29.718618: val_loss -0.5272 +2026-04-09 16:46:29.722074: Pseudo dice [0.2705, 0.7152, 0.4376, 0.3947, 0.3717, 0.029, 0.8281] +2026-04-09 16:46:29.725209: Epoch time: 102.49 s +2026-04-09 16:46:30.841421: +2026-04-09 16:46:30.843859: Epoch 840 +2026-04-09 16:46:30.846490: Current learning rate: 0.00192 +2026-04-09 16:48:13.707129: train_loss -0.5675 +2026-04-09 16:48:13.713895: val_loss -0.4959 +2026-04-09 16:48:13.716862: Pseudo dice [0.5542, 0.338, 0.3768, 0.3768, 0.393, 0.9156, 0.742] +2026-04-09 16:48:13.718827: Epoch time: 102.87 s +2026-04-09 16:48:14.829168: +2026-04-09 16:48:14.831390: Epoch 841 +2026-04-09 16:48:14.833767: Current learning rate: 0.00191 +2026-04-09 16:49:58.234553: train_loss -0.5386 +2026-04-09 16:49:58.262387: val_loss -0.5264 +2026-04-09 16:49:58.266199: Pseudo dice [0.1359, 0.3739, 0.7675, 0.3804, 0.2782, 0.9224, 0.7887] +2026-04-09 16:49:58.269746: Epoch time: 103.41 s +2026-04-09 16:49:59.348967: +2026-04-09 16:49:59.352204: Epoch 842 +2026-04-09 16:49:59.355388: Current learning rate: 0.0019 +2026-04-09 16:51:41.533739: train_loss -0.5502 +2026-04-09 16:51:41.540227: val_loss -0.508 +2026-04-09 16:51:41.542525: Pseudo dice [0.8537, 0.4112, 0.608, 0.0, 0.2495, 0.6082, 0.6228] +2026-04-09 16:51:41.544438: Epoch time: 102.19 s +2026-04-09 16:51:42.637148: +2026-04-09 16:51:42.639315: Epoch 843 +2026-04-09 16:51:42.641527: Current learning rate: 0.00189 +2026-04-09 16:53:25.375748: train_loss -0.57 +2026-04-09 16:53:25.385339: val_loss -0.5177 +2026-04-09 16:53:25.387816: Pseudo dice [0.4264, 0.5116, 0.6472, 0.3427, 0.4566, 0.3309, 0.5921] +2026-04-09 16:53:25.390712: Epoch time: 102.74 s +2026-04-09 16:53:26.465595: +2026-04-09 16:53:26.467443: Epoch 844 +2026-04-09 16:53:26.470947: Current learning rate: 0.00188 +2026-04-09 16:55:08.993000: train_loss -0.588 +2026-04-09 16:55:09.002036: val_loss -0.4927 +2026-04-09 16:55:09.004217: Pseudo dice [0.5192, 0.7133, 0.7157, 0.1005, 0.4989, 0.5889, 0.6837] +2026-04-09 16:55:09.006333: Epoch time: 102.53 s +2026-04-09 16:55:10.106122: +2026-04-09 16:55:10.108358: Epoch 845 +2026-04-09 16:55:10.111114: Current learning rate: 0.00187 +2026-04-09 16:56:52.773894: train_loss -0.5511 +2026-04-09 16:56:52.782722: val_loss -0.5583 +2026-04-09 16:56:52.785779: Pseudo dice [0.2214, 0.3841, 0.6132, 0.1329, 0.3339, 0.813, 0.7862] +2026-04-09 16:56:52.790259: Epoch time: 102.67 s +2026-04-09 16:56:53.917228: +2026-04-09 16:56:53.919201: Epoch 846 +2026-04-09 16:56:53.921656: Current learning rate: 0.00186 +2026-04-09 16:58:36.368589: train_loss -0.563 +2026-04-09 16:58:36.375437: val_loss -0.5681 +2026-04-09 16:58:36.377697: Pseudo dice [0.4086, 0.7036, 0.7822, 0.5295, 0.4895, 0.845, 0.781] +2026-04-09 16:58:36.379776: Epoch time: 102.45 s +2026-04-09 16:58:37.483007: +2026-04-09 16:58:37.485439: Epoch 847 +2026-04-09 16:58:37.487814: Current learning rate: 0.00185 +2026-04-09 17:00:20.371194: train_loss -0.563 +2026-04-09 17:00:20.378523: val_loss -0.5472 +2026-04-09 17:00:20.381685: Pseudo dice [0.3953, 0.0599, 0.72, 0.1104, 0.4672, 0.6697, 0.5296] +2026-04-09 17:00:20.384156: Epoch time: 102.89 s +2026-04-09 17:00:21.499888: +2026-04-09 17:00:21.502222: Epoch 848 +2026-04-09 17:00:21.504136: Current learning rate: 0.00184 +2026-04-09 17:02:05.873876: train_loss -0.5752 +2026-04-09 17:02:05.881197: val_loss -0.5118 +2026-04-09 17:02:05.883876: Pseudo dice [0.5454, 0.1274, 0.707, 0.3425, 0.1367, 0.9011, 0.8048] +2026-04-09 17:02:05.886542: Epoch time: 104.38 s +2026-04-09 17:02:06.985100: +2026-04-09 17:02:06.987395: Epoch 849 +2026-04-09 17:02:06.990130: Current learning rate: 0.00182 +2026-04-09 17:03:49.516027: train_loss -0.5584 +2026-04-09 17:03:49.522684: val_loss -0.5381 +2026-04-09 17:03:49.525105: Pseudo dice [0.577, 0.5543, 0.7923, 0.2935, 0.385, 0.8489, 0.7892] +2026-04-09 17:03:49.527798: Epoch time: 102.53 s +2026-04-09 17:03:52.381644: +2026-04-09 17:03:52.383554: Epoch 850 +2026-04-09 17:03:52.385456: Current learning rate: 0.00181 +2026-04-09 17:05:34.849097: train_loss -0.5978 +2026-04-09 17:05:34.855974: val_loss -0.5381 +2026-04-09 17:05:34.857855: Pseudo dice [0.5613, 0.5489, 0.7202, 0.4086, 0.3088, 0.8401, 0.6676] +2026-04-09 17:05:34.860026: Epoch time: 102.47 s +2026-04-09 17:05:35.973806: +2026-04-09 17:05:35.976357: Epoch 851 +2026-04-09 17:05:35.978476: Current learning rate: 0.0018 +2026-04-09 17:07:18.788003: train_loss -0.5911 +2026-04-09 17:07:18.797385: val_loss -0.4852 +2026-04-09 17:07:18.799343: Pseudo dice [0.5567, 0.8486, 0.6319, 0.0, 0.3743, 0.3742, 0.7728] +2026-04-09 17:07:18.802907: Epoch time: 102.82 s +2026-04-09 17:07:19.905122: +2026-04-09 17:07:19.907653: Epoch 852 +2026-04-09 17:07:19.911228: Current learning rate: 0.00179 +2026-04-09 17:09:02.790708: train_loss -0.5777 +2026-04-09 17:09:02.799224: val_loss -0.5579 +2026-04-09 17:09:02.803453: Pseudo dice [0.607, 0.5312, 0.8161, 0.0, 0.4776, 0.8756, 0.5695] +2026-04-09 17:09:02.805879: Epoch time: 102.89 s +2026-04-09 17:09:03.891837: +2026-04-09 17:09:03.894365: Epoch 853 +2026-04-09 17:09:03.897179: Current learning rate: 0.00178 +2026-04-09 17:10:47.272469: train_loss -0.5813 +2026-04-09 17:10:47.279041: val_loss -0.5605 +2026-04-09 17:10:47.281271: Pseudo dice [0.5053, 0.1037, 0.7298, 0.0, 0.5944, 0.895, 0.7409] +2026-04-09 17:10:47.284153: Epoch time: 103.38 s +2026-04-09 17:10:48.382567: +2026-04-09 17:10:48.385616: Epoch 854 +2026-04-09 17:10:48.389414: Current learning rate: 0.00177 +2026-04-09 17:12:31.627114: train_loss -0.5543 +2026-04-09 17:12:31.634199: val_loss -0.5031 +2026-04-09 17:12:31.635917: Pseudo dice [0.7546, 0.4918, 0.7024, 0.0, 0.4389, 0.5545, 0.7058] +2026-04-09 17:12:31.638036: Epoch time: 103.25 s +2026-04-09 17:12:32.719562: +2026-04-09 17:12:32.721467: Epoch 855 +2026-04-09 17:12:32.723333: Current learning rate: 0.00176 +2026-04-09 17:14:15.375473: train_loss -0.5421 +2026-04-09 17:14:15.381227: val_loss -0.531 +2026-04-09 17:14:15.384162: Pseudo dice [0.6435, 0.7409, 0.7789, 0.3302, 0.4939, 0.7465, 0.7533] +2026-04-09 17:14:15.386132: Epoch time: 102.66 s +2026-04-09 17:14:16.477638: +2026-04-09 17:14:16.481425: Epoch 856 +2026-04-09 17:14:16.486989: Current learning rate: 0.00175 +2026-04-09 17:15:59.494562: train_loss -0.5835 +2026-04-09 17:15:59.500558: val_loss -0.5095 +2026-04-09 17:15:59.502610: Pseudo dice [0.392, 0.896, 0.607, 0.2785, 0.5567, 0.4678, 0.7134] +2026-04-09 17:15:59.505127: Epoch time: 103.02 s +2026-04-09 17:16:00.614526: +2026-04-09 17:16:00.616378: Epoch 857 +2026-04-09 17:16:00.618526: Current learning rate: 0.00174 +2026-04-09 17:17:43.978275: train_loss -0.5541 +2026-04-09 17:17:43.988357: val_loss -0.5481 +2026-04-09 17:17:43.993431: Pseudo dice [0.4823, 0.5557, 0.7931, 0.1442, 0.4802, 0.8387, 0.5316] +2026-04-09 17:17:43.998152: Epoch time: 103.37 s +2026-04-09 17:17:45.083963: +2026-04-09 17:17:45.088400: Epoch 858 +2026-04-09 17:17:45.093265: Current learning rate: 0.00173 +2026-04-09 17:19:27.739060: train_loss -0.5849 +2026-04-09 17:19:27.746041: val_loss -0.552 +2026-04-09 17:19:27.748560: Pseudo dice [0.2329, 0.8544, 0.6839, 0.3166, 0.5848, 0.7714, 0.7704] +2026-04-09 17:19:27.751208: Epoch time: 102.66 s +2026-04-09 17:19:28.969427: +2026-04-09 17:19:28.972059: Epoch 859 +2026-04-09 17:19:28.974397: Current learning rate: 0.00172 +2026-04-09 17:21:11.730931: train_loss -0.5785 +2026-04-09 17:21:11.736740: val_loss -0.5521 +2026-04-09 17:21:11.738927: Pseudo dice [0.674, 0.7585, 0.637, 0.1929, 0.2687, 0.7286, 0.6488] +2026-04-09 17:21:11.740826: Epoch time: 102.76 s +2026-04-09 17:21:12.870588: +2026-04-09 17:21:12.872763: Epoch 860 +2026-04-09 17:21:12.876414: Current learning rate: 0.0017 +2026-04-09 17:22:56.529837: train_loss -0.5638 +2026-04-09 17:22:56.536617: val_loss -0.4931 +2026-04-09 17:22:56.538536: Pseudo dice [0.4807, 0.7428, 0.7458, 0.4638, 0.4294, 0.3683, 0.7705] +2026-04-09 17:22:56.541485: Epoch time: 103.66 s +2026-04-09 17:22:57.713698: +2026-04-09 17:22:57.715897: Epoch 861 +2026-04-09 17:22:57.718139: Current learning rate: 0.00169 +2026-04-09 17:24:40.877554: train_loss -0.5696 +2026-04-09 17:24:40.884983: val_loss -0.5372 +2026-04-09 17:24:40.887834: Pseudo dice [0.2583, 0.8196, 0.7843, 0.0704, 0.2841, 0.7685, 0.6674] +2026-04-09 17:24:40.890852: Epoch time: 103.17 s +2026-04-09 17:24:42.004062: +2026-04-09 17:24:42.005996: Epoch 862 +2026-04-09 17:24:42.008440: Current learning rate: 0.00168 +2026-04-09 17:26:24.589752: train_loss -0.5808 +2026-04-09 17:26:24.595556: val_loss -0.4894 +2026-04-09 17:26:24.601943: Pseudo dice [0.2099, 0.4191, 0.6706, 0.3452, 0.5338, 0.3024, 0.7952] +2026-04-09 17:26:24.605568: Epoch time: 102.59 s +2026-04-09 17:26:25.701293: +2026-04-09 17:26:25.703348: Epoch 863 +2026-04-09 17:26:25.706286: Current learning rate: 0.00167 +2026-04-09 17:28:09.326341: train_loss -0.5772 +2026-04-09 17:28:09.333565: val_loss -0.5148 +2026-04-09 17:28:09.336205: Pseudo dice [0.3389, 0.3451, 0.7666, 0.2305, 0.3264, 0.197, 0.7065] +2026-04-09 17:28:09.338915: Epoch time: 103.63 s +2026-04-09 17:28:10.409213: +2026-04-09 17:28:10.411234: Epoch 864 +2026-04-09 17:28:10.413631: Current learning rate: 0.00166 +2026-04-09 17:29:52.904110: train_loss -0.5692 +2026-04-09 17:29:52.911292: val_loss -0.5351 +2026-04-09 17:29:52.913756: Pseudo dice [0.7612, 0.54, 0.7432, 0.389, 0.3388, 0.7497, 0.801] +2026-04-09 17:29:52.915637: Epoch time: 102.5 s +2026-04-09 17:29:54.019877: +2026-04-09 17:29:54.021842: Epoch 865 +2026-04-09 17:29:54.023750: Current learning rate: 0.00165 +2026-04-09 17:31:37.355274: train_loss -0.584 +2026-04-09 17:31:37.365278: val_loss -0.6216 +2026-04-09 17:31:37.368064: Pseudo dice [0.7867, 0.7006, 0.7587, 0.3001, 0.6087, 0.5106, 0.7981] +2026-04-09 17:31:37.371410: Epoch time: 103.34 s +2026-04-09 17:31:38.454099: +2026-04-09 17:31:38.456579: Epoch 866 +2026-04-09 17:31:38.459274: Current learning rate: 0.00164 +2026-04-09 17:33:21.734075: train_loss -0.588 +2026-04-09 17:33:21.741767: val_loss -0.5322 +2026-04-09 17:33:21.744157: Pseudo dice [0.4787, 0.5669, 0.7359, 0.4847, 0.5337, 0.8234, 0.839] +2026-04-09 17:33:21.746783: Epoch time: 103.28 s +2026-04-09 17:33:22.843193: +2026-04-09 17:33:22.845192: Epoch 867 +2026-04-09 17:33:22.847697: Current learning rate: 0.00163 +2026-04-09 17:35:05.681847: train_loss -0.576 +2026-04-09 17:35:05.689180: val_loss -0.5755 +2026-04-09 17:35:05.691828: Pseudo dice [0.7838, 0.6508, 0.7454, 0.3018, 0.4566, 0.7726, 0.7709] +2026-04-09 17:35:05.693981: Epoch time: 102.84 s +2026-04-09 17:35:06.790797: +2026-04-09 17:35:06.794241: Epoch 868 +2026-04-09 17:35:06.796687: Current learning rate: 0.00162 +2026-04-09 17:36:50.759954: train_loss -0.5706 +2026-04-09 17:36:50.767069: val_loss -0.5552 +2026-04-09 17:36:50.771055: Pseudo dice [0.6442, 0.8978, 0.7792, 0.1921, 0.4956, 0.659, 0.7585] +2026-04-09 17:36:50.774997: Epoch time: 103.97 s +2026-04-09 17:36:51.856206: +2026-04-09 17:36:51.859740: Epoch 869 +2026-04-09 17:36:51.863072: Current learning rate: 0.00161 +2026-04-09 17:38:34.894757: train_loss -0.5807 +2026-04-09 17:38:34.909275: val_loss -0.5512 +2026-04-09 17:38:34.917737: Pseudo dice [0.0, 0.6786, 0.6803, 0.68, 0.3668, 0.6095, 0.7031] +2026-04-09 17:38:34.923993: Epoch time: 103.04 s +2026-04-09 17:38:35.989956: +2026-04-09 17:38:35.992824: Epoch 870 +2026-04-09 17:38:35.998386: Current learning rate: 0.00159 +2026-04-09 17:40:19.188302: train_loss -0.5772 +2026-04-09 17:40:19.197410: val_loss -0.513 +2026-04-09 17:40:19.199558: Pseudo dice [0.4863, 0.5308, 0.718, 0.0, 0.3539, 0.6956, 0.4521] +2026-04-09 17:40:19.201920: Epoch time: 103.2 s +2026-04-09 17:40:20.286654: +2026-04-09 17:40:20.289031: Epoch 871 +2026-04-09 17:40:20.291185: Current learning rate: 0.00158 +2026-04-09 17:42:03.504093: train_loss -0.5756 +2026-04-09 17:42:03.512520: val_loss -0.5699 +2026-04-09 17:42:03.515017: Pseudo dice [0.3873, 0.7834, 0.8154, 0.5472, 0.4723, 0.6685, 0.8654] +2026-04-09 17:42:03.517736: Epoch time: 103.22 s +2026-04-09 17:42:04.616021: +2026-04-09 17:42:04.618680: Epoch 872 +2026-04-09 17:42:04.621717: Current learning rate: 0.00157 +2026-04-09 17:43:46.684960: train_loss -0.5681 +2026-04-09 17:43:46.691129: val_loss -0.5355 +2026-04-09 17:43:46.693959: Pseudo dice [0.4623, 0.6249, 0.8163, 0.1614, 0.0043, 0.8929, 0.5954] +2026-04-09 17:43:46.696979: Epoch time: 102.07 s +2026-04-09 17:43:47.807411: +2026-04-09 17:43:47.810198: Epoch 873 +2026-04-09 17:43:47.812542: Current learning rate: 0.00156 +2026-04-09 17:45:30.240439: train_loss -0.5699 +2026-04-09 17:45:30.251219: val_loss -0.5574 +2026-04-09 17:45:30.254673: Pseudo dice [0.6496, 0.8283, 0.7459, 0.0205, 0.4902, 0.8626, 0.6098] +2026-04-09 17:45:30.261631: Epoch time: 102.44 s +2026-04-09 17:45:31.360336: +2026-04-09 17:45:31.362704: Epoch 874 +2026-04-09 17:45:31.366346: Current learning rate: 0.00155 +2026-04-09 17:47:13.928723: train_loss -0.5827 +2026-04-09 17:47:13.936313: val_loss -0.5623 +2026-04-09 17:47:13.938612: Pseudo dice [0.3803, 0.7126, 0.7094, 0.4387, 0.4818, 0.3185, 0.5448] +2026-04-09 17:47:13.940812: Epoch time: 102.57 s +2026-04-09 17:47:15.056237: +2026-04-09 17:47:15.058506: Epoch 875 +2026-04-09 17:47:15.061624: Current learning rate: 0.00154 +2026-04-09 17:48:57.574094: train_loss -0.5831 +2026-04-09 17:48:57.581728: val_loss -0.5809 +2026-04-09 17:48:57.583908: Pseudo dice [0.6443, 0.8932, 0.7461, 0.394, 0.4977, 0.2987, 0.7855] +2026-04-09 17:48:57.588216: Epoch time: 102.52 s +2026-04-09 17:48:58.725721: +2026-04-09 17:48:58.728765: Epoch 876 +2026-04-09 17:48:58.732680: Current learning rate: 0.00153 +2026-04-09 17:50:42.016970: train_loss -0.5738 +2026-04-09 17:50:42.023170: val_loss -0.5372 +2026-04-09 17:50:42.025666: Pseudo dice [0.5354, 0.7548, 0.8179, 0.0244, 0.1052, 0.877, 0.5868] +2026-04-09 17:50:42.027990: Epoch time: 103.29 s +2026-04-09 17:50:43.157818: +2026-04-09 17:50:43.159903: Epoch 877 +2026-04-09 17:50:43.162605: Current learning rate: 0.00152 +2026-04-09 17:52:25.562840: train_loss -0.5555 +2026-04-09 17:52:25.568983: val_loss -0.5897 +2026-04-09 17:52:25.571338: Pseudo dice [0.3148, 0.6677, 0.8062, 0.4337, 0.5557, 0.7147, 0.7664] +2026-04-09 17:52:25.574980: Epoch time: 102.41 s +2026-04-09 17:52:26.667916: +2026-04-09 17:52:26.669660: Epoch 878 +2026-04-09 17:52:26.671848: Current learning rate: 0.00151 +2026-04-09 17:54:08.662767: train_loss -0.5843 +2026-04-09 17:54:08.674892: val_loss -0.5374 +2026-04-09 17:54:08.677214: Pseudo dice [0.6918, 0.5764, 0.7712, 0.3101, 0.5115, 0.9008, 0.5514] +2026-04-09 17:54:08.679594: Epoch time: 102.0 s +2026-04-09 17:54:09.783785: +2026-04-09 17:54:09.786157: Epoch 879 +2026-04-09 17:54:09.788495: Current learning rate: 0.00149 +2026-04-09 17:55:51.990042: train_loss -0.6048 +2026-04-09 17:55:51.996401: val_loss -0.5683 +2026-04-09 17:55:51.999053: Pseudo dice [0.2835, 0.3562, 0.8165, 0.3347, 0.4931, 0.624, 0.6339] +2026-04-09 17:55:52.001198: Epoch time: 102.21 s +2026-04-09 17:55:53.079362: +2026-04-09 17:55:53.081927: Epoch 880 +2026-04-09 17:55:53.084402: Current learning rate: 0.00148 +2026-04-09 17:57:36.113314: train_loss -0.5864 +2026-04-09 17:57:36.120023: val_loss -0.5501 +2026-04-09 17:57:36.123134: Pseudo dice [0.5713, 0.6028, 0.7904, 0.2677, 0.5099, 0.7328, 0.8116] +2026-04-09 17:57:36.125660: Epoch time: 103.04 s +2026-04-09 17:57:37.221106: +2026-04-09 17:57:37.231758: Epoch 881 +2026-04-09 17:57:37.234209: Current learning rate: 0.00147 +2026-04-09 17:59:20.179377: train_loss -0.5753 +2026-04-09 17:59:20.186816: val_loss -0.4925 +2026-04-09 17:59:20.189176: Pseudo dice [0.6948, 0.8809, 0.7529, 0.0778, 0.366, 0.6386, 0.5984] +2026-04-09 17:59:20.191659: Epoch time: 102.96 s +2026-04-09 17:59:21.251736: +2026-04-09 17:59:21.254483: Epoch 882 +2026-04-09 17:59:21.256966: Current learning rate: 0.00146 +2026-04-09 18:01:04.640870: train_loss -0.5721 +2026-04-09 18:01:04.651449: val_loss -0.5316 +2026-04-09 18:01:04.653656: Pseudo dice [0.5931, 0.8902, 0.7996, 0.0824, 0.3435, 0.2487, 0.5778] +2026-04-09 18:01:04.657034: Epoch time: 103.39 s +2026-04-09 18:01:05.742870: +2026-04-09 18:01:05.745351: Epoch 883 +2026-04-09 18:01:05.748530: Current learning rate: 0.00145 +2026-04-09 18:02:48.489274: train_loss -0.5851 +2026-04-09 18:02:48.494979: val_loss -0.5325 +2026-04-09 18:02:48.496775: Pseudo dice [0.6072, 0.9161, 0.7466, 0.4793, 0.4962, 0.534, 0.8154] +2026-04-09 18:02:48.499045: Epoch time: 102.75 s +2026-04-09 18:02:49.603004: +2026-04-09 18:02:49.605373: Epoch 884 +2026-04-09 18:02:49.607728: Current learning rate: 0.00144 +2026-04-09 18:04:32.210442: train_loss -0.5886 +2026-04-09 18:04:32.227734: val_loss -0.5274 +2026-04-09 18:04:32.234177: Pseudo dice [0.1774, 0.8737, 0.773, 0.3927, 0.5198, 0.6913, 0.6417] +2026-04-09 18:04:32.236481: Epoch time: 102.61 s +2026-04-09 18:04:33.377023: +2026-04-09 18:04:33.379163: Epoch 885 +2026-04-09 18:04:33.381007: Current learning rate: 0.00143 +2026-04-09 18:06:15.785398: train_loss -0.5912 +2026-04-09 18:06:15.791427: val_loss -0.4976 +2026-04-09 18:06:15.793840: Pseudo dice [0.0679, 0.8878, 0.8001, 0.4105, 0.2263, 0.7258, 0.6076] +2026-04-09 18:06:15.796073: Epoch time: 102.41 s +2026-04-09 18:06:16.924173: +2026-04-09 18:06:16.926654: Epoch 886 +2026-04-09 18:06:16.929892: Current learning rate: 0.00142 +2026-04-09 18:07:59.266640: train_loss -0.5897 +2026-04-09 18:07:59.273736: val_loss -0.551 +2026-04-09 18:07:59.276627: Pseudo dice [0.6571, 0.1419, 0.7154, 0.0179, 0.421, 0.8615, 0.8107] +2026-04-09 18:07:59.279666: Epoch time: 102.35 s +2026-04-09 18:08:00.388798: +2026-04-09 18:08:00.391158: Epoch 887 +2026-04-09 18:08:00.394448: Current learning rate: 0.00141 +2026-04-09 18:09:45.148840: train_loss -0.5635 +2026-04-09 18:09:45.161999: val_loss -0.5742 +2026-04-09 18:09:45.167300: Pseudo dice [0.7213, 0.716, 0.8114, 0.0, 0.5455, 0.8399, 0.7572] +2026-04-09 18:09:45.170747: Epoch time: 104.76 s +2026-04-09 18:09:46.287257: +2026-04-09 18:09:46.290013: Epoch 888 +2026-04-09 18:09:46.293051: Current learning rate: 0.00139 +2026-04-09 18:11:33.681137: train_loss -0.5898 +2026-04-09 18:11:33.696374: val_loss -0.5348 +2026-04-09 18:11:33.702093: Pseudo dice [0.8719, 0.7327, 0.4676, 0.0715, 0.5466, 0.8702, 0.5808] +2026-04-09 18:11:33.707075: Epoch time: 107.4 s +2026-04-09 18:11:36.038954: +2026-04-09 18:11:36.045234: Epoch 889 +2026-04-09 18:11:36.050559: Current learning rate: 0.00138 +2026-04-09 18:13:21.340649: train_loss -0.5922 +2026-04-09 18:13:21.350066: val_loss -0.5304 +2026-04-09 18:13:21.355276: Pseudo dice [0.4111, 0.8933, 0.729, 0.4842, 0.4696, 0.8699, 0.7578] +2026-04-09 18:13:21.358697: Epoch time: 105.3 s +2026-04-09 18:13:22.498112: +2026-04-09 18:13:22.501207: Epoch 890 +2026-04-09 18:13:22.504829: Current learning rate: 0.00137 +2026-04-09 18:15:08.897567: train_loss -0.5631 +2026-04-09 18:15:08.914037: val_loss -0.5509 +2026-04-09 18:15:08.920443: Pseudo dice [0.6287, 0.8705, 0.8614, 0.4545, 0.5879, 0.8639, 0.8303] +2026-04-09 18:15:08.927169: Epoch time: 106.4 s +2026-04-09 18:15:10.052976: +2026-04-09 18:15:10.059117: Epoch 891 +2026-04-09 18:15:10.063750: Current learning rate: 0.00136 +2026-04-09 18:16:54.808476: train_loss -0.6117 +2026-04-09 18:16:54.829120: val_loss -0.5502 +2026-04-09 18:16:54.837763: Pseudo dice [0.6137, 0.7323, 0.7517, 0.1184, 0.3869, 0.889, 0.7463] +2026-04-09 18:16:54.842935: Epoch time: 104.76 s +2026-04-09 18:16:55.942726: +2026-04-09 18:16:55.948593: Epoch 892 +2026-04-09 18:16:55.953881: Current learning rate: 0.00135 +2026-04-09 18:18:41.502145: train_loss -0.6044 +2026-04-09 18:18:41.512167: val_loss -0.5569 +2026-04-09 18:18:41.517560: Pseudo dice [0.6466, 0.7569, 0.6208, 0.0951, 0.5874, 0.4152, 0.7929] +2026-04-09 18:18:41.524433: Epoch time: 105.56 s +2026-04-09 18:18:42.634307: +2026-04-09 18:18:42.640325: Epoch 893 +2026-04-09 18:18:42.644917: Current learning rate: 0.00134 +2026-04-09 18:20:28.122543: train_loss -0.5806 +2026-04-09 18:20:28.132594: val_loss -0.5818 +2026-04-09 18:20:28.136781: Pseudo dice [0.6296, 0.8213, 0.8347, 0.696, 0.3289, 0.8603, 0.8028] +2026-04-09 18:20:28.140411: Epoch time: 105.49 s +2026-04-09 18:20:29.256911: +2026-04-09 18:20:29.261353: Epoch 894 +2026-04-09 18:20:29.264862: Current learning rate: 0.00133 +2026-04-09 18:22:15.332390: train_loss -0.5859 +2026-04-09 18:22:15.348721: val_loss -0.5347 +2026-04-09 18:22:15.357131: Pseudo dice [0.5366, 0.911, 0.8089, 0.1138, 0.7064, 0.4717, 0.6666] +2026-04-09 18:22:15.364568: Epoch time: 106.08 s +2026-04-09 18:22:16.462319: +2026-04-09 18:22:16.468109: Epoch 895 +2026-04-09 18:22:16.472478: Current learning rate: 0.00132 +2026-04-09 18:24:02.165397: train_loss -0.5922 +2026-04-09 18:24:02.179748: val_loss -0.4916 +2026-04-09 18:24:02.184754: Pseudo dice [0.3597, 0.8604, 0.7488, 0.483, 0.3966, 0.4349, 0.5562] +2026-04-09 18:24:02.189881: Epoch time: 105.71 s +2026-04-09 18:24:03.299368: +2026-04-09 18:24:03.303075: Epoch 896 +2026-04-09 18:24:03.307261: Current learning rate: 0.0013 +2026-04-09 18:25:48.014111: train_loss -0.6031 +2026-04-09 18:25:48.027153: val_loss -0.5312 +2026-04-09 18:25:48.032153: Pseudo dice [0.4984, 0.8692, 0.71, 0.1642, 0.3897, 0.8177, 0.7519] +2026-04-09 18:25:48.036684: Epoch time: 104.72 s +2026-04-09 18:25:49.138226: +2026-04-09 18:25:49.142870: Epoch 897 +2026-04-09 18:25:49.148580: Current learning rate: 0.00129 +2026-04-09 18:27:34.042035: train_loss -0.5919 +2026-04-09 18:27:34.055634: val_loss -0.5477 +2026-04-09 18:27:34.063331: Pseudo dice [0.5457, 0.6786, 0.7393, 0.4279, 0.3303, 0.7658, 0.7791] +2026-04-09 18:27:34.068325: Epoch time: 104.91 s +2026-04-09 18:27:35.159635: +2026-04-09 18:27:35.164356: Epoch 898 +2026-04-09 18:27:35.168558: Current learning rate: 0.00128 +2026-04-09 18:29:19.992732: train_loss -0.5932 +2026-04-09 18:29:20.010938: val_loss -0.5653 +2026-04-09 18:29:20.025462: Pseudo dice [0.6512, 0.5352, 0.617, 0.1489, 0.6137, 0.8835, 0.8577] +2026-04-09 18:29:20.030640: Epoch time: 104.84 s +2026-04-09 18:29:21.145806: +2026-04-09 18:29:21.159364: Epoch 899 +2026-04-09 18:29:21.167211: Current learning rate: 0.00127 +2026-04-09 18:31:06.064688: train_loss -0.5837 +2026-04-09 18:31:06.078734: val_loss -0.5776 +2026-04-09 18:31:06.084270: Pseudo dice [0.4601, 0.8424, 0.4986, 0.159, 0.3158, 0.7318, 0.7407] +2026-04-09 18:31:06.090038: Epoch time: 104.92 s +2026-04-09 18:31:09.142818: +2026-04-09 18:31:09.146612: Epoch 900 +2026-04-09 18:31:09.150235: Current learning rate: 0.00126 +2026-04-09 18:32:53.012468: train_loss -0.6006 +2026-04-09 18:32:53.027155: val_loss -0.5324 +2026-04-09 18:32:53.032615: Pseudo dice [0.6851, 0.1585, 0.6762, 0.5617, 0.3715, 0.7006, 0.8195] +2026-04-09 18:32:53.037249: Epoch time: 103.87 s +2026-04-09 18:32:54.124387: +2026-04-09 18:32:54.131928: Epoch 901 +2026-04-09 18:32:54.139735: Current learning rate: 0.00125 +2026-04-09 18:34:41.472147: train_loss -0.5926 +2026-04-09 18:34:41.486514: val_loss -0.586 +2026-04-09 18:34:41.492119: Pseudo dice [0.8019, 0.3344, 0.7816, 0.3602, 0.6005, 0.8524, 0.7685] +2026-04-09 18:34:41.496860: Epoch time: 107.35 s +2026-04-09 18:34:42.617132: +2026-04-09 18:34:42.621748: Epoch 902 +2026-04-09 18:34:42.626384: Current learning rate: 0.00124 +2026-04-09 18:36:26.526532: train_loss -0.5947 +2026-04-09 18:36:26.536524: val_loss -0.5535 +2026-04-09 18:36:26.540729: Pseudo dice [0.5764, 0.603, 0.8094, 0.2579, 0.3956, 0.6624, 0.7838] +2026-04-09 18:36:26.547209: Epoch time: 103.91 s +2026-04-09 18:36:27.659882: +2026-04-09 18:36:27.663043: Epoch 903 +2026-04-09 18:36:27.668779: Current learning rate: 0.00122 +2026-04-09 18:38:12.240966: train_loss -0.5997 +2026-04-09 18:38:12.255937: val_loss -0.561 +2026-04-09 18:38:12.260135: Pseudo dice [0.4308, 0.7456, 0.67, 0.3373, 0.3745, 0.9268, 0.7669] +2026-04-09 18:38:12.264043: Epoch time: 104.58 s +2026-04-09 18:38:13.395719: +2026-04-09 18:38:13.419188: Epoch 904 +2026-04-09 18:38:13.445397: Current learning rate: 0.00121 +2026-04-09 18:39:56.621014: train_loss -0.5917 +2026-04-09 18:39:56.629604: val_loss -0.5619 +2026-04-09 18:39:56.633169: Pseudo dice [0.8176, 0.7063, 0.7516, 0.437, 0.4898, 0.855, 0.6341] +2026-04-09 18:39:56.638486: Epoch time: 103.23 s +2026-04-09 18:39:57.748297: +2026-04-09 18:39:57.751884: Epoch 905 +2026-04-09 18:39:57.754780: Current learning rate: 0.0012 +2026-04-09 18:41:41.978370: train_loss -0.6005 +2026-04-09 18:41:41.987629: val_loss -0.5198 +2026-04-09 18:41:41.990779: Pseudo dice [0.1173, 0.8531, 0.7594, 0.176, 0.3126, 0.613, 0.6666] +2026-04-09 18:41:41.993716: Epoch time: 104.23 s +2026-04-09 18:41:43.083708: +2026-04-09 18:41:43.089997: Epoch 906 +2026-04-09 18:41:43.095941: Current learning rate: 0.00119 +2026-04-09 18:43:28.774693: train_loss -0.6071 +2026-04-09 18:43:28.790699: val_loss -0.5738 +2026-04-09 18:43:28.795111: Pseudo dice [0.5929, 0.8931, 0.8044, 0.0236, 0.4663, 0.8276, 0.8371] +2026-04-09 18:43:28.799308: Epoch time: 105.69 s +2026-04-09 18:43:29.902105: +2026-04-09 18:43:29.907870: Epoch 907 +2026-04-09 18:43:29.912376: Current learning rate: 0.00118 +2026-04-09 18:45:16.565899: train_loss -0.6062 +2026-04-09 18:45:16.583553: val_loss -0.5642 +2026-04-09 18:45:16.588756: Pseudo dice [0.3192, 0.697, 0.7242, 0.0698, 0.3381, 0.8955, 0.5414] +2026-04-09 18:45:16.595257: Epoch time: 106.67 s +2026-04-09 18:45:17.699830: +2026-04-09 18:45:17.705933: Epoch 908 +2026-04-09 18:45:17.711017: Current learning rate: 0.00117 +2026-04-09 18:47:03.155037: train_loss -0.5895 +2026-04-09 18:47:03.169104: val_loss -0.5761 +2026-04-09 18:47:03.176985: Pseudo dice [0.7551, 0.8666, 0.8042, 0.0, 0.5845, 0.7073, 0.8045] +2026-04-09 18:47:03.182104: Epoch time: 105.46 s +2026-04-09 18:47:04.298431: +2026-04-09 18:47:04.319508: Epoch 909 +2026-04-09 18:47:04.328820: Current learning rate: 0.00116 +2026-04-09 18:48:50.106724: train_loss -0.6051 +2026-04-09 18:48:50.123206: val_loss -0.5441 +2026-04-09 18:48:50.128746: Pseudo dice [0.6602, 0.3343, 0.8131, 0.1503, 0.5667, 0.6108, 0.8552] +2026-04-09 18:48:50.136491: Epoch time: 105.81 s +2026-04-09 18:48:51.238970: +2026-04-09 18:48:51.243556: Epoch 910 +2026-04-09 18:48:51.248523: Current learning rate: 0.00115 +2026-04-09 18:50:36.401490: train_loss -0.6002 +2026-04-09 18:50:36.412813: val_loss -0.5117 +2026-04-09 18:50:36.417274: Pseudo dice [0.2804, 0.8662, 0.4846, 0.1778, 0.2991, 0.2455, 0.7191] +2026-04-09 18:50:36.421955: Epoch time: 105.17 s +2026-04-09 18:50:37.541901: +2026-04-09 18:50:37.546386: Epoch 911 +2026-04-09 18:50:37.550707: Current learning rate: 0.00113 +2026-04-09 18:52:21.675943: train_loss -0.6117 +2026-04-09 18:52:21.687961: val_loss -0.5444 +2026-04-09 18:52:21.693020: Pseudo dice [0.6118, 0.8977, 0.6509, 0.2335, 0.5802, 0.7045, 0.8903] +2026-04-09 18:52:21.697273: Epoch time: 104.14 s +2026-04-09 18:52:22.789641: +2026-04-09 18:52:22.792994: Epoch 912 +2026-04-09 18:52:22.796958: Current learning rate: 0.00112 +2026-04-09 18:54:08.971081: train_loss -0.5989 +2026-04-09 18:54:08.983827: val_loss -0.5362 +2026-04-09 18:54:08.986582: Pseudo dice [0.0936, 0.8762, 0.6652, 0.6127, 0.5496, 0.4463, 0.751] +2026-04-09 18:54:08.990112: Epoch time: 106.18 s +2026-04-09 18:54:10.081824: +2026-04-09 18:54:10.086498: Epoch 913 +2026-04-09 18:54:10.089983: Current learning rate: 0.00111 +2026-04-09 18:55:54.125611: train_loss -0.6098 +2026-04-09 18:55:54.136883: val_loss -0.508 +2026-04-09 18:55:54.141567: Pseudo dice [0.6935, 0.8607, 0.6582, 0.2799, 0.4134, 0.1569, 0.7789] +2026-04-09 18:55:54.146027: Epoch time: 104.05 s +2026-04-09 18:55:55.258656: +2026-04-09 18:55:55.261779: Epoch 914 +2026-04-09 18:55:55.264474: Current learning rate: 0.0011 +2026-04-09 18:57:39.371920: train_loss -0.5928 +2026-04-09 18:57:39.381754: val_loss -0.5722 +2026-04-09 18:57:39.384685: Pseudo dice [0.6989, 0.6919, 0.537, 0.4083, 0.3074, 0.1399, 0.8222] +2026-04-09 18:57:39.387807: Epoch time: 104.12 s +2026-04-09 18:57:40.476522: +2026-04-09 18:57:40.479275: Epoch 915 +2026-04-09 18:57:40.481646: Current learning rate: 0.00109 +2026-04-09 18:59:26.865837: train_loss -0.5877 +2026-04-09 18:59:26.875748: val_loss -0.5395 +2026-04-09 18:59:26.879465: Pseudo dice [0.6744, 0.908, 0.745, 0.2347, 0.3624, 0.0781, 0.8618] +2026-04-09 18:59:26.883011: Epoch time: 106.39 s +2026-04-09 18:59:27.968327: +2026-04-09 18:59:27.972509: Epoch 916 +2026-04-09 18:59:27.976784: Current learning rate: 0.00108 +2026-04-09 19:01:12.533059: train_loss -0.5986 +2026-04-09 19:01:12.542476: val_loss -0.5903 +2026-04-09 19:01:12.545746: Pseudo dice [0.7009, 0.6825, 0.8061, 0.2593, 0.6232, 0.8771, 0.8238] +2026-04-09 19:01:12.548221: Epoch time: 104.57 s +2026-04-09 19:01:13.739060: +2026-04-09 19:01:13.741605: Epoch 917 +2026-04-09 19:01:13.744234: Current learning rate: 0.00106 +2026-04-09 19:02:58.162307: train_loss -0.6053 +2026-04-09 19:02:58.175164: val_loss -0.6004 +2026-04-09 19:02:58.179224: Pseudo dice [0.7477, 0.7524, 0.8037, 0.4003, 0.4645, 0.9146, 0.79] +2026-04-09 19:02:58.183750: Epoch time: 104.43 s +2026-04-09 19:02:59.281361: +2026-04-09 19:02:59.286737: Epoch 918 +2026-04-09 19:02:59.292588: Current learning rate: 0.00105 +2026-04-09 19:04:43.401160: train_loss -0.6161 +2026-04-09 19:04:43.410882: val_loss -0.5129 +2026-04-09 19:04:43.414208: Pseudo dice [0.5855, 0.6734, 0.719, 0.1213, 0.5462, 0.3883, 0.8305] +2026-04-09 19:04:43.418574: Epoch time: 104.12 s +2026-04-09 19:04:44.517831: +2026-04-09 19:04:44.520389: Epoch 919 +2026-04-09 19:04:44.523160: Current learning rate: 0.00104 +2026-04-09 19:06:28.848292: train_loss -0.6043 +2026-04-09 19:06:28.863151: val_loss -0.5728 +2026-04-09 19:06:28.868409: Pseudo dice [0.8697, 0.7934, 0.7515, 0.4635, 0.4288, 0.6119, 0.8127] +2026-04-09 19:06:28.872000: Epoch time: 104.33 s +2026-04-09 19:06:29.978442: +2026-04-09 19:06:29.982617: Epoch 920 +2026-04-09 19:06:29.985909: Current learning rate: 0.00103 +2026-04-09 19:08:13.823040: train_loss -0.6161 +2026-04-09 19:08:13.832387: val_loss -0.5511 +2026-04-09 19:08:13.842941: Pseudo dice [0.5444, 0.7493, 0.783, 0.3129, 0.5681, 0.1218, 0.7223] +2026-04-09 19:08:13.846105: Epoch time: 103.85 s +2026-04-09 19:08:14.941879: +2026-04-09 19:08:14.945592: Epoch 921 +2026-04-09 19:08:14.948633: Current learning rate: 0.00102 +2026-04-09 19:10:00.422074: train_loss -0.6122 +2026-04-09 19:10:00.436824: val_loss -0.5556 +2026-04-09 19:10:00.442357: Pseudo dice [0.802, 0.885, 0.6422, 0.5621, 0.4861, 0.0829, 0.8434] +2026-04-09 19:10:00.446966: Epoch time: 105.48 s +2026-04-09 19:10:01.587250: +2026-04-09 19:10:01.592177: Epoch 922 +2026-04-09 19:10:01.598741: Current learning rate: 0.00101 +2026-04-09 19:11:45.045198: train_loss -0.6021 +2026-04-09 19:11:45.053606: val_loss -0.5594 +2026-04-09 19:11:45.057029: Pseudo dice [0.7437, 0.6207, 0.7603, 0.5388, 0.2131, 0.8415, 0.8369] +2026-04-09 19:11:45.066302: Epoch time: 103.46 s +2026-04-09 19:11:46.154024: +2026-04-09 19:11:46.156667: Epoch 923 +2026-04-09 19:11:46.159652: Current learning rate: 0.001 +2026-04-09 19:13:31.399406: train_loss -0.6053 +2026-04-09 19:13:31.412890: val_loss -0.5094 +2026-04-09 19:13:31.418581: Pseudo dice [0.7755, 0.7433, 0.7299, 0.1105, 0.5427, 0.6321, 0.5421] +2026-04-09 19:13:31.423429: Epoch time: 105.25 s +2026-04-09 19:13:32.534043: +2026-04-09 19:13:32.536850: Epoch 924 +2026-04-09 19:13:32.540888: Current learning rate: 0.00098 +2026-04-09 19:15:16.094124: train_loss -0.6019 +2026-04-09 19:15:16.103579: val_loss -0.5194 +2026-04-09 19:15:16.106800: Pseudo dice [0.8147, 0.6592, 0.6919, 0.0, 0.2525, 0.6173, 0.7888] +2026-04-09 19:15:16.110019: Epoch time: 103.56 s +2026-04-09 19:15:17.205198: +2026-04-09 19:15:17.209407: Epoch 925 +2026-04-09 19:15:17.214156: Current learning rate: 0.00097 +2026-04-09 19:16:59.632842: train_loss -0.6056 +2026-04-09 19:16:59.645734: val_loss -0.5396 +2026-04-09 19:16:59.648684: Pseudo dice [0.6439, 0.9109, 0.8529, 0.5736, 0.5151, 0.7238, 0.7418] +2026-04-09 19:16:59.652324: Epoch time: 102.43 s +2026-04-09 19:17:00.753335: +2026-04-09 19:17:00.755444: Epoch 926 +2026-04-09 19:17:00.758009: Current learning rate: 0.00096 +2026-04-09 19:18:43.523658: train_loss -0.6051 +2026-04-09 19:18:43.531582: val_loss -0.5611 +2026-04-09 19:18:43.535031: Pseudo dice [0.6232, 0.3193, 0.7495, 0.222, 0.6389, 0.5232, 0.8093] +2026-04-09 19:18:43.538866: Epoch time: 102.77 s +2026-04-09 19:18:44.679189: +2026-04-09 19:18:44.682600: Epoch 927 +2026-04-09 19:18:44.689525: Current learning rate: 0.00095 +2026-04-09 19:20:28.690641: train_loss -0.5945 +2026-04-09 19:20:28.700255: val_loss -0.5489 +2026-04-09 19:20:28.703555: Pseudo dice [0.6319, 0.5407, 0.7172, 0.2585, 0.3886, 0.1085, 0.8778] +2026-04-09 19:20:28.706850: Epoch time: 104.01 s +2026-04-09 19:20:29.786753: +2026-04-09 19:20:29.790324: Epoch 928 +2026-04-09 19:20:29.794721: Current learning rate: 0.00094 +2026-04-09 19:22:12.670008: train_loss -0.6261 +2026-04-09 19:22:12.679726: val_loss -0.5629 +2026-04-09 19:22:12.682564: Pseudo dice [0.7168, 0.8094, 0.6701, 0.6456, 0.4177, 0.5167, 0.8249] +2026-04-09 19:22:12.687062: Epoch time: 102.89 s +2026-04-09 19:22:13.815246: +2026-04-09 19:22:13.818601: Epoch 929 +2026-04-09 19:22:13.822079: Current learning rate: 0.00092 +2026-04-09 19:23:57.663076: train_loss -0.617 +2026-04-09 19:23:57.671872: val_loss -0.5102 +2026-04-09 19:23:57.675594: Pseudo dice [0.6285, 0.8929, 0.6923, 0.2237, 0.3277, 0.0613, 0.5539] +2026-04-09 19:23:57.679200: Epoch time: 103.85 s +2026-04-09 19:23:59.867002: +2026-04-09 19:23:59.869122: Epoch 930 +2026-04-09 19:23:59.871545: Current learning rate: 0.00091 +2026-04-09 19:25:43.580922: train_loss -0.6166 +2026-04-09 19:25:43.592427: val_loss -0.5389 +2026-04-09 19:25:43.595476: Pseudo dice [0.5098, 0.758, 0.7641, 0.3381, 0.3743, 0.4223, 0.7579] +2026-04-09 19:25:43.598419: Epoch time: 103.72 s +2026-04-09 19:25:44.693119: +2026-04-09 19:25:44.696029: Epoch 931 +2026-04-09 19:25:44.699157: Current learning rate: 0.0009 +2026-04-09 19:27:28.751983: train_loss -0.6109 +2026-04-09 19:27:28.766623: val_loss -0.5829 +2026-04-09 19:27:28.772731: Pseudo dice [0.4662, 0.9001, 0.7219, 0.1371, 0.3907, 0.2135, 0.8123] +2026-04-09 19:27:28.780813: Epoch time: 104.06 s +2026-04-09 19:27:29.874777: +2026-04-09 19:27:29.882384: Epoch 932 +2026-04-09 19:27:29.889435: Current learning rate: 0.00089 +2026-04-09 19:29:12.523295: train_loss -0.6081 +2026-04-09 19:29:12.533381: val_loss -0.5524 +2026-04-09 19:29:12.537531: Pseudo dice [0.6174, 0.8776, 0.8719, 0.0729, 0.4828, 0.7419, 0.823] +2026-04-09 19:29:12.549686: Epoch time: 102.65 s +2026-04-09 19:29:13.636792: +2026-04-09 19:29:13.644060: Epoch 933 +2026-04-09 19:29:13.649144: Current learning rate: 0.00088 +2026-04-09 19:30:58.388392: train_loss -0.6183 +2026-04-09 19:30:58.400232: val_loss -0.5398 +2026-04-09 19:30:58.404073: Pseudo dice [0.4126, 0.8668, 0.8467, 0.4021, 0.1106, 0.2315, 0.7084] +2026-04-09 19:30:58.408054: Epoch time: 104.75 s +2026-04-09 19:30:59.486001: +2026-04-09 19:30:59.489672: Epoch 934 +2026-04-09 19:30:59.492686: Current learning rate: 0.00087 +2026-04-09 19:32:43.152048: train_loss -0.6019 +2026-04-09 19:32:43.162479: val_loss -0.577 +2026-04-09 19:32:43.167525: Pseudo dice [0.4929, 0.5601, 0.7103, 0.0031, 0.6462, 0.9227, 0.7646] +2026-04-09 19:32:43.172985: Epoch time: 103.67 s +2026-04-09 19:32:44.304390: +2026-04-09 19:32:44.308569: Epoch 935 +2026-04-09 19:32:44.311494: Current learning rate: 0.00085 +2026-04-09 19:34:27.885531: train_loss -0.5968 +2026-04-09 19:34:27.899969: val_loss -0.6011 +2026-04-09 19:34:27.904732: Pseudo dice [0.6408, 0.8607, 0.8514, 0.6418, 0.5717, 0.8281, 0.7538] +2026-04-09 19:34:27.908769: Epoch time: 103.58 s +2026-04-09 19:34:29.009067: +2026-04-09 19:34:29.012662: Epoch 936 +2026-04-09 19:34:29.016392: Current learning rate: 0.00084 +2026-04-09 19:36:12.447579: train_loss -0.591 +2026-04-09 19:36:12.456306: val_loss -0.5682 +2026-04-09 19:36:12.459730: Pseudo dice [0.4996, 0.7012, 0.7569, 0.4568, 0.5022, 0.8664, 0.8108] +2026-04-09 19:36:12.463740: Epoch time: 103.44 s +2026-04-09 19:36:13.581578: +2026-04-09 19:36:13.585171: Epoch 937 +2026-04-09 19:36:13.588204: Current learning rate: 0.00083 +2026-04-09 19:37:56.896652: train_loss -0.5937 +2026-04-09 19:37:56.907359: val_loss -0.5254 +2026-04-09 19:37:56.911005: Pseudo dice [0.6153, 0.9117, 0.5654, 0.0, 0.3045, 0.6331, 0.8412] +2026-04-09 19:37:56.914751: Epoch time: 103.32 s +2026-04-09 19:37:58.037519: +2026-04-09 19:37:58.039756: Epoch 938 +2026-04-09 19:37:58.041818: Current learning rate: 0.00082 +2026-04-09 19:39:41.556650: train_loss -0.6131 +2026-04-09 19:39:41.565307: val_loss -0.6069 +2026-04-09 19:39:41.569786: Pseudo dice [0.8563, 0.7258, 0.6824, 0.7507, 0.5574, 0.4942, 0.832] +2026-04-09 19:39:41.573183: Epoch time: 103.52 s +2026-04-09 19:39:42.688175: +2026-04-09 19:39:42.691181: Epoch 939 +2026-04-09 19:39:42.694562: Current learning rate: 0.00081 +2026-04-09 19:41:25.639504: train_loss -0.6207 +2026-04-09 19:41:25.647573: val_loss -0.5647 +2026-04-09 19:41:25.650060: Pseudo dice [0.1959, 0.6, 0.7609, 0.5436, 0.3801, 0.7789, 0.7828] +2026-04-09 19:41:25.652434: Epoch time: 102.95 s +2026-04-09 19:41:26.742783: +2026-04-09 19:41:26.744842: Epoch 940 +2026-04-09 19:41:26.746915: Current learning rate: 0.00079 +2026-04-09 19:43:09.494876: train_loss -0.6271 +2026-04-09 19:43:09.503263: val_loss -0.603 +2026-04-09 19:43:09.506033: Pseudo dice [0.3213, 0.8599, 0.7392, 0.0389, 0.5962, 0.7584, 0.8517] +2026-04-09 19:43:09.508206: Epoch time: 102.76 s +2026-04-09 19:43:10.592960: +2026-04-09 19:43:10.595821: Epoch 941 +2026-04-09 19:43:10.599388: Current learning rate: 0.00078 +2026-04-09 19:44:54.265723: train_loss -0.6254 +2026-04-09 19:44:54.281021: val_loss -0.577 +2026-04-09 19:44:54.284266: Pseudo dice [0.7764, 0.7285, 0.7688, 0.4671, 0.2473, 0.4387, 0.8512] +2026-04-09 19:44:54.287089: Epoch time: 103.68 s +2026-04-09 19:44:55.377071: +2026-04-09 19:44:55.379921: Epoch 942 +2026-04-09 19:44:55.384964: Current learning rate: 0.00077 +2026-04-09 19:46:40.476618: train_loss -0.5888 +2026-04-09 19:46:40.489964: val_loss -0.5261 +2026-04-09 19:46:40.492890: Pseudo dice [0.2682, 0.8684, 0.7464, 0.0894, 0.4304, 0.7893, 0.6862] +2026-04-09 19:46:40.496121: Epoch time: 105.1 s +2026-04-09 19:46:41.596061: +2026-04-09 19:46:41.599572: Epoch 943 +2026-04-09 19:46:41.602501: Current learning rate: 0.00076 +2026-04-09 19:48:24.917693: train_loss -0.5929 +2026-04-09 19:48:24.928936: val_loss -0.5652 +2026-04-09 19:48:24.932724: Pseudo dice [0.1077, 0.4787, 0.7671, 0.2469, 0.3596, 0.9101, 0.8243] +2026-04-09 19:48:24.936322: Epoch time: 103.32 s +2026-04-09 19:48:26.012687: +2026-04-09 19:48:26.014769: Epoch 944 +2026-04-09 19:48:26.017562: Current learning rate: 0.00075 +2026-04-09 19:50:10.287631: train_loss -0.5906 +2026-04-09 19:50:10.298604: val_loss -0.5649 +2026-04-09 19:50:10.302007: Pseudo dice [0.8367, 0.5686, 0.7566, 0.4054, 0.5974, 0.7008, 0.8599] +2026-04-09 19:50:10.305126: Epoch time: 104.28 s +2026-04-09 19:50:11.418834: +2026-04-09 19:50:11.422258: Epoch 945 +2026-04-09 19:50:11.424970: Current learning rate: 0.00074 +2026-04-09 19:51:56.591608: train_loss -0.6126 +2026-04-09 19:51:56.620431: val_loss -0.5787 +2026-04-09 19:51:56.623352: Pseudo dice [0.4048, 0.7048, 0.7732, 0.2728, 0.3564, 0.8319, 0.8534] +2026-04-09 19:51:56.628251: Epoch time: 105.18 s +2026-04-09 19:51:57.707531: +2026-04-09 19:51:57.710471: Epoch 946 +2026-04-09 19:51:57.713054: Current learning rate: 0.00072 +2026-04-09 19:53:41.579066: train_loss -0.6145 +2026-04-09 19:53:41.592229: val_loss -0.5521 +2026-04-09 19:53:41.596750: Pseudo dice [0.7049, 0.9024, 0.662, 0.3413, 0.4926, 0.5555, 0.7732] +2026-04-09 19:53:41.602867: Epoch time: 103.87 s +2026-04-09 19:53:42.687404: +2026-04-09 19:53:42.697536: Epoch 947 +2026-04-09 19:53:42.699908: Current learning rate: 0.00071 +2026-04-09 19:55:27.098698: train_loss -0.6058 +2026-04-09 19:55:27.116009: val_loss -0.5746 +2026-04-09 19:55:27.119183: Pseudo dice [0.5824, 0.7153, 0.7813, 0.1663, 0.5709, 0.4261, 0.7886] +2026-04-09 19:55:27.124657: Epoch time: 104.41 s +2026-04-09 19:55:28.243503: +2026-04-09 19:55:28.248871: Epoch 948 +2026-04-09 19:55:28.254121: Current learning rate: 0.0007 +2026-04-09 19:57:12.408201: train_loss -0.6251 +2026-04-09 19:57:12.419042: val_loss -0.577 +2026-04-09 19:57:12.422001: Pseudo dice [0.6568, 0.5728, 0.7784, 0.5176, 0.3669, 0.9379, 0.8709] +2026-04-09 19:57:12.425125: Epoch time: 104.17 s +2026-04-09 19:57:13.539418: +2026-04-09 19:57:13.544698: Epoch 949 +2026-04-09 19:57:13.548436: Current learning rate: 0.00069 +2026-04-09 19:58:57.292873: train_loss -0.621 +2026-04-09 19:58:57.300688: val_loss -0.5689 +2026-04-09 19:58:57.304455: Pseudo dice [0.7711, 0.6554, 0.794, 0.0894, 0.522, 0.538, 0.7657] +2026-04-09 19:58:57.307554: Epoch time: 103.76 s +2026-04-09 19:59:01.304769: +2026-04-09 19:59:01.306744: Epoch 950 +2026-04-09 19:59:01.308589: Current learning rate: 0.00067 +2026-04-09 20:00:46.041914: train_loss -0.6204 +2026-04-09 20:00:46.051886: val_loss -0.5813 +2026-04-09 20:00:46.054511: Pseudo dice [0.6947, 0.4887, 0.7123, 0.0641, 0.5908, 0.6232, 0.6492] +2026-04-09 20:00:46.058182: Epoch time: 104.74 s +2026-04-09 20:00:47.173340: +2026-04-09 20:00:47.176285: Epoch 951 +2026-04-09 20:00:47.178506: Current learning rate: 0.00066 +2026-04-09 20:02:30.189296: train_loss -0.6218 +2026-04-09 20:02:30.200502: val_loss -0.59 +2026-04-09 20:02:30.205044: Pseudo dice [0.7014, 0.597, 0.8036, 0.3256, 0.4828, 0.6707, 0.8299] +2026-04-09 20:02:30.208411: Epoch time: 103.02 s +2026-04-09 20:02:31.321051: +2026-04-09 20:02:31.323459: Epoch 952 +2026-04-09 20:02:31.325709: Current learning rate: 0.00065 +2026-04-09 20:04:15.076266: train_loss -0.6141 +2026-04-09 20:04:15.086978: val_loss -0.581 +2026-04-09 20:04:15.091761: Pseudo dice [0.5146, 0.8927, 0.8543, 0.171, 0.6411, 0.5235, 0.7437] +2026-04-09 20:04:15.094662: Epoch time: 103.76 s +2026-04-09 20:04:16.182943: +2026-04-09 20:04:16.186404: Epoch 953 +2026-04-09 20:04:16.189707: Current learning rate: 0.00064 +2026-04-09 20:06:02.463660: train_loss -0.6034 +2026-04-09 20:06:02.475081: val_loss -0.5908 +2026-04-09 20:06:02.479232: Pseudo dice [0.2112, 0.7726, 0.6896, 0.0, 0.6501, 0.9206, 0.8718] +2026-04-09 20:06:02.484460: Epoch time: 106.28 s +2026-04-09 20:06:03.626277: +2026-04-09 20:06:03.632908: Epoch 954 +2026-04-09 20:06:03.642129: Current learning rate: 0.00063 +2026-04-09 20:07:47.974201: train_loss -0.6247 +2026-04-09 20:07:47.984247: val_loss -0.5472 +2026-04-09 20:07:47.987307: Pseudo dice [0.6474, 0.8936, 0.7378, 0.2077, 0.6291, 0.1441, 0.8909] +2026-04-09 20:07:47.991064: Epoch time: 104.35 s +2026-04-09 20:07:49.114725: +2026-04-09 20:07:49.118796: Epoch 955 +2026-04-09 20:07:49.122379: Current learning rate: 0.00061 +2026-04-09 20:09:32.643507: train_loss -0.6214 +2026-04-09 20:09:32.655597: val_loss -0.5216 +2026-04-09 20:09:32.658633: Pseudo dice [0.6543, 0.9068, 0.7425, 0.2411, 0.468, 0.7233, 0.669] +2026-04-09 20:09:32.662623: Epoch time: 103.53 s +2026-04-09 20:09:33.819705: +2026-04-09 20:09:33.822750: Epoch 956 +2026-04-09 20:09:33.825650: Current learning rate: 0.0006 +2026-04-09 20:11:18.317498: train_loss -0.6133 +2026-04-09 20:11:18.327041: val_loss -0.5631 +2026-04-09 20:11:18.330653: Pseudo dice [0.4737, 0.2255, 0.7369, 0.1058, 0.531, 0.8113, 0.8713] +2026-04-09 20:11:18.334285: Epoch time: 104.5 s +2026-04-09 20:11:19.439575: +2026-04-09 20:11:19.444352: Epoch 957 +2026-04-09 20:11:19.458006: Current learning rate: 0.00059 +2026-04-09 20:13:03.836737: train_loss -0.6279 +2026-04-09 20:13:03.844820: val_loss -0.5799 +2026-04-09 20:13:03.847656: Pseudo dice [0.6645, 0.4624, 0.8059, 0.026, 0.6839, 0.8244, 0.778] +2026-04-09 20:13:03.850527: Epoch time: 104.4 s +2026-04-09 20:13:04.955071: +2026-04-09 20:13:04.965614: Epoch 958 +2026-04-09 20:13:04.969489: Current learning rate: 0.00058 +2026-04-09 20:14:49.077637: train_loss -0.6131 +2026-04-09 20:14:49.087316: val_loss -0.5833 +2026-04-09 20:14:49.091374: Pseudo dice [0.6677, 0.7961, 0.7637, 0.6557, 0.3635, 0.8549, 0.8296] +2026-04-09 20:14:49.095747: Epoch time: 104.13 s +2026-04-09 20:14:50.208660: +2026-04-09 20:14:50.211287: Epoch 959 +2026-04-09 20:14:50.214363: Current learning rate: 0.00056 +2026-04-09 20:16:37.000863: train_loss -0.6196 +2026-04-09 20:16:37.013527: val_loss -0.5676 +2026-04-09 20:16:37.016957: Pseudo dice [0.7925, 0.6701, 0.8244, 0.2294, 0.5218, 0.9223, 0.7873] +2026-04-09 20:16:37.021393: Epoch time: 106.8 s +2026-04-09 20:16:37.025246: Yayy! New best EMA pseudo Dice: 0.6161 +2026-04-09 20:16:40.163441: +2026-04-09 20:16:40.165994: Epoch 960 +2026-04-09 20:16:40.168151: Current learning rate: 0.00055 +2026-04-09 20:18:27.551481: train_loss -0.6054 +2026-04-09 20:18:27.569893: val_loss -0.5653 +2026-04-09 20:18:27.574515: Pseudo dice [0.8105, 0.8892, 0.8434, 0.3966, 0.4418, 0.3845, 0.7722] +2026-04-09 20:18:27.580423: Epoch time: 107.39 s +2026-04-09 20:18:27.586429: Yayy! New best EMA pseudo Dice: 0.6193 +2026-04-09 20:18:30.801036: +2026-04-09 20:18:30.804942: Epoch 961 +2026-04-09 20:18:30.808009: Current learning rate: 0.00054 +2026-04-09 20:20:13.867015: train_loss -0.6268 +2026-04-09 20:20:13.879866: val_loss -0.5531 +2026-04-09 20:20:13.884139: Pseudo dice [0.893, 0.6891, 0.7643, 0.4408, 0.7184, 0.5704, 0.5073] +2026-04-09 20:20:13.887033: Epoch time: 103.07 s +2026-04-09 20:20:13.890179: Yayy! New best EMA pseudo Dice: 0.6228 +2026-04-09 20:20:16.976415: +2026-04-09 20:20:16.978471: Epoch 962 +2026-04-09 20:20:16.980570: Current learning rate: 0.00053 +2026-04-09 20:21:59.760639: train_loss -0.6251 +2026-04-09 20:21:59.771320: val_loss -0.5985 +2026-04-09 20:21:59.775933: Pseudo dice [0.8493, 0.7608, 0.6928, 0.5631, 0.355, 0.8488, 0.8723] +2026-04-09 20:21:59.778969: Epoch time: 102.79 s +2026-04-09 20:21:59.782115: Yayy! New best EMA pseudo Dice: 0.6312 +2026-04-09 20:22:02.731354: +2026-04-09 20:22:02.734007: Epoch 963 +2026-04-09 20:22:02.736000: Current learning rate: 0.00051 +2026-04-09 20:23:45.054664: train_loss -0.6327 +2026-04-09 20:23:45.065692: val_loss -0.5441 +2026-04-09 20:23:45.068778: Pseudo dice [0.7361, 0.9034, 0.7775, 0.2472, 0.6353, 0.7656, 0.8061] +2026-04-09 20:23:45.071815: Epoch time: 102.33 s +2026-04-09 20:23:45.074193: Yayy! New best EMA pseudo Dice: 0.6376 +2026-04-09 20:23:48.164433: +2026-04-09 20:23:48.169849: Epoch 964 +2026-04-09 20:23:48.174477: Current learning rate: 0.0005 +2026-04-09 20:25:31.980220: train_loss -0.6032 +2026-04-09 20:25:31.992783: val_loss -0.6104 +2026-04-09 20:25:31.998583: Pseudo dice [0.8906, 0.6504, 0.8585, 0.6983, 0.6377, 0.917, 0.7889] +2026-04-09 20:25:32.003853: Epoch time: 103.82 s +2026-04-09 20:25:32.007254: Yayy! New best EMA pseudo Dice: 0.6516 +2026-04-09 20:25:35.308983: +2026-04-09 20:25:35.311102: Epoch 965 +2026-04-09 20:25:35.313053: Current learning rate: 0.00049 +2026-04-09 20:27:18.795003: train_loss -0.6271 +2026-04-09 20:27:18.804897: val_loss -0.5448 +2026-04-09 20:27:18.808218: Pseudo dice [0.7953, 0.8952, 0.5672, 0.0353, 0.6962, 0.5664, 0.8767] +2026-04-09 20:27:18.810654: Epoch time: 103.49 s +2026-04-09 20:27:19.954036: +2026-04-09 20:27:19.959194: Epoch 966 +2026-04-09 20:27:19.961775: Current learning rate: 0.00048 +2026-04-09 20:29:03.242360: train_loss -0.6287 +2026-04-09 20:29:03.258689: val_loss -0.5636 +2026-04-09 20:29:03.261938: Pseudo dice [0.5231, 0.9089, 0.6776, 0.2211, 0.6177, 0.5622, 0.8426] +2026-04-09 20:29:03.269593: Epoch time: 103.29 s +2026-04-09 20:29:04.382249: +2026-04-09 20:29:04.384575: Epoch 967 +2026-04-09 20:29:04.387583: Current learning rate: 0.00046 +2026-04-09 20:30:46.667438: train_loss -0.6314 +2026-04-09 20:30:46.674783: val_loss -0.577 +2026-04-09 20:30:46.679298: Pseudo dice [0.1864, 0.9028, 0.8226, 0.1144, 0.4392, 0.4362, 0.6886] +2026-04-09 20:30:46.681413: Epoch time: 102.29 s +2026-04-09 20:30:47.769100: +2026-04-09 20:30:47.772081: Epoch 968 +2026-04-09 20:30:47.775073: Current learning rate: 0.00045 +2026-04-09 20:32:32.454112: train_loss -0.605 +2026-04-09 20:32:32.465128: val_loss -0.5916 +2026-04-09 20:32:32.469522: Pseudo dice [0.6135, 0.646, 0.7679, 0.5011, 0.5388, 0.6362, 0.8001] +2026-04-09 20:32:32.473410: Epoch time: 104.69 s +2026-04-09 20:32:33.562993: +2026-04-09 20:32:33.565938: Epoch 969 +2026-04-09 20:32:33.571754: Current learning rate: 0.00044 +2026-04-09 20:34:15.287536: train_loss -0.6316 +2026-04-09 20:34:15.301234: val_loss -0.5953 +2026-04-09 20:34:15.303823: Pseudo dice [0.5888, 0.7702, 0.7768, 0.3934, 0.4653, 0.729, 0.8477] +2026-04-09 20:34:15.306417: Epoch time: 101.73 s +2026-04-09 20:34:16.410830: +2026-04-09 20:34:16.413327: Epoch 970 +2026-04-09 20:34:16.417485: Current learning rate: 0.00043 +2026-04-09 20:36:00.174821: train_loss -0.6224 +2026-04-09 20:36:00.183743: val_loss -0.566 +2026-04-09 20:36:00.186887: Pseudo dice [0.6697, 0.7831, 0.746, 0.4341, 0.312, 0.8569, 0.7121] +2026-04-09 20:36:00.189195: Epoch time: 103.77 s +2026-04-09 20:36:01.285787: +2026-04-09 20:36:01.288008: Epoch 971 +2026-04-09 20:36:01.289960: Current learning rate: 0.00041 +2026-04-09 20:37:42.990304: train_loss -0.6324 +2026-04-09 20:37:42.998984: val_loss -0.5697 +2026-04-09 20:37:43.002034: Pseudo dice [0.2831, 0.8919, 0.8342, 0.2493, 0.5284, 0.1333, 0.7886] +2026-04-09 20:37:43.004539: Epoch time: 101.71 s +2026-04-09 20:37:44.113984: +2026-04-09 20:37:44.116493: Epoch 972 +2026-04-09 20:37:44.119284: Current learning rate: 0.0004 +2026-04-09 20:39:26.577661: train_loss -0.6245 +2026-04-09 20:39:26.587067: val_loss -0.6013 +2026-04-09 20:39:26.590057: Pseudo dice [0.7898, 0.6684, 0.8079, 0.5005, 0.5182, 0.8668, 0.8001] +2026-04-09 20:39:26.594187: Epoch time: 102.47 s +2026-04-09 20:39:27.707929: +2026-04-09 20:39:27.710571: Epoch 973 +2026-04-09 20:39:27.713073: Current learning rate: 0.00039 +2026-04-09 20:41:11.341872: train_loss -0.6307 +2026-04-09 20:41:11.353468: val_loss -0.5631 +2026-04-09 20:41:11.356466: Pseudo dice [0.6051, 0.745, 0.8718, 0.1301, 0.4255, 0.8177, 0.8133] +2026-04-09 20:41:11.359979: Epoch time: 103.64 s +2026-04-09 20:41:12.466361: +2026-04-09 20:41:12.471215: Epoch 974 +2026-04-09 20:41:12.474002: Current learning rate: 0.00037 +2026-04-09 20:42:55.644189: train_loss -0.6377 +2026-04-09 20:42:55.655742: val_loss -0.5271 +2026-04-09 20:42:55.662978: Pseudo dice [0.3807, 0.8783, 0.6318, 0.2716, 0.2854, 0.5501, 0.7256] +2026-04-09 20:42:55.665404: Epoch time: 103.18 s +2026-04-09 20:42:56.760837: +2026-04-09 20:42:56.763182: Epoch 975 +2026-04-09 20:42:56.766372: Current learning rate: 0.00036 +2026-04-09 20:44:40.196090: train_loss -0.6325 +2026-04-09 20:44:40.203149: val_loss -0.5545 +2026-04-09 20:44:40.205893: Pseudo dice [0.6818, 0.9065, 0.8338, 0.3148, 0.3652, 0.7356, 0.7912] +2026-04-09 20:44:40.207860: Epoch time: 103.44 s +2026-04-09 20:44:41.285682: +2026-04-09 20:44:41.287678: Epoch 976 +2026-04-09 20:44:41.291195: Current learning rate: 0.00035 +2026-04-09 20:46:23.812797: train_loss -0.6164 +2026-04-09 20:46:23.822992: val_loss -0.6066 +2026-04-09 20:46:23.827085: Pseudo dice [0.8406, 0.8093, 0.6686, 0.2836, 0.5163, 0.8654, 0.8866] +2026-04-09 20:46:23.829761: Epoch time: 102.53 s +2026-04-09 20:46:24.932759: +2026-04-09 20:46:24.935821: Epoch 977 +2026-04-09 20:46:24.939793: Current learning rate: 0.00034 +2026-04-09 20:48:08.923028: train_loss -0.6381 +2026-04-09 20:48:08.932243: val_loss -0.5356 +2026-04-09 20:48:08.934829: Pseudo dice [0.6673, 0.539, 0.7348, 0.3545, 0.4602, 0.0968, 0.8056] +2026-04-09 20:48:08.937372: Epoch time: 103.99 s +2026-04-09 20:48:10.061067: +2026-04-09 20:48:10.063757: Epoch 978 +2026-04-09 20:48:10.070753: Current learning rate: 0.00032 +2026-04-09 20:49:53.008307: train_loss -0.6305 +2026-04-09 20:49:53.016663: val_loss -0.5871 +2026-04-09 20:49:53.020550: Pseudo dice [0.6888, 0.9234, 0.7495, 0.4838, 0.3237, 0.5232, 0.8164] +2026-04-09 20:49:53.024154: Epoch time: 102.95 s +2026-04-09 20:49:54.144544: +2026-04-09 20:49:54.148231: Epoch 979 +2026-04-09 20:49:54.151829: Current learning rate: 0.00031 +2026-04-09 20:51:36.314968: train_loss -0.6386 +2026-04-09 20:51:36.322704: val_loss -0.5883 +2026-04-09 20:51:36.325415: Pseudo dice [0.6639, 0.5998, 0.7709, 0.3513, 0.5909, 0.7431, 0.8209] +2026-04-09 20:51:36.328586: Epoch time: 102.17 s +2026-04-09 20:51:37.437426: +2026-04-09 20:51:37.439532: Epoch 980 +2026-04-09 20:51:37.443535: Current learning rate: 0.0003 +2026-04-09 20:53:21.363582: train_loss -0.6277 +2026-04-09 20:53:21.374540: val_loss -0.5371 +2026-04-09 20:53:21.381973: Pseudo dice [0.3158, 0.8449, 0.5937, 0.3918, 0.3564, 0.4913, 0.8427] +2026-04-09 20:53:21.388786: Epoch time: 103.93 s +2026-04-09 20:53:22.506617: +2026-04-09 20:53:22.509899: Epoch 981 +2026-04-09 20:53:22.513547: Current learning rate: 0.00028 +2026-04-09 20:55:05.450592: train_loss -0.6428 +2026-04-09 20:55:05.459368: val_loss -0.585 +2026-04-09 20:55:05.474644: Pseudo dice [0.6516, 0.6327, 0.7916, 0.5719, 0.6475, 0.9141, 0.7593] +2026-04-09 20:55:05.479383: Epoch time: 102.95 s +2026-04-09 20:55:06.584910: +2026-04-09 20:55:06.587568: Epoch 982 +2026-04-09 20:55:06.590531: Current learning rate: 0.00027 +2026-04-09 20:56:48.600214: train_loss -0.6331 +2026-04-09 20:56:48.607779: val_loss -0.5902 +2026-04-09 20:56:48.611018: Pseudo dice [0.751, 0.8279, 0.7392, 0.4285, 0.6238, 0.5644, 0.8688] +2026-04-09 20:56:48.614159: Epoch time: 102.02 s +2026-04-09 20:56:49.750831: +2026-04-09 20:56:49.753501: Epoch 983 +2026-04-09 20:56:49.757367: Current learning rate: 0.00026 +2026-04-09 20:58:33.610643: train_loss -0.6393 +2026-04-09 20:58:33.620277: val_loss -0.5628 +2026-04-09 20:58:33.624506: Pseudo dice [0.5101, 0.9026, 0.6298, 0.6863, 0.6404, 0.0913, 0.6652] +2026-04-09 20:58:33.628511: Epoch time: 103.86 s +2026-04-09 20:58:34.754014: +2026-04-09 20:58:34.757746: Epoch 984 +2026-04-09 20:58:34.763415: Current learning rate: 0.00024 +2026-04-09 21:00:17.824324: train_loss -0.6252 +2026-04-09 21:00:17.831191: val_loss -0.5421 +2026-04-09 21:00:17.833781: Pseudo dice [0.5755, 0.9018, 0.7564, 0.5907, 0.5575, 0.7489, 0.7228] +2026-04-09 21:00:17.836310: Epoch time: 103.07 s +2026-04-09 21:00:18.967686: +2026-04-09 21:00:18.970308: Epoch 985 +2026-04-09 21:00:18.972691: Current learning rate: 0.00023 +2026-04-09 21:02:02.080979: train_loss -0.6418 +2026-04-09 21:02:02.090179: val_loss -0.5673 +2026-04-09 21:02:02.094915: Pseudo dice [0.7521, 0.7889, 0.6983, 0.7592, 0.256, 0.3864, 0.6298] +2026-04-09 21:02:02.097565: Epoch time: 103.12 s +2026-04-09 21:02:03.192902: +2026-04-09 21:02:03.198071: Epoch 986 +2026-04-09 21:02:03.204068: Current learning rate: 0.00021 +2026-04-09 21:03:47.383664: train_loss -0.6368 +2026-04-09 21:03:47.392581: val_loss -0.5812 +2026-04-09 21:03:47.395471: Pseudo dice [0.8774, 0.917, 0.8085, 0.6305, 0.5222, 0.738, 0.8146] +2026-04-09 21:03:47.399348: Epoch time: 104.19 s +2026-04-09 21:03:48.510461: +2026-04-09 21:03:48.513949: Epoch 987 +2026-04-09 21:03:48.517867: Current learning rate: 0.0002 +2026-04-09 21:05:31.781138: train_loss -0.6228 +2026-04-09 21:05:31.796601: val_loss -0.5552 +2026-04-09 21:05:31.803388: Pseudo dice [0.6516, 0.9229, 0.7488, 0.8202, 0.6843, 0.0998, 0.8583] +2026-04-09 21:05:31.810156: Epoch time: 103.27 s +2026-04-09 21:05:32.932483: +2026-04-09 21:05:32.935842: Epoch 988 +2026-04-09 21:05:32.939993: Current learning rate: 0.00019 +2026-04-09 21:07:15.654187: train_loss -0.6241 +2026-04-09 21:07:15.662544: val_loss -0.5757 +2026-04-09 21:07:15.666690: Pseudo dice [0.6064, 0.8865, 0.7001, 0.1298, 0.7197, 0.3683, 0.8153] +2026-04-09 21:07:15.669485: Epoch time: 102.72 s +2026-04-09 21:07:17.991185: +2026-04-09 21:07:17.994099: Epoch 989 +2026-04-09 21:07:17.996231: Current learning rate: 0.00017 +2026-04-09 21:09:01.483588: train_loss -0.6331 +2026-04-09 21:09:01.496506: val_loss -0.5138 +2026-04-09 21:09:01.499961: Pseudo dice [0.6495, 0.8795, 0.7594, 0.5022, 0.4444, 0.4371, 0.7977] +2026-04-09 21:09:01.502654: Epoch time: 103.5 s +2026-04-09 21:09:02.623466: +2026-04-09 21:09:02.627950: Epoch 990 +2026-04-09 21:09:02.631027: Current learning rate: 0.00016 +2026-04-09 21:10:46.401092: train_loss -0.6346 +2026-04-09 21:10:46.410465: val_loss -0.5789 +2026-04-09 21:10:46.413391: Pseudo dice [0.8465, 0.886, 0.832, 0.4833, 0.4216, 0.4191, 0.8807] +2026-04-09 21:10:46.416453: Epoch time: 103.78 s +2026-04-09 21:10:47.527415: +2026-04-09 21:10:47.529692: Epoch 991 +2026-04-09 21:10:47.532280: Current learning rate: 0.00014 +2026-04-09 21:12:30.877185: train_loss -0.6256 +2026-04-09 21:12:30.885774: val_loss -0.5921 +2026-04-09 21:12:30.889437: Pseudo dice [0.8278, 0.5391, 0.8689, 0.8164, 0.5686, 0.9154, 0.77] +2026-04-09 21:12:30.893359: Epoch time: 103.35 s +2026-04-09 21:12:30.899108: Yayy! New best EMA pseudo Dice: 0.6593 +2026-04-09 21:12:34.067132: +2026-04-09 21:12:34.070655: Epoch 992 +2026-04-09 21:12:34.073154: Current learning rate: 0.00013 +2026-04-09 21:14:17.202093: train_loss -0.636 +2026-04-09 21:14:17.210701: val_loss -0.565 +2026-04-09 21:14:17.214300: Pseudo dice [0.6371, 0.9183, 0.7685, 0.6239, 0.4299, 0.6134, 0.8619] +2026-04-09 21:14:17.217557: Epoch time: 103.14 s +2026-04-09 21:14:17.221269: Yayy! New best EMA pseudo Dice: 0.6627 +2026-04-09 21:14:20.365522: +2026-04-09 21:14:20.367851: Epoch 993 +2026-04-09 21:14:20.369832: Current learning rate: 0.00011 +2026-04-09 21:16:03.340561: train_loss -0.6481 +2026-04-09 21:16:03.359506: val_loss -0.5484 +2026-04-09 21:16:03.363084: Pseudo dice [0.5791, 0.7345, 0.7699, 0.2215, 0.4571, 0.2256, 0.8255] +2026-04-09 21:16:03.366024: Epoch time: 102.98 s +2026-04-09 21:16:04.542553: +2026-04-09 21:16:04.544761: Epoch 994 +2026-04-09 21:16:04.547387: Current learning rate: 0.0001 +2026-04-09 21:17:47.696635: train_loss -0.634 +2026-04-09 21:17:47.718408: val_loss -0.5439 +2026-04-09 21:17:47.721125: Pseudo dice [0.6376, 0.913, 0.8406, 0.4043, 0.4479, 0.11, 0.8813] +2026-04-09 21:17:47.724412: Epoch time: 103.16 s +2026-04-09 21:17:48.841758: +2026-04-09 21:17:48.844995: Epoch 995 +2026-04-09 21:17:48.849312: Current learning rate: 8e-05 +2026-04-09 21:19:32.314890: train_loss -0.6427 +2026-04-09 21:19:32.325011: val_loss -0.5484 +2026-04-09 21:19:32.328569: Pseudo dice [0.4382, 0.4974, 0.7651, 0.1404, 0.5135, 0.8302, 0.8308] +2026-04-09 21:19:32.331451: Epoch time: 103.48 s +2026-04-09 21:19:33.429555: +2026-04-09 21:19:33.432196: Epoch 996 +2026-04-09 21:19:33.437201: Current learning rate: 7e-05 +2026-04-09 21:21:16.676303: train_loss -0.6384 +2026-04-09 21:21:16.685986: val_loss -0.568 +2026-04-09 21:21:16.688708: Pseudo dice [0.5688, 0.8995, 0.8263, 0.0864, 0.6504, 0.7853, 0.8204] +2026-04-09 21:21:16.692865: Epoch time: 103.25 s +2026-04-09 21:21:17.792174: +2026-04-09 21:21:17.795375: Epoch 997 +2026-04-09 21:21:17.798049: Current learning rate: 5e-05 +2026-04-09 21:23:02.662734: train_loss -0.6293 +2026-04-09 21:23:02.672781: val_loss -0.5911 +2026-04-09 21:23:02.676110: Pseudo dice [0.6629, 0.6885, 0.7759, 0.2521, 0.2883, 0.7049, 0.6805] +2026-04-09 21:23:02.679675: Epoch time: 104.87 s +2026-04-09 21:23:03.802102: +2026-04-09 21:23:03.807558: Epoch 998 +2026-04-09 21:23:03.810743: Current learning rate: 4e-05 +2026-04-09 21:24:48.649456: train_loss -0.6352 +2026-04-09 21:24:48.662414: val_loss -0.5552 +2026-04-09 21:24:48.667178: Pseudo dice [0.6839, 0.9045, 0.8531, 0.2032, 0.1911, 0.6764, 0.872] +2026-04-09 21:24:48.672088: Epoch time: 104.85 s +2026-04-09 21:24:49.800465: +2026-04-09 21:24:49.806427: Epoch 999 +2026-04-09 21:24:49.812932: Current learning rate: 2e-05 +2026-04-09 21:26:33.439560: train_loss -0.6408 +2026-04-09 21:26:33.448944: val_loss -0.5581 +2026-04-09 21:26:33.451956: Pseudo dice [0.8891, 0.8964, 0.666, 0.4295, 0.5459, 0.6542, 0.8178] +2026-04-09 21:26:33.454604: Epoch time: 103.64 s +2026-04-09 21:26:36.364035: Training done. +2026-04-09 21:26:36.672461: Using splits from existing split file: /data/houbb/nnunetv2/nnUNet_preprocessed/Dataset201_MSWAL/splits_final.json +2026-04-09 21:26:36.678355: The split file contains 5 splits. +2026-04-09 21:26:36.680341: Desired fold for training: 4 +2026-04-09 21:26:36.682164: This split has 388 training and 96 validation cases. +2026-04-09 21:26:36.684658: predicting MSWAL_0001 +2026-04-09 21:26:36.693962: MSWAL_0001, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:27:28.973136: predicting MSWAL_0011 +2026-04-09 21:27:28.988450: MSWAL_0011, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:27:41.497123: predicting MSWAL_0021 +2026-04-09 21:27:41.521408: MSWAL_0021, shape torch.Size([1, 181, 507, 507]), rank 0 +2026-04-09 21:27:54.109402: predicting MSWAL_0035 +2026-04-09 21:27:54.125368: MSWAL_0035, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:28:06.687915: predicting MSWAL_0042 +2026-04-09 21:28:06.706474: MSWAL_0042, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 21:28:15.341980: predicting MSWAL_0051 +2026-04-09 21:28:15.361985: MSWAL_0051, shape torch.Size([1, 177, 541, 541]), rank 0 +2026-04-09 21:28:37.496470: predicting MSWAL_0054 +2026-04-09 21:28:37.507189: MSWAL_0054, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:28:49.851490: predicting MSWAL_0063 +2026-04-09 21:28:49.874765: MSWAL_0063, shape torch.Size([1, 157, 520, 520]), rank 0 +2026-04-09 21:29:04.758172: predicting MSWAL_0088 +2026-04-09 21:29:04.783154: MSWAL_0088, shape torch.Size([1, 177, 535, 535]), rank 0 +2026-04-09 21:29:26.910442: predicting MSWAL_0089 +2026-04-09 21:29:26.931838: MSWAL_0089, shape torch.Size([1, 185, 561, 561]), rank 0 +2026-04-09 21:29:49.048677: predicting MSWAL_0094 +2026-04-09 21:29:49.071024: MSWAL_0094, shape torch.Size([1, 157, 539, 539]), rank 0 +2026-04-09 21:30:03.941088: predicting MSWAL_0095 +2026-04-09 21:30:03.956175: MSWAL_0095, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:30:16.378134: predicting MSWAL_0096 +2026-04-09 21:30:16.393987: MSWAL_0096, shape torch.Size([1, 221, 480, 480]), rank 0 +2026-04-09 21:30:28.935401: predicting MSWAL_0106 +2026-04-09 21:30:28.951834: MSWAL_0106, shape torch.Size([1, 181, 507, 507]), rank 0 +2026-04-09 21:30:41.403522: predicting MSWAL_0109 +2026-04-09 21:30:41.417283: MSWAL_0109, shape torch.Size([1, 285, 611, 611]), rank 0 +2026-04-09 21:31:18.262616: predicting MSWAL_0111 +2026-04-09 21:31:18.287822: MSWAL_0111, shape torch.Size([1, 313, 583, 583]), rank 0 +2026-04-09 21:31:55.547673: predicting MSWAL_0112 +2026-04-09 21:31:55.577725: MSWAL_0112, shape torch.Size([1, 201, 540, 540]), rank 0 +2026-04-09 21:32:18.252578: predicting MSWAL_0117 +2026-04-09 21:32:18.267526: MSWAL_0117, shape torch.Size([1, 369, 541, 541]), rank 0 +2026-04-09 21:33:02.320040: predicting MSWAL_0119 +2026-04-09 21:33:02.345522: MSWAL_0119, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:33:15.093752: predicting MSWAL_0120 +2026-04-09 21:33:15.112321: MSWAL_0120, shape torch.Size([1, 177, 537, 537]), rank 0 +2026-04-09 21:33:37.456390: predicting MSWAL_0122 +2026-04-09 21:33:37.476980: MSWAL_0122, shape torch.Size([1, 313, 508, 508]), rank 0 +2026-04-09 21:33:58.482970: predicting MSWAL_0132 +2026-04-09 21:33:58.509639: MSWAL_0132, shape torch.Size([1, 281, 556, 556]), rank 0 +2026-04-09 21:34:35.422554: predicting MSWAL_0150 +2026-04-09 21:34:35.443987: MSWAL_0150, shape torch.Size([1, 288, 549, 549]), rank 0 +2026-04-09 21:35:12.469319: predicting MSWAL_0152 +2026-04-09 21:35:12.491881: MSWAL_0152, shape torch.Size([1, 305, 533, 533]), rank 0 +2026-04-09 21:35:49.446681: predicting MSWAL_0157 +2026-04-09 21:35:49.466307: MSWAL_0157, shape torch.Size([1, 514, 509, 509]), rank 0 +2026-04-09 21:36:27.178243: predicting MSWAL_0171 +2026-04-09 21:36:27.199450: MSWAL_0171, shape torch.Size([1, 331, 561, 561]), rank 0 +2026-04-09 21:37:05.024057: predicting MSWAL_0172 +2026-04-09 21:37:05.049643: MSWAL_0172, shape torch.Size([1, 358, 608, 608]), rank 0 +2026-04-09 21:37:49.862555: predicting MSWAL_0177 +2026-04-09 21:37:49.895143: MSWAL_0177, shape torch.Size([1, 454, 531, 531]), rank 0 +2026-04-09 21:38:48.843416: predicting MSWAL_0183 +2026-04-09 21:38:48.872970: MSWAL_0183, shape torch.Size([1, 174, 519, 519]), rank 0 +2026-04-09 21:39:11.248702: predicting MSWAL_0187 +2026-04-09 21:39:11.273496: MSWAL_0187, shape torch.Size([1, 421, 613, 613]), rank 0 +2026-04-09 21:40:02.698442: predicting MSWAL_0195 +2026-04-09 21:40:02.725714: MSWAL_0195, shape torch.Size([1, 301, 540, 540]), rank 0 +2026-04-09 21:40:39.634486: predicting MSWAL_0203 +2026-04-09 21:40:39.736312: MSWAL_0203, shape torch.Size([1, 458, 572, 572]), rank 0 +2026-04-09 21:41:38.850350: predicting MSWAL_0204 +2026-04-09 21:41:38.873744: MSWAL_0204, shape torch.Size([1, 248, 605, 605]), rank 0 +2026-04-09 21:42:08.624826: predicting MSWAL_0209 +2026-04-09 21:42:08.653288: MSWAL_0209, shape torch.Size([1, 342, 508, 508]), rank 0 +2026-04-09 21:42:33.957382: predicting MSWAL_0224 +2026-04-09 21:42:33.982850: MSWAL_0224, shape torch.Size([1, 201, 507, 507]), rank 0 +2026-04-09 21:42:47.017702: predicting MSWAL_0242 +2026-04-09 21:42:47.032354: MSWAL_0242, shape torch.Size([1, 296, 571, 571]), rank 0 +2026-04-09 21:43:23.815895: predicting MSWAL_0248 +2026-04-09 21:43:23.847338: MSWAL_0248, shape torch.Size([1, 518, 617, 617]), rank 0 +2026-04-09 21:44:30.505998: predicting MSWAL_0251 +2026-04-09 21:44:30.550837: MSWAL_0251, shape torch.Size([1, 340, 512, 512]), rank 0 +2026-04-09 21:44:56.120260: predicting MSWAL_0257 +2026-04-09 21:44:56.156106: MSWAL_0257, shape torch.Size([1, 310, 496, 496]), rank 0 +2026-04-09 21:45:17.410247: predicting MSWAL_0264 +2026-04-09 21:45:17.425241: MSWAL_0264, shape torch.Size([1, 196, 532, 532]), rank 0 +2026-04-09 21:45:39.572614: predicting MSWAL_0271 +2026-04-09 21:45:39.596562: MSWAL_0271, shape torch.Size([1, 350, 581, 581]), rank 0 +2026-04-09 21:46:24.226007: predicting MSWAL_0284 +2026-04-09 21:46:24.259162: MSWAL_0284, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:46:37.196820: predicting MSWAL_0297 +2026-04-09 21:46:37.215880: MSWAL_0297, shape torch.Size([1, 301, 507, 507]), rank 0 +2026-04-09 21:46:58.480060: predicting MSWAL_0302 +2026-04-09 21:46:58.504222: MSWAL_0302, shape torch.Size([1, 285, 507, 507]), rank 0 +2026-04-09 21:47:19.514647: predicting MSWAL_0314 +2026-04-09 21:47:19.530952: MSWAL_0314, shape torch.Size([1, 189, 532, 532]), rank 0 +2026-04-09 21:47:41.659442: predicting MSWAL_0327 +2026-04-09 21:47:41.678623: MSWAL_0327, shape torch.Size([1, 197, 507, 507]), rank 0 +2026-04-09 21:47:54.737533: predicting MSWAL_0328 +2026-04-09 21:47:54.759011: MSWAL_0328, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:48:07.414260: predicting MSWAL_0333 +2026-04-09 21:48:07.430005: MSWAL_0333, shape torch.Size([1, 237, 507, 507]), rank 0 +2026-04-09 21:48:24.172632: predicting MSWAL_0337 +2026-04-09 21:48:24.188541: MSWAL_0337, shape torch.Size([1, 301, 507, 507]), rank 0 +2026-04-09 21:48:45.094738: predicting MSWAL_0356 +2026-04-09 21:48:45.124175: MSWAL_0356, shape torch.Size([1, 165, 507, 507]), rank 0 +2026-04-09 21:48:53.761365: predicting MSWAL_0357 +2026-04-09 21:48:53.772501: MSWAL_0357, shape torch.Size([1, 361, 585, 585]), rank 0 +2026-04-09 21:49:38.438107: predicting MSWAL_0362 +2026-04-09 21:49:38.464963: MSWAL_0362, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 21:49:47.143490: predicting MSWAL_0363 +2026-04-09 21:49:47.167236: MSWAL_0363, shape torch.Size([1, 277, 507, 507]), rank 0 +2026-04-09 21:50:04.135977: predicting MSWAL_0373 +2026-04-09 21:50:04.159233: MSWAL_0373, shape torch.Size([1, 377, 547, 547]), rank 0 +2026-04-09 21:50:48.289243: predicting MSWAL_0378 +2026-04-09 21:50:48.310720: MSWAL_0378, shape torch.Size([1, 317, 567, 567]), rank 0 +2026-04-09 21:51:25.850053: predicting MSWAL_0380 +2026-04-09 21:51:25.878366: MSWAL_0380, shape torch.Size([1, 247, 507, 507]), rank 0 +2026-04-09 21:51:42.922785: predicting MSWAL_0388 +2026-04-09 21:51:42.938191: MSWAL_0388, shape torch.Size([1, 325, 651, 651]), rank 0 +2026-04-09 21:52:40.304589: predicting MSWAL_0403 +2026-04-09 21:52:40.345931: MSWAL_0403, shape torch.Size([1, 385, 636, 636]), rank 0 +2026-04-09 21:53:25.002580: predicting MSWAL_0407 +2026-04-09 21:53:25.026777: MSWAL_0407, shape torch.Size([1, 189, 532, 532]), rank 0 +2026-04-09 21:53:47.307758: predicting MSWAL_0415 +2026-04-09 21:53:47.321504: MSWAL_0415, shape torch.Size([1, 305, 547, 547]), rank 0 +2026-04-09 21:54:24.105212: predicting MSWAL_0417 +2026-04-09 21:54:24.124425: MSWAL_0417, shape torch.Size([1, 117, 575, 575]), rank 0 +2026-04-09 21:54:38.908684: predicting MSWAL_0418 +2026-04-09 21:54:38.931393: MSWAL_0418, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:54:51.620894: predicting MSWAL_0425 +2026-04-09 21:54:51.639872: MSWAL_0425, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:55:04.156545: predicting MSWAL_0428 +2026-04-09 21:55:04.168082: MSWAL_0428, shape torch.Size([1, 325, 572, 572]), rank 0 +2026-04-09 21:55:41.263255: predicting MSWAL_0434 +2026-04-09 21:55:41.283232: MSWAL_0434, shape torch.Size([1, 341, 507, 507]), rank 0 +2026-04-09 21:56:06.650651: predicting MSWAL_0442 +2026-04-09 21:56:06.666348: MSWAL_0442, shape torch.Size([1, 157, 507, 507]), rank 0 +2026-04-09 21:56:15.495656: predicting MSWAL_0457 +2026-04-09 21:56:15.518781: MSWAL_0457, shape torch.Size([1, 197, 507, 507]), rank 0 +2026-04-09 21:56:28.281978: predicting MSWAL_0480 +2026-04-09 21:56:28.306822: MSWAL_0480, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:56:40.815242: predicting MSWAL_0487 +2026-04-09 21:56:40.829557: MSWAL_0487, shape torch.Size([1, 217, 507, 507]), rank 0 +2026-04-09 21:56:53.663157: predicting MSWAL_0493 +2026-04-09 21:56:53.676200: MSWAL_0493, shape torch.Size([1, 237, 507, 507]), rank 0 +2026-04-09 21:57:10.460514: predicting MSWAL_0495 +2026-04-09 21:57:10.488160: MSWAL_0495, shape torch.Size([1, 208, 560, 560]), rank 0 +2026-04-09 21:57:32.577605: predicting MSWAL_0500 +2026-04-09 21:57:32.593567: MSWAL_0500, shape torch.Size([1, 157, 455, 455]), rank 0 +2026-04-09 21:57:41.207198: predicting MSWAL_0508 +2026-04-09 21:57:41.218024: MSWAL_0508, shape torch.Size([1, 177, 517, 517]), rank 0 +2026-04-09 21:58:03.187526: predicting MSWAL_0516 +2026-04-09 21:58:03.233262: MSWAL_0516, shape torch.Size([1, 165, 464, 464]), rank 0 +2026-04-09 21:58:11.605818: predicting MSWAL_0535 +2026-04-09 21:58:11.615520: MSWAL_0535, shape torch.Size([1, 179, 576, 576]), rank 0 +2026-04-09 21:58:33.656862: predicting MSWAL_0536 +2026-04-09 21:58:33.681474: MSWAL_0536, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:58:46.400971: predicting MSWAL_0538 +2026-04-09 21:58:46.415055: MSWAL_0538, shape torch.Size([1, 137, 529, 529]), rank 0 +2026-04-09 21:59:01.263252: predicting MSWAL_0542 +2026-04-09 21:59:01.273232: MSWAL_0542, shape torch.Size([1, 177, 507, 507]), rank 0 +2026-04-09 21:59:13.989064: predicting MSWAL_0548 +2026-04-09 21:59:14.005081: MSWAL_0548, shape torch.Size([1, 153, 531, 531]), rank 0 +2026-04-09 21:59:28.875810: predicting MSWAL_0551 +2026-04-09 21:59:28.897712: MSWAL_0551, shape torch.Size([1, 249, 507, 507]), rank 0 +2026-04-09 21:59:45.830776: predicting MSWAL_0556 +2026-04-09 21:59:45.849088: MSWAL_0556, shape torch.Size([1, 409, 507, 507]), rank 0 +2026-04-09 22:00:15.227672: predicting MSWAL_0558 +2026-04-09 22:00:15.260394: MSWAL_0558, shape torch.Size([1, 248, 480, 480]), rank 0 +2026-04-09 22:00:31.902051: predicting MSWAL_0571 +2026-04-09 22:00:31.912815: MSWAL_0571, shape torch.Size([1, 254, 560, 560]), rank 0 +2026-04-09 22:01:01.586622: predicting MSWAL_0600 +2026-04-09 22:01:01.604472: MSWAL_0600, shape torch.Size([1, 145, 507, 507]), rank 0 +2026-04-09 22:01:10.272077: predicting MSWAL_0608 +2026-04-09 22:01:10.283502: MSWAL_0608, shape torch.Size([1, 328, 589, 589]), rank 0 +2026-04-09 22:01:47.307135: predicting MSWAL_0612 +2026-04-09 22:01:47.334061: MSWAL_0612, shape torch.Size([1, 344, 573, 573]), rank 0 +2026-04-09 22:02:31.438058: predicting MSWAL_0625 +2026-04-09 22:02:31.465140: MSWAL_0625, shape torch.Size([1, 316, 564, 564]), rank 0 +2026-04-09 22:03:08.331584: predicting MSWAL_0626 +2026-04-09 22:03:08.352384: MSWAL_0626, shape torch.Size([1, 313, 560, 560]), rank 0 +2026-04-09 22:03:45.105699: predicting MSWAL_0627 +2026-04-09 22:03:45.125675: MSWAL_0627, shape torch.Size([1, 218, 480, 480]), rank 0 +2026-04-09 22:03:57.979858: predicting MSWAL_0632 +2026-04-09 22:03:58.003085: MSWAL_0632, shape torch.Size([1, 343, 636, 636]), rank 0 +2026-04-09 22:04:42.584530: predicting MSWAL_0635 +2026-04-09 22:04:42.603676: MSWAL_0635, shape torch.Size([1, 344, 563, 563]), rank 0 +2026-04-09 22:05:26.739429: predicting MSWAL_0658 +2026-04-09 22:05:26.763213: MSWAL_0658, shape torch.Size([1, 139, 496, 496]), rank 0 +2026-04-09 22:05:35.268435: predicting MSWAL_0682 +2026-04-09 22:05:35.279768: MSWAL_0682, shape torch.Size([1, 316, 572, 572]), rank 0 +2026-04-09 22:06:12.137448: predicting MSWAL_0685 +2026-04-09 22:06:12.150775: MSWAL_0685, shape torch.Size([1, 343, 552, 552]), rank 0 +2026-04-09 22:06:56.819995: predicting MSWAL_0690 +2026-04-09 22:06:56.842103: MSWAL_0690, shape torch.Size([1, 523, 667, 667]), rank 0 +2026-04-09 22:08:39.738962: predicting MSWAL_0693 +2026-04-09 22:08:39.777649: MSWAL_0693, shape torch.Size([1, 288, 476, 476]), rank 0 +2026-04-09 22:10:41.335332: Validation complete +2026-04-09 22:10:41.338709: Mean Validation Dice: 0.4241852859962259 diff --git a/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/plans.json b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/plans.json new file mode 100644 index 0000000000000000000000000000000000000000..d7c29249f212bb1ee27bfa432ef6373c1402c8e8 --- /dev/null +++ b/Dataset201_MSWAL/nnUNetTrainer__nnUNetResEncUNetLPlans__3d_fullres/plans.json @@ -0,0 +1,558 @@ +{ + "dataset_name": "Dataset201_MSWAL", + "plans_name": "nnUNetResEncUNetLPlans", + "original_median_spacing_after_transp": [ + 1.25, + 0.75, + 0.75 + ], + "original_median_shape_after_transp": [ + 261, + 512, + 512 + ], + "image_reader_writer": "SimpleITKIO", + "transpose_forward": [ + 0, + 1, + 2 + ], + "transpose_backward": [ + 0, + 1, + 2 + ], + "configurations": { + "2d": { + "data_identifier": "nnUNetPlans_2d", + "preprocessor_name": "DefaultPreprocessor", + "batch_size": 35, + "patch_size": [ + 512, + 512 + ], + "median_image_size_in_voxels": [ + 512.0, + 512.0 + ], + "spacing": [ + 0.75, + 0.75 + ], + "normalization_schemes": [ + "CTNormalization" + ], + "use_mask_for_norm": [ + false + ], + "resampling_fn_data": "resample_data_or_seg_to_shape", + "resampling_fn_seg": "resample_data_or_seg_to_shape", + "resampling_fn_data_kwargs": { + "is_seg": false, + "order": 3, + "order_z": 0, + "force_separate_z": null + }, + "resampling_fn_seg_kwargs": { + "is_seg": true, + "order": 1, + "order_z": 0, + "force_separate_z": null + }, + "resampling_fn_probabilities": "resample_data_or_seg_to_shape", + "resampling_fn_probabilities_kwargs": { + "is_seg": false, + "order": 1, + "order_z": 0, + "force_separate_z": null + }, + "architecture": { + "network_class_name": "dynamic_network_architectures.architectures.unet.ResidualEncoderUNet", + "arch_kwargs": { + "n_stages": 8, + "features_per_stage": [ + 32, + 64, + 128, + 256, + 512, + 512, + 512, + 512 + ], + "conv_op": "torch.nn.modules.conv.Conv2d", + "kernel_sizes": [ + [ + 3, + 3 + ], + [ + 3, + 3 + ], + [ + 3, + 3 + ], + [ + 3, + 3 + ], + [ + 3, + 3 + ], + [ + 3, + 3 + ], + [ + 3, + 3 + ], + [ + 3, + 3 + ] + ], + "strides": [ + [ + 1, + 1 + ], + [ + 2, + 2 + ], + [ + 2, + 2 + ], + [ + 2, + 2 + ], + [ + 2, + 2 + ], + [ + 2, + 2 + ], + [ + 2, + 2 + ], + [ + 2, + 2 + ] + ], + "n_blocks_per_stage": [ + 1, + 3, + 4, + 6, + 6, + 6, + 6, + 6 + ], + "n_conv_per_stage_decoder": [ + 1, + 1, + 1, + 1, + 1, + 1, + 1 + ], + "conv_bias": true, + "norm_op": "torch.nn.modules.instancenorm.InstanceNorm2d", + "norm_op_kwargs": { + "eps": 1e-05, + "affine": true + }, + "dropout_op": null, + "dropout_op_kwargs": null, + "nonlin": "torch.nn.LeakyReLU", + "nonlin_kwargs": { + "inplace": true + } + }, + "_kw_requires_import": [ + "conv_op", + "norm_op", + "dropout_op", + "nonlin" + ] + }, + "batch_dice": true + }, + "3d_lowres": { + "data_identifier": "nnUNetResEncUNetLPlans_3d_lowres", + "preprocessor_name": "DefaultPreprocessor", + "batch_size": 2, + "patch_size": [ + 112, + 256, + 256 + ], + "median_image_size_in_voxels": [ + 190, + 381, + 381 + ], + "spacing": [ + 1.6798954741801528, + 1.0079372845080916, + 1.0079372845080916 + ], + "normalization_schemes": [ + "CTNormalization" + ], + "use_mask_for_norm": [ + false + ], + "resampling_fn_data": "resample_data_or_seg_to_shape", + "resampling_fn_seg": "resample_data_or_seg_to_shape", + "resampling_fn_data_kwargs": { + "is_seg": false, + "order": 3, + "order_z": 0, + "force_separate_z": null + }, + "resampling_fn_seg_kwargs": { + "is_seg": true, + "order": 1, + "order_z": 0, + "force_separate_z": null + }, + "resampling_fn_probabilities": "resample_data_or_seg_to_shape", + "resampling_fn_probabilities_kwargs": { + "is_seg": false, + "order": 1, + "order_z": 0, + "force_separate_z": null + }, + "architecture": { + "network_class_name": "dynamic_network_architectures.architectures.unet.ResidualEncoderUNet", + "arch_kwargs": { + "n_stages": 7, + "features_per_stage": [ + 32, + 64, + 128, + 256, + 320, + 320, + 320 + ], + "conv_op": "torch.nn.modules.conv.Conv3d", + "kernel_sizes": [ + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ] + ], + "strides": [ + [ + 1, + 1, + 1 + ], + [ + 2, + 2, + 2 + ], + [ + 2, + 2, + 2 + ], + [ + 2, + 2, + 2 + ], + [ + 2, + 2, + 2 + ], + [ + 1, + 2, + 2 + ], + [ + 1, + 2, + 2 + ] + ], + "n_blocks_per_stage": [ + 1, + 3, + 4, + 6, + 6, + 6, + 6 + ], + "n_conv_per_stage_decoder": [ + 1, + 1, + 1, + 1, + 1, + 1 + ], + "conv_bias": true, + "norm_op": "torch.nn.modules.instancenorm.InstanceNorm3d", + "norm_op_kwargs": { + "eps": 1e-05, + "affine": true + }, + "dropout_op": null, + "dropout_op_kwargs": null, + "nonlin": "torch.nn.LeakyReLU", + "nonlin_kwargs": { + "inplace": true + } + }, + "_kw_requires_import": [ + "conv_op", + "norm_op", + "dropout_op", + "nonlin" + ] + }, + "batch_dice": false, + "next_stage": "3d_cascade_fullres" + }, + "3d_fullres": { + "data_identifier": "nnUNetPlans_3d_fullres", + "preprocessor_name": "DefaultPreprocessor", + "batch_size": 2, + "patch_size": [ + 112, + 256, + 256 + ], + "median_image_size_in_voxels": [ + 255.5, + 512.0, + 512.0 + ], + "spacing": [ + 1.25, + 0.75, + 0.75 + ], + "normalization_schemes": [ + "CTNormalization" + ], + "use_mask_for_norm": [ + false + ], + "resampling_fn_data": "resample_data_or_seg_to_shape", + "resampling_fn_seg": "resample_data_or_seg_to_shape", + "resampling_fn_data_kwargs": { + "is_seg": false, + "order": 3, + "order_z": 0, + "force_separate_z": null + }, + "resampling_fn_seg_kwargs": { + "is_seg": true, + "order": 1, + "order_z": 0, + "force_separate_z": null + }, + "resampling_fn_probabilities": "resample_data_or_seg_to_shape", + "resampling_fn_probabilities_kwargs": { + "is_seg": false, + "order": 1, + "order_z": 0, + "force_separate_z": null + }, + "architecture": { + "network_class_name": "dynamic_network_architectures.architectures.unet.ResidualEncoderUNet", + "arch_kwargs": { + "n_stages": 7, + "features_per_stage": [ + 32, + 64, + 128, + 256, + 320, + 320, + 320 + ], + "conv_op": "torch.nn.modules.conv.Conv3d", + "kernel_sizes": [ + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ], + [ + 3, + 3, + 3 + ] + ], + "strides": [ + [ + 1, + 1, + 1 + ], + [ + 2, + 2, + 2 + ], + [ + 2, + 2, + 2 + ], + [ + 2, + 2, + 2 + ], + [ + 2, + 2, + 2 + ], + [ + 1, + 2, + 2 + ], + [ + 1, + 2, + 2 + ] + ], + "n_blocks_per_stage": [ + 1, + 3, + 4, + 6, + 6, + 6, + 6 + ], + "n_conv_per_stage_decoder": [ + 1, + 1, + 1, + 1, + 1, + 1 + ], + "conv_bias": true, + "norm_op": "torch.nn.modules.instancenorm.InstanceNorm3d", + "norm_op_kwargs": { + "eps": 1e-05, + "affine": true + }, + "dropout_op": null, + "dropout_op_kwargs": null, + "nonlin": "torch.nn.LeakyReLU", + "nonlin_kwargs": { + "inplace": true + } + }, + "_kw_requires_import": [ + "conv_op", + "norm_op", + "dropout_op", + "nonlin" + ] + }, + "batch_dice": true + }, + "3d_cascade_fullres": { + "inherits_from": "3d_fullres", + "previous_stage": "3d_lowres" + } + }, + "experiment_planner_used": "nnUNetPlannerResEncL", + "label_manager": "LabelManager", + "foreground_intensity_properties_per_channel": { + "0": { + "max": 3071.0, + "mean": 71.96339416503906, + "median": 45.0, + "min": -932.0, + "percentile_00_5": -93.0, + "percentile_99_5": 1052.0, + "std": 141.6230926513672 + } + } +} \ No newline at end of file