yevvonlim commited on
Commit
3257a18
·
verified ·
1 Parent(s): 471545e

End of training

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. checkpoint-31500/optimizer.bin +3 -0
  3. checkpoint-31500/querytransformer/config.json +69 -0
  4. checkpoint-31500/querytransformer/model.safetensors +3 -0
  5. checkpoint-31500/random_states_0.pkl +3 -0
  6. checkpoint-31500/scheduler.bin +3 -0
  7. checkpoint-32000/optimizer.bin +3 -0
  8. checkpoint-32000/querytransformer/config.json +69 -0
  9. checkpoint-32000/querytransformer/model.safetensors +3 -0
  10. checkpoint-32000/random_states_0.pkl +3 -0
  11. checkpoint-32000/scheduler.bin +3 -0
  12. config.json +40 -6
  13. model.safetensors +2 -2
  14. validation_images/step_10000_val_cross_0.mp4 +0 -0
  15. validation_images/step_10000_val_recon_0.mp4 +0 -0
  16. validation_images/step_1000_val_cross_0.mp4 +0 -0
  17. validation_images/step_1000_val_recon_0.mp4 +0 -0
  18. validation_images/step_10500_val_cross_0.mp4 +0 -0
  19. validation_images/step_10500_val_recon_0.mp4 +0 -0
  20. validation_images/step_11000_val_cross_0.mp4 +0 -0
  21. validation_images/step_11000_val_recon_0.mp4 +0 -0
  22. validation_images/step_11500_val_cross_0.mp4 +0 -0
  23. validation_images/step_11500_val_recon_0.mp4 +0 -0
  24. validation_images/step_12000_val_cross_0.mp4 +0 -0
  25. validation_images/step_12000_val_recon_0.mp4 +0 -0
  26. validation_images/step_12500_val_cross_0.mp4 +0 -0
  27. validation_images/step_12500_val_recon_0.mp4 +0 -0
  28. validation_images/step_13000_val_cross_0.mp4 +0 -0
  29. validation_images/step_13000_val_recon_0.mp4 +0 -0
  30. validation_images/step_13500_val_cross_0.mp4 +0 -0
  31. validation_images/step_13500_val_recon_0.mp4 +0 -0
  32. validation_images/step_14000_val_cross_0.mp4 +0 -0
  33. validation_images/step_14000_val_recon_0.mp4 +0 -0
  34. validation_images/step_14500_val_cross_0.mp4 +0 -0
  35. validation_images/step_14500_val_recon_0.mp4 +0 -0
  36. validation_images/step_15000_val_cross_0.mp4 +0 -0
  37. validation_images/step_15000_val_recon_0.mp4 +0 -0
  38. validation_images/step_1500_val_cross_0.mp4 +0 -0
  39. validation_images/step_1500_val_recon_0.mp4 +0 -0
  40. validation_images/step_15500_val_cross_0.mp4 +0 -0
  41. validation_images/step_15500_val_recon_0.mp4 +0 -0
  42. validation_images/step_16000_val_cross_0.mp4 +0 -0
  43. validation_images/step_16000_val_recon_0.mp4 +0 -0
  44. validation_images/step_16500_val_cross_0.mp4 +0 -0
  45. validation_images/step_16500_val_recon_0.mp4 +0 -0
  46. validation_images/step_17000_val_cross_0.mp4 +0 -0
  47. validation_images/step_17000_val_recon_0.mp4 +0 -0
  48. validation_images/step_17500_val_cross_0.mp4 +0 -0
  49. validation_images/step_17500_val_recon_0.mp4 +0 -0
  50. validation_images/step_18000_val_cross_0.mp4 +0 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ validation_images/step_1_val_cross_0.mp4 filter=lfs diff=lfs merge=lfs -text
checkpoint-31500/optimizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:117aee311bcb13a8651a64f65418f968c5ee0200c51f066a58382fff82309d24
3
+ size 126654
checkpoint-31500/querytransformer/config.json ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "QueryTransformerEmbedding"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.0,
6
+ "hidden_act": "gelu_fast",
7
+ "hidden_dropout_prob": 0.0,
8
+ "hidden_size": 768,
9
+ "image_size": [
10
+ 72,
11
+ 128
12
+ ],
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-06,
16
+ "model_type": "vivit",
17
+ "num_attention_heads": 8,
18
+ "num_channels": 8,
19
+ "num_frames": 24,
20
+ "num_hidden_layers": 6,
21
+ "qkv_bias": true,
22
+ "query_dims": [
23
+ 160,
24
+ 320,
25
+ 640
26
+ ],
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.47.1",
29
+ "tubelet_size": [
30
+ 2,
31
+ 9,
32
+ 16
33
+ ],
34
+ "unet_attention_names": {
35
+ "down_blocks_0_attentions_0_temporal_transformer_blocks_0_attn1": 320,
36
+ "down_blocks_0_attentions_0_transformer_blocks_0_attn1": 320,
37
+ "down_blocks_0_attentions_1_temporal_transformer_blocks_0_attn1": 320,
38
+ "down_blocks_0_attentions_1_transformer_blocks_0_attn1": 320,
39
+ "down_blocks_1_attentions_0_temporal_transformer_blocks_0_attn1": 640,
40
+ "down_blocks_1_attentions_0_transformer_blocks_0_attn1": 640,
41
+ "down_blocks_1_attentions_1_temporal_transformer_blocks_0_attn1": 640,
42
+ "down_blocks_1_attentions_1_transformer_blocks_0_attn1": 640,
43
+ "down_blocks_2_attentions_0_temporal_transformer_blocks_0_attn1": 1280,
44
+ "down_blocks_2_attentions_0_transformer_blocks_0_attn1": 1280,
45
+ "down_blocks_2_attentions_1_temporal_transformer_blocks_0_attn1": 1280,
46
+ "down_blocks_2_attentions_1_transformer_blocks_0_attn1": 1280,
47
+ "mid_block_attentions_0_temporal_transformer_blocks_0_attn1": 1280,
48
+ "mid_block_attentions_0_transformer_blocks_0_attn1": 1280,
49
+ "up_blocks_1_attentions_0_temporal_transformer_blocks_0_attn1": 1280,
50
+ "up_blocks_1_attentions_0_transformer_blocks_0_attn1": 1280,
51
+ "up_blocks_1_attentions_1_temporal_transformer_blocks_0_attn1": 1280,
52
+ "up_blocks_1_attentions_1_transformer_blocks_0_attn1": 1280,
53
+ "up_blocks_1_attentions_2_temporal_transformer_blocks_0_attn1": 1280,
54
+ "up_blocks_1_attentions_2_transformer_blocks_0_attn1": 1280,
55
+ "up_blocks_2_attentions_0_temporal_transformer_blocks_0_attn1": 640,
56
+ "up_blocks_2_attentions_0_transformer_blocks_0_attn1": 640,
57
+ "up_blocks_2_attentions_1_temporal_transformer_blocks_0_attn1": 640,
58
+ "up_blocks_2_attentions_1_transformer_blocks_0_attn1": 640,
59
+ "up_blocks_2_attentions_2_temporal_transformer_blocks_0_attn1": 640,
60
+ "up_blocks_2_attentions_2_transformer_blocks_0_attn1": 640,
61
+ "up_blocks_3_attentions_0_temporal_transformer_blocks_0_attn1": 320,
62
+ "up_blocks_3_attentions_0_transformer_blocks_0_attn1": 320,
63
+ "up_blocks_3_attentions_1_temporal_transformer_blocks_0_attn1": 320,
64
+ "up_blocks_3_attentions_1_transformer_blocks_0_attn1": 320,
65
+ "up_blocks_3_attentions_2_temporal_transformer_blocks_0_attn1": 320,
66
+ "up_blocks_3_attentions_2_transformer_blocks_0_attn1": 320
67
+ },
68
+ "zero_init_theta": false
69
+ }
checkpoint-31500/querytransformer/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fcfa5f299a42181a50b91adb2c4c91f2861d4b743536edf8195b21abcffb751
3
+ size 53944
checkpoint-31500/random_states_0.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8030c29d1e9d45c4064b2f7cbb60a388973f0f5b8bfcb2db00af710301bcb09
3
+ size 14408
checkpoint-31500/scheduler.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95f142efb6788e98b9e7f658acec6af427fffbb91cc830a18c18f0adc2bd4384
3
+ size 1000
checkpoint-32000/optimizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bf73527abd70bbbe3052e09244f00c967c12eba2b5fed072b65177b3f2f5798
3
+ size 126654
checkpoint-32000/querytransformer/config.json ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "QueryTransformerEmbedding"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.0,
6
+ "hidden_act": "gelu_fast",
7
+ "hidden_dropout_prob": 0.0,
8
+ "hidden_size": 768,
9
+ "image_size": [
10
+ 72,
11
+ 128
12
+ ],
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-06,
16
+ "model_type": "vivit",
17
+ "num_attention_heads": 8,
18
+ "num_channels": 8,
19
+ "num_frames": 24,
20
+ "num_hidden_layers": 6,
21
+ "qkv_bias": true,
22
+ "query_dims": [
23
+ 160,
24
+ 320,
25
+ 640
26
+ ],
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.47.1",
29
+ "tubelet_size": [
30
+ 2,
31
+ 9,
32
+ 16
33
+ ],
34
+ "unet_attention_names": {
35
+ "down_blocks_0_attentions_0_temporal_transformer_blocks_0_attn1": 320,
36
+ "down_blocks_0_attentions_0_transformer_blocks_0_attn1": 320,
37
+ "down_blocks_0_attentions_1_temporal_transformer_blocks_0_attn1": 320,
38
+ "down_blocks_0_attentions_1_transformer_blocks_0_attn1": 320,
39
+ "down_blocks_1_attentions_0_temporal_transformer_blocks_0_attn1": 640,
40
+ "down_blocks_1_attentions_0_transformer_blocks_0_attn1": 640,
41
+ "down_blocks_1_attentions_1_temporal_transformer_blocks_0_attn1": 640,
42
+ "down_blocks_1_attentions_1_transformer_blocks_0_attn1": 640,
43
+ "down_blocks_2_attentions_0_temporal_transformer_blocks_0_attn1": 1280,
44
+ "down_blocks_2_attentions_0_transformer_blocks_0_attn1": 1280,
45
+ "down_blocks_2_attentions_1_temporal_transformer_blocks_0_attn1": 1280,
46
+ "down_blocks_2_attentions_1_transformer_blocks_0_attn1": 1280,
47
+ "mid_block_attentions_0_temporal_transformer_blocks_0_attn1": 1280,
48
+ "mid_block_attentions_0_transformer_blocks_0_attn1": 1280,
49
+ "up_blocks_1_attentions_0_temporal_transformer_blocks_0_attn1": 1280,
50
+ "up_blocks_1_attentions_0_transformer_blocks_0_attn1": 1280,
51
+ "up_blocks_1_attentions_1_temporal_transformer_blocks_0_attn1": 1280,
52
+ "up_blocks_1_attentions_1_transformer_blocks_0_attn1": 1280,
53
+ "up_blocks_1_attentions_2_temporal_transformer_blocks_0_attn1": 1280,
54
+ "up_blocks_1_attentions_2_transformer_blocks_0_attn1": 1280,
55
+ "up_blocks_2_attentions_0_temporal_transformer_blocks_0_attn1": 640,
56
+ "up_blocks_2_attentions_0_transformer_blocks_0_attn1": 640,
57
+ "up_blocks_2_attentions_1_temporal_transformer_blocks_0_attn1": 640,
58
+ "up_blocks_2_attentions_1_transformer_blocks_0_attn1": 640,
59
+ "up_blocks_2_attentions_2_temporal_transformer_blocks_0_attn1": 640,
60
+ "up_blocks_2_attentions_2_transformer_blocks_0_attn1": 640,
61
+ "up_blocks_3_attentions_0_temporal_transformer_blocks_0_attn1": 320,
62
+ "up_blocks_3_attentions_0_transformer_blocks_0_attn1": 320,
63
+ "up_blocks_3_attentions_1_temporal_transformer_blocks_0_attn1": 320,
64
+ "up_blocks_3_attentions_1_transformer_blocks_0_attn1": 320,
65
+ "up_blocks_3_attentions_2_temporal_transformer_blocks_0_attn1": 320,
66
+ "up_blocks_3_attentions_2_transformer_blocks_0_attn1": 320
67
+ },
68
+ "zero_init_theta": false
69
+ }
checkpoint-32000/querytransformer/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1eb5ab499b124bc4f8b5ec476c634fcddab9a577ff5a1254ac2b0efe82d6521e
3
+ size 53944
checkpoint-32000/random_states_0.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd5c8ed682c248a021ac9094cdde93eafe224b01adfe8f750a0c48e25dfdfec5
3
+ size 14408
checkpoint-32000/scheduler.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2bbb61109d01da8a9c381c0e2f077875e9118afeba0c4ed8b289ee114f675793
3
+ size 1000
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "QueryTransformerEncoder"
4
  ],
5
  "attention_probs_dropout_prob": 0.0,
6
  "hidden_act": "gelu_fast",
@@ -14,22 +14,56 @@
14
  "intermediate_size": 3072,
15
  "layer_norm_eps": 1e-06,
16
  "model_type": "vivit",
17
- "num_attention_heads": 16,
18
- "num_channels": 4,
19
  "num_frames": 24,
20
- "num_hidden_layers": 12,
21
  "qkv_bias": true,
22
  "query_dims": [
23
  160,
24
  320,
25
  640
26
  ],
27
- "torch_dtype": "float32",
28
  "transformers_version": "4.47.1",
29
  "tubelet_size": [
30
  2,
31
  9,
32
  16
33
  ],
34
- "zero_init_theta": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  }
 
1
  {
2
  "architectures": [
3
+ "QueryTransformerEmbedding"
4
  ],
5
  "attention_probs_dropout_prob": 0.0,
6
  "hidden_act": "gelu_fast",
 
14
  "intermediate_size": 3072,
15
  "layer_norm_eps": 1e-06,
16
  "model_type": "vivit",
17
+ "num_attention_heads": 8,
18
+ "num_channels": 8,
19
  "num_frames": 24,
20
+ "num_hidden_layers": 6,
21
  "qkv_bias": true,
22
  "query_dims": [
23
  160,
24
  320,
25
  640
26
  ],
27
+ "torch_dtype": "bfloat16",
28
  "transformers_version": "4.47.1",
29
  "tubelet_size": [
30
  2,
31
  9,
32
  16
33
  ],
34
+ "unet_attention_names": {
35
+ "down_blocks_0_attentions_0_temporal_transformer_blocks_0_attn1": 320,
36
+ "down_blocks_0_attentions_0_transformer_blocks_0_attn1": 320,
37
+ "down_blocks_0_attentions_1_temporal_transformer_blocks_0_attn1": 320,
38
+ "down_blocks_0_attentions_1_transformer_blocks_0_attn1": 320,
39
+ "down_blocks_1_attentions_0_temporal_transformer_blocks_0_attn1": 640,
40
+ "down_blocks_1_attentions_0_transformer_blocks_0_attn1": 640,
41
+ "down_blocks_1_attentions_1_temporal_transformer_blocks_0_attn1": 640,
42
+ "down_blocks_1_attentions_1_transformer_blocks_0_attn1": 640,
43
+ "down_blocks_2_attentions_0_temporal_transformer_blocks_0_attn1": 1280,
44
+ "down_blocks_2_attentions_0_transformer_blocks_0_attn1": 1280,
45
+ "down_blocks_2_attentions_1_temporal_transformer_blocks_0_attn1": 1280,
46
+ "down_blocks_2_attentions_1_transformer_blocks_0_attn1": 1280,
47
+ "mid_block_attentions_0_temporal_transformer_blocks_0_attn1": 1280,
48
+ "mid_block_attentions_0_transformer_blocks_0_attn1": 1280,
49
+ "up_blocks_1_attentions_0_temporal_transformer_blocks_0_attn1": 1280,
50
+ "up_blocks_1_attentions_0_transformer_blocks_0_attn1": 1280,
51
+ "up_blocks_1_attentions_1_temporal_transformer_blocks_0_attn1": 1280,
52
+ "up_blocks_1_attentions_1_transformer_blocks_0_attn1": 1280,
53
+ "up_blocks_1_attentions_2_temporal_transformer_blocks_0_attn1": 1280,
54
+ "up_blocks_1_attentions_2_transformer_blocks_0_attn1": 1280,
55
+ "up_blocks_2_attentions_0_temporal_transformer_blocks_0_attn1": 640,
56
+ "up_blocks_2_attentions_0_transformer_blocks_0_attn1": 640,
57
+ "up_blocks_2_attentions_1_temporal_transformer_blocks_0_attn1": 640,
58
+ "up_blocks_2_attentions_1_transformer_blocks_0_attn1": 640,
59
+ "up_blocks_2_attentions_2_temporal_transformer_blocks_0_attn1": 640,
60
+ "up_blocks_2_attentions_2_transformer_blocks_0_attn1": 640,
61
+ "up_blocks_3_attentions_0_temporal_transformer_blocks_0_attn1": 320,
62
+ "up_blocks_3_attentions_0_transformer_blocks_0_attn1": 320,
63
+ "up_blocks_3_attentions_1_temporal_transformer_blocks_0_attn1": 320,
64
+ "up_blocks_3_attentions_1_transformer_blocks_0_attn1": 320,
65
+ "up_blocks_3_attentions_2_temporal_transformer_blocks_0_attn1": 320,
66
+ "up_blocks_3_attentions_2_transformer_blocks_0_attn1": 320
67
+ },
68
+ "zero_init_theta": false
69
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fa570c60b29edfb504dbd670f92b327e0edbf54d59821779553446f6e2b35d7b
3
- size 349599384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ce6e732b58004553623d901af2efbce705669863fae78a330504b7c8072abbc
3
+ size 29008
validation_images/step_10000_val_cross_0.mp4 ADDED
Binary file (895 kB). View file
 
validation_images/step_10000_val_recon_0.mp4 ADDED
Binary file (473 kB). View file
 
validation_images/step_1000_val_cross_0.mp4 ADDED
Binary file (910 kB). View file
 
validation_images/step_1000_val_recon_0.mp4 ADDED
Binary file (549 kB). View file
 
validation_images/step_10500_val_cross_0.mp4 ADDED
Binary file (850 kB). View file
 
validation_images/step_10500_val_recon_0.mp4 ADDED
Binary file (453 kB). View file
 
validation_images/step_11000_val_cross_0.mp4 ADDED
Binary file (818 kB). View file
 
validation_images/step_11000_val_recon_0.mp4 ADDED
Binary file (610 kB). View file
 
validation_images/step_11500_val_cross_0.mp4 ADDED
Binary file (890 kB). View file
 
validation_images/step_11500_val_recon_0.mp4 ADDED
Binary file (458 kB). View file
 
validation_images/step_12000_val_cross_0.mp4 ADDED
Binary file (812 kB). View file
 
validation_images/step_12000_val_recon_0.mp4 ADDED
Binary file (541 kB). View file
 
validation_images/step_12500_val_cross_0.mp4 ADDED
Binary file (893 kB). View file
 
validation_images/step_12500_val_recon_0.mp4 ADDED
Binary file (581 kB). View file
 
validation_images/step_13000_val_cross_0.mp4 ADDED
Binary file (704 kB). View file
 
validation_images/step_13000_val_recon_0.mp4 ADDED
Binary file (299 kB). View file
 
validation_images/step_13500_val_cross_0.mp4 ADDED
Binary file (951 kB). View file
 
validation_images/step_13500_val_recon_0.mp4 ADDED
Binary file (523 kB). View file
 
validation_images/step_14000_val_cross_0.mp4 ADDED
Binary file (929 kB). View file
 
validation_images/step_14000_val_recon_0.mp4 ADDED
Binary file (542 kB). View file
 
validation_images/step_14500_val_cross_0.mp4 ADDED
Binary file (867 kB). View file
 
validation_images/step_14500_val_recon_0.mp4 ADDED
Binary file (571 kB). View file
 
validation_images/step_15000_val_cross_0.mp4 ADDED
Binary file (706 kB). View file
 
validation_images/step_15000_val_recon_0.mp4 ADDED
Binary file (518 kB). View file
 
validation_images/step_1500_val_cross_0.mp4 ADDED
Binary file (870 kB). View file
 
validation_images/step_1500_val_recon_0.mp4 ADDED
Binary file (552 kB). View file
 
validation_images/step_15500_val_cross_0.mp4 ADDED
Binary file (870 kB). View file
 
validation_images/step_15500_val_recon_0.mp4 ADDED
Binary file (378 kB). View file
 
validation_images/step_16000_val_cross_0.mp4 ADDED
Binary file (926 kB). View file
 
validation_images/step_16000_val_recon_0.mp4 ADDED
Binary file (454 kB). View file
 
validation_images/step_16500_val_cross_0.mp4 ADDED
Binary file (860 kB). View file
 
validation_images/step_16500_val_recon_0.mp4 ADDED
Binary file (464 kB). View file
 
validation_images/step_17000_val_cross_0.mp4 ADDED
Binary file (900 kB). View file
 
validation_images/step_17000_val_recon_0.mp4 ADDED
Binary file (496 kB). View file
 
validation_images/step_17500_val_cross_0.mp4 ADDED
Binary file (926 kB). View file
 
validation_images/step_17500_val_recon_0.mp4 ADDED
Binary file (258 kB). View file
 
validation_images/step_18000_val_cross_0.mp4 ADDED
Binary file (863 kB). View file