usr256864 commited on
Commit
c29bad5
·
verified ·
1 Parent(s): bc0f02d

Upload 5 files

Browse files
config_kwargs.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "monitor_metric": "eval_MRR@NM",
3
+ "trainer_kwargs": {
4
+ "class_name": "IR"
5
+ },
6
+ "trainee_kwargs": {
7
+ "class_name": "CLIP_Encoder",
8
+ "freeze_prefixes": [],
9
+ "use_attention": false,
10
+ "symmetric_CL": false,
11
+ "weighted_loss": false,
12
+ "image_type": false,
13
+ "mlm_type": false,
14
+ "tie_weights": false,
15
+ "loss": {
16
+ "class_name": "NLLLoss",
17
+ "align_uniform": true
18
+ }
19
+ },
20
+ "data_module_kwargs": {
21
+ "class_name": "cross_modal_DataModule",
22
+ "data_processor": {
23
+ "class_name": "evqa_data_processor",
24
+ "dataset_path": "../../data/evqa/",
25
+ "kb_path": "../../data/evqa/passages",
26
+ "entity_kb_path": "../../data/evqa/kb",
27
+ ",": "../../data/viquae_dataset/"
28
+ },
29
+ "input_key": "question",
30
+ "passage_key": "passage",
31
+ "relevant_indices_key": "BM25_provenance_indices",
32
+ "irrelevant_indices_key": "BM25_irrelevant_indices",
33
+ "use_image": true,
34
+ "add_positives": false,
35
+ "cross_modal_viquae_valid": false,
36
+ "use_mep": false,
37
+ "use_mlm": false,
38
+ "use_CL": true,
39
+ "augmented": false,
40
+ "random_mask": false,
41
+ "dataloader_kwargs": {
42
+ "num_workers": 6,
43
+ "prefetch_factor": 2
44
+ },
45
+ "tokenizer_kwargs": {
46
+ "class_name": "CLIPTokenizer",
47
+ "pretrained_model_name_or_path": "clip-vit-base-patch32_tokenizer"
48
+ },
49
+ "tokenization_kwargs": {
50
+ "max_length": 77,
51
+ "padding": "longest"
52
+ },
53
+ "image_processor_kwargs": {
54
+ "class_name": "ImageFormatter_evqa",
55
+ "feature_extractor_kwargs": {
56
+ "class_name": "CLIPFeatureExtractor",
57
+ "pretrained_model_name_or_path": "clip-vit-base-patch32_FE"
58
+ }
59
+ }
60
+ },
61
+ "text_encoder_kwargs": {
62
+ "class_name": "CLIP_Text_Encoder",
63
+ "checkpoint_name": "text_encoder",
64
+ "base_encoder_kwargs": {
65
+ "class_name": "CLIPModel",
66
+ "pretrained_model_name_or_path": "/home/data/meerqat/my_transformers_cache/clip-vit-base-patch32"
67
+ },
68
+ "inference_path": "saved_models/CLIP_evqa/text_encoder"
69
+ },
70
+ "image_encoder_kwargs": {
71
+ "class_name": "CLIP_Image_Encoder",
72
+ "checkpoint_name": "image_encoder",
73
+ "base_encoder_kwargs": {
74
+ "class_name": "CLIPModel",
75
+ "pretrained_model_name_or_path": "/home/data/meerqat/my_transformers_cache/clip-vit-base-patch32"
76
+ },
77
+ "inference_path": "saved_models/CLIP_evqa/image_encoder"
78
+ }
79
+ }
experiment_params.json ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "subparser": null,
3
+ "max_seq_length": 256,
4
+ "question_max_seq_length": 256,
5
+ "RC": false,
6
+ "triviaqa": false,
7
+ "coco": false,
8
+ "flickr": false,
9
+ "aokvqa": false,
10
+ "evqa": false,
11
+ "infoseek": false,
12
+ "grad_check": true,
13
+ "MJL": false,
14
+ "debug_run_time": false,
15
+ "only_neighbors": false,
16
+ "full_kb": false,
17
+ "image_eval": false,
18
+ "embedding_column": "my_DPR_few_shot",
19
+ "output_ds": null,
20
+ "output_kb": null,
21
+ "embed": false,
22
+ "data_path": null,
23
+ "train_batch_size": 512,
24
+ "eval_batch_size": 1000,
25
+ "transformer_model_name": "bert-base-uncased",
26
+ "gradient_accumulation_steps": 1,
27
+ "warmup_proportion": 0.1,
28
+ "weight_decay": 0.01,
29
+ "adam_beta1": 0.9,
30
+ "adam_beta2": 0.999,
31
+ "warmup_steps": 4,
32
+ "adam_epsilon": 1e-08,
33
+ "num_train_epochs": 12,
34
+ "learning_rate": 2e-06,
35
+ "dropout": 0.5,
36
+ "bert_hidden_size": 768,
37
+ "MM": null,
38
+ "use_graph_P": null,
39
+ "use_graph_Q": null,
40
+ "syntactic_P": null,
41
+ "syntactic_Q": null,
42
+ "NS": null,
43
+ "IC_Q": false,
44
+ "IC_P": false,
45
+ "gcn_lr": 2e-05,
46
+ "head_lr": 2e-06,
47
+ "func": "cat",
48
+ "graph_pooling": "mean",
49
+ "max_nbr_nodes": 2,
50
+ "num_neib": 1,
51
+ "neib_depth": 1,
52
+ "undirect": false,
53
+ "node_hidden_size": 128,
54
+ "max_num_relations": null,
55
+ "max_num_syntactic_relations": null,
56
+ "map_wikidataRelCode_to_idx": null,
57
+ "graph_layout": "fr",
58
+ "residual": false,
59
+ "num_gcn": null,
60
+ "node_alias_max_len": 32,
61
+ "attention": false,
62
+ "num_paths": 1,
63
+ "kg_embed": false,
64
+ "graph_only": false,
65
+ "freeze": false,
66
+ "sanity_run": false,
67
+ "enhanced_question": false,
68
+ "use_entity_type": false,
69
+ "filter_edges": false,
70
+ "draw": false,
71
+ "layer_norm": false,
72
+ "filters_3": 768,
73
+ "tensor_neurons": 16,
74
+ "output_dir": "saved_models/CLIP_evqa",
75
+ "main_dir": "/home/data/meerqat/ViQuAE/meerqat/GP-VQA",
76
+ "experiment_dir": "None",
77
+ "experiment_name": "CLIP_evqa",
78
+ "search": null,
79
+ "xlnet": false,
80
+ "tune_loss": false,
81
+ "tune_dev": true,
82
+ "tune_valid_loss": false,
83
+ "sanity_val_steps": 0,
84
+ "cpu": false,
85
+ "N": 0,
86
+ "nbr_workers": 1,
87
+ "num_proc": 1,
88
+ "local_cache": "/home/data/meerqat/my_transformers_cache",
89
+ "IMAGE_PATH": "../../data/mini_Commons",
90
+ "transformer_path": null,
91
+ "resume_from": null,
92
+ "checkpoint": null,
93
+ "linear_MEP_checkpoint": null,
94
+ "linear_MLM_checkpoint": null,
95
+ "config": "experiments/ir/evqa/clip/config_clip.json",
96
+ "context_checkpoint": null,
97
+ "grad_accum": 1,
98
+ "cls_token": "[CLS]",
99
+ "sep_token": "[SEP]",
100
+ "pad_token": "[PAD]",
101
+ "cls_token_at_end": false,
102
+ "mask_padding_with_zero": true,
103
+ "pad_on_left": false,
104
+ "sequence_a_segment_id": 0,
105
+ "pad_token_segment_id": 0,
106
+ "cls_token_segment_id": 0,
107
+ "pad_token_label_id": 0,
108
+ "pad_token_id": 0,
109
+ "past_index": -1,
110
+ "world_size": 1,
111
+ "use_lstm": false,
112
+ "use_entity": false,
113
+ "use_question_graph": false,
114
+ "use_question_objects": false,
115
+ "EFeat": null,
116
+ "NFeat": null,
117
+ "ltn": false,
118
+ "use_kelm": false,
119
+ "unshared": false,
120
+ "mlm": false,
121
+ "test": false,
122
+ "split_dpr": false,
123
+ "rename_model": false,
124
+ "mask_rate": 0.15,
125
+ "pretrained_triviaq": false,
126
+ "error_analysis": false,
127
+ "debug": false,
128
+ "stop_debug": false,
129
+ "entity_linking": false,
130
+ "entity_linking_split": false,
131
+ "build_entity_paths": null,
132
+ "object_detection": null,
133
+ "update_wikidata_ids": null,
134
+ "update_None_ids": null,
135
+ "dependency_parsing": null,
136
+ "image_captionning": null,
137
+ "build_graph": false,
138
+ "from_bert": false,
139
+ "fast": false,
140
+ "get_examples": false,
141
+ "get_statistics": null,
142
+ "word_vocab_size": 0,
143
+ "char_vocab_size": 0,
144
+ "max_word_len": 30,
145
+ "device": null,
146
+ "embedding_matrix": null,
147
+ "w2v_file": "word_vector_200d.vec",
148
+ "word_emb_dim": 200,
149
+ "char_lstm": false,
150
+ "char_cnn": false,
151
+ "seed": 32,
152
+ "tune_batch_size": false,
153
+ "model_names": null
154
+ }
model-epoch=06-eval_MRR@NM=0.2085_sd=32.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8261b499c66f9b121c3f24e63a3dedaa6ff788fcc2644380bc704d3e41645bf4
3
+ size 1815837788
results.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ -------- dev---------
2
+ [1.0]
validation_metrics.txt ADDED
File without changes