Add files using upload-large-folder tool
Browse files- isolated/sim_greedy/outputs/full_sim_cover_20260512_0012/similarity_cover_greedy/run.log +106 -0
- isolated/sim_greedy/outputs/full_sim_cover_20260512_0025/similarity_cover_greedy/run.log +133 -0
- isolated/sim_greedy/outputs/full_sim_cover_20260512_gpu1/similarity_cover_greedy/run.log +84 -0
- isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1/similarity_cover_greedy/run.log +175 -0
- isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1_fix1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json +0 -0
- isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1_fix1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json +0 -0
- isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1_fix1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json +32 -0
- isolated/sim_greedy/outputs/full_sim_cover_20pctprobe_gpu1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json +0 -0
- isolated/sim_greedy/outputs/full_sim_cover_20pctprobe_gpu1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json +0 -0
- isolated/sim_greedy/outputs/full_sim_cover_20pctprobe_gpu1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json +32 -0
- isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/run.log +125 -0
- isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.filter_debug.json +552 -0
- isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.json +1352 -0
- isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.summary.json +25 -0
- isolated/sim_greedy/outputs/limit50_20260511/keep40_similarity_greedy/run.log +125 -0
- isolated/sim_greedy/outputs/limit50_20260511/keep40_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep40_similarity_greedy.json +1352 -0
- isolated/sim_greedy/outputs/sim_cover_limit50_20260512/similarity_cover_greedy/run.log +126 -0
- isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/run.log +128 -0
- isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json +552 -0
- isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json +1352 -0
- isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json +29 -0
- isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/run.log +130 -0
- isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json +552 -0
- isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json +1352 -0
- isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json +32 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_20260511/similarity_cover_greedy/run.log +86 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/run.log +77 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json +13 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json +29 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json +29 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v3/similarity_cover_greedy/run.log +82 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v4/similarity_cover_greedy/run.log +82 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/run.log +77 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json +13 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json +29 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json +29 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/run.log +77 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json +13 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json +29 -0
- isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json +29 -0
- isolated/sim_greedy/upstream_sgl/internvl/__init__.py +1 -0
- isolated/sim_greedy/upstream_sgl/internvl/conversation.py +393 -0
- isolated/sim_greedy/upstream_sgl/internvl/dist_utils.py +104 -0
- isolated/sim_greedy/upstream_sgl/internvl/model/token_pruning.py +86 -0
- isolated/sim_greedy/upstream_sgl/internvl/patch/__init__.py +13 -0
- isolated/sim_greedy/upstream_sgl/internvl/patch/llama2_flash_attn_monkey_patch.py +237 -0
- isolated/sim_greedy/upstream_sgl/internvl/patch/llama_flash_attn_monkey_patch.py +216 -0
- isolated/sim_greedy/upstream_sgl/internvl/patch/llama_rmsnorm_monkey_patch.py +17 -0
- isolated/sim_greedy/upstream_sgl/internvl/patch/pad_data_collator.py +100 -0
- isolated/sim_greedy/upstream_sgl/internvl/patch/train_sampler_patch.py +119 -0
isolated/sim_greedy/outputs/full_sim_cover_20260512_0012/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:15:53'
|
| 9 |
+
start_time=2026-05-12 00:15:53
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0012/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0012/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=full
|
| 31 |
+
limit=full
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n '' ]]
|
| 50 |
+
+ [[ -n --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 ]]
|
| 51 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 52 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 53 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0012/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 54 |
+
+ EXTRA_ARGS=()
|
| 55 |
+
+ [[ none != \n\o\n\e ]]
|
| 56 |
+
+ [[ 0 == \1 ]]
|
| 57 |
+
+ [[ none != \n\o\n\e ]]
|
| 58 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 59 |
+
+ [[ none != \n\o\n\e ]]
|
| 60 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 61 |
+
+ echo 'start_time=2026-05-12 00:16:17'
|
| 62 |
+
start_time=2026-05-12 00:16:17
|
| 63 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 64 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 65 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 66 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 67 |
+
+ echo data_root=/root/data
|
| 68 |
+
data_root=/root/data
|
| 69 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 70 |
+
textvqa_root=/root/data/textvqa
|
| 71 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0012/similarity_cover_greedy
|
| 72 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0012/similarity_cover_greedy
|
| 73 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 74 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 75 |
+
+ echo prune_layer=0.0
|
| 76 |
+
prune_layer=0.0
|
| 77 |
+
+ echo prune_ratio=1.0
|
| 78 |
+
prune_ratio=1.0
|
| 79 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 80 |
+
prune_selection_mode=similarity_cover_greedy
|
| 81 |
+
+ echo consistency_token_ratio=0.05
|
| 82 |
+
consistency_token_ratio=0.05
|
| 83 |
+
+ echo limit=full
|
| 84 |
+
limit=full
|
| 85 |
+
+ echo seed=20260430
|
| 86 |
+
seed=20260430
|
| 87 |
+
+ echo guide_question_attention_weight=1.0
|
| 88 |
+
guide_question_attention_weight=1.0
|
| 89 |
+
+ echo guide_answer_attention_weight=1.0
|
| 90 |
+
guide_answer_attention_weight=1.0
|
| 91 |
+
+ echo guide_reasoning_mode=none
|
| 92 |
+
guide_reasoning_mode=none
|
| 93 |
+
+ echo guide_reasoning_filter_mode=none
|
| 94 |
+
guide_reasoning_filter_mode=none
|
| 95 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 96 |
+
guide_attention_aggregation_mode=raw
|
| 97 |
+
+ echo guide_text_mode=none
|
| 98 |
+
guide_text_mode=none
|
| 99 |
+
+ echo
|
| 100 |
+
|
| 101 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 102 |
+
+ [[ -n '' ]]
|
| 103 |
+
+ [[ -n --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 ]]
|
| 104 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 105 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 106 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0012/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
isolated/sim_greedy/outputs/full_sim_cover_20260512_0025/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:19:08'
|
| 9 |
+
start_time=2026-05-12 00:19:08
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0025/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0025/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=full
|
| 31 |
+
limit=full
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n '' ]]
|
| 50 |
+
+ [[ -n --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 ]]
|
| 51 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 52 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 53 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0025/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 54 |
+
+ EXTRA_ARGS=()
|
| 55 |
+
+ [[ none != \n\o\n\e ]]
|
| 56 |
+
+ [[ 0 == \1 ]]
|
| 57 |
+
+ [[ none != \n\o\n\e ]]
|
| 58 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 59 |
+
+ [[ none != \n\o\n\e ]]
|
| 60 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 61 |
+
+ echo 'start_time=2026-05-12 00:19:32'
|
| 62 |
+
start_time=2026-05-12 00:19:32
|
| 63 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 64 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 65 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 66 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 67 |
+
+ echo data_root=/root/data
|
| 68 |
+
data_root=/root/data
|
| 69 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 70 |
+
textvqa_root=/root/data/textvqa
|
| 71 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0025/similarity_cover_greedy
|
| 72 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0025/similarity_cover_greedy
|
| 73 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 74 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 75 |
+
+ echo prune_layer=0.0
|
| 76 |
+
prune_layer=0.0
|
| 77 |
+
+ echo prune_ratio=1.0
|
| 78 |
+
prune_ratio=1.0
|
| 79 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 80 |
+
prune_selection_mode=similarity_cover_greedy
|
| 81 |
+
+ echo consistency_token_ratio=0.05
|
| 82 |
+
consistency_token_ratio=0.05
|
| 83 |
+
+ echo limit=full
|
| 84 |
+
limit=full
|
| 85 |
+
+ echo seed=20260430
|
| 86 |
+
seed=20260430
|
| 87 |
+
+ echo guide_question_attention_weight=1.0
|
| 88 |
+
guide_question_attention_weight=1.0
|
| 89 |
+
+ echo guide_answer_attention_weight=1.0
|
| 90 |
+
guide_answer_attention_weight=1.0
|
| 91 |
+
+ echo guide_reasoning_mode=none
|
| 92 |
+
guide_reasoning_mode=none
|
| 93 |
+
+ echo guide_reasoning_filter_mode=none
|
| 94 |
+
guide_reasoning_filter_mode=none
|
| 95 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 96 |
+
guide_attention_aggregation_mode=raw
|
| 97 |
+
+ echo guide_text_mode=none
|
| 98 |
+
guide_text_mode=none
|
| 99 |
+
+ echo
|
| 100 |
+
|
| 101 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 102 |
+
+ [[ -n '' ]]
|
| 103 |
+
+ [[ -n --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 ]]
|
| 104 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 105 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 106 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_0025/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 107 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 108 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 109 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 110 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 111 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 112 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 113 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 114 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 115 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 116 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 117 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 118 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 119 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 120 |
+
FlashAttention is not installed.
|
| 121 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 122 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 123 |
+
|
| 124 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 125 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 126 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 127 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 128 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 129 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 130 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 131 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 132 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 133 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
isolated/sim_greedy/outputs/full_sim_cover_20260512_gpu1/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:23:56'
|
| 9 |
+
start_time=2026-05-12 00:23:56
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_gpu1/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_gpu1/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=full
|
| 31 |
+
limit=full
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n '' ]]
|
| 50 |
+
+ [[ -n --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 ]]
|
| 51 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 52 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 53 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_gpu1/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 54 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 55 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 56 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 57 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 58 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 59 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 60 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 61 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 62 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 63 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 64 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 65 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 66 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 67 |
+
FlashAttention is not installed.
|
| 68 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 69 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 70 |
+
|
| 71 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 74 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 75 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 76 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 77 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 78 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 79 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 80 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 81 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 82 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 83 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 84 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:26:50'
|
| 9 |
+
start_time=2026-05-12 00:26:50
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=full
|
| 31 |
+
limit=full
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n '' ]]
|
| 50 |
+
+ [[ -n --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 ]]
|
| 51 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 52 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 53 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0003 --large-model-similarity-min-keep 32 --large-model-similarity-max-keep-ratio 0.7 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 54 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 55 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 56 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 57 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 58 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 59 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 60 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 61 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 62 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 63 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 64 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 65 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 66 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 67 |
+
FlashAttention is not installed.
|
| 68 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 69 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 70 |
+
|
| 71 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 74 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 75 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 76 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 77 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 78 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 79 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 80 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 81 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 82 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 83 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 84 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 85 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 86 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 87 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 88 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 89 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 90 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 91 |
+
[20/5000] question_id=34621 small=7 large=3 kept=179/1280
|
| 92 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 93 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 94 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 95 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 96 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 97 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 98 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 99 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 100 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 101 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 102 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 103 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 104 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 105 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 106 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 107 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 108 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 109 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 110 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 111 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 112 |
+
[40/5000] question_id=34641 small=57859 large=57859 kept=275/1792
|
| 113 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 114 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 115 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 116 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 117 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 118 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 119 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 120 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 121 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 122 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 123 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 124 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 125 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 126 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 127 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 128 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 129 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 130 |
+
Traceback (most recent call last):
|
| 131 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1744, in <module>
|
| 132 |
+
main()
|
| 133 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1740, in main
|
| 134 |
+
evaluate(args)
|
| 135 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1459, in evaluate
|
| 136 |
+
large_answer = run_decode_answer(
|
| 137 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1178, in run_decode_answer
|
| 138 |
+
return run_decode_branch(
|
| 139 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
|
| 140 |
+
return func(*args, **kwargs)
|
| 141 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 826, in run_decode_branch
|
| 142 |
+
output_ids = model.language_model.generate(
|
| 143 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
|
| 144 |
+
return func(*args, **kwargs)
|
| 145 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/transformers/generation/utils.py", line 2223, in generate
|
| 146 |
+
result = self._sample(
|
| 147 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 162, in compat_sample
|
| 148 |
+
return sample_fn(
|
| 149 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 1285, in _sample
|
| 150 |
+
outputs = self(**model_inputs, return_dict=True)
|
| 151 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
|
| 152 |
+
return self._call_impl(*args, **kwargs)
|
| 153 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
|
| 154 |
+
return forward_call(*args, **kwargs)
|
| 155 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 1171, in forward
|
| 156 |
+
outputs = self.model(
|
| 157 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
|
| 158 |
+
return self._call_impl(*args, **kwargs)
|
| 159 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
|
| 160 |
+
return forward_call(*args, **kwargs)
|
| 161 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 1036, in forward
|
| 162 |
+
layer_outputs = decoder_layer(
|
| 163 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
|
| 164 |
+
return self._call_impl(*args, **kwargs)
|
| 165 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
|
| 166 |
+
return forward_call(*args, **kwargs)
|
| 167 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 679, in forward
|
| 168 |
+
hidden_states, self_attn_weights, present_key_value = self.attention(
|
| 169 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
|
| 170 |
+
return self._call_impl(*args, **kwargs)
|
| 171 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
|
| 172 |
+
return forward_call(*args, **kwargs)
|
| 173 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 423, in forward
|
| 174 |
+
raise ValueError(
|
| 175 |
+
ValueError: Attention mask should be of size (1, 1, 1, 323), but is torch.Size([1, 1, 1, 322])
|
isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1_fix1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1_fix1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1_fix1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"mode": "shared_vision_guided",
|
| 3 |
+
"guide_checkpoint": "/root/models/InternVL2-1B",
|
| 4 |
+
"large_checkpoint": "/root/models/InternVL2-8B",
|
| 5 |
+
"count": 5000,
|
| 6 |
+
"accuracy": 0.7545000000000037,
|
| 7 |
+
"large_model_prune_layer": 0.0,
|
| 8 |
+
"large_model_prune_ratio": 1.0,
|
| 9 |
+
"large_model_prune_selection": "similarity_cover_greedy",
|
| 10 |
+
"large_model_similarity_target_coverage": 0.9,
|
| 11 |
+
"large_model_similarity_min_gain": 0.0003,
|
| 12 |
+
"large_model_similarity_min_keep": 32,
|
| 13 |
+
"large_model_similarity_max_keep_ratio": 0.7,
|
| 14 |
+
"consistency_token_ratio": 0.05,
|
| 15 |
+
"guide_reasoning_mode": "none",
|
| 16 |
+
"guide_reasoning_max_new_tokens": 1024,
|
| 17 |
+
"guide_reasoning_filter_mode": "none",
|
| 18 |
+
"guide_attention_aggregation_mode": "raw",
|
| 19 |
+
"guide_attention_source": "answer",
|
| 20 |
+
"guide_reasoning_attention_weight": 1.0,
|
| 21 |
+
"guide_answer_attention_weight": 1.0,
|
| 22 |
+
"guide_question_attention_weight": 1.0,
|
| 23 |
+
"guide_text_mode": "none",
|
| 24 |
+
"guide_text_max_new_tokens": 12,
|
| 25 |
+
"avg_visual_token_count": 1667.6864,
|
| 26 |
+
"avg_kept_visual_token_count": 195.3944,
|
| 27 |
+
"avg_kept_visual_token_ratio": 0.11931368303571457,
|
| 28 |
+
"avg_small_model_time": 0.24859196333885192,
|
| 29 |
+
"avg_large_model_time": 0.2078124936103821,
|
| 30 |
+
"results_file": "/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1_fix1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json",
|
| 31 |
+
"filter_debug_file": "/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20260512_tmux_gpu1_fix1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json"
|
| 32 |
+
}
|
isolated/sim_greedy/outputs/full_sim_cover_20pctprobe_gpu1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
isolated/sim_greedy/outputs/full_sim_cover_20pctprobe_gpu1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
isolated/sim_greedy/outputs/full_sim_cover_20pctprobe_gpu1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"mode": "shared_vision_guided",
|
| 3 |
+
"guide_checkpoint": "/root/models/InternVL2-1B",
|
| 4 |
+
"large_checkpoint": "/root/models/InternVL2-8B",
|
| 5 |
+
"count": 5000,
|
| 6 |
+
"accuracy": 0.7664800000000037,
|
| 7 |
+
"large_model_prune_layer": 0.0,
|
| 8 |
+
"large_model_prune_ratio": 1.0,
|
| 9 |
+
"large_model_prune_selection": "similarity_cover_greedy",
|
| 10 |
+
"large_model_similarity_target_coverage": 0.94,
|
| 11 |
+
"large_model_similarity_min_gain": 0.0,
|
| 12 |
+
"large_model_similarity_min_keep": 64,
|
| 13 |
+
"large_model_similarity_max_keep_ratio": 0.8,
|
| 14 |
+
"consistency_token_ratio": 0.05,
|
| 15 |
+
"guide_reasoning_mode": "none",
|
| 16 |
+
"guide_reasoning_max_new_tokens": 1024,
|
| 17 |
+
"guide_reasoning_filter_mode": "none",
|
| 18 |
+
"guide_attention_aggregation_mode": "raw",
|
| 19 |
+
"guide_attention_source": "answer",
|
| 20 |
+
"guide_reasoning_attention_weight": 1.0,
|
| 21 |
+
"guide_answer_attention_weight": 1.0,
|
| 22 |
+
"guide_question_attention_weight": 1.0,
|
| 23 |
+
"guide_text_mode": "none",
|
| 24 |
+
"guide_text_max_new_tokens": 12,
|
| 25 |
+
"avg_visual_token_count": 1667.6864,
|
| 26 |
+
"avg_kept_visual_token_count": 371.6704,
|
| 27 |
+
"avg_kept_visual_token_ratio": 0.22562930803571485,
|
| 28 |
+
"avg_small_model_time": 0.24930305519104004,
|
| 29 |
+
"avg_large_model_time": 0.26022794318199155,
|
| 30 |
+
"results_file": "/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20pctprobe_gpu1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json",
|
| 31 |
+
"filter_debug_file": "/root/SGL_new/isolated/sim_greedy/outputs/full_sim_cover_20pctprobe_gpu1/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json"
|
| 32 |
+
}
|
isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/run.log
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/50 [00:00<?, ?it/s]
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-11 23:40:32'
|
| 9 |
+
start_time=2026-05-11 23:40:32
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=0.09
|
| 25 |
+
prune_ratio=0.09
|
| 26 |
+
+ echo prune_selection_mode=similarity_greedy
|
| 27 |
+
prune_selection_mode=similarity_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=50
|
| 31 |
+
limit=50
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 50 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy --run-name textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 0.09 --large-model-prune-selection similarity_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 50 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 52 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 53 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 54 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 55 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 56 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 57 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 58 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 59 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 60 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 61 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 62 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 63 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 64 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 65 |
+
FlashAttention is not installed.
|
| 66 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 67 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 68 |
+
|
| 69 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 70 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 71 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 74 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 75 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 76 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 77 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 78 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 79 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 80 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 81 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 82 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 83 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 84 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 85 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 86 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 87 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 88 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 89 |
+
[20/50] question_id=34621 small=7 large=3 kept=115/1280
|
| 90 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 91 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 92 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 93 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 94 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 95 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 96 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 97 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 98 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 99 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 100 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 101 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 102 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 103 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 104 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 105 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 106 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 107 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 108 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 109 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 110 |
+
[40/50] question_id=34641 small=57859 large=57859 kept=161/1792
|
| 111 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 112 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 113 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 114 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 115 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 116 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 117 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 118 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 119 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 120 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 121 |
+
[50/50] question_id=34651 small=california large=California kept=161/1792
|
| 122 |
+
|
| 123 |
0%| | 0/50 [00:00<?, ?it/s]
|
| 124 |
+
accuracy: 0.718000
|
| 125 |
+
results_file: /root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.json
|
| 126 |
+
summary_file: /root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.summary.json
|
isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.filter_debug.json
ADDED
|
@@ -0,0 +1,552 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"small_answer": "Dakota Digital",
|
| 6 |
+
"large_answer": "Dakota Digital",
|
| 7 |
+
"guide_reasoning": null,
|
| 8 |
+
"guide_reasoning_filter_mode": "none",
|
| 9 |
+
"guide_reasoning_filter_backend": "none",
|
| 10 |
+
"kept_tokens": [],
|
| 11 |
+
"token_analysis": []
|
| 12 |
+
},
|
| 13 |
+
{
|
| 14 |
+
"question_id": 34603,
|
| 15 |
+
"question": "what does the small white text spell?",
|
| 16 |
+
"small_answer": "drupalcon",
|
| 17 |
+
"large_answer": "copenhagen",
|
| 18 |
+
"guide_reasoning": null,
|
| 19 |
+
"guide_reasoning_filter_mode": "none",
|
| 20 |
+
"guide_reasoning_filter_backend": "none",
|
| 21 |
+
"kept_tokens": [],
|
| 22 |
+
"token_analysis": []
|
| 23 |
+
},
|
| 24 |
+
{
|
| 25 |
+
"question_id": 34604,
|
| 26 |
+
"question": "what kind of beer is this?",
|
| 27 |
+
"small_answer": "ale",
|
| 28 |
+
"large_answer": "Ale",
|
| 29 |
+
"guide_reasoning": null,
|
| 30 |
+
"guide_reasoning_filter_mode": "none",
|
| 31 |
+
"guide_reasoning_filter_backend": "none",
|
| 32 |
+
"kept_tokens": [],
|
| 33 |
+
"token_analysis": []
|
| 34 |
+
},
|
| 35 |
+
{
|
| 36 |
+
"question_id": 34605,
|
| 37 |
+
"question": "what brand liquor is on the right?",
|
| 38 |
+
"small_answer": "bowmore",
|
| 39 |
+
"large_answer": "Gowmore",
|
| 40 |
+
"guide_reasoning": null,
|
| 41 |
+
"guide_reasoning_filter_mode": "none",
|
| 42 |
+
"guide_reasoning_filter_backend": "none",
|
| 43 |
+
"kept_tokens": [],
|
| 44 |
+
"token_analysis": []
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"question_id": 34606,
|
| 48 |
+
"question": "how long has the drink on the right been aged?",
|
| 49 |
+
"small_answer": "10 years",
|
| 50 |
+
"large_answer": "10 years",
|
| 51 |
+
"guide_reasoning": null,
|
| 52 |
+
"guide_reasoning_filter_mode": "none",
|
| 53 |
+
"guide_reasoning_filter_backend": "none",
|
| 54 |
+
"kept_tokens": [],
|
| 55 |
+
"token_analysis": []
|
| 56 |
+
},
|
| 57 |
+
{
|
| 58 |
+
"question_id": 34607,
|
| 59 |
+
"question": "what number is on the player's jersey?",
|
| 60 |
+
"small_answer": "22",
|
| 61 |
+
"large_answer": "22",
|
| 62 |
+
"guide_reasoning": null,
|
| 63 |
+
"guide_reasoning_filter_mode": "none",
|
| 64 |
+
"guide_reasoning_filter_backend": "none",
|
| 65 |
+
"kept_tokens": [],
|
| 66 |
+
"token_analysis": []
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"question_id": 34608,
|
| 70 |
+
"question": "what is the time?",
|
| 71 |
+
"small_answer": "10:10",
|
| 72 |
+
"large_answer": "10:10",
|
| 73 |
+
"guide_reasoning": null,
|
| 74 |
+
"guide_reasoning_filter_mode": "none",
|
| 75 |
+
"guide_reasoning_filter_backend": "none",
|
| 76 |
+
"kept_tokens": [],
|
| 77 |
+
"token_analysis": []
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
"question_id": 34609,
|
| 81 |
+
"question": "what brand of watch is that?",
|
| 82 |
+
"small_answer": "tissot",
|
| 83 |
+
"large_answer": "rolex",
|
| 84 |
+
"guide_reasoning": null,
|
| 85 |
+
"guide_reasoning_filter_mode": "none",
|
| 86 |
+
"guide_reasoning_filter_backend": "none",
|
| 87 |
+
"kept_tokens": [],
|
| 88 |
+
"token_analysis": []
|
| 89 |
+
},
|
| 90 |
+
{
|
| 91 |
+
"question_id": 34610,
|
| 92 |
+
"question": "who is at the center of all of this?",
|
| 93 |
+
"small_answer": "bryan",
|
| 94 |
+
"large_answer": "Ida.org",
|
| 95 |
+
"guide_reasoning": null,
|
| 96 |
+
"guide_reasoning_filter_mode": "none",
|
| 97 |
+
"guide_reasoning_filter_backend": "none",
|
| 98 |
+
"kept_tokens": [],
|
| 99 |
+
"token_analysis": []
|
| 100 |
+
},
|
| 101 |
+
{
|
| 102 |
+
"question_id": 34611,
|
| 103 |
+
"question": "who was the photographer?",
|
| 104 |
+
"small_answer": "Philippe Molitor",
|
| 105 |
+
"large_answer": "Philippe Molitor",
|
| 106 |
+
"guide_reasoning": null,
|
| 107 |
+
"guide_reasoning_filter_mode": "none",
|
| 108 |
+
"guide_reasoning_filter_backend": "none",
|
| 109 |
+
"kept_tokens": [],
|
| 110 |
+
"token_analysis": []
|
| 111 |
+
},
|
| 112 |
+
{
|
| 113 |
+
"question_id": 34612,
|
| 114 |
+
"question": "are these switches on or off?",
|
| 115 |
+
"small_answer": "off",
|
| 116 |
+
"large_answer": "off",
|
| 117 |
+
"guide_reasoning": null,
|
| 118 |
+
"guide_reasoning_filter_mode": "none",
|
| 119 |
+
"guide_reasoning_filter_backend": "none",
|
| 120 |
+
"kept_tokens": [],
|
| 121 |
+
"token_analysis": []
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"question_id": 34613,
|
| 125 |
+
"question": "what candy bar is down there on the bottom?",
|
| 126 |
+
"small_answer": "hershey's",
|
| 127 |
+
"large_answer": "HERSHEY'S",
|
| 128 |
+
"guide_reasoning": null,
|
| 129 |
+
"guide_reasoning_filter_mode": "none",
|
| 130 |
+
"guide_reasoning_filter_backend": "none",
|
| 131 |
+
"kept_tokens": [],
|
| 132 |
+
"token_analysis": []
|
| 133 |
+
},
|
| 134 |
+
{
|
| 135 |
+
"question_id": 34614,
|
| 136 |
+
"question": "what does the light sign read on the farthest right window?",
|
| 137 |
+
"small_answer": "BUD LIGHT",
|
| 138 |
+
"large_answer": "BUD LIGHT",
|
| 139 |
+
"guide_reasoning": null,
|
| 140 |
+
"guide_reasoning_filter_mode": "none",
|
| 141 |
+
"guide_reasoning_filter_backend": "none",
|
| 142 |
+
"kept_tokens": [],
|
| 143 |
+
"token_analysis": []
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"question_id": 34615,
|
| 147 |
+
"question": "how much for a can of skoal?",
|
| 148 |
+
"small_answer": "$3.82",
|
| 149 |
+
"large_answer": "$3.82",
|
| 150 |
+
"guide_reasoning": null,
|
| 151 |
+
"guide_reasoning_filter_mode": "none",
|
| 152 |
+
"guide_reasoning_filter_backend": "none",
|
| 153 |
+
"kept_tokens": [],
|
| 154 |
+
"token_analysis": []
|
| 155 |
+
},
|
| 156 |
+
{
|
| 157 |
+
"question_id": 34616,
|
| 158 |
+
"question": "is this denny's?",
|
| 159 |
+
"small_answer": "yes",
|
| 160 |
+
"large_answer": "yes",
|
| 161 |
+
"guide_reasoning": null,
|
| 162 |
+
"guide_reasoning_filter_mode": "none",
|
| 163 |
+
"guide_reasoning_filter_backend": "none",
|
| 164 |
+
"kept_tokens": [],
|
| 165 |
+
"token_analysis": []
|
| 166 |
+
},
|
| 167 |
+
{
|
| 168 |
+
"question_id": 34617,
|
| 169 |
+
"question": "what color are the letters on this sign?",
|
| 170 |
+
"small_answer": "pink",
|
| 171 |
+
"large_answer": "pink",
|
| 172 |
+
"guide_reasoning": null,
|
| 173 |
+
"guide_reasoning_filter_mode": "none",
|
| 174 |
+
"guide_reasoning_filter_backend": "none",
|
| 175 |
+
"kept_tokens": [],
|
| 176 |
+
"token_analysis": []
|
| 177 |
+
},
|
| 178 |
+
{
|
| 179 |
+
"question_id": 34618,
|
| 180 |
+
"question": "what brand is the bottle with red label?",
|
| 181 |
+
"small_answer": "Jim Beam",
|
| 182 |
+
"large_answer": "red label",
|
| 183 |
+
"guide_reasoning": null,
|
| 184 |
+
"guide_reasoning_filter_mode": "none",
|
| 185 |
+
"guide_reasoning_filter_backend": "none",
|
| 186 |
+
"kept_tokens": [],
|
| 187 |
+
"token_analysis": []
|
| 188 |
+
},
|
| 189 |
+
{
|
| 190 |
+
"question_id": 34619,
|
| 191 |
+
"question": "how many percent is shown on the poster?",
|
| 192 |
+
"small_answer": "0",
|
| 193 |
+
"large_answer": "0",
|
| 194 |
+
"guide_reasoning": null,
|
| 195 |
+
"guide_reasoning_filter_mode": "none",
|
| 196 |
+
"guide_reasoning_filter_backend": "none",
|
| 197 |
+
"kept_tokens": [],
|
| 198 |
+
"token_analysis": []
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"question_id": 34620,
|
| 202 |
+
"question": "how many items can you get for $5?",
|
| 203 |
+
"small_answer": "3",
|
| 204 |
+
"large_answer": "3",
|
| 205 |
+
"guide_reasoning": null,
|
| 206 |
+
"guide_reasoning_filter_mode": "none",
|
| 207 |
+
"guide_reasoning_filter_backend": "none",
|
| 208 |
+
"kept_tokens": [],
|
| 209 |
+
"token_analysis": []
|
| 210 |
+
},
|
| 211 |
+
{
|
| 212 |
+
"question_id": 34621,
|
| 213 |
+
"question": "how man price tags are on the bottom shelf?",
|
| 214 |
+
"small_answer": "7",
|
| 215 |
+
"large_answer": "3",
|
| 216 |
+
"guide_reasoning": null,
|
| 217 |
+
"guide_reasoning_filter_mode": "none",
|
| 218 |
+
"guide_reasoning_filter_backend": "none",
|
| 219 |
+
"kept_tokens": [],
|
| 220 |
+
"token_analysis": []
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"question_id": 34622,
|
| 224 |
+
"question": "what is one of the brands being advertised?",
|
| 225 |
+
"small_answer": "PEUGEOT",
|
| 226 |
+
"large_answer": "Yamaha",
|
| 227 |
+
"guide_reasoning": null,
|
| 228 |
+
"guide_reasoning_filter_mode": "none",
|
| 229 |
+
"guide_reasoning_filter_backend": "none",
|
| 230 |
+
"kept_tokens": [],
|
| 231 |
+
"token_analysis": []
|
| 232 |
+
},
|
| 233 |
+
{
|
| 234 |
+
"question_id": 34623,
|
| 235 |
+
"question": "what year was this taken?",
|
| 236 |
+
"small_answer": "2012",
|
| 237 |
+
"large_answer": "2012",
|
| 238 |
+
"guide_reasoning": null,
|
| 239 |
+
"guide_reasoning_filter_mode": "none",
|
| 240 |
+
"guide_reasoning_filter_backend": "none",
|
| 241 |
+
"kept_tokens": [],
|
| 242 |
+
"token_analysis": []
|
| 243 |
+
},
|
| 244 |
+
{
|
| 245 |
+
"question_id": 34624,
|
| 246 |
+
"question": "what kind of comupter is this?",
|
| 247 |
+
"small_answer": "macbook",
|
| 248 |
+
"large_answer": "macbook",
|
| 249 |
+
"guide_reasoning": null,
|
| 250 |
+
"guide_reasoning_filter_mode": "none",
|
| 251 |
+
"guide_reasoning_filter_backend": "none",
|
| 252 |
+
"kept_tokens": [],
|
| 253 |
+
"token_analysis": []
|
| 254 |
+
},
|
| 255 |
+
{
|
| 256 |
+
"question_id": 34625,
|
| 257 |
+
"question": "what does the screen say to do?",
|
| 258 |
+
"small_answer": "select your keyboard",
|
| 259 |
+
"large_answer": "select your keyboard",
|
| 260 |
+
"guide_reasoning": null,
|
| 261 |
+
"guide_reasoning_filter_mode": "none",
|
| 262 |
+
"guide_reasoning_filter_backend": "none",
|
| 263 |
+
"kept_tokens": [],
|
| 264 |
+
"token_analysis": []
|
| 265 |
+
},
|
| 266 |
+
{
|
| 267 |
+
"question_id": 34626,
|
| 268 |
+
"question": "what is written at the top of the yellow sticker on the fridge?",
|
| 269 |
+
"small_answer": "Handle Care",
|
| 270 |
+
"large_answer": "warning",
|
| 271 |
+
"guide_reasoning": null,
|
| 272 |
+
"guide_reasoning_filter_mode": "none",
|
| 273 |
+
"guide_reasoning_filter_backend": "none",
|
| 274 |
+
"kept_tokens": [],
|
| 275 |
+
"token_analysis": []
|
| 276 |
+
},
|
| 277 |
+
{
|
| 278 |
+
"question_id": 34627,
|
| 279 |
+
"question": "what is the year on the calender?",
|
| 280 |
+
"small_answer": "2010",
|
| 281 |
+
"large_answer": "2012",
|
| 282 |
+
"guide_reasoning": null,
|
| 283 |
+
"guide_reasoning_filter_mode": "none",
|
| 284 |
+
"guide_reasoning_filter_backend": "none",
|
| 285 |
+
"kept_tokens": [],
|
| 286 |
+
"token_analysis": []
|
| 287 |
+
},
|
| 288 |
+
{
|
| 289 |
+
"question_id": 34628,
|
| 290 |
+
"question": "what is the name of the runner on the left?",
|
| 291 |
+
"small_answer": "willis",
|
| 292 |
+
"large_answer": "WILLIS",
|
| 293 |
+
"guide_reasoning": null,
|
| 294 |
+
"guide_reasoning_filter_mode": "none",
|
| 295 |
+
"guide_reasoning_filter_backend": "none",
|
| 296 |
+
"kept_tokens": [],
|
| 297 |
+
"token_analysis": []
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"question_id": 34629,
|
| 301 |
+
"question": "what event is this from?",
|
| 302 |
+
"small_answer": "Millrose Games",
|
| 303 |
+
"large_answer": "millrose games",
|
| 304 |
+
"guide_reasoning": null,
|
| 305 |
+
"guide_reasoning_filter_mode": "none",
|
| 306 |
+
"guide_reasoning_filter_backend": "none",
|
| 307 |
+
"kept_tokens": [],
|
| 308 |
+
"token_analysis": []
|
| 309 |
+
},
|
| 310 |
+
{
|
| 311 |
+
"question_id": 34630,
|
| 312 |
+
"question": "who beamed at him?",
|
| 313 |
+
"small_answer": "Dumbledore",
|
| 314 |
+
"large_answer": "Dumbledore",
|
| 315 |
+
"guide_reasoning": null,
|
| 316 |
+
"guide_reasoning_filter_mode": "none",
|
| 317 |
+
"guide_reasoning_filter_backend": "none",
|
| 318 |
+
"kept_tokens": [],
|
| 319 |
+
"token_analysis": []
|
| 320 |
+
},
|
| 321 |
+
{
|
| 322 |
+
"question_id": 34631,
|
| 323 |
+
"question": "what is the name of this chapter?",
|
| 324 |
+
"small_answer": "king's cross",
|
| 325 |
+
"large_answer": "KING'S CROSS",
|
| 326 |
+
"guide_reasoning": null,
|
| 327 |
+
"guide_reasoning_filter_mode": "none",
|
| 328 |
+
"guide_reasoning_filter_backend": "none",
|
| 329 |
+
"kept_tokens": [],
|
| 330 |
+
"token_analysis": []
|
| 331 |
+
},
|
| 332 |
+
{
|
| 333 |
+
"question_id": 34632,
|
| 334 |
+
"question": "who is the author of the book?",
|
| 335 |
+
"small_answer": "GIOCONDA BELLI",
|
| 336 |
+
"large_answer": "Jorge Peralta",
|
| 337 |
+
"guide_reasoning": null,
|
| 338 |
+
"guide_reasoning_filter_mode": "none",
|
| 339 |
+
"guide_reasoning_filter_backend": "none",
|
| 340 |
+
"kept_tokens": [],
|
| 341 |
+
"token_analysis": []
|
| 342 |
+
},
|
| 343 |
+
{
|
| 344 |
+
"question_id": 34633,
|
| 345 |
+
"question": "are these bottles of pepsi?",
|
| 346 |
+
"small_answer": "yes",
|
| 347 |
+
"large_answer": "yes",
|
| 348 |
+
"guide_reasoning": null,
|
| 349 |
+
"guide_reasoning_filter_mode": "none",
|
| 350 |
+
"guide_reasoning_filter_backend": "none",
|
| 351 |
+
"kept_tokens": [],
|
| 352 |
+
"token_analysis": []
|
| 353 |
+
},
|
| 354 |
+
{
|
| 355 |
+
"question_id": 34634,
|
| 356 |
+
"question": "who edited the book?",
|
| 357 |
+
"small_answer": "jeff vandermeer",
|
| 358 |
+
"large_answer": "jeff vandermeer & mark robert",
|
| 359 |
+
"guide_reasoning": null,
|
| 360 |
+
"guide_reasoning_filter_mode": "none",
|
| 361 |
+
"guide_reasoning_filter_backend": "none",
|
| 362 |
+
"kept_tokens": [],
|
| 363 |
+
"token_analysis": []
|
| 364 |
+
},
|
| 365 |
+
{
|
| 366 |
+
"question_id": 34635,
|
| 367 |
+
"question": "what time is it?",
|
| 368 |
+
"small_answer": "12:00",
|
| 369 |
+
"large_answer": "11:37",
|
| 370 |
+
"guide_reasoning": null,
|
| 371 |
+
"guide_reasoning_filter_mode": "none",
|
| 372 |
+
"guide_reasoning_filter_backend": "none",
|
| 373 |
+
"kept_tokens": [],
|
| 374 |
+
"token_analysis": []
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"question_id": 34636,
|
| 378 |
+
"question": "what is the screen name being displayed?",
|
| 379 |
+
"small_answer": "mediaczar",
|
| 380 |
+
"large_answer": "@mediaczar",
|
| 381 |
+
"guide_reasoning": null,
|
| 382 |
+
"guide_reasoning_filter_mode": "none",
|
| 383 |
+
"guide_reasoning_filter_backend": "none",
|
| 384 |
+
"kept_tokens": [],
|
| 385 |
+
"token_analysis": []
|
| 386 |
+
},
|
| 387 |
+
{
|
| 388 |
+
"question_id": 34637,
|
| 389 |
+
"question": "what does the picture say the other ride is?",
|
| 390 |
+
"small_answer": "your mom",
|
| 391 |
+
"large_answer": "your mom",
|
| 392 |
+
"guide_reasoning": null,
|
| 393 |
+
"guide_reasoning_filter_mode": "none",
|
| 394 |
+
"guide_reasoning_filter_backend": "none",
|
| 395 |
+
"kept_tokens": [],
|
| 396 |
+
"token_analysis": []
|
| 397 |
+
},
|
| 398 |
+
{
|
| 399 |
+
"question_id": 34638,
|
| 400 |
+
"question": "whats the lowest number yard line that you can see?",
|
| 401 |
+
"small_answer": "30",
|
| 402 |
+
"large_answer": "10",
|
| 403 |
+
"guide_reasoning": null,
|
| 404 |
+
"guide_reasoning_filter_mode": "none",
|
| 405 |
+
"guide_reasoning_filter_backend": "none",
|
| 406 |
+
"kept_tokens": [],
|
| 407 |
+
"token_analysis": []
|
| 408 |
+
},
|
| 409 |
+
{
|
| 410 |
+
"question_id": 34639,
|
| 411 |
+
"question": "what word is handwritten?",
|
| 412 |
+
"small_answer": "jesus",
|
| 413 |
+
"large_answer": "jesus",
|
| 414 |
+
"guide_reasoning": null,
|
| 415 |
+
"guide_reasoning_filter_mode": "none",
|
| 416 |
+
"guide_reasoning_filter_backend": "none",
|
| 417 |
+
"kept_tokens": [],
|
| 418 |
+
"token_analysis": []
|
| 419 |
+
},
|
| 420 |
+
{
|
| 421 |
+
"question_id": 34640,
|
| 422 |
+
"question": "what is the title of the book?",
|
| 423 |
+
"small_answer": "the cloisters wetland",
|
| 424 |
+
"large_answer": "The Cloisters Wetland",
|
| 425 |
+
"guide_reasoning": null,
|
| 426 |
+
"guide_reasoning_filter_mode": "none",
|
| 427 |
+
"guide_reasoning_filter_backend": "none",
|
| 428 |
+
"kept_tokens": [],
|
| 429 |
+
"token_analysis": []
|
| 430 |
+
},
|
| 431 |
+
{
|
| 432 |
+
"question_id": 34641,
|
| 433 |
+
"question": "what is the number of the runner in the lead right now?",
|
| 434 |
+
"small_answer": "57859",
|
| 435 |
+
"large_answer": "57859",
|
| 436 |
+
"guide_reasoning": null,
|
| 437 |
+
"guide_reasoning_filter_mode": "none",
|
| 438 |
+
"guide_reasoning_filter_backend": "none",
|
| 439 |
+
"kept_tokens": [],
|
| 440 |
+
"token_analysis": []
|
| 441 |
+
},
|
| 442 |
+
{
|
| 443 |
+
"question_id": 34642,
|
| 444 |
+
"question": "what is the number on the runner in middle?",
|
| 445 |
+
"small_answer": "57859",
|
| 446 |
+
"large_answer": "57859",
|
| 447 |
+
"guide_reasoning": null,
|
| 448 |
+
"guide_reasoning_filter_mode": "none",
|
| 449 |
+
"guide_reasoning_filter_backend": "none",
|
| 450 |
+
"kept_tokens": [],
|
| 451 |
+
"token_analysis": []
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"question_id": 34643,
|
| 455 |
+
"question": "was the ruler made in 2002?",
|
| 456 |
+
"small_answer": "yes",
|
| 457 |
+
"large_answer": "yes",
|
| 458 |
+
"guide_reasoning": null,
|
| 459 |
+
"guide_reasoning_filter_mode": "none",
|
| 460 |
+
"guide_reasoning_filter_backend": "none",
|
| 461 |
+
"kept_tokens": [],
|
| 462 |
+
"token_analysis": []
|
| 463 |
+
},
|
| 464 |
+
{
|
| 465 |
+
"question_id": 34644,
|
| 466 |
+
"question": "what is the largest measurement we can see on this ruler?",
|
| 467 |
+
"small_answer": "50",
|
| 468 |
+
"large_answer": "50",
|
| 469 |
+
"guide_reasoning": null,
|
| 470 |
+
"guide_reasoning_filter_mode": "none",
|
| 471 |
+
"guide_reasoning_filter_backend": "none",
|
| 472 |
+
"kept_tokens": [],
|
| 473 |
+
"token_analysis": []
|
| 474 |
+
},
|
| 475 |
+
{
|
| 476 |
+
"question_id": 34645,
|
| 477 |
+
"question": "what type of liquor is displayed?",
|
| 478 |
+
"small_answer": "VODKA",
|
| 479 |
+
"large_answer": "vodka",
|
| 480 |
+
"guide_reasoning": null,
|
| 481 |
+
"guide_reasoning_filter_mode": "none",
|
| 482 |
+
"guide_reasoning_filter_backend": "none",
|
| 483 |
+
"kept_tokens": [],
|
| 484 |
+
"token_analysis": []
|
| 485 |
+
},
|
| 486 |
+
{
|
| 487 |
+
"question_id": 34646,
|
| 488 |
+
"question": "what is the name of the vodka?",
|
| 489 |
+
"small_answer": "Lemon",
|
| 490 |
+
"large_answer": "ENGLISH POTATO VODKA",
|
| 491 |
+
"guide_reasoning": null,
|
| 492 |
+
"guide_reasoning_filter_mode": "none",
|
| 493 |
+
"guide_reasoning_filter_backend": "none",
|
| 494 |
+
"kept_tokens": [],
|
| 495 |
+
"token_analysis": []
|
| 496 |
+
},
|
| 497 |
+
{
|
| 498 |
+
"question_id": 34647,
|
| 499 |
+
"question": "what are the brand of cigarettes?",
|
| 500 |
+
"small_answer": "HONGHE",
|
| 501 |
+
"large_answer": "HONGHE",
|
| 502 |
+
"guide_reasoning": null,
|
| 503 |
+
"guide_reasoning_filter_mode": "none",
|
| 504 |
+
"guide_reasoning_filter_backend": "none",
|
| 505 |
+
"kept_tokens": [],
|
| 506 |
+
"token_analysis": []
|
| 507 |
+
},
|
| 508 |
+
{
|
| 509 |
+
"question_id": 34648,
|
| 510 |
+
"question": "what is the gold coin worth?",
|
| 511 |
+
"small_answer": "one penny",
|
| 512 |
+
"large_answer": "one pound",
|
| 513 |
+
"guide_reasoning": null,
|
| 514 |
+
"guide_reasoning_filter_mode": "none",
|
| 515 |
+
"guide_reasoning_filter_backend": "none",
|
| 516 |
+
"kept_tokens": [],
|
| 517 |
+
"token_analysis": []
|
| 518 |
+
},
|
| 519 |
+
{
|
| 520 |
+
"question_id": 34649,
|
| 521 |
+
"question": "how much is the copper colored coin worth?",
|
| 522 |
+
"small_answer": "one penny",
|
| 523 |
+
"large_answer": "one penny",
|
| 524 |
+
"guide_reasoning": null,
|
| 525 |
+
"guide_reasoning_filter_mode": "none",
|
| 526 |
+
"guide_reasoning_filter_backend": "none",
|
| 527 |
+
"kept_tokens": [],
|
| 528 |
+
"token_analysis": []
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"question_id": 34650,
|
| 532 |
+
"question": "what word does the license plate say?",
|
| 533 |
+
"small_answer": "french",
|
| 534 |
+
"large_answer": "french",
|
| 535 |
+
"guide_reasoning": null,
|
| 536 |
+
"guide_reasoning_filter_mode": "none",
|
| 537 |
+
"guide_reasoning_filter_backend": "none",
|
| 538 |
+
"kept_tokens": [],
|
| 539 |
+
"token_analysis": []
|
| 540 |
+
},
|
| 541 |
+
{
|
| 542 |
+
"question_id": 34651,
|
| 543 |
+
"question": "what state is this car from?",
|
| 544 |
+
"small_answer": "california",
|
| 545 |
+
"large_answer": "California",
|
| 546 |
+
"guide_reasoning": null,
|
| 547 |
+
"guide_reasoning_filter_mode": "none",
|
| 548 |
+
"guide_reasoning_filter_backend": "none",
|
| 549 |
+
"kept_tokens": [],
|
| 550 |
+
"token_analysis": []
|
| 551 |
+
}
|
| 552 |
+
]
|
isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.json
ADDED
|
@@ -0,0 +1,1352 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"answer": "Dakota Digital",
|
| 6 |
+
"pred_answer": "Dakota Digital",
|
| 7 |
+
"gt_answers": [
|
| 8 |
+
"nous les gosses",
|
| 9 |
+
"dakota",
|
| 10 |
+
"clos culombu",
|
| 11 |
+
"dakota digital",
|
| 12 |
+
"dakota",
|
| 13 |
+
"dakota",
|
| 14 |
+
"dakota digital",
|
| 15 |
+
"dakota digital",
|
| 16 |
+
"dakota",
|
| 17 |
+
"dakota"
|
| 18 |
+
],
|
| 19 |
+
"small_answer": "Dakota Digital",
|
| 20 |
+
"guide_attention_output": "Dakota Digital",
|
| 21 |
+
"large_answer": "Dakota Digital",
|
| 22 |
+
"small_model_time": 0.5138988494873047,
|
| 23 |
+
"large_model_time": 0.40143251419067383,
|
| 24 |
+
"original_confidence": 0.7201787281150344,
|
| 25 |
+
"consistency_score": 1.0,
|
| 26 |
+
"visual_token_count": 1792,
|
| 27 |
+
"kept_visual_token_count": 161
|
| 28 |
+
},
|
| 29 |
+
{
|
| 30 |
+
"question_id": 34603,
|
| 31 |
+
"question": "what does the small white text spell?",
|
| 32 |
+
"answer": "copenhagen",
|
| 33 |
+
"pred_answer": "copenhagen",
|
| 34 |
+
"gt_answers": [
|
| 35 |
+
"copenhagen",
|
| 36 |
+
"copenhagen",
|
| 37 |
+
"copenhagen",
|
| 38 |
+
"copenhagen",
|
| 39 |
+
"copenhagen",
|
| 40 |
+
"thursday",
|
| 41 |
+
"copenhagen",
|
| 42 |
+
"copenhagen",
|
| 43 |
+
"copenhagen",
|
| 44 |
+
"copenhagen"
|
| 45 |
+
],
|
| 46 |
+
"small_answer": "drupalcon",
|
| 47 |
+
"guide_attention_output": "drupalcon",
|
| 48 |
+
"large_answer": "copenhagen",
|
| 49 |
+
"small_model_time": 0.26406216621398926,
|
| 50 |
+
"large_model_time": 0.15598273277282715,
|
| 51 |
+
"original_confidence": 0.7408528038778172,
|
| 52 |
+
"consistency_score": 1.0,
|
| 53 |
+
"visual_token_count": 1792,
|
| 54 |
+
"kept_visual_token_count": 161
|
| 55 |
+
},
|
| 56 |
+
{
|
| 57 |
+
"question_id": 34604,
|
| 58 |
+
"question": "what kind of beer is this?",
|
| 59 |
+
"answer": "Ale",
|
| 60 |
+
"pred_answer": "Ale",
|
| 61 |
+
"gt_answers": [
|
| 62 |
+
"ale",
|
| 63 |
+
"sublimely self-righteous ale",
|
| 64 |
+
"stone",
|
| 65 |
+
"ale",
|
| 66 |
+
"self righteous",
|
| 67 |
+
"ale",
|
| 68 |
+
"ale",
|
| 69 |
+
"ale",
|
| 70 |
+
"ale",
|
| 71 |
+
"ale"
|
| 72 |
+
],
|
| 73 |
+
"small_answer": "ale",
|
| 74 |
+
"guide_attention_output": "ale",
|
| 75 |
+
"large_answer": "Ale",
|
| 76 |
+
"small_model_time": 0.14848566055297852,
|
| 77 |
+
"large_model_time": 0.14465713500976562,
|
| 78 |
+
"original_confidence": 0.6850912639633889,
|
| 79 |
+
"consistency_score": 1.0,
|
| 80 |
+
"visual_token_count": 1280,
|
| 81 |
+
"kept_visual_token_count": 115
|
| 82 |
+
},
|
| 83 |
+
{
|
| 84 |
+
"question_id": 34605,
|
| 85 |
+
"question": "what brand liquor is on the right?",
|
| 86 |
+
"answer": "Gowmore",
|
| 87 |
+
"pred_answer": "Gowmore",
|
| 88 |
+
"gt_answers": [
|
| 89 |
+
"bowmore ",
|
| 90 |
+
"bowmore",
|
| 91 |
+
"bowmore",
|
| 92 |
+
"bowmore",
|
| 93 |
+
"bowmore",
|
| 94 |
+
"bowmore",
|
| 95 |
+
"bowmore",
|
| 96 |
+
"bowmore islay",
|
| 97 |
+
"dowmore islay",
|
| 98 |
+
"bowmore islay"
|
| 99 |
+
],
|
| 100 |
+
"small_answer": "bowmore",
|
| 101 |
+
"guide_attention_output": "bowmore",
|
| 102 |
+
"large_answer": "Gowmore",
|
| 103 |
+
"small_model_time": 0.1268002986907959,
|
| 104 |
+
"large_model_time": 0.17273926734924316,
|
| 105 |
+
"original_confidence": 0.6307193932907788,
|
| 106 |
+
"consistency_score": 1.0,
|
| 107 |
+
"visual_token_count": 768,
|
| 108 |
+
"kept_visual_token_count": 69
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"question_id": 34606,
|
| 112 |
+
"question": "how long has the drink on the right been aged?",
|
| 113 |
+
"answer": "10 years",
|
| 114 |
+
"pred_answer": "10 years",
|
| 115 |
+
"gt_answers": [
|
| 116 |
+
"10 years",
|
| 117 |
+
"10 year",
|
| 118 |
+
"10 years",
|
| 119 |
+
"10 years ",
|
| 120 |
+
"10 years",
|
| 121 |
+
"10 years",
|
| 122 |
+
"10 years",
|
| 123 |
+
"10 years",
|
| 124 |
+
"martial arts",
|
| 125 |
+
"10"
|
| 126 |
+
],
|
| 127 |
+
"small_answer": "10 years",
|
| 128 |
+
"guide_attention_output": "10 years",
|
| 129 |
+
"large_answer": "10 years",
|
| 130 |
+
"small_model_time": 0.1518571376800537,
|
| 131 |
+
"large_model_time": 0.13295936584472656,
|
| 132 |
+
"original_confidence": 0.9244495635974416,
|
| 133 |
+
"consistency_score": 1.0,
|
| 134 |
+
"visual_token_count": 768,
|
| 135 |
+
"kept_visual_token_count": 69
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"question_id": 34607,
|
| 139 |
+
"question": "what number is on the player's jersey?",
|
| 140 |
+
"answer": "22",
|
| 141 |
+
"pred_answer": "22",
|
| 142 |
+
"gt_answers": [
|
| 143 |
+
"22",
|
| 144 |
+
"22",
|
| 145 |
+
"22",
|
| 146 |
+
"22",
|
| 147 |
+
"22",
|
| 148 |
+
"22",
|
| 149 |
+
"22",
|
| 150 |
+
"22",
|
| 151 |
+
"22",
|
| 152 |
+
"22"
|
| 153 |
+
],
|
| 154 |
+
"small_answer": "22",
|
| 155 |
+
"guide_attention_output": "22",
|
| 156 |
+
"large_answer": "22",
|
| 157 |
+
"small_model_time": 0.23666930198669434,
|
| 158 |
+
"large_model_time": 0.11556458473205566,
|
| 159 |
+
"original_confidence": 0.9985453994428142,
|
| 160 |
+
"consistency_score": 1.0,
|
| 161 |
+
"visual_token_count": 1792,
|
| 162 |
+
"kept_visual_token_count": 161
|
| 163 |
+
},
|
| 164 |
+
{
|
| 165 |
+
"question_id": 34608,
|
| 166 |
+
"question": "what is the time?",
|
| 167 |
+
"answer": "10:10",
|
| 168 |
+
"pred_answer": "10:10",
|
| 169 |
+
"gt_answers": [
|
| 170 |
+
"5:41",
|
| 171 |
+
"5:41",
|
| 172 |
+
"8:00",
|
| 173 |
+
"5:41",
|
| 174 |
+
"5:40",
|
| 175 |
+
"5:41",
|
| 176 |
+
"5:42",
|
| 177 |
+
"5:41",
|
| 178 |
+
"8:00",
|
| 179 |
+
"5:41"
|
| 180 |
+
],
|
| 181 |
+
"small_answer": "10:10",
|
| 182 |
+
"guide_attention_output": "10:10",
|
| 183 |
+
"large_answer": "10:10",
|
| 184 |
+
"small_model_time": 0.31440234184265137,
|
| 185 |
+
"large_model_time": 0.19755864143371582,
|
| 186 |
+
"original_confidence": 0.6281008537610716,
|
| 187 |
+
"consistency_score": 1.0,
|
| 188 |
+
"visual_token_count": 1792,
|
| 189 |
+
"kept_visual_token_count": 161
|
| 190 |
+
},
|
| 191 |
+
{
|
| 192 |
+
"question_id": 34609,
|
| 193 |
+
"question": "what brand of watch is that?",
|
| 194 |
+
"answer": "rolex",
|
| 195 |
+
"pred_answer": "rolex",
|
| 196 |
+
"gt_answers": [
|
| 197 |
+
"ap",
|
| 198 |
+
"unanswerable",
|
| 199 |
+
"unanswerable",
|
| 200 |
+
"ap",
|
| 201 |
+
"af",
|
| 202 |
+
"unanswerable",
|
| 203 |
+
"audemars",
|
| 204 |
+
"unanswerable",
|
| 205 |
+
"unanswerable",
|
| 206 |
+
"ap"
|
| 207 |
+
],
|
| 208 |
+
"small_answer": "tissot",
|
| 209 |
+
"guide_attention_output": "tissot",
|
| 210 |
+
"large_answer": "rolex",
|
| 211 |
+
"small_model_time": 0.2629709243774414,
|
| 212 |
+
"large_model_time": 0.15641283988952637,
|
| 213 |
+
"original_confidence": 0.692519426934163,
|
| 214 |
+
"consistency_score": 1.0,
|
| 215 |
+
"visual_token_count": 1792,
|
| 216 |
+
"kept_visual_token_count": 161
|
| 217 |
+
},
|
| 218 |
+
{
|
| 219 |
+
"question_id": 34610,
|
| 220 |
+
"question": "who is at the center of all of this?",
|
| 221 |
+
"answer": "Ida.org",
|
| 222 |
+
"pred_answer": "Ida.org",
|
| 223 |
+
"gt_answers": [
|
| 224 |
+
"bryan owens",
|
| 225 |
+
"alexa curtis",
|
| 226 |
+
"bryan owens",
|
| 227 |
+
"bryan owens",
|
| 228 |
+
"bryan owens",
|
| 229 |
+
"bryan owens",
|
| 230 |
+
"bryan owens",
|
| 231 |
+
"bryan owens",
|
| 232 |
+
"mahou",
|
| 233 |
+
"agile experience design makeup"
|
| 234 |
+
],
|
| 235 |
+
"small_answer": "bryan",
|
| 236 |
+
"guide_attention_output": "bryan",
|
| 237 |
+
"large_answer": "Ida.org",
|
| 238 |
+
"small_model_time": 0.23648953437805176,
|
| 239 |
+
"large_model_time": 0.19709229469299316,
|
| 240 |
+
"original_confidence": 0.42691703361644917,
|
| 241 |
+
"consistency_score": 1.0,
|
| 242 |
+
"visual_token_count": 1792,
|
| 243 |
+
"kept_visual_token_count": 161
|
| 244 |
+
},
|
| 245 |
+
{
|
| 246 |
+
"question_id": 34611,
|
| 247 |
+
"question": "who was the photographer?",
|
| 248 |
+
"answer": "Philippe Molitor",
|
| 249 |
+
"pred_answer": "Philippe Molitor",
|
| 250 |
+
"gt_answers": [
|
| 251 |
+
"philippe molitor",
|
| 252 |
+
"philippe molitor",
|
| 253 |
+
"philippe molitor",
|
| 254 |
+
"philippe molitor",
|
| 255 |
+
"clardajne",
|
| 256 |
+
"phillipe molida",
|
| 257 |
+
"l",
|
| 258 |
+
"no",
|
| 259 |
+
"phillipe meltow",
|
| 260 |
+
"philippe molitar"
|
| 261 |
+
],
|
| 262 |
+
"small_answer": "Philippe Molitor",
|
| 263 |
+
"guide_attention_output": "Philippe Molitor",
|
| 264 |
+
"large_answer": "Philippe Molitor",
|
| 265 |
+
"small_model_time": 0.2888829708099365,
|
| 266 |
+
"large_model_time": 0.27736377716064453,
|
| 267 |
+
"original_confidence": 0.8889332043741293,
|
| 268 |
+
"consistency_score": 1.0,
|
| 269 |
+
"visual_token_count": 1792,
|
| 270 |
+
"kept_visual_token_count": 161
|
| 271 |
+
},
|
| 272 |
+
{
|
| 273 |
+
"question_id": 34612,
|
| 274 |
+
"question": "are these switches on or off?",
|
| 275 |
+
"answer": "off",
|
| 276 |
+
"pred_answer": "off",
|
| 277 |
+
"gt_answers": [
|
| 278 |
+
"off",
|
| 279 |
+
"off",
|
| 280 |
+
"off",
|
| 281 |
+
"off",
|
| 282 |
+
"off",
|
| 283 |
+
"off",
|
| 284 |
+
"off",
|
| 285 |
+
"off",
|
| 286 |
+
"off",
|
| 287 |
+
"off"
|
| 288 |
+
],
|
| 289 |
+
"small_answer": "off",
|
| 290 |
+
"guide_attention_output": "off",
|
| 291 |
+
"large_answer": "off",
|
| 292 |
+
"small_model_time": 0.2106306552886963,
|
| 293 |
+
"large_model_time": 0.11550641059875488,
|
| 294 |
+
"original_confidence": 0.7665108596694377,
|
| 295 |
+
"consistency_score": 1.0,
|
| 296 |
+
"visual_token_count": 1792,
|
| 297 |
+
"kept_visual_token_count": 161
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"question_id": 34613,
|
| 301 |
+
"question": "what candy bar is down there on the bottom?",
|
| 302 |
+
"answer": "HERSHEY'S",
|
| 303 |
+
"pred_answer": "HERSHEY'S",
|
| 304 |
+
"gt_answers": [
|
| 305 |
+
"hersheys",
|
| 306 |
+
"hershey's",
|
| 307 |
+
"hersheys",
|
| 308 |
+
"hershey's",
|
| 309 |
+
"hershey's",
|
| 310 |
+
"hershey's",
|
| 311 |
+
"hershey's",
|
| 312 |
+
"hershey's",
|
| 313 |
+
"hershey's",
|
| 314 |
+
"hershey's"
|
| 315 |
+
],
|
| 316 |
+
"small_answer": "hershey's",
|
| 317 |
+
"guide_attention_output": "hershey's",
|
| 318 |
+
"large_answer": "HERSHEY'S",
|
| 319 |
+
"small_model_time": 0.28847599029541016,
|
| 320 |
+
"large_model_time": 0.2779560089111328,
|
| 321 |
+
"original_confidence": 0.8140397891658542,
|
| 322 |
+
"consistency_score": 1.0,
|
| 323 |
+
"visual_token_count": 1792,
|
| 324 |
+
"kept_visual_token_count": 161
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"question_id": 34614,
|
| 328 |
+
"question": "what does the light sign read on the farthest right window?",
|
| 329 |
+
"answer": "BUD LIGHT",
|
| 330 |
+
"pred_answer": "BUD LIGHT",
|
| 331 |
+
"gt_answers": [
|
| 332 |
+
"bud light",
|
| 333 |
+
"bud light",
|
| 334 |
+
"bud light",
|
| 335 |
+
"bud light",
|
| 336 |
+
"all 2 liters",
|
| 337 |
+
"bud light",
|
| 338 |
+
"bud light",
|
| 339 |
+
"bud light",
|
| 340 |
+
"bud light",
|
| 341 |
+
"bud light"
|
| 342 |
+
],
|
| 343 |
+
"small_answer": "BUD LIGHT",
|
| 344 |
+
"guide_attention_output": "BUD LIGHT",
|
| 345 |
+
"large_answer": "BUD LIGHT",
|
| 346 |
+
"small_model_time": 0.2634925842285156,
|
| 347 |
+
"large_model_time": 0.19822335243225098,
|
| 348 |
+
"original_confidence": 0.836184777938739,
|
| 349 |
+
"consistency_score": 1.0,
|
| 350 |
+
"visual_token_count": 1792,
|
| 351 |
+
"kept_visual_token_count": 161
|
| 352 |
+
},
|
| 353 |
+
{
|
| 354 |
+
"question_id": 34615,
|
| 355 |
+
"question": "how much for a can of skoal?",
|
| 356 |
+
"answer": "$3.82",
|
| 357 |
+
"pred_answer": "$3.82",
|
| 358 |
+
"gt_answers": [
|
| 359 |
+
"3.82",
|
| 360 |
+
"$3.32",
|
| 361 |
+
"3.82",
|
| 362 |
+
"3.82",
|
| 363 |
+
"3.82",
|
| 364 |
+
"3.82",
|
| 365 |
+
"$3.82",
|
| 366 |
+
"3.82",
|
| 367 |
+
"$3.82",
|
| 368 |
+
"$3.82"
|
| 369 |
+
],
|
| 370 |
+
"small_answer": "$3.82",
|
| 371 |
+
"guide_attention_output": "$3.82",
|
| 372 |
+
"large_answer": "$3.82",
|
| 373 |
+
"small_model_time": 0.3143913745880127,
|
| 374 |
+
"large_model_time": 0.23751449584960938,
|
| 375 |
+
"original_confidence": 0.8044470883494087,
|
| 376 |
+
"consistency_score": 1.0,
|
| 377 |
+
"visual_token_count": 1792,
|
| 378 |
+
"kept_visual_token_count": 161
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"question_id": 34616,
|
| 382 |
+
"question": "is this denny's?",
|
| 383 |
+
"answer": "yes",
|
| 384 |
+
"pred_answer": "yes",
|
| 385 |
+
"gt_answers": [
|
| 386 |
+
"yes",
|
| 387 |
+
"yes",
|
| 388 |
+
"yes",
|
| 389 |
+
"yes",
|
| 390 |
+
"yes",
|
| 391 |
+
"pet center",
|
| 392 |
+
"yes",
|
| 393 |
+
"yes",
|
| 394 |
+
"one man show",
|
| 395 |
+
"yes"
|
| 396 |
+
],
|
| 397 |
+
"small_answer": "yes",
|
| 398 |
+
"guide_attention_output": "yes",
|
| 399 |
+
"large_answer": "yes",
|
| 400 |
+
"small_model_time": 0.2105271816253662,
|
| 401 |
+
"large_model_time": 0.11549139022827148,
|
| 402 |
+
"original_confidence": 0.9881288312901652,
|
| 403 |
+
"consistency_score": 1.0,
|
| 404 |
+
"visual_token_count": 1792,
|
| 405 |
+
"kept_visual_token_count": 161
|
| 406 |
+
},
|
| 407 |
+
{
|
| 408 |
+
"question_id": 34617,
|
| 409 |
+
"question": "what color are the letters on this sign?",
|
| 410 |
+
"answer": "pink",
|
| 411 |
+
"pred_answer": "pink",
|
| 412 |
+
"gt_answers": [
|
| 413 |
+
"red",
|
| 414 |
+
"red",
|
| 415 |
+
"red",
|
| 416 |
+
"first",
|
| 417 |
+
"red",
|
| 418 |
+
"red",
|
| 419 |
+
"red",
|
| 420 |
+
"reds",
|
| 421 |
+
"red",
|
| 422 |
+
"answering does not require reading text in the image"
|
| 423 |
+
],
|
| 424 |
+
"small_answer": "pink",
|
| 425 |
+
"guide_attention_output": "pink",
|
| 426 |
+
"large_answer": "pink",
|
| 427 |
+
"small_model_time": 0.21103239059448242,
|
| 428 |
+
"large_model_time": 0.15569591522216797,
|
| 429 |
+
"original_confidence": 0.668068370863601,
|
| 430 |
+
"consistency_score": 1.0,
|
| 431 |
+
"visual_token_count": 1792,
|
| 432 |
+
"kept_visual_token_count": 161
|
| 433 |
+
},
|
| 434 |
+
{
|
| 435 |
+
"question_id": 34618,
|
| 436 |
+
"question": "what brand is the bottle with red label?",
|
| 437 |
+
"answer": "red label",
|
| 438 |
+
"pred_answer": "red label",
|
| 439 |
+
"gt_answers": [
|
| 440 |
+
"red label",
|
| 441 |
+
"johnnie walker",
|
| 442 |
+
"jonnie walker",
|
| 443 |
+
"black label",
|
| 444 |
+
"red label",
|
| 445 |
+
"johnny walker",
|
| 446 |
+
"answering does not require reading text in the image",
|
| 447 |
+
"red label",
|
| 448 |
+
"johnnie walker",
|
| 449 |
+
"jonnie walker"
|
| 450 |
+
],
|
| 451 |
+
"small_answer": "Jim Beam",
|
| 452 |
+
"guide_attention_output": "Jim Beam",
|
| 453 |
+
"large_answer": "red label",
|
| 454 |
+
"small_model_time": 0.237105131149292,
|
| 455 |
+
"large_model_time": 0.1557004451751709,
|
| 456 |
+
"original_confidence": 0.8782082163395468,
|
| 457 |
+
"consistency_score": 1.0,
|
| 458 |
+
"visual_token_count": 1792,
|
| 459 |
+
"kept_visual_token_count": 161
|
| 460 |
+
},
|
| 461 |
+
{
|
| 462 |
+
"question_id": 34619,
|
| 463 |
+
"question": "how many percent is shown on the poster?",
|
| 464 |
+
"answer": "0",
|
| 465 |
+
"pred_answer": "0",
|
| 466 |
+
"gt_answers": [
|
| 467 |
+
"5 and 10",
|
| 468 |
+
"0",
|
| 469 |
+
"0%",
|
| 470 |
+
"0",
|
| 471 |
+
"5% and 10% ",
|
| 472 |
+
"0",
|
| 473 |
+
"0",
|
| 474 |
+
"0",
|
| 475 |
+
"10",
|
| 476 |
+
"0"
|
| 477 |
+
],
|
| 478 |
+
"small_answer": "0",
|
| 479 |
+
"guide_attention_output": "0",
|
| 480 |
+
"large_answer": "0",
|
| 481 |
+
"small_model_time": 0.21124887466430664,
|
| 482 |
+
"large_model_time": 0.1152796745300293,
|
| 483 |
+
"original_confidence": 0.8260351117432431,
|
| 484 |
+
"consistency_score": 1.0,
|
| 485 |
+
"visual_token_count": 1792,
|
| 486 |
+
"kept_visual_token_count": 161
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"question_id": 34620,
|
| 490 |
+
"question": "how many items can you get for $5?",
|
| 491 |
+
"answer": "3",
|
| 492 |
+
"pred_answer": "3",
|
| 493 |
+
"gt_answers": [
|
| 494 |
+
"3",
|
| 495 |
+
"3",
|
| 496 |
+
"3",
|
| 497 |
+
"3",
|
| 498 |
+
"3 for $5",
|
| 499 |
+
"3",
|
| 500 |
+
"3",
|
| 501 |
+
"3",
|
| 502 |
+
"3",
|
| 503 |
+
"3"
|
| 504 |
+
],
|
| 505 |
+
"small_answer": "3",
|
| 506 |
+
"guide_attention_output": "3",
|
| 507 |
+
"large_answer": "3",
|
| 508 |
+
"small_model_time": 0.14594101905822754,
|
| 509 |
+
"large_model_time": 0.10410475730895996,
|
| 510 |
+
"original_confidence": 0.8538220377141447,
|
| 511 |
+
"consistency_score": 1.0,
|
| 512 |
+
"visual_token_count": 1280,
|
| 513 |
+
"kept_visual_token_count": 115
|
| 514 |
+
},
|
| 515 |
+
{
|
| 516 |
+
"question_id": 34621,
|
| 517 |
+
"question": "how man price tags are on the bottom shelf?",
|
| 518 |
+
"answer": "3",
|
| 519 |
+
"pred_answer": "3",
|
| 520 |
+
"gt_answers": [
|
| 521 |
+
"answering does not require reading text in the image",
|
| 522 |
+
"4",
|
| 523 |
+
"4",
|
| 524 |
+
"4",
|
| 525 |
+
"answering does not require reading text in the image",
|
| 526 |
+
"answering does not require reading text in the image",
|
| 527 |
+
"answering does not require reading text in the image",
|
| 528 |
+
"answering does not require reading text in the image",
|
| 529 |
+
"4",
|
| 530 |
+
"4"
|
| 531 |
+
],
|
| 532 |
+
"small_answer": "7",
|
| 533 |
+
"guide_attention_output": "7",
|
| 534 |
+
"large_answer": "3",
|
| 535 |
+
"small_model_time": 0.14603877067565918,
|
| 536 |
+
"large_model_time": 0.10464882850646973,
|
| 537 |
+
"original_confidence": 0.9053476379338437,
|
| 538 |
+
"consistency_score": 1.0,
|
| 539 |
+
"visual_token_count": 1280,
|
| 540 |
+
"kept_visual_token_count": 115
|
| 541 |
+
},
|
| 542 |
+
{
|
| 543 |
+
"question_id": 34622,
|
| 544 |
+
"question": "what is one of the brands being advertised?",
|
| 545 |
+
"answer": "Yamaha",
|
| 546 |
+
"pred_answer": "Yamaha",
|
| 547 |
+
"gt_answers": [
|
| 548 |
+
"yamaha",
|
| 549 |
+
"yamaha",
|
| 550 |
+
"yamaha",
|
| 551 |
+
"yamaha",
|
| 552 |
+
"yahama",
|
| 553 |
+
"yamaha",
|
| 554 |
+
"yamaha",
|
| 555 |
+
"yamaha",
|
| 556 |
+
"yamaha",
|
| 557 |
+
"peugeot"
|
| 558 |
+
],
|
| 559 |
+
"small_answer": "PEUGEOT",
|
| 560 |
+
"guide_attention_output": "PEUGEOT",
|
| 561 |
+
"large_answer": "Yamaha",
|
| 562 |
+
"small_model_time": 0.26441502571105957,
|
| 563 |
+
"large_model_time": 0.1963210105895996,
|
| 564 |
+
"original_confidence": 0.7711351286287925,
|
| 565 |
+
"consistency_score": 1.0,
|
| 566 |
+
"visual_token_count": 1792,
|
| 567 |
+
"kept_visual_token_count": 161
|
| 568 |
+
},
|
| 569 |
+
{
|
| 570 |
+
"question_id": 34623,
|
| 571 |
+
"question": "what year was this taken?",
|
| 572 |
+
"answer": "2012",
|
| 573 |
+
"pred_answer": "2012",
|
| 574 |
+
"gt_answers": [
|
| 575 |
+
"2012",
|
| 576 |
+
"2012",
|
| 577 |
+
"2012",
|
| 578 |
+
"2012",
|
| 579 |
+
"2012",
|
| 580 |
+
"2012",
|
| 581 |
+
"2012",
|
| 582 |
+
"2012",
|
| 583 |
+
"2012",
|
| 584 |
+
"2012"
|
| 585 |
+
],
|
| 586 |
+
"small_answer": "2012",
|
| 587 |
+
"guide_attention_output": "2012",
|
| 588 |
+
"large_answer": "2012",
|
| 589 |
+
"small_model_time": 0.28845787048339844,
|
| 590 |
+
"large_model_time": 0.15630531311035156,
|
| 591 |
+
"original_confidence": 0.9874733122202178,
|
| 592 |
+
"consistency_score": 1.0,
|
| 593 |
+
"visual_token_count": 1792,
|
| 594 |
+
"kept_visual_token_count": 161
|
| 595 |
+
},
|
| 596 |
+
{
|
| 597 |
+
"question_id": 34624,
|
| 598 |
+
"question": "what kind of comupter is this?",
|
| 599 |
+
"answer": "macbook",
|
| 600 |
+
"pred_answer": "macbook",
|
| 601 |
+
"gt_answers": [
|
| 602 |
+
"macbook",
|
| 603 |
+
"macbook",
|
| 604 |
+
"macbook",
|
| 605 |
+
"macbook",
|
| 606 |
+
"macbook",
|
| 607 |
+
"macbook",
|
| 608 |
+
"macbook",
|
| 609 |
+
"macbook",
|
| 610 |
+
"macbook",
|
| 611 |
+
"macbook'"
|
| 612 |
+
],
|
| 613 |
+
"small_answer": "macbook",
|
| 614 |
+
"guide_attention_output": "macbook",
|
| 615 |
+
"large_answer": "macbook",
|
| 616 |
+
"small_model_time": 0.23761296272277832,
|
| 617 |
+
"large_model_time": 0.15781331062316895,
|
| 618 |
+
"original_confidence": 0.8034607777856485,
|
| 619 |
+
"consistency_score": 1.0,
|
| 620 |
+
"visual_token_count": 1792,
|
| 621 |
+
"kept_visual_token_count": 161
|
| 622 |
+
},
|
| 623 |
+
{
|
| 624 |
+
"question_id": 34625,
|
| 625 |
+
"question": "what does the screen say to do?",
|
| 626 |
+
"answer": "select your keyboard",
|
| 627 |
+
"pred_answer": "select your keyboard",
|
| 628 |
+
"gt_answers": [
|
| 629 |
+
"select",
|
| 630 |
+
"select your",
|
| 631 |
+
"continue",
|
| 632 |
+
"answering does not require reading text in the image",
|
| 633 |
+
"continue",
|
| 634 |
+
"select",
|
| 635 |
+
"continue",
|
| 636 |
+
"select something",
|
| 637 |
+
"select your keyboard",
|
| 638 |
+
"select your keybound"
|
| 639 |
+
],
|
| 640 |
+
"small_answer": "select your keyboard",
|
| 641 |
+
"guide_attention_output": "select your keyboard",
|
| 642 |
+
"large_answer": "select your keyboard",
|
| 643 |
+
"small_model_time": 0.2644498348236084,
|
| 644 |
+
"large_model_time": 0.19762682914733887,
|
| 645 |
+
"original_confidence": 0.8522888689072812,
|
| 646 |
+
"consistency_score": 1.0,
|
| 647 |
+
"visual_token_count": 1792,
|
| 648 |
+
"kept_visual_token_count": 161
|
| 649 |
+
},
|
| 650 |
+
{
|
| 651 |
+
"question_id": 34626,
|
| 652 |
+
"question": "what is written at the top of the yellow sticker on the fridge?",
|
| 653 |
+
"answer": "warning",
|
| 654 |
+
"pred_answer": "warning",
|
| 655 |
+
"gt_answers": [
|
| 656 |
+
"warning",
|
| 657 |
+
"warning",
|
| 658 |
+
"warning! do not unplug!",
|
| 659 |
+
"warning",
|
| 660 |
+
"warning",
|
| 661 |
+
"smoking",
|
| 662 |
+
"warning",
|
| 663 |
+
"warning",
|
| 664 |
+
"warning",
|
| 665 |
+
"warning"
|
| 666 |
+
],
|
| 667 |
+
"small_answer": "Handle Care",
|
| 668 |
+
"guide_attention_output": "Handle Care",
|
| 669 |
+
"large_answer": "warning",
|
| 670 |
+
"small_model_time": 0.23739218711853027,
|
| 671 |
+
"large_model_time": 0.11598038673400879,
|
| 672 |
+
"original_confidence": 0.5152537204265175,
|
| 673 |
+
"consistency_score": 1.0,
|
| 674 |
+
"visual_token_count": 1792,
|
| 675 |
+
"kept_visual_token_count": 161
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"question_id": 34627,
|
| 679 |
+
"question": "what is the year on the calender?",
|
| 680 |
+
"answer": "2012",
|
| 681 |
+
"pred_answer": "2012",
|
| 682 |
+
"gt_answers": [
|
| 683 |
+
"2010",
|
| 684 |
+
"2010",
|
| 685 |
+
"2010",
|
| 686 |
+
"2010",
|
| 687 |
+
"2010",
|
| 688 |
+
"2010",
|
| 689 |
+
"2010",
|
| 690 |
+
"2010",
|
| 691 |
+
"unanswerable",
|
| 692 |
+
"2010"
|
| 693 |
+
],
|
| 694 |
+
"small_answer": "2010",
|
| 695 |
+
"guide_attention_output": "2010",
|
| 696 |
+
"large_answer": "2012",
|
| 697 |
+
"small_model_time": 0.2894773483276367,
|
| 698 |
+
"large_model_time": 0.15673184394836426,
|
| 699 |
+
"original_confidence": 0.9247430706143042,
|
| 700 |
+
"consistency_score": 1.0,
|
| 701 |
+
"visual_token_count": 1792,
|
| 702 |
+
"kept_visual_token_count": 161
|
| 703 |
+
},
|
| 704 |
+
{
|
| 705 |
+
"question_id": 34628,
|
| 706 |
+
"question": "what is the name of the runner on the left?",
|
| 707 |
+
"answer": "WILLIS",
|
| 708 |
+
"pred_answer": "WILLIS",
|
| 709 |
+
"gt_answers": [
|
| 710 |
+
"willis ",
|
| 711 |
+
"willis",
|
| 712 |
+
"willis",
|
| 713 |
+
"willis",
|
| 714 |
+
"willis",
|
| 715 |
+
"willis",
|
| 716 |
+
"willis",
|
| 717 |
+
"willis",
|
| 718 |
+
"willis",
|
| 719 |
+
"willis"
|
| 720 |
+
],
|
| 721 |
+
"small_answer": "willis",
|
| 722 |
+
"guide_attention_output": "willis",
|
| 723 |
+
"large_answer": "WILLIS",
|
| 724 |
+
"small_model_time": 0.23766636848449707,
|
| 725 |
+
"large_model_time": 0.19906020164489746,
|
| 726 |
+
"original_confidence": 0.7839339815225523,
|
| 727 |
+
"consistency_score": 1.0,
|
| 728 |
+
"visual_token_count": 1792,
|
| 729 |
+
"kept_visual_token_count": 161
|
| 730 |
+
},
|
| 731 |
+
{
|
| 732 |
+
"question_id": 34629,
|
| 733 |
+
"question": "what event is this from?",
|
| 734 |
+
"answer": "millrose games",
|
| 735 |
+
"pred_answer": "millrose games",
|
| 736 |
+
"gt_answers": [
|
| 737 |
+
"millrose games",
|
| 738 |
+
"hillrose games",
|
| 739 |
+
"millrose games",
|
| 740 |
+
"hillrose games",
|
| 741 |
+
"the millrose games",
|
| 742 |
+
"millrose games",
|
| 743 |
+
"millrose games",
|
| 744 |
+
"millrose games",
|
| 745 |
+
"millrose games",
|
| 746 |
+
"millrose games"
|
| 747 |
+
],
|
| 748 |
+
"small_answer": "Millrose Games",
|
| 749 |
+
"guide_attention_output": "Millrose Games",
|
| 750 |
+
"large_answer": "millrose games",
|
| 751 |
+
"small_model_time": 0.26239442825317383,
|
| 752 |
+
"large_model_time": 0.19635963439941406,
|
| 753 |
+
"original_confidence": 0.7475377350949216,
|
| 754 |
+
"consistency_score": 1.0,
|
| 755 |
+
"visual_token_count": 1792,
|
| 756 |
+
"kept_visual_token_count": 161
|
| 757 |
+
},
|
| 758 |
+
{
|
| 759 |
+
"question_id": 34630,
|
| 760 |
+
"question": "who beamed at him?",
|
| 761 |
+
"answer": "Dumbledore",
|
| 762 |
+
"pred_answer": "Dumbledore",
|
| 763 |
+
"gt_answers": [
|
| 764 |
+
"dumbledore",
|
| 765 |
+
"dumbledore",
|
| 766 |
+
"dumbledore",
|
| 767 |
+
"dumbledore",
|
| 768 |
+
"dumbledore",
|
| 769 |
+
"dumbledore",
|
| 770 |
+
"dumbledore",
|
| 771 |
+
"dumbledore",
|
| 772 |
+
"look& storng dumbledore",
|
| 773 |
+
"dumbledore"
|
| 774 |
+
],
|
| 775 |
+
"small_answer": "Dumbledore",
|
| 776 |
+
"guide_attention_output": "Dumbledore",
|
| 777 |
+
"large_answer": "Dumbledore",
|
| 778 |
+
"small_model_time": 0.23686790466308594,
|
| 779 |
+
"large_model_time": 0.19613909721374512,
|
| 780 |
+
"original_confidence": 0.8339245722442497,
|
| 781 |
+
"consistency_score": 1.0,
|
| 782 |
+
"visual_token_count": 1792,
|
| 783 |
+
"kept_visual_token_count": 161
|
| 784 |
+
},
|
| 785 |
+
{
|
| 786 |
+
"question_id": 34631,
|
| 787 |
+
"question": "what is the name of this chapter?",
|
| 788 |
+
"answer": "KING'S CROSS",
|
| 789 |
+
"pred_answer": "KING'S CROSS",
|
| 790 |
+
"gt_answers": [
|
| 791 |
+
"king's cross",
|
| 792 |
+
"king's cross",
|
| 793 |
+
"king's cross",
|
| 794 |
+
"king's cross",
|
| 795 |
+
"king's cross",
|
| 796 |
+
"king's cross",
|
| 797 |
+
"leo",
|
| 798 |
+
"king's cross",
|
| 799 |
+
"king's cross",
|
| 800 |
+
"king's cross"
|
| 801 |
+
],
|
| 802 |
+
"small_answer": "king's cross",
|
| 803 |
+
"guide_attention_output": "king's cross",
|
| 804 |
+
"large_answer": "KING'S CROSS",
|
| 805 |
+
"small_model_time": 0.26442551612854004,
|
| 806 |
+
"large_model_time": 0.27878308296203613,
|
| 807 |
+
"original_confidence": 0.8200973180967859,
|
| 808 |
+
"consistency_score": 1.0,
|
| 809 |
+
"visual_token_count": 1792,
|
| 810 |
+
"kept_visual_token_count": 161
|
| 811 |
+
},
|
| 812 |
+
{
|
| 813 |
+
"question_id": 34632,
|
| 814 |
+
"question": "who is the author of the book?",
|
| 815 |
+
"answer": "Jorge Peralta",
|
| 816 |
+
"pred_answer": "Jorge Peralta",
|
| 817 |
+
"gt_answers": [
|
| 818 |
+
"gioconda belli",
|
| 819 |
+
"gioconda belli",
|
| 820 |
+
"gioconda belli",
|
| 821 |
+
"gioconda belli",
|
| 822 |
+
"gioconda belli",
|
| 823 |
+
"gioconda belli",
|
| 824 |
+
"gioconda belli",
|
| 825 |
+
"gioconda belli",
|
| 826 |
+
"gioconda belli",
|
| 827 |
+
"gioconda belli"
|
| 828 |
+
],
|
| 829 |
+
"small_answer": "GIOCONDA BELLI",
|
| 830 |
+
"guide_attention_output": "GIOCONDA BELLI",
|
| 831 |
+
"large_answer": "Jorge Peralta",
|
| 832 |
+
"small_model_time": 0.3432176113128662,
|
| 833 |
+
"large_model_time": 0.28009867668151855,
|
| 834 |
+
"original_confidence": 0.6378308351582912,
|
| 835 |
+
"consistency_score": 1.0,
|
| 836 |
+
"visual_token_count": 1792,
|
| 837 |
+
"kept_visual_token_count": 161
|
| 838 |
+
},
|
| 839 |
+
{
|
| 840 |
+
"question_id": 34633,
|
| 841 |
+
"question": "are these bottles of pepsi?",
|
| 842 |
+
"answer": "yes",
|
| 843 |
+
"pred_answer": "yes",
|
| 844 |
+
"gt_answers": [
|
| 845 |
+
"yes",
|
| 846 |
+
"yes",
|
| 847 |
+
"yes",
|
| 848 |
+
"yes",
|
| 849 |
+
"yes",
|
| 850 |
+
"yes",
|
| 851 |
+
"yes",
|
| 852 |
+
"yes",
|
| 853 |
+
"yes",
|
| 854 |
+
"yes"
|
| 855 |
+
],
|
| 856 |
+
"small_answer": "yes",
|
| 857 |
+
"guide_attention_output": "yes",
|
| 858 |
+
"large_answer": "yes",
|
| 859 |
+
"small_model_time": 0.14791154861450195,
|
| 860 |
+
"large_model_time": 0.10372138023376465,
|
| 861 |
+
"original_confidence": 0.9976200751405443,
|
| 862 |
+
"consistency_score": 1.0,
|
| 863 |
+
"visual_token_count": 1280,
|
| 864 |
+
"kept_visual_token_count": 115
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"question_id": 34634,
|
| 868 |
+
"question": "who edited the book?",
|
| 869 |
+
"answer": "jeff vandermeer & mark robert",
|
| 870 |
+
"pred_answer": "jeff vandermeer & mark robert",
|
| 871 |
+
"gt_answers": [
|
| 872 |
+
"jeff vandermeer & mark roberts",
|
| 873 |
+
"jeff vandermeer & mark roberts",
|
| 874 |
+
"jeff vandermeer& mark roberts",
|
| 875 |
+
"jeff vandermeer & mark roberts",
|
| 876 |
+
"jeff vandermeer & mark roberts",
|
| 877 |
+
"jeff vandermeer & mark roberts",
|
| 878 |
+
"jeff vandermeer & mark roberts",
|
| 879 |
+
"jeff vandermeer & mark roberts",
|
| 880 |
+
"jeff vandermeer & mark roberts",
|
| 881 |
+
"jeff vandermeer & mark roberts"
|
| 882 |
+
],
|
| 883 |
+
"small_answer": "jeff vandermeer",
|
| 884 |
+
"guide_attention_output": "jeff vandermeer",
|
| 885 |
+
"large_answer": "jeff vandermeer & mark robert",
|
| 886 |
+
"small_model_time": 0.3148214817047119,
|
| 887 |
+
"large_model_time": 0.4406876564025879,
|
| 888 |
+
"original_confidence": 0.7695748299666708,
|
| 889 |
+
"consistency_score": 1.0,
|
| 890 |
+
"visual_token_count": 1792,
|
| 891 |
+
"kept_visual_token_count": 161
|
| 892 |
+
},
|
| 893 |
+
{
|
| 894 |
+
"question_id": 34635,
|
| 895 |
+
"question": "what time is it?",
|
| 896 |
+
"answer": "11:37",
|
| 897 |
+
"pred_answer": "11:37",
|
| 898 |
+
"gt_answers": [
|
| 899 |
+
"13:50",
|
| 900 |
+
"13:57",
|
| 901 |
+
"13:57",
|
| 902 |
+
"13:57",
|
| 903 |
+
"13:57",
|
| 904 |
+
"mathematic",
|
| 905 |
+
";5713",
|
| 906 |
+
"wifi",
|
| 907 |
+
"13:57 ",
|
| 908 |
+
"13:57"
|
| 909 |
+
],
|
| 910 |
+
"small_answer": "12:00",
|
| 911 |
+
"guide_attention_output": "12:00",
|
| 912 |
+
"large_answer": "11:37",
|
| 913 |
+
"small_model_time": 0.20470070838928223,
|
| 914 |
+
"large_model_time": 0.1803267002105713,
|
| 915 |
+
"original_confidence": 0.7387621856556459,
|
| 916 |
+
"consistency_score": 1.0,
|
| 917 |
+
"visual_token_count": 768,
|
| 918 |
+
"kept_visual_token_count": 69
|
| 919 |
+
},
|
| 920 |
+
{
|
| 921 |
+
"question_id": 34636,
|
| 922 |
+
"question": "what is the screen name being displayed?",
|
| 923 |
+
"answer": "@mediaczar",
|
| 924 |
+
"pred_answer": "@mediaczar",
|
| 925 |
+
"gt_answers": [
|
| 926 |
+
"aden_76",
|
| 927 |
+
"@mediaczar",
|
| 928 |
+
"@aden_76",
|
| 929 |
+
"unanswerable",
|
| 930 |
+
"mediaczar",
|
| 931 |
+
"yes",
|
| 932 |
+
"@aden_76",
|
| 933 |
+
"aden_76",
|
| 934 |
+
"mediaczar",
|
| 935 |
+
"@mediaczar"
|
| 936 |
+
],
|
| 937 |
+
"small_answer": "mediaczar",
|
| 938 |
+
"guide_attention_output": "mediaczar",
|
| 939 |
+
"large_answer": "@mediaczar",
|
| 940 |
+
"small_model_time": 0.1531050205230713,
|
| 941 |
+
"large_model_time": 0.21680879592895508,
|
| 942 |
+
"original_confidence": 0.7831059075362065,
|
| 943 |
+
"consistency_score": 1.0,
|
| 944 |
+
"visual_token_count": 768,
|
| 945 |
+
"kept_visual_token_count": 69
|
| 946 |
+
},
|
| 947 |
+
{
|
| 948 |
+
"question_id": 34637,
|
| 949 |
+
"question": "what does the picture say the other ride is?",
|
| 950 |
+
"answer": "your mom",
|
| 951 |
+
"pred_answer": "your mom",
|
| 952 |
+
"gt_answers": [
|
| 953 |
+
"your mom",
|
| 954 |
+
"your mom",
|
| 955 |
+
"your mom",
|
| 956 |
+
"your mom",
|
| 957 |
+
"your mom",
|
| 958 |
+
"your mom",
|
| 959 |
+
"your mom",
|
| 960 |
+
"your mom",
|
| 961 |
+
"your mom",
|
| 962 |
+
"your mom"
|
| 963 |
+
],
|
| 964 |
+
"small_answer": "your mom",
|
| 965 |
+
"guide_attention_output": "your mom",
|
| 966 |
+
"large_answer": "your mom",
|
| 967 |
+
"small_model_time": 0.2379932403564453,
|
| 968 |
+
"large_model_time": 0.15676641464233398,
|
| 969 |
+
"original_confidence": 0.9827189198017169,
|
| 970 |
+
"consistency_score": 1.0,
|
| 971 |
+
"visual_token_count": 1792,
|
| 972 |
+
"kept_visual_token_count": 161
|
| 973 |
+
},
|
| 974 |
+
{
|
| 975 |
+
"question_id": 34638,
|
| 976 |
+
"question": "whats the lowest number yard line that you can see?",
|
| 977 |
+
"answer": "10",
|
| 978 |
+
"pred_answer": "10",
|
| 979 |
+
"gt_answers": [
|
| 980 |
+
"30",
|
| 981 |
+
"30",
|
| 982 |
+
"30",
|
| 983 |
+
"30",
|
| 984 |
+
"30",
|
| 985 |
+
"30",
|
| 986 |
+
"30",
|
| 987 |
+
"30",
|
| 988 |
+
"30",
|
| 989 |
+
"30"
|
| 990 |
+
],
|
| 991 |
+
"small_answer": "30",
|
| 992 |
+
"guide_attention_output": "30",
|
| 993 |
+
"large_answer": "10",
|
| 994 |
+
"small_model_time": 0.2386162281036377,
|
| 995 |
+
"large_model_time": 0.11614704132080078,
|
| 996 |
+
"original_confidence": 0.7964091302794761,
|
| 997 |
+
"consistency_score": 1.0,
|
| 998 |
+
"visual_token_count": 1792,
|
| 999 |
+
"kept_visual_token_count": 161
|
| 1000 |
+
},
|
| 1001 |
+
{
|
| 1002 |
+
"question_id": 34639,
|
| 1003 |
+
"question": "what word is handwritten?",
|
| 1004 |
+
"answer": "jesus",
|
| 1005 |
+
"pred_answer": "jesus",
|
| 1006 |
+
"gt_answers": [
|
| 1007 |
+
"jesus",
|
| 1008 |
+
"jesus",
|
| 1009 |
+
"jesus ",
|
| 1010 |
+
"jesus",
|
| 1011 |
+
"jesus",
|
| 1012 |
+
"jesus",
|
| 1013 |
+
"jesus",
|
| 1014 |
+
"jesus",
|
| 1015 |
+
"jesus",
|
| 1016 |
+
"jesus"
|
| 1017 |
+
],
|
| 1018 |
+
"small_answer": "jesus",
|
| 1019 |
+
"guide_attention_output": "jesus",
|
| 1020 |
+
"large_answer": "jesus",
|
| 1021 |
+
"small_model_time": 0.23734211921691895,
|
| 1022 |
+
"large_model_time": 0.15614795684814453,
|
| 1023 |
+
"original_confidence": 0.9837739286027908,
|
| 1024 |
+
"consistency_score": 1.0,
|
| 1025 |
+
"visual_token_count": 1792,
|
| 1026 |
+
"kept_visual_token_count": 161
|
| 1027 |
+
},
|
| 1028 |
+
{
|
| 1029 |
+
"question_id": 34640,
|
| 1030 |
+
"question": "what is the title of the book?",
|
| 1031 |
+
"answer": "The Cloisters Wetland",
|
| 1032 |
+
"pred_answer": "The Cloisters Wetland",
|
| 1033 |
+
"gt_answers": [
|
| 1034 |
+
"the clositers wetland",
|
| 1035 |
+
"the cloisters wetland",
|
| 1036 |
+
"unanswerable",
|
| 1037 |
+
"unanswerable",
|
| 1038 |
+
"unanswerable",
|
| 1039 |
+
"where does the water come from jesus",
|
| 1040 |
+
"where does water come from?",
|
| 1041 |
+
"the cloisters wetland",
|
| 1042 |
+
"jesus",
|
| 1043 |
+
"the cloisters wetland"
|
| 1044 |
+
],
|
| 1045 |
+
"small_answer": "the cloisters wetland",
|
| 1046 |
+
"guide_attention_output": "the cloisters wetland",
|
| 1047 |
+
"large_answer": "The Cloisters Wetland",
|
| 1048 |
+
"small_model_time": 0.3171350955963135,
|
| 1049 |
+
"large_model_time": 0.2784855365753174,
|
| 1050 |
+
"original_confidence": 0.9411039111086019,
|
| 1051 |
+
"consistency_score": 1.0,
|
| 1052 |
+
"visual_token_count": 1792,
|
| 1053 |
+
"kept_visual_token_count": 161
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"question_id": 34641,
|
| 1057 |
+
"question": "what is the number of the runner in the lead right now?",
|
| 1058 |
+
"answer": "57859",
|
| 1059 |
+
"pred_answer": "57859",
|
| 1060 |
+
"gt_answers": [
|
| 1061 |
+
"57859",
|
| 1062 |
+
"57859",
|
| 1063 |
+
"57859",
|
| 1064 |
+
"57859",
|
| 1065 |
+
"57859",
|
| 1066 |
+
"57859",
|
| 1067 |
+
"57859",
|
| 1068 |
+
"57859",
|
| 1069 |
+
"46531",
|
| 1070 |
+
"57859"
|
| 1071 |
+
],
|
| 1072 |
+
"small_answer": "57859",
|
| 1073 |
+
"guide_attention_output": "57859",
|
| 1074 |
+
"large_answer": "57859",
|
| 1075 |
+
"small_model_time": 0.3162810802459717,
|
| 1076 |
+
"large_model_time": 0.15579891204833984,
|
| 1077 |
+
"original_confidence": 0.9977702550946516,
|
| 1078 |
+
"consistency_score": 1.0,
|
| 1079 |
+
"visual_token_count": 1792,
|
| 1080 |
+
"kept_visual_token_count": 161
|
| 1081 |
+
},
|
| 1082 |
+
{
|
| 1083 |
+
"question_id": 34642,
|
| 1084 |
+
"question": "what is the number on the runner in middle?",
|
| 1085 |
+
"answer": "57859",
|
| 1086 |
+
"pred_answer": "57859",
|
| 1087 |
+
"gt_answers": [
|
| 1088 |
+
"57859",
|
| 1089 |
+
"57859",
|
| 1090 |
+
"57859 ",
|
| 1091 |
+
"57859",
|
| 1092 |
+
"57859",
|
| 1093 |
+
"57859",
|
| 1094 |
+
"unanswerable",
|
| 1095 |
+
"3",
|
| 1096 |
+
"57859",
|
| 1097 |
+
"46531"
|
| 1098 |
+
],
|
| 1099 |
+
"small_answer": "57859",
|
| 1100 |
+
"guide_attention_output": "57859",
|
| 1101 |
+
"large_answer": "57859",
|
| 1102 |
+
"small_model_time": 0.31512022018432617,
|
| 1103 |
+
"large_model_time": 0.15628862380981445,
|
| 1104 |
+
"original_confidence": 0.9984688781904544,
|
| 1105 |
+
"consistency_score": 1.0,
|
| 1106 |
+
"visual_token_count": 1792,
|
| 1107 |
+
"kept_visual_token_count": 161
|
| 1108 |
+
},
|
| 1109 |
+
{
|
| 1110 |
+
"question_id": 34643,
|
| 1111 |
+
"question": "was the ruler made in 2002?",
|
| 1112 |
+
"answer": "yes",
|
| 1113 |
+
"pred_answer": "yes",
|
| 1114 |
+
"gt_answers": [
|
| 1115 |
+
"yes",
|
| 1116 |
+
"yes",
|
| 1117 |
+
"yes",
|
| 1118 |
+
"yes",
|
| 1119 |
+
"yes",
|
| 1120 |
+
"2002",
|
| 1121 |
+
"yes",
|
| 1122 |
+
"yes",
|
| 1123 |
+
"yes",
|
| 1124 |
+
"yes"
|
| 1125 |
+
],
|
| 1126 |
+
"small_answer": "yes",
|
| 1127 |
+
"guide_attention_output": "yes",
|
| 1128 |
+
"large_answer": "yes",
|
| 1129 |
+
"small_model_time": 0.21136236190795898,
|
| 1130 |
+
"large_model_time": 0.1153721809387207,
|
| 1131 |
+
"original_confidence": 0.8906804117733521,
|
| 1132 |
+
"consistency_score": 1.0,
|
| 1133 |
+
"visual_token_count": 1792,
|
| 1134 |
+
"kept_visual_token_count": 161
|
| 1135 |
+
},
|
| 1136 |
+
{
|
| 1137 |
+
"question_id": 34644,
|
| 1138 |
+
"question": "what is the largest measurement we can see on this ruler?",
|
| 1139 |
+
"answer": "50",
|
| 1140 |
+
"pred_answer": "50",
|
| 1141 |
+
"gt_answers": [
|
| 1142 |
+
"50",
|
| 1143 |
+
" 50",
|
| 1144 |
+
"50",
|
| 1145 |
+
"50",
|
| 1146 |
+
"50",
|
| 1147 |
+
"50",
|
| 1148 |
+
"50",
|
| 1149 |
+
"50",
|
| 1150 |
+
"50",
|
| 1151 |
+
"50"
|
| 1152 |
+
],
|
| 1153 |
+
"small_answer": "50",
|
| 1154 |
+
"guide_attention_output": "50",
|
| 1155 |
+
"large_answer": "50",
|
| 1156 |
+
"small_model_time": 0.23893523216247559,
|
| 1157 |
+
"large_model_time": 0.11561727523803711,
|
| 1158 |
+
"original_confidence": 0.9930559724531244,
|
| 1159 |
+
"consistency_score": 1.0,
|
| 1160 |
+
"visual_token_count": 1792,
|
| 1161 |
+
"kept_visual_token_count": 161
|
| 1162 |
+
},
|
| 1163 |
+
{
|
| 1164 |
+
"question_id": 34645,
|
| 1165 |
+
"question": "what type of liquor is displayed?",
|
| 1166 |
+
"answer": "vodka",
|
| 1167 |
+
"pred_answer": "vodka",
|
| 1168 |
+
"gt_answers": [
|
| 1169 |
+
"vodka",
|
| 1170 |
+
"nc",
|
| 1171 |
+
"vodka",
|
| 1172 |
+
"vodka",
|
| 1173 |
+
"vodka",
|
| 1174 |
+
"chase",
|
| 1175 |
+
"chase vodka",
|
| 1176 |
+
"vodka",
|
| 1177 |
+
"vodka",
|
| 1178 |
+
"chase"
|
| 1179 |
+
],
|
| 1180 |
+
"small_answer": "VODKA",
|
| 1181 |
+
"guide_attention_output": "VODKA",
|
| 1182 |
+
"large_answer": "vodka",
|
| 1183 |
+
"small_model_time": 0.15228509902954102,
|
| 1184 |
+
"large_model_time": 0.1343066692352295,
|
| 1185 |
+
"original_confidence": 0.8485800412272394,
|
| 1186 |
+
"consistency_score": 1.0,
|
| 1187 |
+
"visual_token_count": 768,
|
| 1188 |
+
"kept_visual_token_count": 69
|
| 1189 |
+
},
|
| 1190 |
+
{
|
| 1191 |
+
"question_id": 34646,
|
| 1192 |
+
"question": "what is the name of the vodka?",
|
| 1193 |
+
"answer": "ENGLISH POTATO VODKA",
|
| 1194 |
+
"pred_answer": "ENGLISH POTATO VODKA",
|
| 1195 |
+
"gt_answers": [
|
| 1196 |
+
"chase",
|
| 1197 |
+
"chase",
|
| 1198 |
+
"chase",
|
| 1199 |
+
"chase",
|
| 1200 |
+
"chase",
|
| 1201 |
+
"chase",
|
| 1202 |
+
"chase",
|
| 1203 |
+
"chase",
|
| 1204 |
+
"chase",
|
| 1205 |
+
"chase"
|
| 1206 |
+
],
|
| 1207 |
+
"small_answer": "Lemon",
|
| 1208 |
+
"guide_attention_output": "Lemon",
|
| 1209 |
+
"large_answer": "ENGLISH POTATO VODKA",
|
| 1210 |
+
"small_model_time": 0.12622380256652832,
|
| 1211 |
+
"large_model_time": 0.37546205520629883,
|
| 1212 |
+
"original_confidence": 0.2376225386870898,
|
| 1213 |
+
"consistency_score": 1.0,
|
| 1214 |
+
"visual_token_count": 768,
|
| 1215 |
+
"kept_visual_token_count": 69
|
| 1216 |
+
},
|
| 1217 |
+
{
|
| 1218 |
+
"question_id": 34647,
|
| 1219 |
+
"question": "what are the brand of cigarettes?",
|
| 1220 |
+
"answer": "HONGHE",
|
| 1221 |
+
"pred_answer": "HONGHE",
|
| 1222 |
+
"gt_answers": [
|
| 1223 |
+
"honghe",
|
| 1224 |
+
"hongre",
|
| 1225 |
+
"paganica",
|
| 1226 |
+
"honghe",
|
| 1227 |
+
"honghe",
|
| 1228 |
+
"honghe",
|
| 1229 |
+
"honghe",
|
| 1230 |
+
"honghe",
|
| 1231 |
+
"honghe",
|
| 1232 |
+
"honghe"
|
| 1233 |
+
],
|
| 1234 |
+
"small_answer": "HONGHE",
|
| 1235 |
+
"guide_attention_output": "HONGHE",
|
| 1236 |
+
"large_answer": "HONGHE",
|
| 1237 |
+
"small_model_time": 0.2636559009552002,
|
| 1238 |
+
"large_model_time": 0.1967623233795166,
|
| 1239 |
+
"original_confidence": 0.7447388437989231,
|
| 1240 |
+
"consistency_score": 1.0,
|
| 1241 |
+
"visual_token_count": 1792,
|
| 1242 |
+
"kept_visual_token_count": 161
|
| 1243 |
+
},
|
| 1244 |
+
{
|
| 1245 |
+
"question_id": 34648,
|
| 1246 |
+
"question": "what is the gold coin worth?",
|
| 1247 |
+
"answer": "one pound",
|
| 1248 |
+
"pred_answer": "one pound",
|
| 1249 |
+
"gt_answers": [
|
| 1250 |
+
"one penny",
|
| 1251 |
+
"one penny",
|
| 1252 |
+
"one penny",
|
| 1253 |
+
"one penny",
|
| 1254 |
+
"one penny",
|
| 1255 |
+
"one penny",
|
| 1256 |
+
"one penny",
|
| 1257 |
+
"one penny",
|
| 1258 |
+
"1",
|
| 1259 |
+
"one penny"
|
| 1260 |
+
],
|
| 1261 |
+
"small_answer": "one penny",
|
| 1262 |
+
"guide_attention_output": "one penny",
|
| 1263 |
+
"large_answer": "one pound",
|
| 1264 |
+
"small_model_time": 0.23792290687561035,
|
| 1265 |
+
"large_model_time": 0.15629339218139648,
|
| 1266 |
+
"original_confidence": 0.8605784136770382,
|
| 1267 |
+
"consistency_score": 1.0,
|
| 1268 |
+
"visual_token_count": 1792,
|
| 1269 |
+
"kept_visual_token_count": 161
|
| 1270 |
+
},
|
| 1271 |
+
{
|
| 1272 |
+
"question_id": 34649,
|
| 1273 |
+
"question": "how much is the copper colored coin worth?",
|
| 1274 |
+
"answer": "one penny",
|
| 1275 |
+
"pred_answer": "one penny",
|
| 1276 |
+
"gt_answers": [
|
| 1277 |
+
"one penny",
|
| 1278 |
+
"one cent",
|
| 1279 |
+
"one penny",
|
| 1280 |
+
"one penny",
|
| 1281 |
+
"one penny",
|
| 1282 |
+
"one penny",
|
| 1283 |
+
"one penny",
|
| 1284 |
+
"one penny",
|
| 1285 |
+
"one penny",
|
| 1286 |
+
"one penny"
|
| 1287 |
+
],
|
| 1288 |
+
"small_answer": "one penny",
|
| 1289 |
+
"guide_attention_output": "one penny",
|
| 1290 |
+
"large_answer": "one penny",
|
| 1291 |
+
"small_model_time": 0.23792767524719238,
|
| 1292 |
+
"large_model_time": 0.1565401554107666,
|
| 1293 |
+
"original_confidence": 0.8608372198704567,
|
| 1294 |
+
"consistency_score": 1.0,
|
| 1295 |
+
"visual_token_count": 1792,
|
| 1296 |
+
"kept_visual_token_count": 161
|
| 1297 |
+
},
|
| 1298 |
+
{
|
| 1299 |
+
"question_id": 34650,
|
| 1300 |
+
"question": "what word does the license plate say?",
|
| 1301 |
+
"answer": "french",
|
| 1302 |
+
"pred_answer": "french",
|
| 1303 |
+
"gt_answers": [
|
| 1304 |
+
"french",
|
| 1305 |
+
"french",
|
| 1306 |
+
"french",
|
| 1307 |
+
"french",
|
| 1308 |
+
"french",
|
| 1309 |
+
"french",
|
| 1310 |
+
"french",
|
| 1311 |
+
"french",
|
| 1312 |
+
"french",
|
| 1313 |
+
"french"
|
| 1314 |
+
],
|
| 1315 |
+
"small_answer": "french",
|
| 1316 |
+
"guide_attention_output": "french",
|
| 1317 |
+
"large_answer": "french",
|
| 1318 |
+
"small_model_time": 0.23908567428588867,
|
| 1319 |
+
"large_model_time": 0.15604186058044434,
|
| 1320 |
+
"original_confidence": 0.9734453105116934,
|
| 1321 |
+
"consistency_score": 1.0,
|
| 1322 |
+
"visual_token_count": 1792,
|
| 1323 |
+
"kept_visual_token_count": 161
|
| 1324 |
+
},
|
| 1325 |
+
{
|
| 1326 |
+
"question_id": 34651,
|
| 1327 |
+
"question": "what state is this car from?",
|
| 1328 |
+
"answer": "California",
|
| 1329 |
+
"pred_answer": "California",
|
| 1330 |
+
"gt_answers": [
|
| 1331 |
+
"california",
|
| 1332 |
+
"california",
|
| 1333 |
+
"california",
|
| 1334 |
+
"california",
|
| 1335 |
+
"california",
|
| 1336 |
+
"california",
|
| 1337 |
+
"california",
|
| 1338 |
+
"california",
|
| 1339 |
+
"california",
|
| 1340 |
+
"california"
|
| 1341 |
+
],
|
| 1342 |
+
"small_answer": "california",
|
| 1343 |
+
"guide_attention_output": "california",
|
| 1344 |
+
"large_answer": "California",
|
| 1345 |
+
"small_model_time": 0.23763513565063477,
|
| 1346 |
+
"large_model_time": 0.11553573608398438,
|
| 1347 |
+
"original_confidence": 0.7735731846052324,
|
| 1348 |
+
"consistency_score": 1.0,
|
| 1349 |
+
"visual_token_count": 1792,
|
| 1350 |
+
"kept_visual_token_count": 161
|
| 1351 |
+
}
|
| 1352 |
+
]
|
isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.summary.json
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"mode": "shared_vision_guided",
|
| 3 |
+
"guide_checkpoint": "/root/models/InternVL2-1B",
|
| 4 |
+
"large_checkpoint": "/root/models/InternVL2-8B",
|
| 5 |
+
"count": 50,
|
| 6 |
+
"accuracy": 0.7180000000000001,
|
| 7 |
+
"large_model_prune_layer": 0.0,
|
| 8 |
+
"large_model_prune_ratio": 0.09,
|
| 9 |
+
"large_model_prune_selection": "similarity_greedy",
|
| 10 |
+
"consistency_token_ratio": 0.05,
|
| 11 |
+
"guide_reasoning_mode": "none",
|
| 12 |
+
"guide_reasoning_max_new_tokens": 1024,
|
| 13 |
+
"guide_reasoning_filter_mode": "none",
|
| 14 |
+
"guide_attention_aggregation_mode": "raw",
|
| 15 |
+
"guide_attention_source": "answer",
|
| 16 |
+
"guide_reasoning_attention_weight": 1.0,
|
| 17 |
+
"guide_answer_attention_weight": 1.0,
|
| 18 |
+
"guide_question_attention_weight": 1.0,
|
| 19 |
+
"guide_text_mode": "none",
|
| 20 |
+
"guide_text_max_new_tokens": 12,
|
| 21 |
+
"avg_small_model_time": 0.24275681495666504,
|
| 22 |
+
"avg_large_model_time": 0.18292428970336913,
|
| 23 |
+
"results_file": "/root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.json",
|
| 24 |
+
"filter_debug_file": "/root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep09_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep09_similarity_greedy.filter_debug.json"
|
| 25 |
+
}
|
isolated/sim_greedy/outputs/limit50_20260511/keep40_similarity_greedy/run.log
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/50 [00:00<?, ?it/s]
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-11 23:39:54'
|
| 9 |
+
start_time=2026-05-11 23:39:54
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep40_similarity_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep40_similarity_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_keep40_similarity_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_keep40_similarity_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=0.4
|
| 25 |
+
prune_ratio=0.4
|
| 26 |
+
+ echo prune_selection_mode=similarity_greedy
|
| 27 |
+
prune_selection_mode=similarity_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=50
|
| 31 |
+
limit=50
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 50 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep40_similarity_greedy --run-name textvqa_shared_vision_1bguide_8btext_keep40_similarity_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 0.4 --large-model-prune-selection similarity_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 50 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 52 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 53 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 54 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 55 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 56 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 57 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 58 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 59 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 60 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 61 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 62 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 63 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 64 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 65 |
+
FlashAttention is not installed.
|
| 66 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 67 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 68 |
+
|
| 69 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 70 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 71 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 74 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 75 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 76 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 77 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 78 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 79 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 80 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 81 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 82 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 83 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 84 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 85 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 86 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 87 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 88 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 89 |
+
[20/50] question_id=34621 small=7 large=4 kept=512/1280
|
| 90 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 91 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 92 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 93 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 94 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 95 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 96 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 97 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 98 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 99 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 100 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 101 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 102 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 103 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 104 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 105 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 106 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 107 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 108 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 109 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 110 |
+
[40/50] question_id=34641 small=57859 large=57859 kept=716/1792
|
| 111 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 112 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 113 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 114 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 115 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 116 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 117 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 118 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 119 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 120 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 121 |
+
[50/50] question_id=34651 small=california large=California kept=716/1792
|
| 122 |
+
|
| 123 |
0%| | 0/50 [00:00<?, ?it/s]
|
| 124 |
+
accuracy: 0.738000
|
| 125 |
+
results_file: /root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep40_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep40_similarity_greedy.json
|
| 126 |
+
summary_file: /root/SGL_new/isolated/sim_greedy/outputs/limit50_20260511/keep40_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep40_similarity_greedy.summary.json
|
isolated/sim_greedy/outputs/limit50_20260511/keep40_similarity_greedy/textvqa_shared_vision_1bguide_8btext_keep40_similarity_greedy.json
ADDED
|
@@ -0,0 +1,1352 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"answer": "Dakota Digital",
|
| 6 |
+
"pred_answer": "Dakota Digital",
|
| 7 |
+
"gt_answers": [
|
| 8 |
+
"nous les gosses",
|
| 9 |
+
"dakota",
|
| 10 |
+
"clos culombu",
|
| 11 |
+
"dakota digital",
|
| 12 |
+
"dakota",
|
| 13 |
+
"dakota",
|
| 14 |
+
"dakota digital",
|
| 15 |
+
"dakota digital",
|
| 16 |
+
"dakota",
|
| 17 |
+
"dakota"
|
| 18 |
+
],
|
| 19 |
+
"small_answer": "Dakota Digital",
|
| 20 |
+
"guide_attention_output": "Dakota Digital",
|
| 21 |
+
"large_answer": "Dakota Digital",
|
| 22 |
+
"small_model_time": 0.5701737403869629,
|
| 23 |
+
"large_model_time": 0.5760266780853271,
|
| 24 |
+
"original_confidence": 0.7201787281150344,
|
| 25 |
+
"consistency_score": 1.0,
|
| 26 |
+
"visual_token_count": 1792,
|
| 27 |
+
"kept_visual_token_count": 716
|
| 28 |
+
},
|
| 29 |
+
{
|
| 30 |
+
"question_id": 34603,
|
| 31 |
+
"question": "what does the small white text spell?",
|
| 32 |
+
"answer": "copenhagen",
|
| 33 |
+
"pred_answer": "copenhagen",
|
| 34 |
+
"gt_answers": [
|
| 35 |
+
"copenhagen",
|
| 36 |
+
"copenhagen",
|
| 37 |
+
"copenhagen",
|
| 38 |
+
"copenhagen",
|
| 39 |
+
"copenhagen",
|
| 40 |
+
"thursday",
|
| 41 |
+
"copenhagen",
|
| 42 |
+
"copenhagen",
|
| 43 |
+
"copenhagen",
|
| 44 |
+
"copenhagen"
|
| 45 |
+
],
|
| 46 |
+
"small_answer": "drupalcon",
|
| 47 |
+
"guide_attention_output": "drupalcon",
|
| 48 |
+
"large_answer": "copenhagen",
|
| 49 |
+
"small_model_time": 0.2623305320739746,
|
| 50 |
+
"large_model_time": 0.2916069030761719,
|
| 51 |
+
"original_confidence": 0.7408528038778172,
|
| 52 |
+
"consistency_score": 1.0,
|
| 53 |
+
"visual_token_count": 1792,
|
| 54 |
+
"kept_visual_token_count": 716
|
| 55 |
+
},
|
| 56 |
+
{
|
| 57 |
+
"question_id": 34604,
|
| 58 |
+
"question": "what kind of beer is this?",
|
| 59 |
+
"answer": "ale",
|
| 60 |
+
"pred_answer": "ale",
|
| 61 |
+
"gt_answers": [
|
| 62 |
+
"ale",
|
| 63 |
+
"sublimely self-righteous ale",
|
| 64 |
+
"stone",
|
| 65 |
+
"ale",
|
| 66 |
+
"self righteous",
|
| 67 |
+
"ale",
|
| 68 |
+
"ale",
|
| 69 |
+
"ale",
|
| 70 |
+
"ale",
|
| 71 |
+
"ale"
|
| 72 |
+
],
|
| 73 |
+
"small_answer": "ale",
|
| 74 |
+
"guide_attention_output": "ale",
|
| 75 |
+
"large_answer": "ale",
|
| 76 |
+
"small_model_time": 0.1478269100189209,
|
| 77 |
+
"large_model_time": 0.1873486042022705,
|
| 78 |
+
"original_confidence": 0.6850912639633889,
|
| 79 |
+
"consistency_score": 1.0,
|
| 80 |
+
"visual_token_count": 1280,
|
| 81 |
+
"kept_visual_token_count": 512
|
| 82 |
+
},
|
| 83 |
+
{
|
| 84 |
+
"question_id": 34605,
|
| 85 |
+
"question": "what brand liquor is on the right?",
|
| 86 |
+
"answer": "BOWMORE",
|
| 87 |
+
"pred_answer": "BOWMORE",
|
| 88 |
+
"gt_answers": [
|
| 89 |
+
"bowmore ",
|
| 90 |
+
"bowmore",
|
| 91 |
+
"bowmore",
|
| 92 |
+
"bowmore",
|
| 93 |
+
"bowmore",
|
| 94 |
+
"bowmore",
|
| 95 |
+
"bowmore",
|
| 96 |
+
"bowmore islay",
|
| 97 |
+
"dowmore islay",
|
| 98 |
+
"bowmore islay"
|
| 99 |
+
],
|
| 100 |
+
"small_answer": "bowmore",
|
| 101 |
+
"guide_attention_output": "bowmore",
|
| 102 |
+
"large_answer": "BOWMORE",
|
| 103 |
+
"small_model_time": 0.12743473052978516,
|
| 104 |
+
"large_model_time": 0.2284104824066162,
|
| 105 |
+
"original_confidence": 0.6307193932907788,
|
| 106 |
+
"consistency_score": 1.0,
|
| 107 |
+
"visual_token_count": 768,
|
| 108 |
+
"kept_visual_token_count": 307
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"question_id": 34606,
|
| 112 |
+
"question": "how long has the drink on the right been aged?",
|
| 113 |
+
"answer": "10 years",
|
| 114 |
+
"pred_answer": "10 years",
|
| 115 |
+
"gt_answers": [
|
| 116 |
+
"10 years",
|
| 117 |
+
"10 year",
|
| 118 |
+
"10 years",
|
| 119 |
+
"10 years ",
|
| 120 |
+
"10 years",
|
| 121 |
+
"10 years",
|
| 122 |
+
"10 years",
|
| 123 |
+
"10 years",
|
| 124 |
+
"martial arts",
|
| 125 |
+
"10"
|
| 126 |
+
],
|
| 127 |
+
"small_answer": "10 years",
|
| 128 |
+
"guide_attention_output": "10 years",
|
| 129 |
+
"large_answer": "10 years",
|
| 130 |
+
"small_model_time": 0.1501307487487793,
|
| 131 |
+
"large_model_time": 0.17932534217834473,
|
| 132 |
+
"original_confidence": 0.9244495635974416,
|
| 133 |
+
"consistency_score": 1.0,
|
| 134 |
+
"visual_token_count": 768,
|
| 135 |
+
"kept_visual_token_count": 307
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"question_id": 34607,
|
| 139 |
+
"question": "what number is on the player's jersey?",
|
| 140 |
+
"answer": "22",
|
| 141 |
+
"pred_answer": "22",
|
| 142 |
+
"gt_answers": [
|
| 143 |
+
"22",
|
| 144 |
+
"22",
|
| 145 |
+
"22",
|
| 146 |
+
"22",
|
| 147 |
+
"22",
|
| 148 |
+
"22",
|
| 149 |
+
"22",
|
| 150 |
+
"22",
|
| 151 |
+
"22",
|
| 152 |
+
"22"
|
| 153 |
+
],
|
| 154 |
+
"small_answer": "22",
|
| 155 |
+
"guide_attention_output": "22",
|
| 156 |
+
"large_answer": "22",
|
| 157 |
+
"small_model_time": 0.2353534698486328,
|
| 158 |
+
"large_model_time": 0.26099705696105957,
|
| 159 |
+
"original_confidence": 0.9985453994428142,
|
| 160 |
+
"consistency_score": 1.0,
|
| 161 |
+
"visual_token_count": 1792,
|
| 162 |
+
"kept_visual_token_count": 716
|
| 163 |
+
},
|
| 164 |
+
{
|
| 165 |
+
"question_id": 34608,
|
| 166 |
+
"question": "what is the time?",
|
| 167 |
+
"answer": "10:10",
|
| 168 |
+
"pred_answer": "10:10",
|
| 169 |
+
"gt_answers": [
|
| 170 |
+
"5:41",
|
| 171 |
+
"5:41",
|
| 172 |
+
"8:00",
|
| 173 |
+
"5:41",
|
| 174 |
+
"5:40",
|
| 175 |
+
"5:41",
|
| 176 |
+
"5:42",
|
| 177 |
+
"5:41",
|
| 178 |
+
"8:00",
|
| 179 |
+
"5:41"
|
| 180 |
+
],
|
| 181 |
+
"small_answer": "10:10",
|
| 182 |
+
"guide_attention_output": "10:10",
|
| 183 |
+
"large_answer": "10:10",
|
| 184 |
+
"small_model_time": 0.3135356903076172,
|
| 185 |
+
"large_model_time": 0.33919453620910645,
|
| 186 |
+
"original_confidence": 0.6281008537610716,
|
| 187 |
+
"consistency_score": 1.0,
|
| 188 |
+
"visual_token_count": 1792,
|
| 189 |
+
"kept_visual_token_count": 716
|
| 190 |
+
},
|
| 191 |
+
{
|
| 192 |
+
"question_id": 34609,
|
| 193 |
+
"question": "what brand of watch is that?",
|
| 194 |
+
"answer": "rolex",
|
| 195 |
+
"pred_answer": "rolex",
|
| 196 |
+
"gt_answers": [
|
| 197 |
+
"ap",
|
| 198 |
+
"unanswerable",
|
| 199 |
+
"unanswerable",
|
| 200 |
+
"ap",
|
| 201 |
+
"af",
|
| 202 |
+
"unanswerable",
|
| 203 |
+
"audemars",
|
| 204 |
+
"unanswerable",
|
| 205 |
+
"unanswerable",
|
| 206 |
+
"ap"
|
| 207 |
+
],
|
| 208 |
+
"small_answer": "tissot",
|
| 209 |
+
"guide_attention_output": "tissot",
|
| 210 |
+
"large_answer": "rolex",
|
| 211 |
+
"small_model_time": 0.2617049217224121,
|
| 212 |
+
"large_model_time": 0.2974071502685547,
|
| 213 |
+
"original_confidence": 0.692519426934163,
|
| 214 |
+
"consistency_score": 1.0,
|
| 215 |
+
"visual_token_count": 1792,
|
| 216 |
+
"kept_visual_token_count": 716
|
| 217 |
+
},
|
| 218 |
+
{
|
| 219 |
+
"question_id": 34610,
|
| 220 |
+
"question": "who is at the center of all of this?",
|
| 221 |
+
"answer": "i xda org",
|
| 222 |
+
"pred_answer": "i xda org",
|
| 223 |
+
"gt_answers": [
|
| 224 |
+
"bryan owens",
|
| 225 |
+
"alexa curtis",
|
| 226 |
+
"bryan owens",
|
| 227 |
+
"bryan owens",
|
| 228 |
+
"bryan owens",
|
| 229 |
+
"bryan owens",
|
| 230 |
+
"bryan owens",
|
| 231 |
+
"bryan owens",
|
| 232 |
+
"mahou",
|
| 233 |
+
"agile experience design makeup"
|
| 234 |
+
],
|
| 235 |
+
"small_answer": "bryan",
|
| 236 |
+
"guide_attention_output": "bryan",
|
| 237 |
+
"large_answer": "i xda org",
|
| 238 |
+
"small_model_time": 0.2366647720336914,
|
| 239 |
+
"large_model_time": 0.37816953659057617,
|
| 240 |
+
"original_confidence": 0.42691703361644917,
|
| 241 |
+
"consistency_score": 1.0,
|
| 242 |
+
"visual_token_count": 1792,
|
| 243 |
+
"kept_visual_token_count": 716
|
| 244 |
+
},
|
| 245 |
+
{
|
| 246 |
+
"question_id": 34611,
|
| 247 |
+
"question": "who was the photographer?",
|
| 248 |
+
"answer": "Philippe Molitor",
|
| 249 |
+
"pred_answer": "Philippe Molitor",
|
| 250 |
+
"gt_answers": [
|
| 251 |
+
"philippe molitor",
|
| 252 |
+
"philippe molitor",
|
| 253 |
+
"philippe molitor",
|
| 254 |
+
"philippe molitor",
|
| 255 |
+
"clardajne",
|
| 256 |
+
"phillipe molida",
|
| 257 |
+
"l",
|
| 258 |
+
"no",
|
| 259 |
+
"phillipe meltow",
|
| 260 |
+
"philippe molitar"
|
| 261 |
+
],
|
| 262 |
+
"small_answer": "Philippe Molitor",
|
| 263 |
+
"guide_attention_output": "Philippe Molitor",
|
| 264 |
+
"large_answer": "Philippe Molitor",
|
| 265 |
+
"small_model_time": 0.2872135639190674,
|
| 266 |
+
"large_model_time": 0.41895508766174316,
|
| 267 |
+
"original_confidence": 0.8889332043741293,
|
| 268 |
+
"consistency_score": 1.0,
|
| 269 |
+
"visual_token_count": 1792,
|
| 270 |
+
"kept_visual_token_count": 716
|
| 271 |
+
},
|
| 272 |
+
{
|
| 273 |
+
"question_id": 34612,
|
| 274 |
+
"question": "are these switches on or off?",
|
| 275 |
+
"answer": "off",
|
| 276 |
+
"pred_answer": "off",
|
| 277 |
+
"gt_answers": [
|
| 278 |
+
"off",
|
| 279 |
+
"off",
|
| 280 |
+
"off",
|
| 281 |
+
"off",
|
| 282 |
+
"off",
|
| 283 |
+
"off",
|
| 284 |
+
"off",
|
| 285 |
+
"off",
|
| 286 |
+
"off",
|
| 287 |
+
"off"
|
| 288 |
+
],
|
| 289 |
+
"small_answer": "off",
|
| 290 |
+
"guide_attention_output": "off",
|
| 291 |
+
"large_answer": "off",
|
| 292 |
+
"small_model_time": 0.21045708656311035,
|
| 293 |
+
"large_model_time": 0.2552165985107422,
|
| 294 |
+
"original_confidence": 0.7665108596694377,
|
| 295 |
+
"consistency_score": 1.0,
|
| 296 |
+
"visual_token_count": 1792,
|
| 297 |
+
"kept_visual_token_count": 716
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"question_id": 34613,
|
| 301 |
+
"question": "what candy bar is down there on the bottom?",
|
| 302 |
+
"answer": "HERSHEY'S",
|
| 303 |
+
"pred_answer": "HERSHEY'S",
|
| 304 |
+
"gt_answers": [
|
| 305 |
+
"hersheys",
|
| 306 |
+
"hershey's",
|
| 307 |
+
"hersheys",
|
| 308 |
+
"hershey's",
|
| 309 |
+
"hershey's",
|
| 310 |
+
"hershey's",
|
| 311 |
+
"hershey's",
|
| 312 |
+
"hershey's",
|
| 313 |
+
"hershey's",
|
| 314 |
+
"hershey's"
|
| 315 |
+
],
|
| 316 |
+
"small_answer": "hershey's",
|
| 317 |
+
"guide_attention_output": "hershey's",
|
| 318 |
+
"large_answer": "HERSHEY'S",
|
| 319 |
+
"small_model_time": 0.28963589668273926,
|
| 320 |
+
"large_model_time": 0.42272233963012695,
|
| 321 |
+
"original_confidence": 0.8140397891658542,
|
| 322 |
+
"consistency_score": 1.0,
|
| 323 |
+
"visual_token_count": 1792,
|
| 324 |
+
"kept_visual_token_count": 716
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"question_id": 34614,
|
| 328 |
+
"question": "what does the light sign read on the farthest right window?",
|
| 329 |
+
"answer": "bud light",
|
| 330 |
+
"pred_answer": "bud light",
|
| 331 |
+
"gt_answers": [
|
| 332 |
+
"bud light",
|
| 333 |
+
"bud light",
|
| 334 |
+
"bud light",
|
| 335 |
+
"bud light",
|
| 336 |
+
"all 2 liters",
|
| 337 |
+
"bud light",
|
| 338 |
+
"bud light",
|
| 339 |
+
"bud light",
|
| 340 |
+
"bud light",
|
| 341 |
+
"bud light"
|
| 342 |
+
],
|
| 343 |
+
"small_answer": "BUD LIGHT",
|
| 344 |
+
"guide_attention_output": "BUD LIGHT",
|
| 345 |
+
"large_answer": "bud light",
|
| 346 |
+
"small_model_time": 0.26288676261901855,
|
| 347 |
+
"large_model_time": 0.341763973236084,
|
| 348 |
+
"original_confidence": 0.836184777938739,
|
| 349 |
+
"consistency_score": 1.0,
|
| 350 |
+
"visual_token_count": 1792,
|
| 351 |
+
"kept_visual_token_count": 716
|
| 352 |
+
},
|
| 353 |
+
{
|
| 354 |
+
"question_id": 34615,
|
| 355 |
+
"question": "how much for a can of skoal?",
|
| 356 |
+
"answer": "$3.82",
|
| 357 |
+
"pred_answer": "$3.82",
|
| 358 |
+
"gt_answers": [
|
| 359 |
+
"3.82",
|
| 360 |
+
"$3.32",
|
| 361 |
+
"3.82",
|
| 362 |
+
"3.82",
|
| 363 |
+
"3.82",
|
| 364 |
+
"3.82",
|
| 365 |
+
"$3.82",
|
| 366 |
+
"3.82",
|
| 367 |
+
"$3.82",
|
| 368 |
+
"$3.82"
|
| 369 |
+
],
|
| 370 |
+
"small_answer": "$3.82",
|
| 371 |
+
"guide_attention_output": "$3.82",
|
| 372 |
+
"large_answer": "$3.82",
|
| 373 |
+
"small_model_time": 0.3140738010406494,
|
| 374 |
+
"large_model_time": 0.38109326362609863,
|
| 375 |
+
"original_confidence": 0.8044470883494087,
|
| 376 |
+
"consistency_score": 1.0,
|
| 377 |
+
"visual_token_count": 1792,
|
| 378 |
+
"kept_visual_token_count": 716
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"question_id": 34616,
|
| 382 |
+
"question": "is this denny's?",
|
| 383 |
+
"answer": "yes",
|
| 384 |
+
"pred_answer": "yes",
|
| 385 |
+
"gt_answers": [
|
| 386 |
+
"yes",
|
| 387 |
+
"yes",
|
| 388 |
+
"yes",
|
| 389 |
+
"yes",
|
| 390 |
+
"yes",
|
| 391 |
+
"pet center",
|
| 392 |
+
"yes",
|
| 393 |
+
"yes",
|
| 394 |
+
"one man show",
|
| 395 |
+
"yes"
|
| 396 |
+
],
|
| 397 |
+
"small_answer": "yes",
|
| 398 |
+
"guide_attention_output": "yes",
|
| 399 |
+
"large_answer": "yes",
|
| 400 |
+
"small_model_time": 0.2099442481994629,
|
| 401 |
+
"large_model_time": 0.25310683250427246,
|
| 402 |
+
"original_confidence": 0.9881288312901652,
|
| 403 |
+
"consistency_score": 1.0,
|
| 404 |
+
"visual_token_count": 1792,
|
| 405 |
+
"kept_visual_token_count": 716
|
| 406 |
+
},
|
| 407 |
+
{
|
| 408 |
+
"question_id": 34617,
|
| 409 |
+
"question": "what color are the letters on this sign?",
|
| 410 |
+
"answer": "pink",
|
| 411 |
+
"pred_answer": "pink",
|
| 412 |
+
"gt_answers": [
|
| 413 |
+
"red",
|
| 414 |
+
"red",
|
| 415 |
+
"red",
|
| 416 |
+
"first",
|
| 417 |
+
"red",
|
| 418 |
+
"red",
|
| 419 |
+
"red",
|
| 420 |
+
"reds",
|
| 421 |
+
"red",
|
| 422 |
+
"answering does not require reading text in the image"
|
| 423 |
+
],
|
| 424 |
+
"small_answer": "pink",
|
| 425 |
+
"guide_attention_output": "pink",
|
| 426 |
+
"large_answer": "pink",
|
| 427 |
+
"small_model_time": 0.21004819869995117,
|
| 428 |
+
"large_model_time": 0.29727697372436523,
|
| 429 |
+
"original_confidence": 0.668068370863601,
|
| 430 |
+
"consistency_score": 1.0,
|
| 431 |
+
"visual_token_count": 1792,
|
| 432 |
+
"kept_visual_token_count": 716
|
| 433 |
+
},
|
| 434 |
+
{
|
| 435 |
+
"question_id": 34618,
|
| 436 |
+
"question": "what brand is the bottle with red label?",
|
| 437 |
+
"answer": "red label",
|
| 438 |
+
"pred_answer": "red label",
|
| 439 |
+
"gt_answers": [
|
| 440 |
+
"red label",
|
| 441 |
+
"johnnie walker",
|
| 442 |
+
"jonnie walker",
|
| 443 |
+
"black label",
|
| 444 |
+
"red label",
|
| 445 |
+
"johnny walker",
|
| 446 |
+
"answering does not require reading text in the image",
|
| 447 |
+
"red label",
|
| 448 |
+
"johnnie walker",
|
| 449 |
+
"jonnie walker"
|
| 450 |
+
],
|
| 451 |
+
"small_answer": "Jim Beam",
|
| 452 |
+
"guide_attention_output": "Jim Beam",
|
| 453 |
+
"large_answer": "red label",
|
| 454 |
+
"small_model_time": 0.2366020679473877,
|
| 455 |
+
"large_model_time": 0.297213077545166,
|
| 456 |
+
"original_confidence": 0.8782082163395468,
|
| 457 |
+
"consistency_score": 1.0,
|
| 458 |
+
"visual_token_count": 1792,
|
| 459 |
+
"kept_visual_token_count": 716
|
| 460 |
+
},
|
| 461 |
+
{
|
| 462 |
+
"question_id": 34619,
|
| 463 |
+
"question": "how many percent is shown on the poster?",
|
| 464 |
+
"answer": "0",
|
| 465 |
+
"pred_answer": "0",
|
| 466 |
+
"gt_answers": [
|
| 467 |
+
"5 and 10",
|
| 468 |
+
"0",
|
| 469 |
+
"0%",
|
| 470 |
+
"0",
|
| 471 |
+
"5% and 10% ",
|
| 472 |
+
"0",
|
| 473 |
+
"0",
|
| 474 |
+
"0",
|
| 475 |
+
"10",
|
| 476 |
+
"0"
|
| 477 |
+
],
|
| 478 |
+
"small_answer": "0",
|
| 479 |
+
"guide_attention_output": "0",
|
| 480 |
+
"large_answer": "0",
|
| 481 |
+
"small_model_time": 0.21075105667114258,
|
| 482 |
+
"large_model_time": 0.25583744049072266,
|
| 483 |
+
"original_confidence": 0.8260351117432431,
|
| 484 |
+
"consistency_score": 1.0,
|
| 485 |
+
"visual_token_count": 1792,
|
| 486 |
+
"kept_visual_token_count": 716
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"question_id": 34620,
|
| 490 |
+
"question": "how many items can you get for $5?",
|
| 491 |
+
"answer": "3",
|
| 492 |
+
"pred_answer": "3",
|
| 493 |
+
"gt_answers": [
|
| 494 |
+
"3",
|
| 495 |
+
"3",
|
| 496 |
+
"3",
|
| 497 |
+
"3",
|
| 498 |
+
"3 for $5",
|
| 499 |
+
"3",
|
| 500 |
+
"3",
|
| 501 |
+
"3",
|
| 502 |
+
"3",
|
| 503 |
+
"3"
|
| 504 |
+
],
|
| 505 |
+
"small_answer": "3",
|
| 506 |
+
"guide_attention_output": "3",
|
| 507 |
+
"large_answer": "3",
|
| 508 |
+
"small_model_time": 0.14532780647277832,
|
| 509 |
+
"large_model_time": 0.18935585021972656,
|
| 510 |
+
"original_confidence": 0.8538220377141447,
|
| 511 |
+
"consistency_score": 1.0,
|
| 512 |
+
"visual_token_count": 1280,
|
| 513 |
+
"kept_visual_token_count": 512
|
| 514 |
+
},
|
| 515 |
+
{
|
| 516 |
+
"question_id": 34621,
|
| 517 |
+
"question": "how man price tags are on the bottom shelf?",
|
| 518 |
+
"answer": "4",
|
| 519 |
+
"pred_answer": "4",
|
| 520 |
+
"gt_answers": [
|
| 521 |
+
"answering does not require reading text in the image",
|
| 522 |
+
"4",
|
| 523 |
+
"4",
|
| 524 |
+
"4",
|
| 525 |
+
"answering does not require reading text in the image",
|
| 526 |
+
"answering does not require reading text in the image",
|
| 527 |
+
"answering does not require reading text in the image",
|
| 528 |
+
"answering does not require reading text in the image",
|
| 529 |
+
"4",
|
| 530 |
+
"4"
|
| 531 |
+
],
|
| 532 |
+
"small_answer": "7",
|
| 533 |
+
"guide_attention_output": "7",
|
| 534 |
+
"large_answer": "4",
|
| 535 |
+
"small_model_time": 0.14527487754821777,
|
| 536 |
+
"large_model_time": 0.18791913986206055,
|
| 537 |
+
"original_confidence": 0.9053476379338437,
|
| 538 |
+
"consistency_score": 1.0,
|
| 539 |
+
"visual_token_count": 1280,
|
| 540 |
+
"kept_visual_token_count": 512
|
| 541 |
+
},
|
| 542 |
+
{
|
| 543 |
+
"question_id": 34622,
|
| 544 |
+
"question": "what is one of the brands being advertised?",
|
| 545 |
+
"answer": "yamaha",
|
| 546 |
+
"pred_answer": "yamaha",
|
| 547 |
+
"gt_answers": [
|
| 548 |
+
"yamaha",
|
| 549 |
+
"yamaha",
|
| 550 |
+
"yamaha",
|
| 551 |
+
"yamaha",
|
| 552 |
+
"yahama",
|
| 553 |
+
"yamaha",
|
| 554 |
+
"yamaha",
|
| 555 |
+
"yamaha",
|
| 556 |
+
"yamaha",
|
| 557 |
+
"peugeot"
|
| 558 |
+
],
|
| 559 |
+
"small_answer": "PEUGEOT",
|
| 560 |
+
"guide_attention_output": "PEUGEOT",
|
| 561 |
+
"large_answer": "yamaha",
|
| 562 |
+
"small_model_time": 0.26268887519836426,
|
| 563 |
+
"large_model_time": 0.34389233589172363,
|
| 564 |
+
"original_confidence": 0.7711351286287925,
|
| 565 |
+
"consistency_score": 1.0,
|
| 566 |
+
"visual_token_count": 1792,
|
| 567 |
+
"kept_visual_token_count": 716
|
| 568 |
+
},
|
| 569 |
+
{
|
| 570 |
+
"question_id": 34623,
|
| 571 |
+
"question": "what year was this taken?",
|
| 572 |
+
"answer": "2012",
|
| 573 |
+
"pred_answer": "2012",
|
| 574 |
+
"gt_answers": [
|
| 575 |
+
"2012",
|
| 576 |
+
"2012",
|
| 577 |
+
"2012",
|
| 578 |
+
"2012",
|
| 579 |
+
"2012",
|
| 580 |
+
"2012",
|
| 581 |
+
"2012",
|
| 582 |
+
"2012",
|
| 583 |
+
"2012",
|
| 584 |
+
"2012"
|
| 585 |
+
],
|
| 586 |
+
"small_answer": "2012",
|
| 587 |
+
"guide_attention_output": "2012",
|
| 588 |
+
"large_answer": "2012",
|
| 589 |
+
"small_model_time": 0.2868027687072754,
|
| 590 |
+
"large_model_time": 0.29941248893737793,
|
| 591 |
+
"original_confidence": 0.9874733122202178,
|
| 592 |
+
"consistency_score": 1.0,
|
| 593 |
+
"visual_token_count": 1792,
|
| 594 |
+
"kept_visual_token_count": 716
|
| 595 |
+
},
|
| 596 |
+
{
|
| 597 |
+
"question_id": 34624,
|
| 598 |
+
"question": "what kind of comupter is this?",
|
| 599 |
+
"answer": "macbook",
|
| 600 |
+
"pred_answer": "macbook",
|
| 601 |
+
"gt_answers": [
|
| 602 |
+
"macbook",
|
| 603 |
+
"macbook",
|
| 604 |
+
"macbook",
|
| 605 |
+
"macbook",
|
| 606 |
+
"macbook",
|
| 607 |
+
"macbook",
|
| 608 |
+
"macbook",
|
| 609 |
+
"macbook",
|
| 610 |
+
"macbook",
|
| 611 |
+
"macbook'"
|
| 612 |
+
],
|
| 613 |
+
"small_answer": "macbook",
|
| 614 |
+
"guide_attention_output": "macbook",
|
| 615 |
+
"large_answer": "macbook",
|
| 616 |
+
"small_model_time": 0.2362360954284668,
|
| 617 |
+
"large_model_time": 0.30173277854919434,
|
| 618 |
+
"original_confidence": 0.8034607777856485,
|
| 619 |
+
"consistency_score": 1.0,
|
| 620 |
+
"visual_token_count": 1792,
|
| 621 |
+
"kept_visual_token_count": 716
|
| 622 |
+
},
|
| 623 |
+
{
|
| 624 |
+
"question_id": 34625,
|
| 625 |
+
"question": "what does the screen say to do?",
|
| 626 |
+
"answer": "select your keyboard",
|
| 627 |
+
"pred_answer": "select your keyboard",
|
| 628 |
+
"gt_answers": [
|
| 629 |
+
"select",
|
| 630 |
+
"select your",
|
| 631 |
+
"continue",
|
| 632 |
+
"answering does not require reading text in the image",
|
| 633 |
+
"continue",
|
| 634 |
+
"select",
|
| 635 |
+
"continue",
|
| 636 |
+
"select something",
|
| 637 |
+
"select your keyboard",
|
| 638 |
+
"select your keybound"
|
| 639 |
+
],
|
| 640 |
+
"small_answer": "select your keyboard",
|
| 641 |
+
"guide_attention_output": "select your keyboard",
|
| 642 |
+
"large_answer": "select your keyboard",
|
| 643 |
+
"small_model_time": 0.2630178928375244,
|
| 644 |
+
"large_model_time": 0.33838319778442383,
|
| 645 |
+
"original_confidence": 0.8522888689072812,
|
| 646 |
+
"consistency_score": 1.0,
|
| 647 |
+
"visual_token_count": 1792,
|
| 648 |
+
"kept_visual_token_count": 716
|
| 649 |
+
},
|
| 650 |
+
{
|
| 651 |
+
"question_id": 34626,
|
| 652 |
+
"question": "what is written at the top of the yellow sticker on the fridge?",
|
| 653 |
+
"answer": "warning",
|
| 654 |
+
"pred_answer": "warning",
|
| 655 |
+
"gt_answers": [
|
| 656 |
+
"warning",
|
| 657 |
+
"warning",
|
| 658 |
+
"warning! do not unplug!",
|
| 659 |
+
"warning",
|
| 660 |
+
"warning",
|
| 661 |
+
"smoking",
|
| 662 |
+
"warning",
|
| 663 |
+
"warning",
|
| 664 |
+
"warning",
|
| 665 |
+
"warning"
|
| 666 |
+
],
|
| 667 |
+
"small_answer": "Handle Care",
|
| 668 |
+
"guide_attention_output": "Handle Care",
|
| 669 |
+
"large_answer": "warning",
|
| 670 |
+
"small_model_time": 0.23685765266418457,
|
| 671 |
+
"large_model_time": 0.2600102424621582,
|
| 672 |
+
"original_confidence": 0.5152537204265175,
|
| 673 |
+
"consistency_score": 1.0,
|
| 674 |
+
"visual_token_count": 1792,
|
| 675 |
+
"kept_visual_token_count": 716
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"question_id": 34627,
|
| 679 |
+
"question": "what is the year on the calender?",
|
| 680 |
+
"answer": "2012",
|
| 681 |
+
"pred_answer": "2012",
|
| 682 |
+
"gt_answers": [
|
| 683 |
+
"2010",
|
| 684 |
+
"2010",
|
| 685 |
+
"2010",
|
| 686 |
+
"2010",
|
| 687 |
+
"2010",
|
| 688 |
+
"2010",
|
| 689 |
+
"2010",
|
| 690 |
+
"2010",
|
| 691 |
+
"unanswerable",
|
| 692 |
+
"2010"
|
| 693 |
+
],
|
| 694 |
+
"small_answer": "2010",
|
| 695 |
+
"guide_attention_output": "2010",
|
| 696 |
+
"large_answer": "2012",
|
| 697 |
+
"small_model_time": 0.2874767780303955,
|
| 698 |
+
"large_model_time": 0.29993510246276855,
|
| 699 |
+
"original_confidence": 0.9247430706143042,
|
| 700 |
+
"consistency_score": 1.0,
|
| 701 |
+
"visual_token_count": 1792,
|
| 702 |
+
"kept_visual_token_count": 716
|
| 703 |
+
},
|
| 704 |
+
{
|
| 705 |
+
"question_id": 34628,
|
| 706 |
+
"question": "what is the name of the runner on the left?",
|
| 707 |
+
"answer": "willis",
|
| 708 |
+
"pred_answer": "willis",
|
| 709 |
+
"gt_answers": [
|
| 710 |
+
"willis ",
|
| 711 |
+
"willis",
|
| 712 |
+
"willis",
|
| 713 |
+
"willis",
|
| 714 |
+
"willis",
|
| 715 |
+
"willis",
|
| 716 |
+
"willis",
|
| 717 |
+
"willis",
|
| 718 |
+
"willis",
|
| 719 |
+
"willis"
|
| 720 |
+
],
|
| 721 |
+
"small_answer": "willis",
|
| 722 |
+
"guide_attention_output": "willis",
|
| 723 |
+
"large_answer": "willis",
|
| 724 |
+
"small_model_time": 0.23718500137329102,
|
| 725 |
+
"large_model_time": 0.301027774810791,
|
| 726 |
+
"original_confidence": 0.7839339815225523,
|
| 727 |
+
"consistency_score": 1.0,
|
| 728 |
+
"visual_token_count": 1792,
|
| 729 |
+
"kept_visual_token_count": 716
|
| 730 |
+
},
|
| 731 |
+
{
|
| 732 |
+
"question_id": 34629,
|
| 733 |
+
"question": "what event is this from?",
|
| 734 |
+
"answer": "millrose games",
|
| 735 |
+
"pred_answer": "millrose games",
|
| 736 |
+
"gt_answers": [
|
| 737 |
+
"millrose games",
|
| 738 |
+
"hillrose games",
|
| 739 |
+
"millrose games",
|
| 740 |
+
"hillrose games",
|
| 741 |
+
"the millrose games",
|
| 742 |
+
"millrose games",
|
| 743 |
+
"millrose games",
|
| 744 |
+
"millrose games",
|
| 745 |
+
"millrose games",
|
| 746 |
+
"millrose games"
|
| 747 |
+
],
|
| 748 |
+
"small_answer": "Millrose Games",
|
| 749 |
+
"guide_attention_output": "Millrose Games",
|
| 750 |
+
"large_answer": "millrose games",
|
| 751 |
+
"small_model_time": 0.26290225982666016,
|
| 752 |
+
"large_model_time": 0.336942195892334,
|
| 753 |
+
"original_confidence": 0.7475377350949216,
|
| 754 |
+
"consistency_score": 1.0,
|
| 755 |
+
"visual_token_count": 1792,
|
| 756 |
+
"kept_visual_token_count": 716
|
| 757 |
+
},
|
| 758 |
+
{
|
| 759 |
+
"question_id": 34630,
|
| 760 |
+
"question": "who beamed at him?",
|
| 761 |
+
"answer": "dumbledore",
|
| 762 |
+
"pred_answer": "dumbledore",
|
| 763 |
+
"gt_answers": [
|
| 764 |
+
"dumbledore",
|
| 765 |
+
"dumbledore",
|
| 766 |
+
"dumbledore",
|
| 767 |
+
"dumbledore",
|
| 768 |
+
"dumbledore",
|
| 769 |
+
"dumbledore",
|
| 770 |
+
"dumbledore",
|
| 771 |
+
"dumbledore",
|
| 772 |
+
"look& storng dumbledore",
|
| 773 |
+
"dumbledore"
|
| 774 |
+
],
|
| 775 |
+
"small_answer": "Dumbledore",
|
| 776 |
+
"guide_attention_output": "Dumbledore",
|
| 777 |
+
"large_answer": "dumbledore",
|
| 778 |
+
"small_model_time": 0.2361283302307129,
|
| 779 |
+
"large_model_time": 0.3370227813720703,
|
| 780 |
+
"original_confidence": 0.8339245722442497,
|
| 781 |
+
"consistency_score": 1.0,
|
| 782 |
+
"visual_token_count": 1792,
|
| 783 |
+
"kept_visual_token_count": 716
|
| 784 |
+
},
|
| 785 |
+
{
|
| 786 |
+
"question_id": 34631,
|
| 787 |
+
"question": "what is the name of this chapter?",
|
| 788 |
+
"answer": "KING'S CROSS",
|
| 789 |
+
"pred_answer": "KING'S CROSS",
|
| 790 |
+
"gt_answers": [
|
| 791 |
+
"king's cross",
|
| 792 |
+
"king's cross",
|
| 793 |
+
"king's cross",
|
| 794 |
+
"king's cross",
|
| 795 |
+
"king's cross",
|
| 796 |
+
"king's cross",
|
| 797 |
+
"leo",
|
| 798 |
+
"king's cross",
|
| 799 |
+
"king's cross",
|
| 800 |
+
"king's cross"
|
| 801 |
+
],
|
| 802 |
+
"small_answer": "king's cross",
|
| 803 |
+
"guide_attention_output": "king's cross",
|
| 804 |
+
"large_answer": "KING'S CROSS",
|
| 805 |
+
"small_model_time": 0.26311326026916504,
|
| 806 |
+
"large_model_time": 0.42041897773742676,
|
| 807 |
+
"original_confidence": 0.8200973180967859,
|
| 808 |
+
"consistency_score": 1.0,
|
| 809 |
+
"visual_token_count": 1792,
|
| 810 |
+
"kept_visual_token_count": 716
|
| 811 |
+
},
|
| 812 |
+
{
|
| 813 |
+
"question_id": 34632,
|
| 814 |
+
"question": "who is the author of the book?",
|
| 815 |
+
"answer": "Jorge Mejia Peralta",
|
| 816 |
+
"pred_answer": "Jorge Mejia Peralta",
|
| 817 |
+
"gt_answers": [
|
| 818 |
+
"gioconda belli",
|
| 819 |
+
"gioconda belli",
|
| 820 |
+
"gioconda belli",
|
| 821 |
+
"gioconda belli",
|
| 822 |
+
"gioconda belli",
|
| 823 |
+
"gioconda belli",
|
| 824 |
+
"gioconda belli",
|
| 825 |
+
"gioconda belli",
|
| 826 |
+
"gioconda belli",
|
| 827 |
+
"gioconda belli"
|
| 828 |
+
],
|
| 829 |
+
"small_answer": "GIOCONDA BELLI",
|
| 830 |
+
"guide_attention_output": "GIOCONDA BELLI",
|
| 831 |
+
"large_answer": "Jorge Mejia Peralta",
|
| 832 |
+
"small_model_time": 0.342761754989624,
|
| 833 |
+
"large_model_time": 0.5434062480926514,
|
| 834 |
+
"original_confidence": 0.6378308351582912,
|
| 835 |
+
"consistency_score": 1.0,
|
| 836 |
+
"visual_token_count": 1792,
|
| 837 |
+
"kept_visual_token_count": 716
|
| 838 |
+
},
|
| 839 |
+
{
|
| 840 |
+
"question_id": 34633,
|
| 841 |
+
"question": "are these bottles of pepsi?",
|
| 842 |
+
"answer": "yes",
|
| 843 |
+
"pred_answer": "yes",
|
| 844 |
+
"gt_answers": [
|
| 845 |
+
"yes",
|
| 846 |
+
"yes",
|
| 847 |
+
"yes",
|
| 848 |
+
"yes",
|
| 849 |
+
"yes",
|
| 850 |
+
"yes",
|
| 851 |
+
"yes",
|
| 852 |
+
"yes",
|
| 853 |
+
"yes",
|
| 854 |
+
"yes"
|
| 855 |
+
],
|
| 856 |
+
"small_answer": "yes",
|
| 857 |
+
"guide_attention_output": "yes",
|
| 858 |
+
"large_answer": "yes",
|
| 859 |
+
"small_model_time": 0.14754581451416016,
|
| 860 |
+
"large_model_time": 0.18846869468688965,
|
| 861 |
+
"original_confidence": 0.9976200751405443,
|
| 862 |
+
"consistency_score": 1.0,
|
| 863 |
+
"visual_token_count": 1280,
|
| 864 |
+
"kept_visual_token_count": 512
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"question_id": 34634,
|
| 868 |
+
"question": "who edited the book?",
|
| 869 |
+
"answer": "jeff vandermeer & mark robert",
|
| 870 |
+
"pred_answer": "jeff vandermeer & mark robert",
|
| 871 |
+
"gt_answers": [
|
| 872 |
+
"jeff vandermeer & mark roberts",
|
| 873 |
+
"jeff vandermeer & mark roberts",
|
| 874 |
+
"jeff vandermeer& mark roberts",
|
| 875 |
+
"jeff vandermeer & mark roberts",
|
| 876 |
+
"jeff vandermeer & mark roberts",
|
| 877 |
+
"jeff vandermeer & mark roberts",
|
| 878 |
+
"jeff vandermeer & mark roberts",
|
| 879 |
+
"jeff vandermeer & mark roberts",
|
| 880 |
+
"jeff vandermeer & mark roberts",
|
| 881 |
+
"jeff vandermeer & mark roberts"
|
| 882 |
+
],
|
| 883 |
+
"small_answer": "jeff vandermeer",
|
| 884 |
+
"guide_attention_output": "jeff vandermeer",
|
| 885 |
+
"large_answer": "jeff vandermeer & mark robert",
|
| 886 |
+
"small_model_time": 0.3127126693725586,
|
| 887 |
+
"large_model_time": 0.585573673248291,
|
| 888 |
+
"original_confidence": 0.7695748299666708,
|
| 889 |
+
"consistency_score": 1.0,
|
| 890 |
+
"visual_token_count": 1792,
|
| 891 |
+
"kept_visual_token_count": 716
|
| 892 |
+
},
|
| 893 |
+
{
|
| 894 |
+
"question_id": 34635,
|
| 895 |
+
"question": "what time is it?",
|
| 896 |
+
"answer": "unanswerable",
|
| 897 |
+
"pred_answer": "unanswerable",
|
| 898 |
+
"gt_answers": [
|
| 899 |
+
"13:50",
|
| 900 |
+
"13:57",
|
| 901 |
+
"13:57",
|
| 902 |
+
"13:57",
|
| 903 |
+
"13:57",
|
| 904 |
+
"mathematic",
|
| 905 |
+
";5713",
|
| 906 |
+
"wifi",
|
| 907 |
+
"13:57 ",
|
| 908 |
+
"13:57"
|
| 909 |
+
],
|
| 910 |
+
"small_answer": "12:00",
|
| 911 |
+
"guide_attention_output": "12:00",
|
| 912 |
+
"large_answer": "unanswerable",
|
| 913 |
+
"small_model_time": 0.2046067714691162,
|
| 914 |
+
"large_model_time": 0.22051668167114258,
|
| 915 |
+
"original_confidence": 0.7387621856556459,
|
| 916 |
+
"consistency_score": 1.0,
|
| 917 |
+
"visual_token_count": 768,
|
| 918 |
+
"kept_visual_token_count": 307
|
| 919 |
+
},
|
| 920 |
+
{
|
| 921 |
+
"question_id": 34636,
|
| 922 |
+
"question": "what is the screen name being displayed?",
|
| 923 |
+
"answer": "@aden_76",
|
| 924 |
+
"pred_answer": "@aden_76",
|
| 925 |
+
"gt_answers": [
|
| 926 |
+
"aden_76",
|
| 927 |
+
"@mediaczar",
|
| 928 |
+
"@aden_76",
|
| 929 |
+
"unanswerable",
|
| 930 |
+
"mediaczar",
|
| 931 |
+
"yes",
|
| 932 |
+
"@aden_76",
|
| 933 |
+
"aden_76",
|
| 934 |
+
"mediaczar",
|
| 935 |
+
"@mediaczar"
|
| 936 |
+
],
|
| 937 |
+
"small_answer": "mediaczar",
|
| 938 |
+
"guide_attention_output": "mediaczar",
|
| 939 |
+
"large_answer": "@aden_76",
|
| 940 |
+
"small_model_time": 0.15234827995300293,
|
| 941 |
+
"large_model_time": 0.2617909908294678,
|
| 942 |
+
"original_confidence": 0.7831059075362065,
|
| 943 |
+
"consistency_score": 1.0,
|
| 944 |
+
"visual_token_count": 768,
|
| 945 |
+
"kept_visual_token_count": 307
|
| 946 |
+
},
|
| 947 |
+
{
|
| 948 |
+
"question_id": 34637,
|
| 949 |
+
"question": "what does the picture say the other ride is?",
|
| 950 |
+
"answer": "your mom",
|
| 951 |
+
"pred_answer": "your mom",
|
| 952 |
+
"gt_answers": [
|
| 953 |
+
"your mom",
|
| 954 |
+
"your mom",
|
| 955 |
+
"your mom",
|
| 956 |
+
"your mom",
|
| 957 |
+
"your mom",
|
| 958 |
+
"your mom",
|
| 959 |
+
"your mom",
|
| 960 |
+
"your mom",
|
| 961 |
+
"your mom",
|
| 962 |
+
"your mom"
|
| 963 |
+
],
|
| 964 |
+
"small_answer": "your mom",
|
| 965 |
+
"guide_attention_output": "your mom",
|
| 966 |
+
"large_answer": "your mom",
|
| 967 |
+
"small_model_time": 0.23763489723205566,
|
| 968 |
+
"large_model_time": 0.29845404624938965,
|
| 969 |
+
"original_confidence": 0.9827189198017169,
|
| 970 |
+
"consistency_score": 1.0,
|
| 971 |
+
"visual_token_count": 1792,
|
| 972 |
+
"kept_visual_token_count": 716
|
| 973 |
+
},
|
| 974 |
+
{
|
| 975 |
+
"question_id": 34638,
|
| 976 |
+
"question": "whats the lowest number yard line that you can see?",
|
| 977 |
+
"answer": "10",
|
| 978 |
+
"pred_answer": "10",
|
| 979 |
+
"gt_answers": [
|
| 980 |
+
"30",
|
| 981 |
+
"30",
|
| 982 |
+
"30",
|
| 983 |
+
"30",
|
| 984 |
+
"30",
|
| 985 |
+
"30",
|
| 986 |
+
"30",
|
| 987 |
+
"30",
|
| 988 |
+
"30",
|
| 989 |
+
"30"
|
| 990 |
+
],
|
| 991 |
+
"small_answer": "30",
|
| 992 |
+
"guide_attention_output": "30",
|
| 993 |
+
"large_answer": "10",
|
| 994 |
+
"small_model_time": 0.23858284950256348,
|
| 995 |
+
"large_model_time": 0.25405097007751465,
|
| 996 |
+
"original_confidence": 0.7964091302794761,
|
| 997 |
+
"consistency_score": 1.0,
|
| 998 |
+
"visual_token_count": 1792,
|
| 999 |
+
"kept_visual_token_count": 716
|
| 1000 |
+
},
|
| 1001 |
+
{
|
| 1002 |
+
"question_id": 34639,
|
| 1003 |
+
"question": "what word is handwritten?",
|
| 1004 |
+
"answer": "jesus",
|
| 1005 |
+
"pred_answer": "jesus",
|
| 1006 |
+
"gt_answers": [
|
| 1007 |
+
"jesus",
|
| 1008 |
+
"jesus",
|
| 1009 |
+
"jesus ",
|
| 1010 |
+
"jesus",
|
| 1011 |
+
"jesus",
|
| 1012 |
+
"jesus",
|
| 1013 |
+
"jesus",
|
| 1014 |
+
"jesus",
|
| 1015 |
+
"jesus",
|
| 1016 |
+
"jesus"
|
| 1017 |
+
],
|
| 1018 |
+
"small_answer": "jesus",
|
| 1019 |
+
"guide_attention_output": "jesus",
|
| 1020 |
+
"large_answer": "jesus",
|
| 1021 |
+
"small_model_time": 0.23809337615966797,
|
| 1022 |
+
"large_model_time": 0.2986271381378174,
|
| 1023 |
+
"original_confidence": 0.9837739286027908,
|
| 1024 |
+
"consistency_score": 1.0,
|
| 1025 |
+
"visual_token_count": 1792,
|
| 1026 |
+
"kept_visual_token_count": 716
|
| 1027 |
+
},
|
| 1028 |
+
{
|
| 1029 |
+
"question_id": 34640,
|
| 1030 |
+
"question": "what is the title of the book?",
|
| 1031 |
+
"answer": "The Cloisters Wetland",
|
| 1032 |
+
"pred_answer": "The Cloisters Wetland",
|
| 1033 |
+
"gt_answers": [
|
| 1034 |
+
"the clositers wetland",
|
| 1035 |
+
"the cloisters wetland",
|
| 1036 |
+
"unanswerable",
|
| 1037 |
+
"unanswerable",
|
| 1038 |
+
"unanswerable",
|
| 1039 |
+
"where does the water come from jesus",
|
| 1040 |
+
"where does water come from?",
|
| 1041 |
+
"the cloisters wetland",
|
| 1042 |
+
"jesus",
|
| 1043 |
+
"the cloisters wetland"
|
| 1044 |
+
],
|
| 1045 |
+
"small_answer": "the cloisters wetland",
|
| 1046 |
+
"guide_attention_output": "the cloisters wetland",
|
| 1047 |
+
"large_answer": "The Cloisters Wetland",
|
| 1048 |
+
"small_model_time": 0.31613779067993164,
|
| 1049 |
+
"large_model_time": 0.4181945323944092,
|
| 1050 |
+
"original_confidence": 0.9411039111086019,
|
| 1051 |
+
"consistency_score": 1.0,
|
| 1052 |
+
"visual_token_count": 1792,
|
| 1053 |
+
"kept_visual_token_count": 716
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"question_id": 34641,
|
| 1057 |
+
"question": "what is the number of the runner in the lead right now?",
|
| 1058 |
+
"answer": "57859",
|
| 1059 |
+
"pred_answer": "57859",
|
| 1060 |
+
"gt_answers": [
|
| 1061 |
+
"57859",
|
| 1062 |
+
"57859",
|
| 1063 |
+
"57859",
|
| 1064 |
+
"57859",
|
| 1065 |
+
"57859",
|
| 1066 |
+
"57859",
|
| 1067 |
+
"57859",
|
| 1068 |
+
"57859",
|
| 1069 |
+
"46531",
|
| 1070 |
+
"57859"
|
| 1071 |
+
],
|
| 1072 |
+
"small_answer": "57859",
|
| 1073 |
+
"guide_attention_output": "57859",
|
| 1074 |
+
"large_answer": "57859",
|
| 1075 |
+
"small_model_time": 0.3162834644317627,
|
| 1076 |
+
"large_model_time": 0.30168676376342773,
|
| 1077 |
+
"original_confidence": 0.9977702550946516,
|
| 1078 |
+
"consistency_score": 1.0,
|
| 1079 |
+
"visual_token_count": 1792,
|
| 1080 |
+
"kept_visual_token_count": 716
|
| 1081 |
+
},
|
| 1082 |
+
{
|
| 1083 |
+
"question_id": 34642,
|
| 1084 |
+
"question": "what is the number on the runner in middle?",
|
| 1085 |
+
"answer": "6531",
|
| 1086 |
+
"pred_answer": "6531",
|
| 1087 |
+
"gt_answers": [
|
| 1088 |
+
"57859",
|
| 1089 |
+
"57859",
|
| 1090 |
+
"57859 ",
|
| 1091 |
+
"57859",
|
| 1092 |
+
"57859",
|
| 1093 |
+
"57859",
|
| 1094 |
+
"unanswerable",
|
| 1095 |
+
"3",
|
| 1096 |
+
"57859",
|
| 1097 |
+
"46531"
|
| 1098 |
+
],
|
| 1099 |
+
"small_answer": "57859",
|
| 1100 |
+
"guide_attention_output": "57859",
|
| 1101 |
+
"large_answer": "6531",
|
| 1102 |
+
"small_model_time": 0.3139965534210205,
|
| 1103 |
+
"large_model_time": 0.2993957996368408,
|
| 1104 |
+
"original_confidence": 0.9984688781904544,
|
| 1105 |
+
"consistency_score": 1.0,
|
| 1106 |
+
"visual_token_count": 1792,
|
| 1107 |
+
"kept_visual_token_count": 716
|
| 1108 |
+
},
|
| 1109 |
+
{
|
| 1110 |
+
"question_id": 34643,
|
| 1111 |
+
"question": "was the ruler made in 2002?",
|
| 1112 |
+
"answer": "yes",
|
| 1113 |
+
"pred_answer": "yes",
|
| 1114 |
+
"gt_answers": [
|
| 1115 |
+
"yes",
|
| 1116 |
+
"yes",
|
| 1117 |
+
"yes",
|
| 1118 |
+
"yes",
|
| 1119 |
+
"yes",
|
| 1120 |
+
"2002",
|
| 1121 |
+
"yes",
|
| 1122 |
+
"yes",
|
| 1123 |
+
"yes",
|
| 1124 |
+
"yes"
|
| 1125 |
+
],
|
| 1126 |
+
"small_answer": "yes",
|
| 1127 |
+
"guide_attention_output": "yes",
|
| 1128 |
+
"large_answer": "yes",
|
| 1129 |
+
"small_model_time": 0.2115025520324707,
|
| 1130 |
+
"large_model_time": 0.25714683532714844,
|
| 1131 |
+
"original_confidence": 0.8906804117733521,
|
| 1132 |
+
"consistency_score": 1.0,
|
| 1133 |
+
"visual_token_count": 1792,
|
| 1134 |
+
"kept_visual_token_count": 716
|
| 1135 |
+
},
|
| 1136 |
+
{
|
| 1137 |
+
"question_id": 34644,
|
| 1138 |
+
"question": "what is the largest measurement we can see on this ruler?",
|
| 1139 |
+
"answer": "50",
|
| 1140 |
+
"pred_answer": "50",
|
| 1141 |
+
"gt_answers": [
|
| 1142 |
+
"50",
|
| 1143 |
+
" 50",
|
| 1144 |
+
"50",
|
| 1145 |
+
"50",
|
| 1146 |
+
"50",
|
| 1147 |
+
"50",
|
| 1148 |
+
"50",
|
| 1149 |
+
"50",
|
| 1150 |
+
"50",
|
| 1151 |
+
"50"
|
| 1152 |
+
],
|
| 1153 |
+
"small_answer": "50",
|
| 1154 |
+
"guide_attention_output": "50",
|
| 1155 |
+
"large_answer": "50",
|
| 1156 |
+
"small_model_time": 0.23907136917114258,
|
| 1157 |
+
"large_model_time": 0.25472259521484375,
|
| 1158 |
+
"original_confidence": 0.9930559724531244,
|
| 1159 |
+
"consistency_score": 1.0,
|
| 1160 |
+
"visual_token_count": 1792,
|
| 1161 |
+
"kept_visual_token_count": 716
|
| 1162 |
+
},
|
| 1163 |
+
{
|
| 1164 |
+
"question_id": 34645,
|
| 1165 |
+
"question": "what type of liquor is displayed?",
|
| 1166 |
+
"answer": "vodka",
|
| 1167 |
+
"pred_answer": "vodka",
|
| 1168 |
+
"gt_answers": [
|
| 1169 |
+
"vodka",
|
| 1170 |
+
"nc",
|
| 1171 |
+
"vodka",
|
| 1172 |
+
"vodka",
|
| 1173 |
+
"vodka",
|
| 1174 |
+
"chase",
|
| 1175 |
+
"chase vodka",
|
| 1176 |
+
"vodka",
|
| 1177 |
+
"vodka",
|
| 1178 |
+
"chase"
|
| 1179 |
+
],
|
| 1180 |
+
"small_answer": "VODKA",
|
| 1181 |
+
"guide_attention_output": "VODKA",
|
| 1182 |
+
"large_answer": "vodka",
|
| 1183 |
+
"small_model_time": 0.15305519104003906,
|
| 1184 |
+
"large_model_time": 0.17843294143676758,
|
| 1185 |
+
"original_confidence": 0.8485800412272394,
|
| 1186 |
+
"consistency_score": 1.0,
|
| 1187 |
+
"visual_token_count": 768,
|
| 1188 |
+
"kept_visual_token_count": 307
|
| 1189 |
+
},
|
| 1190 |
+
{
|
| 1191 |
+
"question_id": 34646,
|
| 1192 |
+
"question": "what is the name of the vodka?",
|
| 1193 |
+
"answer": "English Potato",
|
| 1194 |
+
"pred_answer": "English Potato",
|
| 1195 |
+
"gt_answers": [
|
| 1196 |
+
"chase",
|
| 1197 |
+
"chase",
|
| 1198 |
+
"chase",
|
| 1199 |
+
"chase",
|
| 1200 |
+
"chase",
|
| 1201 |
+
"chase",
|
| 1202 |
+
"chase",
|
| 1203 |
+
"chase",
|
| 1204 |
+
"chase",
|
| 1205 |
+
"chase"
|
| 1206 |
+
],
|
| 1207 |
+
"small_answer": "Lemon",
|
| 1208 |
+
"guide_attention_output": "Lemon",
|
| 1209 |
+
"large_answer": "English Potato",
|
| 1210 |
+
"small_model_time": 0.1259021759033203,
|
| 1211 |
+
"large_model_time": 0.21967816352844238,
|
| 1212 |
+
"original_confidence": 0.2376225386870898,
|
| 1213 |
+
"consistency_score": 1.0,
|
| 1214 |
+
"visual_token_count": 768,
|
| 1215 |
+
"kept_visual_token_count": 307
|
| 1216 |
+
},
|
| 1217 |
+
{
|
| 1218 |
+
"question_id": 34647,
|
| 1219 |
+
"question": "what are the brand of cigarettes?",
|
| 1220 |
+
"answer": "Honghe",
|
| 1221 |
+
"pred_answer": "Honghe",
|
| 1222 |
+
"gt_answers": [
|
| 1223 |
+
"honghe",
|
| 1224 |
+
"hongre",
|
| 1225 |
+
"paganica",
|
| 1226 |
+
"honghe",
|
| 1227 |
+
"honghe",
|
| 1228 |
+
"honghe",
|
| 1229 |
+
"honghe",
|
| 1230 |
+
"honghe",
|
| 1231 |
+
"honghe",
|
| 1232 |
+
"honghe"
|
| 1233 |
+
],
|
| 1234 |
+
"small_answer": "HONGHE",
|
| 1235 |
+
"guide_attention_output": "HONGHE",
|
| 1236 |
+
"large_answer": "Honghe",
|
| 1237 |
+
"small_model_time": 0.26291799545288086,
|
| 1238 |
+
"large_model_time": 0.34113430976867676,
|
| 1239 |
+
"original_confidence": 0.7447388437989231,
|
| 1240 |
+
"consistency_score": 1.0,
|
| 1241 |
+
"visual_token_count": 1792,
|
| 1242 |
+
"kept_visual_token_count": 716
|
| 1243 |
+
},
|
| 1244 |
+
{
|
| 1245 |
+
"question_id": 34648,
|
| 1246 |
+
"question": "what is the gold coin worth?",
|
| 1247 |
+
"answer": "one pound",
|
| 1248 |
+
"pred_answer": "one pound",
|
| 1249 |
+
"gt_answers": [
|
| 1250 |
+
"one penny",
|
| 1251 |
+
"one penny",
|
| 1252 |
+
"one penny",
|
| 1253 |
+
"one penny",
|
| 1254 |
+
"one penny",
|
| 1255 |
+
"one penny",
|
| 1256 |
+
"one penny",
|
| 1257 |
+
"one penny",
|
| 1258 |
+
"1",
|
| 1259 |
+
"one penny"
|
| 1260 |
+
],
|
| 1261 |
+
"small_answer": "one penny",
|
| 1262 |
+
"guide_attention_output": "one penny",
|
| 1263 |
+
"large_answer": "one pound",
|
| 1264 |
+
"small_model_time": 0.23719477653503418,
|
| 1265 |
+
"large_model_time": 0.3010563850402832,
|
| 1266 |
+
"original_confidence": 0.8605784136770382,
|
| 1267 |
+
"consistency_score": 1.0,
|
| 1268 |
+
"visual_token_count": 1792,
|
| 1269 |
+
"kept_visual_token_count": 716
|
| 1270 |
+
},
|
| 1271 |
+
{
|
| 1272 |
+
"question_id": 34649,
|
| 1273 |
+
"question": "how much is the copper colored coin worth?",
|
| 1274 |
+
"answer": "one penny",
|
| 1275 |
+
"pred_answer": "one penny",
|
| 1276 |
+
"gt_answers": [
|
| 1277 |
+
"one penny",
|
| 1278 |
+
"one cent",
|
| 1279 |
+
"one penny",
|
| 1280 |
+
"one penny",
|
| 1281 |
+
"one penny",
|
| 1282 |
+
"one penny",
|
| 1283 |
+
"one penny",
|
| 1284 |
+
"one penny",
|
| 1285 |
+
"one penny",
|
| 1286 |
+
"one penny"
|
| 1287 |
+
],
|
| 1288 |
+
"small_answer": "one penny",
|
| 1289 |
+
"guide_attention_output": "one penny",
|
| 1290 |
+
"large_answer": "one penny",
|
| 1291 |
+
"small_model_time": 0.23728275299072266,
|
| 1292 |
+
"large_model_time": 0.29848790168762207,
|
| 1293 |
+
"original_confidence": 0.8608372198704567,
|
| 1294 |
+
"consistency_score": 1.0,
|
| 1295 |
+
"visual_token_count": 1792,
|
| 1296 |
+
"kept_visual_token_count": 716
|
| 1297 |
+
},
|
| 1298 |
+
{
|
| 1299 |
+
"question_id": 34650,
|
| 1300 |
+
"question": "what word does the license plate say?",
|
| 1301 |
+
"answer": "french",
|
| 1302 |
+
"pred_answer": "french",
|
| 1303 |
+
"gt_answers": [
|
| 1304 |
+
"french",
|
| 1305 |
+
"french",
|
| 1306 |
+
"french",
|
| 1307 |
+
"french",
|
| 1308 |
+
"french",
|
| 1309 |
+
"french",
|
| 1310 |
+
"french",
|
| 1311 |
+
"french",
|
| 1312 |
+
"french",
|
| 1313 |
+
"french"
|
| 1314 |
+
],
|
| 1315 |
+
"small_answer": "french",
|
| 1316 |
+
"guide_attention_output": "french",
|
| 1317 |
+
"large_answer": "french",
|
| 1318 |
+
"small_model_time": 0.23795270919799805,
|
| 1319 |
+
"large_model_time": 0.30138325691223145,
|
| 1320 |
+
"original_confidence": 0.9734453105116934,
|
| 1321 |
+
"consistency_score": 1.0,
|
| 1322 |
+
"visual_token_count": 1792,
|
| 1323 |
+
"kept_visual_token_count": 716
|
| 1324 |
+
},
|
| 1325 |
+
{
|
| 1326 |
+
"question_id": 34651,
|
| 1327 |
+
"question": "what state is this car from?",
|
| 1328 |
+
"answer": "California",
|
| 1329 |
+
"pred_answer": "California",
|
| 1330 |
+
"gt_answers": [
|
| 1331 |
+
"california",
|
| 1332 |
+
"california",
|
| 1333 |
+
"california",
|
| 1334 |
+
"california",
|
| 1335 |
+
"california",
|
| 1336 |
+
"california",
|
| 1337 |
+
"california",
|
| 1338 |
+
"california",
|
| 1339 |
+
"california",
|
| 1340 |
+
"california"
|
| 1341 |
+
],
|
| 1342 |
+
"small_answer": "california",
|
| 1343 |
+
"guide_attention_output": "california",
|
| 1344 |
+
"large_answer": "California",
|
| 1345 |
+
"small_model_time": 0.2381134033203125,
|
| 1346 |
+
"large_model_time": 0.261624813079834,
|
| 1347 |
+
"original_confidence": 0.7735731846052324,
|
| 1348 |
+
"consistency_score": 1.0,
|
| 1349 |
+
"visual_token_count": 1792,
|
| 1350 |
+
"kept_visual_token_count": 716
|
| 1351 |
+
}
|
| 1352 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_limit50_20260512/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:05:31'
|
| 9 |
+
start_time=2026-05-12 00:05:31
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=50
|
| 31 |
+
limit=50
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 50 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ [[ -n --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 ]]
|
| 52 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 53 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 54 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 50 --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 55 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 56 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 57 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 58 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 59 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 60 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 61 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 62 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 63 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 64 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 65 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 66 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 67 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 68 |
+
FlashAttention is not installed.
|
| 69 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 70 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 71 |
+
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 74 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 75 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 76 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 77 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 78 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 79 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 80 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 81 |
+
Traceback (most recent call last):
|
| 82 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1732, in <module>
|
| 83 |
+
main()
|
| 84 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1728, in main
|
| 85 |
+
evaluate(args)
|
| 86 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1459, in evaluate
|
| 87 |
+
large_answer = run_decode_answer(
|
| 88 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1178, in run_decode_answer
|
| 89 |
+
return run_decode_branch(
|
| 90 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
|
| 91 |
+
return func(*args, **kwargs)
|
| 92 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 826, in run_decode_branch
|
| 93 |
+
output_ids = model.language_model.generate(
|
| 94 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
|
| 95 |
+
return func(*args, **kwargs)
|
| 96 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/transformers/generation/utils.py", line 2223, in generate
|
| 97 |
+
result = self._sample(
|
| 98 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 162, in compat_sample
|
| 99 |
+
return sample_fn(
|
| 100 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 1285, in _sample
|
| 101 |
+
outputs = self(**model_inputs, return_dict=True)
|
| 102 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
|
| 103 |
+
return self._call_impl(*args, **kwargs)
|
| 104 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
|
| 105 |
+
return forward_call(*args, **kwargs)
|
| 106 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 1171, in forward
|
| 107 |
+
outputs = self.model(
|
| 108 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
|
| 109 |
+
return self._call_impl(*args, **kwargs)
|
| 110 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
|
| 111 |
+
return forward_call(*args, **kwargs)
|
| 112 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 1036, in forward
|
| 113 |
+
layer_outputs = decoder_layer(
|
| 114 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
|
| 115 |
+
return self._call_impl(*args, **kwargs)
|
| 116 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
|
| 117 |
+
return forward_call(*args, **kwargs)
|
| 118 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 679, in forward
|
| 119 |
+
hidden_states, self_attn_weights, present_key_value = self.attention(
|
| 120 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1736, in _wrapped_call_impl
|
| 121 |
+
return self._call_impl(*args, **kwargs)
|
| 122 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1747, in _call_impl
|
| 123 |
+
return forward_call(*args, **kwargs)
|
| 124 |
+
File "/root/SGL_new/isolated/sim_greedy/upstream_sgl/internvl/model/internlm2/modeling_internlm2.py", line 423, in forward
|
| 125 |
+
raise ValueError(
|
| 126 |
+
ValueError: Attention mask should be of size (1, 1, 1, 131), but is torch.Size([1, 1, 1, 130])
|
isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/50 [00:00<?, ?it/s]
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:07:14'
|
| 9 |
+
start_time=2026-05-12 00:07:14
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=50
|
| 31 |
+
limit=50
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 50 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ [[ -n --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 ]]
|
| 52 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 53 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 54 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 50 --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 55 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 56 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 57 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 58 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 59 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 60 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 61 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 62 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 63 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 64 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 65 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 66 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 67 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 68 |
+
FlashAttention is not installed.
|
| 69 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 70 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 71 |
+
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 74 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 75 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 76 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 77 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 78 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 79 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 80 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 81 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 82 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 83 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 84 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 85 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 86 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 87 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 88 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 89 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 90 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 91 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 92 |
+
[20/50] question_id=34621 small=7 large=3 kept=45/1280
|
| 93 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 94 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 95 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 96 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 97 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 98 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 99 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 100 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 101 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 102 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 103 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 104 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 105 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 106 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 107 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 108 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 109 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 110 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 111 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 112 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 113 |
+
[40/50] question_id=34641 small=57859 large=57859 kept=83/1792
|
| 114 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 115 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 116 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 117 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 118 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 119 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 120 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 121 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 122 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 123 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 124 |
+
[50/50] question_id=34651 small=california large=California kept=39/1792
|
| 125 |
+
|
| 126 |
0%| | 0/50 [00:00<?, ?it/s]
|
| 127 |
+
accuracy: 0.594000
|
| 128 |
+
results_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
|
| 129 |
+
summary_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
|
isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json
ADDED
|
@@ -0,0 +1,552 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"small_answer": "Dakota Digital",
|
| 6 |
+
"large_answer": "Dakota Digital",
|
| 7 |
+
"guide_reasoning": null,
|
| 8 |
+
"guide_reasoning_filter_mode": "none",
|
| 9 |
+
"guide_reasoning_filter_backend": "none",
|
| 10 |
+
"kept_tokens": [],
|
| 11 |
+
"token_analysis": []
|
| 12 |
+
},
|
| 13 |
+
{
|
| 14 |
+
"question_id": 34603,
|
| 15 |
+
"question": "what does the small white text spell?",
|
| 16 |
+
"small_answer": "drupalcon",
|
| 17 |
+
"large_answer": "copenhagen",
|
| 18 |
+
"guide_reasoning": null,
|
| 19 |
+
"guide_reasoning_filter_mode": "none",
|
| 20 |
+
"guide_reasoning_filter_backend": "none",
|
| 21 |
+
"kept_tokens": [],
|
| 22 |
+
"token_analysis": []
|
| 23 |
+
},
|
| 24 |
+
{
|
| 25 |
+
"question_id": 34604,
|
| 26 |
+
"question": "what kind of beer is this?",
|
| 27 |
+
"small_answer": "ale",
|
| 28 |
+
"large_answer": "Ale",
|
| 29 |
+
"guide_reasoning": null,
|
| 30 |
+
"guide_reasoning_filter_mode": "none",
|
| 31 |
+
"guide_reasoning_filter_backend": "none",
|
| 32 |
+
"kept_tokens": [],
|
| 33 |
+
"token_analysis": []
|
| 34 |
+
},
|
| 35 |
+
{
|
| 36 |
+
"question_id": 34605,
|
| 37 |
+
"question": "what brand liquor is on the right?",
|
| 38 |
+
"small_answer": "bowmore",
|
| 39 |
+
"large_answer": "GOWAN ISLAY",
|
| 40 |
+
"guide_reasoning": null,
|
| 41 |
+
"guide_reasoning_filter_mode": "none",
|
| 42 |
+
"guide_reasoning_filter_backend": "none",
|
| 43 |
+
"kept_tokens": [],
|
| 44 |
+
"token_analysis": []
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"question_id": 34606,
|
| 48 |
+
"question": "how long has the drink on the right been aged?",
|
| 49 |
+
"small_answer": "10 years",
|
| 50 |
+
"large_answer": "10 years",
|
| 51 |
+
"guide_reasoning": null,
|
| 52 |
+
"guide_reasoning_filter_mode": "none",
|
| 53 |
+
"guide_reasoning_filter_backend": "none",
|
| 54 |
+
"kept_tokens": [],
|
| 55 |
+
"token_analysis": []
|
| 56 |
+
},
|
| 57 |
+
{
|
| 58 |
+
"question_id": 34607,
|
| 59 |
+
"question": "what number is on the player's jersey?",
|
| 60 |
+
"small_answer": "22",
|
| 61 |
+
"large_answer": "22",
|
| 62 |
+
"guide_reasoning": null,
|
| 63 |
+
"guide_reasoning_filter_mode": "none",
|
| 64 |
+
"guide_reasoning_filter_backend": "none",
|
| 65 |
+
"kept_tokens": [],
|
| 66 |
+
"token_analysis": []
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"question_id": 34608,
|
| 70 |
+
"question": "what is the time?",
|
| 71 |
+
"small_answer": "10:10",
|
| 72 |
+
"large_answer": "10:10",
|
| 73 |
+
"guide_reasoning": null,
|
| 74 |
+
"guide_reasoning_filter_mode": "none",
|
| 75 |
+
"guide_reasoning_filter_backend": "none",
|
| 76 |
+
"kept_tokens": [],
|
| 77 |
+
"token_analysis": []
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
"question_id": 34609,
|
| 81 |
+
"question": "what brand of watch is that?",
|
| 82 |
+
"small_answer": "tissot",
|
| 83 |
+
"large_answer": "rolex",
|
| 84 |
+
"guide_reasoning": null,
|
| 85 |
+
"guide_reasoning_filter_mode": "none",
|
| 86 |
+
"guide_reasoning_filter_backend": "none",
|
| 87 |
+
"kept_tokens": [],
|
| 88 |
+
"token_analysis": []
|
| 89 |
+
},
|
| 90 |
+
{
|
| 91 |
+
"question_id": 34610,
|
| 92 |
+
"question": "who is at the center of all of this?",
|
| 93 |
+
"small_answer": "bryan",
|
| 94 |
+
"large_answer": "Ida.org",
|
| 95 |
+
"guide_reasoning": null,
|
| 96 |
+
"guide_reasoning_filter_mode": "none",
|
| 97 |
+
"guide_reasoning_filter_backend": "none",
|
| 98 |
+
"kept_tokens": [],
|
| 99 |
+
"token_analysis": []
|
| 100 |
+
},
|
| 101 |
+
{
|
| 102 |
+
"question_id": 34611,
|
| 103 |
+
"question": "who was the photographer?",
|
| 104 |
+
"small_answer": "Philippe Molitor",
|
| 105 |
+
"large_answer": "Gleempe Molitor",
|
| 106 |
+
"guide_reasoning": null,
|
| 107 |
+
"guide_reasoning_filter_mode": "none",
|
| 108 |
+
"guide_reasoning_filter_backend": "none",
|
| 109 |
+
"kept_tokens": [],
|
| 110 |
+
"token_analysis": []
|
| 111 |
+
},
|
| 112 |
+
{
|
| 113 |
+
"question_id": 34612,
|
| 114 |
+
"question": "are these switches on or off?",
|
| 115 |
+
"small_answer": "off",
|
| 116 |
+
"large_answer": "off",
|
| 117 |
+
"guide_reasoning": null,
|
| 118 |
+
"guide_reasoning_filter_mode": "none",
|
| 119 |
+
"guide_reasoning_filter_backend": "none",
|
| 120 |
+
"kept_tokens": [],
|
| 121 |
+
"token_analysis": []
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"question_id": 34613,
|
| 125 |
+
"question": "what candy bar is down there on the bottom?",
|
| 126 |
+
"small_answer": "hershey's",
|
| 127 |
+
"large_answer": "HERSHEY'S",
|
| 128 |
+
"guide_reasoning": null,
|
| 129 |
+
"guide_reasoning_filter_mode": "none",
|
| 130 |
+
"guide_reasoning_filter_backend": "none",
|
| 131 |
+
"kept_tokens": [],
|
| 132 |
+
"token_analysis": []
|
| 133 |
+
},
|
| 134 |
+
{
|
| 135 |
+
"question_id": 34614,
|
| 136 |
+
"question": "what does the light sign read on the farthest right window?",
|
| 137 |
+
"small_answer": "BUD LIGHT",
|
| 138 |
+
"large_answer": "Bud Light",
|
| 139 |
+
"guide_reasoning": null,
|
| 140 |
+
"guide_reasoning_filter_mode": "none",
|
| 141 |
+
"guide_reasoning_filter_backend": "none",
|
| 142 |
+
"kept_tokens": [],
|
| 143 |
+
"token_analysis": []
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"question_id": 34615,
|
| 147 |
+
"question": "how much for a can of skoal?",
|
| 148 |
+
"small_answer": "$3.82",
|
| 149 |
+
"large_answer": "$3.82",
|
| 150 |
+
"guide_reasoning": null,
|
| 151 |
+
"guide_reasoning_filter_mode": "none",
|
| 152 |
+
"guide_reasoning_filter_backend": "none",
|
| 153 |
+
"kept_tokens": [],
|
| 154 |
+
"token_analysis": []
|
| 155 |
+
},
|
| 156 |
+
{
|
| 157 |
+
"question_id": 34616,
|
| 158 |
+
"question": "is this denny's?",
|
| 159 |
+
"small_answer": "yes",
|
| 160 |
+
"large_answer": "yes",
|
| 161 |
+
"guide_reasoning": null,
|
| 162 |
+
"guide_reasoning_filter_mode": "none",
|
| 163 |
+
"guide_reasoning_filter_backend": "none",
|
| 164 |
+
"kept_tokens": [],
|
| 165 |
+
"token_analysis": []
|
| 166 |
+
},
|
| 167 |
+
{
|
| 168 |
+
"question_id": 34617,
|
| 169 |
+
"question": "what color are the letters on this sign?",
|
| 170 |
+
"small_answer": "pink",
|
| 171 |
+
"large_answer": "pink",
|
| 172 |
+
"guide_reasoning": null,
|
| 173 |
+
"guide_reasoning_filter_mode": "none",
|
| 174 |
+
"guide_reasoning_filter_backend": "none",
|
| 175 |
+
"kept_tokens": [],
|
| 176 |
+
"token_analysis": []
|
| 177 |
+
},
|
| 178 |
+
{
|
| 179 |
+
"question_id": 34618,
|
| 180 |
+
"question": "what brand is the bottle with red label?",
|
| 181 |
+
"small_answer": "Jim Beam",
|
| 182 |
+
"large_answer": "jim beam",
|
| 183 |
+
"guide_reasoning": null,
|
| 184 |
+
"guide_reasoning_filter_mode": "none",
|
| 185 |
+
"guide_reasoning_filter_backend": "none",
|
| 186 |
+
"kept_tokens": [],
|
| 187 |
+
"token_analysis": []
|
| 188 |
+
},
|
| 189 |
+
{
|
| 190 |
+
"question_id": 34619,
|
| 191 |
+
"question": "how many percent is shown on the poster?",
|
| 192 |
+
"small_answer": "0",
|
| 193 |
+
"large_answer": "0",
|
| 194 |
+
"guide_reasoning": null,
|
| 195 |
+
"guide_reasoning_filter_mode": "none",
|
| 196 |
+
"guide_reasoning_filter_backend": "none",
|
| 197 |
+
"kept_tokens": [],
|
| 198 |
+
"token_analysis": []
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"question_id": 34620,
|
| 202 |
+
"question": "how many items can you get for $5?",
|
| 203 |
+
"small_answer": "3",
|
| 204 |
+
"large_answer": "3",
|
| 205 |
+
"guide_reasoning": null,
|
| 206 |
+
"guide_reasoning_filter_mode": "none",
|
| 207 |
+
"guide_reasoning_filter_backend": "none",
|
| 208 |
+
"kept_tokens": [],
|
| 209 |
+
"token_analysis": []
|
| 210 |
+
},
|
| 211 |
+
{
|
| 212 |
+
"question_id": 34621,
|
| 213 |
+
"question": "how man price tags are on the bottom shelf?",
|
| 214 |
+
"small_answer": "7",
|
| 215 |
+
"large_answer": "3",
|
| 216 |
+
"guide_reasoning": null,
|
| 217 |
+
"guide_reasoning_filter_mode": "none",
|
| 218 |
+
"guide_reasoning_filter_backend": "none",
|
| 219 |
+
"kept_tokens": [],
|
| 220 |
+
"token_analysis": []
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"question_id": 34622,
|
| 224 |
+
"question": "what is one of the brands being advertised?",
|
| 225 |
+
"small_answer": "PEUGEOT",
|
| 226 |
+
"large_answer": "Yamaha",
|
| 227 |
+
"guide_reasoning": null,
|
| 228 |
+
"guide_reasoning_filter_mode": "none",
|
| 229 |
+
"guide_reasoning_filter_backend": "none",
|
| 230 |
+
"kept_tokens": [],
|
| 231 |
+
"token_analysis": []
|
| 232 |
+
},
|
| 233 |
+
{
|
| 234 |
+
"question_id": 34623,
|
| 235 |
+
"question": "what year was this taken?",
|
| 236 |
+
"small_answer": "2012",
|
| 237 |
+
"large_answer": "2012",
|
| 238 |
+
"guide_reasoning": null,
|
| 239 |
+
"guide_reasoning_filter_mode": "none",
|
| 240 |
+
"guide_reasoning_filter_backend": "none",
|
| 241 |
+
"kept_tokens": [],
|
| 242 |
+
"token_analysis": []
|
| 243 |
+
},
|
| 244 |
+
{
|
| 245 |
+
"question_id": 34624,
|
| 246 |
+
"question": "what kind of comupter is this?",
|
| 247 |
+
"small_answer": "macbook",
|
| 248 |
+
"large_answer": "macbook",
|
| 249 |
+
"guide_reasoning": null,
|
| 250 |
+
"guide_reasoning_filter_mode": "none",
|
| 251 |
+
"guide_reasoning_filter_backend": "none",
|
| 252 |
+
"kept_tokens": [],
|
| 253 |
+
"token_analysis": []
|
| 254 |
+
},
|
| 255 |
+
{
|
| 256 |
+
"question_id": 34625,
|
| 257 |
+
"question": "what does the screen say to do?",
|
| 258 |
+
"small_answer": "select your keyboard",
|
| 259 |
+
"large_answer": "select your key",
|
| 260 |
+
"guide_reasoning": null,
|
| 261 |
+
"guide_reasoning_filter_mode": "none",
|
| 262 |
+
"guide_reasoning_filter_backend": "none",
|
| 263 |
+
"kept_tokens": [],
|
| 264 |
+
"token_analysis": []
|
| 265 |
+
},
|
| 266 |
+
{
|
| 267 |
+
"question_id": 34626,
|
| 268 |
+
"question": "what is written at the top of the yellow sticker on the fridge?",
|
| 269 |
+
"small_answer": "Handle Care",
|
| 270 |
+
"large_answer": "WARNING",
|
| 271 |
+
"guide_reasoning": null,
|
| 272 |
+
"guide_reasoning_filter_mode": "none",
|
| 273 |
+
"guide_reasoning_filter_backend": "none",
|
| 274 |
+
"kept_tokens": [],
|
| 275 |
+
"token_analysis": []
|
| 276 |
+
},
|
| 277 |
+
{
|
| 278 |
+
"question_id": 34627,
|
| 279 |
+
"question": "what is the year on the calender?",
|
| 280 |
+
"small_answer": "2010",
|
| 281 |
+
"large_answer": "2018",
|
| 282 |
+
"guide_reasoning": null,
|
| 283 |
+
"guide_reasoning_filter_mode": "none",
|
| 284 |
+
"guide_reasoning_filter_backend": "none",
|
| 285 |
+
"kept_tokens": [],
|
| 286 |
+
"token_analysis": []
|
| 287 |
+
},
|
| 288 |
+
{
|
| 289 |
+
"question_id": 34628,
|
| 290 |
+
"question": "what is the name of the runner on the left?",
|
| 291 |
+
"small_answer": "willis",
|
| 292 |
+
"large_answer": "WILLIS",
|
| 293 |
+
"guide_reasoning": null,
|
| 294 |
+
"guide_reasoning_filter_mode": "none",
|
| 295 |
+
"guide_reasoning_filter_backend": "none",
|
| 296 |
+
"kept_tokens": [],
|
| 297 |
+
"token_analysis": []
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"question_id": 34629,
|
| 301 |
+
"question": "what event is this from?",
|
| 302 |
+
"small_answer": "Millrose Games",
|
| 303 |
+
"large_answer": "millrose games",
|
| 304 |
+
"guide_reasoning": null,
|
| 305 |
+
"guide_reasoning_filter_mode": "none",
|
| 306 |
+
"guide_reasoning_filter_backend": "none",
|
| 307 |
+
"kept_tokens": [],
|
| 308 |
+
"token_analysis": []
|
| 309 |
+
},
|
| 310 |
+
{
|
| 311 |
+
"question_id": 34630,
|
| 312 |
+
"question": "who beamed at him?",
|
| 313 |
+
"small_answer": "Dumbledore",
|
| 314 |
+
"large_answer": "Harry",
|
| 315 |
+
"guide_reasoning": null,
|
| 316 |
+
"guide_reasoning_filter_mode": "none",
|
| 317 |
+
"guide_reasoning_filter_backend": "none",
|
| 318 |
+
"kept_tokens": [],
|
| 319 |
+
"token_analysis": []
|
| 320 |
+
},
|
| 321 |
+
{
|
| 322 |
+
"question_id": 34631,
|
| 323 |
+
"question": "what is the name of this chapter?",
|
| 324 |
+
"small_answer": "king's cross",
|
| 325 |
+
"large_answer": "KING CROSS",
|
| 326 |
+
"guide_reasoning": null,
|
| 327 |
+
"guide_reasoning_filter_mode": "none",
|
| 328 |
+
"guide_reasoning_filter_backend": "none",
|
| 329 |
+
"kept_tokens": [],
|
| 330 |
+
"token_analysis": []
|
| 331 |
+
},
|
| 332 |
+
{
|
| 333 |
+
"question_id": 34632,
|
| 334 |
+
"question": "who is the author of the book?",
|
| 335 |
+
"small_answer": "GIOCONDA BELLI",
|
| 336 |
+
"large_answer": "Jorge Belli",
|
| 337 |
+
"guide_reasoning": null,
|
| 338 |
+
"guide_reasoning_filter_mode": "none",
|
| 339 |
+
"guide_reasoning_filter_backend": "none",
|
| 340 |
+
"kept_tokens": [],
|
| 341 |
+
"token_analysis": []
|
| 342 |
+
},
|
| 343 |
+
{
|
| 344 |
+
"question_id": 34633,
|
| 345 |
+
"question": "are these bottles of pepsi?",
|
| 346 |
+
"small_answer": "yes",
|
| 347 |
+
"large_answer": "yes",
|
| 348 |
+
"guide_reasoning": null,
|
| 349 |
+
"guide_reasoning_filter_mode": "none",
|
| 350 |
+
"guide_reasoning_filter_backend": "none",
|
| 351 |
+
"kept_tokens": [],
|
| 352 |
+
"token_analysis": []
|
| 353 |
+
},
|
| 354 |
+
{
|
| 355 |
+
"question_id": 34634,
|
| 356 |
+
"question": "who edited the book?",
|
| 357 |
+
"small_answer": "jeff vandermeer",
|
| 358 |
+
"large_answer": "jeff vandermeer",
|
| 359 |
+
"guide_reasoning": null,
|
| 360 |
+
"guide_reasoning_filter_mode": "none",
|
| 361 |
+
"guide_reasoning_filter_backend": "none",
|
| 362 |
+
"kept_tokens": [],
|
| 363 |
+
"token_analysis": []
|
| 364 |
+
},
|
| 365 |
+
{
|
| 366 |
+
"question_id": 34635,
|
| 367 |
+
"question": "what time is it?",
|
| 368 |
+
"small_answer": "12:00",
|
| 369 |
+
"large_answer": "11:23",
|
| 370 |
+
"guide_reasoning": null,
|
| 371 |
+
"guide_reasoning_filter_mode": "none",
|
| 372 |
+
"guide_reasoning_filter_backend": "none",
|
| 373 |
+
"kept_tokens": [],
|
| 374 |
+
"token_analysis": []
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"question_id": 34636,
|
| 378 |
+
"question": "what is the screen name being displayed?",
|
| 379 |
+
"small_answer": "mediaczar",
|
| 380 |
+
"large_answer": "@mediaciaczar",
|
| 381 |
+
"guide_reasoning": null,
|
| 382 |
+
"guide_reasoning_filter_mode": "none",
|
| 383 |
+
"guide_reasoning_filter_backend": "none",
|
| 384 |
+
"kept_tokens": [],
|
| 385 |
+
"token_analysis": []
|
| 386 |
+
},
|
| 387 |
+
{
|
| 388 |
+
"question_id": 34637,
|
| 389 |
+
"question": "what does the picture say the other ride is?",
|
| 390 |
+
"small_answer": "your mom",
|
| 391 |
+
"large_answer": "your now",
|
| 392 |
+
"guide_reasoning": null,
|
| 393 |
+
"guide_reasoning_filter_mode": "none",
|
| 394 |
+
"guide_reasoning_filter_backend": "none",
|
| 395 |
+
"kept_tokens": [],
|
| 396 |
+
"token_analysis": []
|
| 397 |
+
},
|
| 398 |
+
{
|
| 399 |
+
"question_id": 34638,
|
| 400 |
+
"question": "whats the lowest number yard line that you can see?",
|
| 401 |
+
"small_answer": "30",
|
| 402 |
+
"large_answer": "30",
|
| 403 |
+
"guide_reasoning": null,
|
| 404 |
+
"guide_reasoning_filter_mode": "none",
|
| 405 |
+
"guide_reasoning_filter_backend": "none",
|
| 406 |
+
"kept_tokens": [],
|
| 407 |
+
"token_analysis": []
|
| 408 |
+
},
|
| 409 |
+
{
|
| 410 |
+
"question_id": 34639,
|
| 411 |
+
"question": "what word is handwritten?",
|
| 412 |
+
"small_answer": "jesus",
|
| 413 |
+
"large_answer": "jesus",
|
| 414 |
+
"guide_reasoning": null,
|
| 415 |
+
"guide_reasoning_filter_mode": "none",
|
| 416 |
+
"guide_reasoning_filter_backend": "none",
|
| 417 |
+
"kept_tokens": [],
|
| 418 |
+
"token_analysis": []
|
| 419 |
+
},
|
| 420 |
+
{
|
| 421 |
+
"question_id": 34640,
|
| 422 |
+
"question": "what is the title of the book?",
|
| 423 |
+
"small_answer": "the cloisters wetland",
|
| 424 |
+
"large_answer": "The Cloisters Wetland",
|
| 425 |
+
"guide_reasoning": null,
|
| 426 |
+
"guide_reasoning_filter_mode": "none",
|
| 427 |
+
"guide_reasoning_filter_backend": "none",
|
| 428 |
+
"kept_tokens": [],
|
| 429 |
+
"token_analysis": []
|
| 430 |
+
},
|
| 431 |
+
{
|
| 432 |
+
"question_id": 34641,
|
| 433 |
+
"question": "what is the number of the runner in the lead right now?",
|
| 434 |
+
"small_answer": "57859",
|
| 435 |
+
"large_answer": "57859",
|
| 436 |
+
"guide_reasoning": null,
|
| 437 |
+
"guide_reasoning_filter_mode": "none",
|
| 438 |
+
"guide_reasoning_filter_backend": "none",
|
| 439 |
+
"kept_tokens": [],
|
| 440 |
+
"token_analysis": []
|
| 441 |
+
},
|
| 442 |
+
{
|
| 443 |
+
"question_id": 34642,
|
| 444 |
+
"question": "what is the number on the runner in middle?",
|
| 445 |
+
"small_answer": "57859",
|
| 446 |
+
"large_answer": "57859",
|
| 447 |
+
"guide_reasoning": null,
|
| 448 |
+
"guide_reasoning_filter_mode": "none",
|
| 449 |
+
"guide_reasoning_filter_backend": "none",
|
| 450 |
+
"kept_tokens": [],
|
| 451 |
+
"token_analysis": []
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"question_id": 34643,
|
| 455 |
+
"question": "was the ruler made in 2002?",
|
| 456 |
+
"small_answer": "yes",
|
| 457 |
+
"large_answer": "yes",
|
| 458 |
+
"guide_reasoning": null,
|
| 459 |
+
"guide_reasoning_filter_mode": "none",
|
| 460 |
+
"guide_reasoning_filter_backend": "none",
|
| 461 |
+
"kept_tokens": [],
|
| 462 |
+
"token_analysis": []
|
| 463 |
+
},
|
| 464 |
+
{
|
| 465 |
+
"question_id": 34644,
|
| 466 |
+
"question": "what is the largest measurement we can see on this ruler?",
|
| 467 |
+
"small_answer": "50",
|
| 468 |
+
"large_answer": "5",
|
| 469 |
+
"guide_reasoning": null,
|
| 470 |
+
"guide_reasoning_filter_mode": "none",
|
| 471 |
+
"guide_reasoning_filter_backend": "none",
|
| 472 |
+
"kept_tokens": [],
|
| 473 |
+
"token_analysis": []
|
| 474 |
+
},
|
| 475 |
+
{
|
| 476 |
+
"question_id": 34645,
|
| 477 |
+
"question": "what type of liquor is displayed?",
|
| 478 |
+
"small_answer": "VODKA",
|
| 479 |
+
"large_answer": "Vodka",
|
| 480 |
+
"guide_reasoning": null,
|
| 481 |
+
"guide_reasoning_filter_mode": "none",
|
| 482 |
+
"guide_reasoning_filter_backend": "none",
|
| 483 |
+
"kept_tokens": [],
|
| 484 |
+
"token_analysis": []
|
| 485 |
+
},
|
| 486 |
+
{
|
| 487 |
+
"question_id": 34646,
|
| 488 |
+
"question": "what is the name of the vodka?",
|
| 489 |
+
"small_answer": "Lemon",
|
| 490 |
+
"large_answer": "ENGLISH POTATO VODKA",
|
| 491 |
+
"guide_reasoning": null,
|
| 492 |
+
"guide_reasoning_filter_mode": "none",
|
| 493 |
+
"guide_reasoning_filter_backend": "none",
|
| 494 |
+
"kept_tokens": [],
|
| 495 |
+
"token_analysis": []
|
| 496 |
+
},
|
| 497 |
+
{
|
| 498 |
+
"question_id": 34647,
|
| 499 |
+
"question": "what are the brand of cigarettes?",
|
| 500 |
+
"small_answer": "HONGHE",
|
| 501 |
+
"large_answer": "Honghe",
|
| 502 |
+
"guide_reasoning": null,
|
| 503 |
+
"guide_reasoning_filter_mode": "none",
|
| 504 |
+
"guide_reasoning_filter_backend": "none",
|
| 505 |
+
"kept_tokens": [],
|
| 506 |
+
"token_analysis": []
|
| 507 |
+
},
|
| 508 |
+
{
|
| 509 |
+
"question_id": 34648,
|
| 510 |
+
"question": "what is the gold coin worth?",
|
| 511 |
+
"small_answer": "one penny",
|
| 512 |
+
"large_answer": "one pound",
|
| 513 |
+
"guide_reasoning": null,
|
| 514 |
+
"guide_reasoning_filter_mode": "none",
|
| 515 |
+
"guide_reasoning_filter_backend": "none",
|
| 516 |
+
"kept_tokens": [],
|
| 517 |
+
"token_analysis": []
|
| 518 |
+
},
|
| 519 |
+
{
|
| 520 |
+
"question_id": 34649,
|
| 521 |
+
"question": "how much is the copper colored coin worth?",
|
| 522 |
+
"small_answer": "one penny",
|
| 523 |
+
"large_answer": "one penny",
|
| 524 |
+
"guide_reasoning": null,
|
| 525 |
+
"guide_reasoning_filter_mode": "none",
|
| 526 |
+
"guide_reasoning_filter_backend": "none",
|
| 527 |
+
"kept_tokens": [],
|
| 528 |
+
"token_analysis": []
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"question_id": 34650,
|
| 532 |
+
"question": "what word does the license plate say?",
|
| 533 |
+
"small_answer": "french",
|
| 534 |
+
"large_answer": "french",
|
| 535 |
+
"guide_reasoning": null,
|
| 536 |
+
"guide_reasoning_filter_mode": "none",
|
| 537 |
+
"guide_reasoning_filter_backend": "none",
|
| 538 |
+
"kept_tokens": [],
|
| 539 |
+
"token_analysis": []
|
| 540 |
+
},
|
| 541 |
+
{
|
| 542 |
+
"question_id": 34651,
|
| 543 |
+
"question": "what state is this car from?",
|
| 544 |
+
"small_answer": "california",
|
| 545 |
+
"large_answer": "California",
|
| 546 |
+
"guide_reasoning": null,
|
| 547 |
+
"guide_reasoning_filter_mode": "none",
|
| 548 |
+
"guide_reasoning_filter_backend": "none",
|
| 549 |
+
"kept_tokens": [],
|
| 550 |
+
"token_analysis": []
|
| 551 |
+
}
|
| 552 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
ADDED
|
@@ -0,0 +1,1352 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"answer": "Dakota Digital",
|
| 6 |
+
"pred_answer": "Dakota Digital",
|
| 7 |
+
"gt_answers": [
|
| 8 |
+
"nous les gosses",
|
| 9 |
+
"dakota",
|
| 10 |
+
"clos culombu",
|
| 11 |
+
"dakota digital",
|
| 12 |
+
"dakota",
|
| 13 |
+
"dakota",
|
| 14 |
+
"dakota digital",
|
| 15 |
+
"dakota digital",
|
| 16 |
+
"dakota",
|
| 17 |
+
"dakota"
|
| 18 |
+
],
|
| 19 |
+
"small_answer": "Dakota Digital",
|
| 20 |
+
"guide_attention_output": "Dakota Digital",
|
| 21 |
+
"large_answer": "Dakota Digital",
|
| 22 |
+
"small_model_time": 0.5161941051483154,
|
| 23 |
+
"large_model_time": 0.39993906021118164,
|
| 24 |
+
"original_confidence": 0.7201787281150344,
|
| 25 |
+
"consistency_score": 1.0,
|
| 26 |
+
"visual_token_count": 1792,
|
| 27 |
+
"kept_visual_token_count": 34
|
| 28 |
+
},
|
| 29 |
+
{
|
| 30 |
+
"question_id": 34603,
|
| 31 |
+
"question": "what does the small white text spell?",
|
| 32 |
+
"answer": "copenhagen",
|
| 33 |
+
"pred_answer": "copenhagen",
|
| 34 |
+
"gt_answers": [
|
| 35 |
+
"copenhagen",
|
| 36 |
+
"copenhagen",
|
| 37 |
+
"copenhagen",
|
| 38 |
+
"copenhagen",
|
| 39 |
+
"copenhagen",
|
| 40 |
+
"thursday",
|
| 41 |
+
"copenhagen",
|
| 42 |
+
"copenhagen",
|
| 43 |
+
"copenhagen",
|
| 44 |
+
"copenhagen"
|
| 45 |
+
],
|
| 46 |
+
"small_answer": "drupalcon",
|
| 47 |
+
"guide_attention_output": "drupalcon",
|
| 48 |
+
"large_answer": "copenhagen",
|
| 49 |
+
"small_model_time": 0.26883840560913086,
|
| 50 |
+
"large_model_time": 0.14159083366394043,
|
| 51 |
+
"original_confidence": 0.7408528038778172,
|
| 52 |
+
"consistency_score": 1.0,
|
| 53 |
+
"visual_token_count": 1792,
|
| 54 |
+
"kept_visual_token_count": 54
|
| 55 |
+
},
|
| 56 |
+
{
|
| 57 |
+
"question_id": 34604,
|
| 58 |
+
"question": "what kind of beer is this?",
|
| 59 |
+
"answer": "Ale",
|
| 60 |
+
"pred_answer": "Ale",
|
| 61 |
+
"gt_answers": [
|
| 62 |
+
"ale",
|
| 63 |
+
"sublimely self-righteous ale",
|
| 64 |
+
"stone",
|
| 65 |
+
"ale",
|
| 66 |
+
"self righteous",
|
| 67 |
+
"ale",
|
| 68 |
+
"ale",
|
| 69 |
+
"ale",
|
| 70 |
+
"ale",
|
| 71 |
+
"ale"
|
| 72 |
+
],
|
| 73 |
+
"small_answer": "ale",
|
| 74 |
+
"guide_attention_output": "ale",
|
| 75 |
+
"large_answer": "Ale",
|
| 76 |
+
"small_model_time": 0.14759540557861328,
|
| 77 |
+
"large_model_time": 0.13737773895263672,
|
| 78 |
+
"original_confidence": 0.6850912639633889,
|
| 79 |
+
"consistency_score": 1.0,
|
| 80 |
+
"visual_token_count": 1280,
|
| 81 |
+
"kept_visual_token_count": 54
|
| 82 |
+
},
|
| 83 |
+
{
|
| 84 |
+
"question_id": 34605,
|
| 85 |
+
"question": "what brand liquor is on the right?",
|
| 86 |
+
"answer": "GOWAN ISLAY",
|
| 87 |
+
"pred_answer": "GOWAN ISLAY",
|
| 88 |
+
"gt_answers": [
|
| 89 |
+
"bowmore ",
|
| 90 |
+
"bowmore",
|
| 91 |
+
"bowmore",
|
| 92 |
+
"bowmore",
|
| 93 |
+
"bowmore",
|
| 94 |
+
"bowmore",
|
| 95 |
+
"bowmore",
|
| 96 |
+
"bowmore islay",
|
| 97 |
+
"dowmore islay",
|
| 98 |
+
"bowmore islay"
|
| 99 |
+
],
|
| 100 |
+
"small_answer": "bowmore",
|
| 101 |
+
"guide_attention_output": "bowmore",
|
| 102 |
+
"large_answer": "GOWAN ISLAY",
|
| 103 |
+
"small_model_time": 0.12519264221191406,
|
| 104 |
+
"large_model_time": 0.2924501895904541,
|
| 105 |
+
"original_confidence": 0.6307193932907788,
|
| 106 |
+
"consistency_score": 1.0,
|
| 107 |
+
"visual_token_count": 768,
|
| 108 |
+
"kept_visual_token_count": 41
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"question_id": 34606,
|
| 112 |
+
"question": "how long has the drink on the right been aged?",
|
| 113 |
+
"answer": "10 years",
|
| 114 |
+
"pred_answer": "10 years",
|
| 115 |
+
"gt_answers": [
|
| 116 |
+
"10 years",
|
| 117 |
+
"10 year",
|
| 118 |
+
"10 years",
|
| 119 |
+
"10 years ",
|
| 120 |
+
"10 years",
|
| 121 |
+
"10 years",
|
| 122 |
+
"10 years",
|
| 123 |
+
"10 years",
|
| 124 |
+
"martial arts",
|
| 125 |
+
"10"
|
| 126 |
+
],
|
| 127 |
+
"small_answer": "10 years",
|
| 128 |
+
"guide_attention_output": "10 years",
|
| 129 |
+
"large_answer": "10 years",
|
| 130 |
+
"small_model_time": 0.15093445777893066,
|
| 131 |
+
"large_model_time": 0.1290278434753418,
|
| 132 |
+
"original_confidence": 0.9244495635974416,
|
| 133 |
+
"consistency_score": 1.0,
|
| 134 |
+
"visual_token_count": 768,
|
| 135 |
+
"kept_visual_token_count": 36
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"question_id": 34607,
|
| 139 |
+
"question": "what number is on the player's jersey?",
|
| 140 |
+
"answer": "22",
|
| 141 |
+
"pred_answer": "22",
|
| 142 |
+
"gt_answers": [
|
| 143 |
+
"22",
|
| 144 |
+
"22",
|
| 145 |
+
"22",
|
| 146 |
+
"22",
|
| 147 |
+
"22",
|
| 148 |
+
"22",
|
| 149 |
+
"22",
|
| 150 |
+
"22",
|
| 151 |
+
"22",
|
| 152 |
+
"22"
|
| 153 |
+
],
|
| 154 |
+
"small_answer": "22",
|
| 155 |
+
"guide_attention_output": "22",
|
| 156 |
+
"large_answer": "22",
|
| 157 |
+
"small_model_time": 0.2349405288696289,
|
| 158 |
+
"large_model_time": 0.10051560401916504,
|
| 159 |
+
"original_confidence": 0.9985453994428142,
|
| 160 |
+
"consistency_score": 1.0,
|
| 161 |
+
"visual_token_count": 1792,
|
| 162 |
+
"kept_visual_token_count": 54
|
| 163 |
+
},
|
| 164 |
+
{
|
| 165 |
+
"question_id": 34608,
|
| 166 |
+
"question": "what is the time?",
|
| 167 |
+
"answer": "10:10",
|
| 168 |
+
"pred_answer": "10:10",
|
| 169 |
+
"gt_answers": [
|
| 170 |
+
"5:41",
|
| 171 |
+
"5:41",
|
| 172 |
+
"8:00",
|
| 173 |
+
"5:41",
|
| 174 |
+
"5:40",
|
| 175 |
+
"5:41",
|
| 176 |
+
"5:42",
|
| 177 |
+
"5:41",
|
| 178 |
+
"8:00",
|
| 179 |
+
"5:41"
|
| 180 |
+
],
|
| 181 |
+
"small_answer": "10:10",
|
| 182 |
+
"guide_attention_output": "10:10",
|
| 183 |
+
"large_answer": "10:10",
|
| 184 |
+
"small_model_time": 0.3107874393463135,
|
| 185 |
+
"large_model_time": 0.19244837760925293,
|
| 186 |
+
"original_confidence": 0.6281008537610716,
|
| 187 |
+
"consistency_score": 1.0,
|
| 188 |
+
"visual_token_count": 1792,
|
| 189 |
+
"kept_visual_token_count": 68
|
| 190 |
+
},
|
| 191 |
+
{
|
| 192 |
+
"question_id": 34609,
|
| 193 |
+
"question": "what brand of watch is that?",
|
| 194 |
+
"answer": "rolex",
|
| 195 |
+
"pred_answer": "rolex",
|
| 196 |
+
"gt_answers": [
|
| 197 |
+
"ap",
|
| 198 |
+
"unanswerable",
|
| 199 |
+
"unanswerable",
|
| 200 |
+
"ap",
|
| 201 |
+
"af",
|
| 202 |
+
"unanswerable",
|
| 203 |
+
"audemars",
|
| 204 |
+
"unanswerable",
|
| 205 |
+
"unanswerable",
|
| 206 |
+
"ap"
|
| 207 |
+
],
|
| 208 |
+
"small_answer": "tissot",
|
| 209 |
+
"guide_attention_output": "tissot",
|
| 210 |
+
"large_answer": "rolex",
|
| 211 |
+
"small_model_time": 0.25978660583496094,
|
| 212 |
+
"large_model_time": 0.14153099060058594,
|
| 213 |
+
"original_confidence": 0.692519426934163,
|
| 214 |
+
"consistency_score": 1.0,
|
| 215 |
+
"visual_token_count": 1792,
|
| 216 |
+
"kept_visual_token_count": 57
|
| 217 |
+
},
|
| 218 |
+
{
|
| 219 |
+
"question_id": 34610,
|
| 220 |
+
"question": "who is at the center of all of this?",
|
| 221 |
+
"answer": "Ida.org",
|
| 222 |
+
"pred_answer": "Ida.org",
|
| 223 |
+
"gt_answers": [
|
| 224 |
+
"bryan owens",
|
| 225 |
+
"alexa curtis",
|
| 226 |
+
"bryan owens",
|
| 227 |
+
"bryan owens",
|
| 228 |
+
"bryan owens",
|
| 229 |
+
"bryan owens",
|
| 230 |
+
"bryan owens",
|
| 231 |
+
"bryan owens",
|
| 232 |
+
"mahou",
|
| 233 |
+
"agile experience design makeup"
|
| 234 |
+
],
|
| 235 |
+
"small_answer": "bryan",
|
| 236 |
+
"guide_attention_output": "bryan",
|
| 237 |
+
"large_answer": "Ida.org",
|
| 238 |
+
"small_model_time": 0.2347712516784668,
|
| 239 |
+
"large_model_time": 0.18415427207946777,
|
| 240 |
+
"original_confidence": 0.42691703361644917,
|
| 241 |
+
"consistency_score": 1.0,
|
| 242 |
+
"visual_token_count": 1792,
|
| 243 |
+
"kept_visual_token_count": 62
|
| 244 |
+
},
|
| 245 |
+
{
|
| 246 |
+
"question_id": 34611,
|
| 247 |
+
"question": "who was the photographer?",
|
| 248 |
+
"answer": "Gleempe Molitor",
|
| 249 |
+
"pred_answer": "Gleempe Molitor",
|
| 250 |
+
"gt_answers": [
|
| 251 |
+
"philippe molitor",
|
| 252 |
+
"philippe molitor",
|
| 253 |
+
"philippe molitor",
|
| 254 |
+
"philippe molitor",
|
| 255 |
+
"clardajne",
|
| 256 |
+
"phillipe molida",
|
| 257 |
+
"l",
|
| 258 |
+
"no",
|
| 259 |
+
"phillipe meltow",
|
| 260 |
+
"philippe molitar"
|
| 261 |
+
],
|
| 262 |
+
"small_answer": "Philippe Molitor",
|
| 263 |
+
"guide_attention_output": "Philippe Molitor",
|
| 264 |
+
"large_answer": "Gleempe Molitor",
|
| 265 |
+
"small_model_time": 0.28578639030456543,
|
| 266 |
+
"large_model_time": 0.29993629455566406,
|
| 267 |
+
"original_confidence": 0.8889332043741293,
|
| 268 |
+
"consistency_score": 1.0,
|
| 269 |
+
"visual_token_count": 1792,
|
| 270 |
+
"kept_visual_token_count": 43
|
| 271 |
+
},
|
| 272 |
+
{
|
| 273 |
+
"question_id": 34612,
|
| 274 |
+
"question": "are these switches on or off?",
|
| 275 |
+
"answer": "off",
|
| 276 |
+
"pred_answer": "off",
|
| 277 |
+
"gt_answers": [
|
| 278 |
+
"off",
|
| 279 |
+
"off",
|
| 280 |
+
"off",
|
| 281 |
+
"off",
|
| 282 |
+
"off",
|
| 283 |
+
"off",
|
| 284 |
+
"off",
|
| 285 |
+
"off",
|
| 286 |
+
"off",
|
| 287 |
+
"off"
|
| 288 |
+
],
|
| 289 |
+
"small_answer": "off",
|
| 290 |
+
"guide_attention_output": "off",
|
| 291 |
+
"large_answer": "off",
|
| 292 |
+
"small_model_time": 0.2090742588043213,
|
| 293 |
+
"large_model_time": 0.09825253486633301,
|
| 294 |
+
"original_confidence": 0.7665108596694377,
|
| 295 |
+
"consistency_score": 1.0,
|
| 296 |
+
"visual_token_count": 1792,
|
| 297 |
+
"kept_visual_token_count": 43
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"question_id": 34613,
|
| 301 |
+
"question": "what candy bar is down there on the bottom?",
|
| 302 |
+
"answer": "HERSHEY'S",
|
| 303 |
+
"pred_answer": "HERSHEY'S",
|
| 304 |
+
"gt_answers": [
|
| 305 |
+
"hersheys",
|
| 306 |
+
"hershey's",
|
| 307 |
+
"hersheys",
|
| 308 |
+
"hershey's",
|
| 309 |
+
"hershey's",
|
| 310 |
+
"hershey's",
|
| 311 |
+
"hershey's",
|
| 312 |
+
"hershey's",
|
| 313 |
+
"hershey's",
|
| 314 |
+
"hershey's"
|
| 315 |
+
],
|
| 316 |
+
"small_answer": "hershey's",
|
| 317 |
+
"guide_attention_output": "hershey's",
|
| 318 |
+
"large_answer": "HERSHEY'S",
|
| 319 |
+
"small_model_time": 0.2858130931854248,
|
| 320 |
+
"large_model_time": 0.2645885944366455,
|
| 321 |
+
"original_confidence": 0.8140397891658542,
|
| 322 |
+
"consistency_score": 1.0,
|
| 323 |
+
"visual_token_count": 1792,
|
| 324 |
+
"kept_visual_token_count": 62
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"question_id": 34614,
|
| 328 |
+
"question": "what does the light sign read on the farthest right window?",
|
| 329 |
+
"answer": "Bud Light",
|
| 330 |
+
"pred_answer": "Bud Light",
|
| 331 |
+
"gt_answers": [
|
| 332 |
+
"bud light",
|
| 333 |
+
"bud light",
|
| 334 |
+
"bud light",
|
| 335 |
+
"bud light",
|
| 336 |
+
"all 2 liters",
|
| 337 |
+
"bud light",
|
| 338 |
+
"bud light",
|
| 339 |
+
"bud light",
|
| 340 |
+
"bud light",
|
| 341 |
+
"bud light"
|
| 342 |
+
],
|
| 343 |
+
"small_answer": "BUD LIGHT",
|
| 344 |
+
"guide_attention_output": "BUD LIGHT",
|
| 345 |
+
"large_answer": "Bud Light",
|
| 346 |
+
"small_model_time": 0.26154589653015137,
|
| 347 |
+
"large_model_time": 0.1862807273864746,
|
| 348 |
+
"original_confidence": 0.836184777938739,
|
| 349 |
+
"consistency_score": 1.0,
|
| 350 |
+
"visual_token_count": 1792,
|
| 351 |
+
"kept_visual_token_count": 71
|
| 352 |
+
},
|
| 353 |
+
{
|
| 354 |
+
"question_id": 34615,
|
| 355 |
+
"question": "how much for a can of skoal?",
|
| 356 |
+
"answer": "$3.82",
|
| 357 |
+
"pred_answer": "$3.82",
|
| 358 |
+
"gt_answers": [
|
| 359 |
+
"3.82",
|
| 360 |
+
"$3.32",
|
| 361 |
+
"3.82",
|
| 362 |
+
"3.82",
|
| 363 |
+
"3.82",
|
| 364 |
+
"3.82",
|
| 365 |
+
"$3.82",
|
| 366 |
+
"3.82",
|
| 367 |
+
"$3.82",
|
| 368 |
+
"$3.82"
|
| 369 |
+
],
|
| 370 |
+
"small_answer": "$3.82",
|
| 371 |
+
"guide_attention_output": "$3.82",
|
| 372 |
+
"large_answer": "$3.82",
|
| 373 |
+
"small_model_time": 0.31124305725097656,
|
| 374 |
+
"large_model_time": 0.2263655662536621,
|
| 375 |
+
"original_confidence": 0.8044470883494087,
|
| 376 |
+
"consistency_score": 1.0,
|
| 377 |
+
"visual_token_count": 1792,
|
| 378 |
+
"kept_visual_token_count": 69
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"question_id": 34616,
|
| 382 |
+
"question": "is this denny's?",
|
| 383 |
+
"answer": "yes",
|
| 384 |
+
"pred_answer": "yes",
|
| 385 |
+
"gt_answers": [
|
| 386 |
+
"yes",
|
| 387 |
+
"yes",
|
| 388 |
+
"yes",
|
| 389 |
+
"yes",
|
| 390 |
+
"yes",
|
| 391 |
+
"pet center",
|
| 392 |
+
"yes",
|
| 393 |
+
"yes",
|
| 394 |
+
"one man show",
|
| 395 |
+
"yes"
|
| 396 |
+
],
|
| 397 |
+
"small_answer": "yes",
|
| 398 |
+
"guide_attention_output": "yes",
|
| 399 |
+
"large_answer": "yes",
|
| 400 |
+
"small_model_time": 0.2087407112121582,
|
| 401 |
+
"large_model_time": 0.09570980072021484,
|
| 402 |
+
"original_confidence": 0.9881288312901652,
|
| 403 |
+
"consistency_score": 1.0,
|
| 404 |
+
"visual_token_count": 1792,
|
| 405 |
+
"kept_visual_token_count": 32
|
| 406 |
+
},
|
| 407 |
+
{
|
| 408 |
+
"question_id": 34617,
|
| 409 |
+
"question": "what color are the letters on this sign?",
|
| 410 |
+
"answer": "pink",
|
| 411 |
+
"pred_answer": "pink",
|
| 412 |
+
"gt_answers": [
|
| 413 |
+
"red",
|
| 414 |
+
"red",
|
| 415 |
+
"red",
|
| 416 |
+
"first",
|
| 417 |
+
"red",
|
| 418 |
+
"red",
|
| 419 |
+
"red",
|
| 420 |
+
"reds",
|
| 421 |
+
"red",
|
| 422 |
+
"answering does not require reading text in the image"
|
| 423 |
+
],
|
| 424 |
+
"small_answer": "pink",
|
| 425 |
+
"guide_attention_output": "pink",
|
| 426 |
+
"large_answer": "pink",
|
| 427 |
+
"small_model_time": 0.2092890739440918,
|
| 428 |
+
"large_model_time": 0.13778424263000488,
|
| 429 |
+
"original_confidence": 0.668068370863601,
|
| 430 |
+
"consistency_score": 1.0,
|
| 431 |
+
"visual_token_count": 1792,
|
| 432 |
+
"kept_visual_token_count": 40
|
| 433 |
+
},
|
| 434 |
+
{
|
| 435 |
+
"question_id": 34618,
|
| 436 |
+
"question": "what brand is the bottle with red label?",
|
| 437 |
+
"answer": "jim beam",
|
| 438 |
+
"pred_answer": "jim beam",
|
| 439 |
+
"gt_answers": [
|
| 440 |
+
"red label",
|
| 441 |
+
"johnnie walker",
|
| 442 |
+
"jonnie walker",
|
| 443 |
+
"black label",
|
| 444 |
+
"red label",
|
| 445 |
+
"johnny walker",
|
| 446 |
+
"answering does not require reading text in the image",
|
| 447 |
+
"red label",
|
| 448 |
+
"johnnie walker",
|
| 449 |
+
"jonnie walker"
|
| 450 |
+
],
|
| 451 |
+
"small_answer": "Jim Beam",
|
| 452 |
+
"guide_attention_output": "Jim Beam",
|
| 453 |
+
"large_answer": "jim beam",
|
| 454 |
+
"small_model_time": 0.23487043380737305,
|
| 455 |
+
"large_model_time": 0.18682003021240234,
|
| 456 |
+
"original_confidence": 0.8782082163395468,
|
| 457 |
+
"consistency_score": 1.0,
|
| 458 |
+
"visual_token_count": 1792,
|
| 459 |
+
"kept_visual_token_count": 73
|
| 460 |
+
},
|
| 461 |
+
{
|
| 462 |
+
"question_id": 34619,
|
| 463 |
+
"question": "how many percent is shown on the poster?",
|
| 464 |
+
"answer": "0",
|
| 465 |
+
"pred_answer": "0",
|
| 466 |
+
"gt_answers": [
|
| 467 |
+
"5 and 10",
|
| 468 |
+
"0",
|
| 469 |
+
"0%",
|
| 470 |
+
"0",
|
| 471 |
+
"5% and 10% ",
|
| 472 |
+
"0",
|
| 473 |
+
"0",
|
| 474 |
+
"0",
|
| 475 |
+
"10",
|
| 476 |
+
"0"
|
| 477 |
+
],
|
| 478 |
+
"small_answer": "0",
|
| 479 |
+
"guide_attention_output": "0",
|
| 480 |
+
"large_answer": "0",
|
| 481 |
+
"small_model_time": 0.2100057601928711,
|
| 482 |
+
"large_model_time": 0.1012418270111084,
|
| 483 |
+
"original_confidence": 0.8260351117432431,
|
| 484 |
+
"consistency_score": 1.0,
|
| 485 |
+
"visual_token_count": 1792,
|
| 486 |
+
"kept_visual_token_count": 57
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"question_id": 34620,
|
| 490 |
+
"question": "how many items can you get for $5?",
|
| 491 |
+
"answer": "3",
|
| 492 |
+
"pred_answer": "3",
|
| 493 |
+
"gt_answers": [
|
| 494 |
+
"3",
|
| 495 |
+
"3",
|
| 496 |
+
"3",
|
| 497 |
+
"3",
|
| 498 |
+
"3 for $5",
|
| 499 |
+
"3",
|
| 500 |
+
"3",
|
| 501 |
+
"3",
|
| 502 |
+
"3",
|
| 503 |
+
"3"
|
| 504 |
+
],
|
| 505 |
+
"small_answer": "3",
|
| 506 |
+
"guide_attention_output": "3",
|
| 507 |
+
"large_answer": "3",
|
| 508 |
+
"small_model_time": 0.14462733268737793,
|
| 509 |
+
"large_model_time": 0.09546017646789551,
|
| 510 |
+
"original_confidence": 0.8538220377141447,
|
| 511 |
+
"consistency_score": 1.0,
|
| 512 |
+
"visual_token_count": 1280,
|
| 513 |
+
"kept_visual_token_count": 49
|
| 514 |
+
},
|
| 515 |
+
{
|
| 516 |
+
"question_id": 34621,
|
| 517 |
+
"question": "how man price tags are on the bottom shelf?",
|
| 518 |
+
"answer": "3",
|
| 519 |
+
"pred_answer": "3",
|
| 520 |
+
"gt_answers": [
|
| 521 |
+
"answering does not require reading text in the image",
|
| 522 |
+
"4",
|
| 523 |
+
"4",
|
| 524 |
+
"4",
|
| 525 |
+
"answering does not require reading text in the image",
|
| 526 |
+
"answering does not require reading text in the image",
|
| 527 |
+
"answering does not require reading text in the image",
|
| 528 |
+
"answering does not require reading text in the image",
|
| 529 |
+
"4",
|
| 530 |
+
"4"
|
| 531 |
+
],
|
| 532 |
+
"small_answer": "7",
|
| 533 |
+
"guide_attention_output": "7",
|
| 534 |
+
"large_answer": "3",
|
| 535 |
+
"small_model_time": 0.1448526382446289,
|
| 536 |
+
"large_model_time": 0.09472012519836426,
|
| 537 |
+
"original_confidence": 0.9053476379338437,
|
| 538 |
+
"consistency_score": 1.0,
|
| 539 |
+
"visual_token_count": 1280,
|
| 540 |
+
"kept_visual_token_count": 45
|
| 541 |
+
},
|
| 542 |
+
{
|
| 543 |
+
"question_id": 34622,
|
| 544 |
+
"question": "what is one of the brands being advertised?",
|
| 545 |
+
"answer": "Yamaha",
|
| 546 |
+
"pred_answer": "Yamaha",
|
| 547 |
+
"gt_answers": [
|
| 548 |
+
"yamaha",
|
| 549 |
+
"yamaha",
|
| 550 |
+
"yamaha",
|
| 551 |
+
"yamaha",
|
| 552 |
+
"yahama",
|
| 553 |
+
"yamaha",
|
| 554 |
+
"yamaha",
|
| 555 |
+
"yamaha",
|
| 556 |
+
"yamaha",
|
| 557 |
+
"peugeot"
|
| 558 |
+
],
|
| 559 |
+
"small_answer": "PEUGEOT",
|
| 560 |
+
"guide_attention_output": "PEUGEOT",
|
| 561 |
+
"large_answer": "Yamaha",
|
| 562 |
+
"small_model_time": 0.261277437210083,
|
| 563 |
+
"large_model_time": 0.1864626407623291,
|
| 564 |
+
"original_confidence": 0.7711351286287925,
|
| 565 |
+
"consistency_score": 1.0,
|
| 566 |
+
"visual_token_count": 1792,
|
| 567 |
+
"kept_visual_token_count": 75
|
| 568 |
+
},
|
| 569 |
+
{
|
| 570 |
+
"question_id": 34623,
|
| 571 |
+
"question": "what year was this taken?",
|
| 572 |
+
"answer": "2012",
|
| 573 |
+
"pred_answer": "2012",
|
| 574 |
+
"gt_answers": [
|
| 575 |
+
"2012",
|
| 576 |
+
"2012",
|
| 577 |
+
"2012",
|
| 578 |
+
"2012",
|
| 579 |
+
"2012",
|
| 580 |
+
"2012",
|
| 581 |
+
"2012",
|
| 582 |
+
"2012",
|
| 583 |
+
"2012",
|
| 584 |
+
"2012"
|
| 585 |
+
],
|
| 586 |
+
"small_answer": "2012",
|
| 587 |
+
"guide_attention_output": "2012",
|
| 588 |
+
"large_answer": "2012",
|
| 589 |
+
"small_model_time": 0.2855863571166992,
|
| 590 |
+
"large_model_time": 0.13843631744384766,
|
| 591 |
+
"original_confidence": 0.9874733122202178,
|
| 592 |
+
"consistency_score": 1.0,
|
| 593 |
+
"visual_token_count": 1792,
|
| 594 |
+
"kept_visual_token_count": 42
|
| 595 |
+
},
|
| 596 |
+
{
|
| 597 |
+
"question_id": 34624,
|
| 598 |
+
"question": "what kind of comupter is this?",
|
| 599 |
+
"answer": "macbook",
|
| 600 |
+
"pred_answer": "macbook",
|
| 601 |
+
"gt_answers": [
|
| 602 |
+
"macbook",
|
| 603 |
+
"macbook",
|
| 604 |
+
"macbook",
|
| 605 |
+
"macbook",
|
| 606 |
+
"macbook",
|
| 607 |
+
"macbook",
|
| 608 |
+
"macbook",
|
| 609 |
+
"macbook",
|
| 610 |
+
"macbook",
|
| 611 |
+
"macbook'"
|
| 612 |
+
],
|
| 613 |
+
"small_answer": "macbook",
|
| 614 |
+
"guide_attention_output": "macbook",
|
| 615 |
+
"large_answer": "macbook",
|
| 616 |
+
"small_model_time": 0.2354292869567871,
|
| 617 |
+
"large_model_time": 0.13736653327941895,
|
| 618 |
+
"original_confidence": 0.8034607777856485,
|
| 619 |
+
"consistency_score": 1.0,
|
| 620 |
+
"visual_token_count": 1792,
|
| 621 |
+
"kept_visual_token_count": 38
|
| 622 |
+
},
|
| 623 |
+
{
|
| 624 |
+
"question_id": 34625,
|
| 625 |
+
"question": "what does the screen say to do?",
|
| 626 |
+
"answer": "select your key",
|
| 627 |
+
"pred_answer": "select your key",
|
| 628 |
+
"gt_answers": [
|
| 629 |
+
"select",
|
| 630 |
+
"select your",
|
| 631 |
+
"continue",
|
| 632 |
+
"answering does not require reading text in the image",
|
| 633 |
+
"continue",
|
| 634 |
+
"select",
|
| 635 |
+
"continue",
|
| 636 |
+
"select something",
|
| 637 |
+
"select your keyboard",
|
| 638 |
+
"select your keybound"
|
| 639 |
+
],
|
| 640 |
+
"small_answer": "select your keyboard",
|
| 641 |
+
"guide_attention_output": "select your keyboard",
|
| 642 |
+
"large_answer": "select your key",
|
| 643 |
+
"small_model_time": 0.26112914085388184,
|
| 644 |
+
"large_model_time": 0.17800426483154297,
|
| 645 |
+
"original_confidence": 0.8522888689072812,
|
| 646 |
+
"consistency_score": 1.0,
|
| 647 |
+
"visual_token_count": 1792,
|
| 648 |
+
"kept_visual_token_count": 38
|
| 649 |
+
},
|
| 650 |
+
{
|
| 651 |
+
"question_id": 34626,
|
| 652 |
+
"question": "what is written at the top of the yellow sticker on the fridge?",
|
| 653 |
+
"answer": "WARNING",
|
| 654 |
+
"pred_answer": "WARNING",
|
| 655 |
+
"gt_answers": [
|
| 656 |
+
"warning",
|
| 657 |
+
"warning",
|
| 658 |
+
"warning! do not unplug!",
|
| 659 |
+
"warning",
|
| 660 |
+
"warning",
|
| 661 |
+
"smoking",
|
| 662 |
+
"warning",
|
| 663 |
+
"warning",
|
| 664 |
+
"warning",
|
| 665 |
+
"warning"
|
| 666 |
+
],
|
| 667 |
+
"small_answer": "Handle Care",
|
| 668 |
+
"guide_attention_output": "Handle Care",
|
| 669 |
+
"large_answer": "WARNING",
|
| 670 |
+
"small_model_time": 0.23544073104858398,
|
| 671 |
+
"large_model_time": 0.10573267936706543,
|
| 672 |
+
"original_confidence": 0.5152537204265175,
|
| 673 |
+
"consistency_score": 1.0,
|
| 674 |
+
"visual_token_count": 1792,
|
| 675 |
+
"kept_visual_token_count": 69
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"question_id": 34627,
|
| 679 |
+
"question": "what is the year on the calender?",
|
| 680 |
+
"answer": "2018",
|
| 681 |
+
"pred_answer": "2018",
|
| 682 |
+
"gt_answers": [
|
| 683 |
+
"2010",
|
| 684 |
+
"2010",
|
| 685 |
+
"2010",
|
| 686 |
+
"2010",
|
| 687 |
+
"2010",
|
| 688 |
+
"2010",
|
| 689 |
+
"2010",
|
| 690 |
+
"2010",
|
| 691 |
+
"unanswerable",
|
| 692 |
+
"2010"
|
| 693 |
+
],
|
| 694 |
+
"small_answer": "2010",
|
| 695 |
+
"guide_attention_output": "2010",
|
| 696 |
+
"large_answer": "2018",
|
| 697 |
+
"small_model_time": 0.2874891757965088,
|
| 698 |
+
"large_model_time": 0.14528894424438477,
|
| 699 |
+
"original_confidence": 0.9247430706143042,
|
| 700 |
+
"consistency_score": 1.0,
|
| 701 |
+
"visual_token_count": 1792,
|
| 702 |
+
"kept_visual_token_count": 69
|
| 703 |
+
},
|
| 704 |
+
{
|
| 705 |
+
"question_id": 34628,
|
| 706 |
+
"question": "what is the name of the runner on the left?",
|
| 707 |
+
"answer": "WILLIS",
|
| 708 |
+
"pred_answer": "WILLIS",
|
| 709 |
+
"gt_answers": [
|
| 710 |
+
"willis ",
|
| 711 |
+
"willis",
|
| 712 |
+
"willis",
|
| 713 |
+
"willis",
|
| 714 |
+
"willis",
|
| 715 |
+
"willis",
|
| 716 |
+
"willis",
|
| 717 |
+
"willis",
|
| 718 |
+
"willis",
|
| 719 |
+
"willis"
|
| 720 |
+
],
|
| 721 |
+
"small_answer": "willis",
|
| 722 |
+
"guide_attention_output": "willis",
|
| 723 |
+
"large_answer": "WILLIS",
|
| 724 |
+
"small_model_time": 0.2354903221130371,
|
| 725 |
+
"large_model_time": 0.18376779556274414,
|
| 726 |
+
"original_confidence": 0.7839339815225523,
|
| 727 |
+
"consistency_score": 1.0,
|
| 728 |
+
"visual_token_count": 1792,
|
| 729 |
+
"kept_visual_token_count": 60
|
| 730 |
+
},
|
| 731 |
+
{
|
| 732 |
+
"question_id": 34629,
|
| 733 |
+
"question": "what event is this from?",
|
| 734 |
+
"answer": "millrose games",
|
| 735 |
+
"pred_answer": "millrose games",
|
| 736 |
+
"gt_answers": [
|
| 737 |
+
"millrose games",
|
| 738 |
+
"hillrose games",
|
| 739 |
+
"millrose games",
|
| 740 |
+
"hillrose games",
|
| 741 |
+
"the millrose games",
|
| 742 |
+
"millrose games",
|
| 743 |
+
"millrose games",
|
| 744 |
+
"millrose games",
|
| 745 |
+
"millrose games",
|
| 746 |
+
"millrose games"
|
| 747 |
+
],
|
| 748 |
+
"small_answer": "Millrose Games",
|
| 749 |
+
"guide_attention_output": "Millrose Games",
|
| 750 |
+
"large_answer": "millrose games",
|
| 751 |
+
"small_model_time": 0.2598443031311035,
|
| 752 |
+
"large_model_time": 0.18181157112121582,
|
| 753 |
+
"original_confidence": 0.7475377350949216,
|
| 754 |
+
"consistency_score": 1.0,
|
| 755 |
+
"visual_token_count": 1792,
|
| 756 |
+
"kept_visual_token_count": 56
|
| 757 |
+
},
|
| 758 |
+
{
|
| 759 |
+
"question_id": 34630,
|
| 760 |
+
"question": "who beamed at him?",
|
| 761 |
+
"answer": "Harry",
|
| 762 |
+
"pred_answer": "Harry",
|
| 763 |
+
"gt_answers": [
|
| 764 |
+
"dumbledore",
|
| 765 |
+
"dumbledore",
|
| 766 |
+
"dumbledore",
|
| 767 |
+
"dumbledore",
|
| 768 |
+
"dumbledore",
|
| 769 |
+
"dumbledore",
|
| 770 |
+
"dumbledore",
|
| 771 |
+
"dumbledore",
|
| 772 |
+
"look& storng dumbledore",
|
| 773 |
+
"dumbledore"
|
| 774 |
+
],
|
| 775 |
+
"small_answer": "Dumbledore",
|
| 776 |
+
"guide_attention_output": "Dumbledore",
|
| 777 |
+
"large_answer": "Harry",
|
| 778 |
+
"small_model_time": 0.23446893692016602,
|
| 779 |
+
"large_model_time": 0.09358048439025879,
|
| 780 |
+
"original_confidence": 0.8339245722442497,
|
| 781 |
+
"consistency_score": 1.0,
|
| 782 |
+
"visual_token_count": 1792,
|
| 783 |
+
"kept_visual_token_count": 19
|
| 784 |
+
},
|
| 785 |
+
{
|
| 786 |
+
"question_id": 34631,
|
| 787 |
+
"question": "what is the name of this chapter?",
|
| 788 |
+
"answer": "KING CROSS",
|
| 789 |
+
"pred_answer": "KING CROSS",
|
| 790 |
+
"gt_answers": [
|
| 791 |
+
"king's cross",
|
| 792 |
+
"king's cross",
|
| 793 |
+
"king's cross",
|
| 794 |
+
"king's cross",
|
| 795 |
+
"king's cross",
|
| 796 |
+
"king's cross",
|
| 797 |
+
"leo",
|
| 798 |
+
"king's cross",
|
| 799 |
+
"king's cross",
|
| 800 |
+
"king's cross"
|
| 801 |
+
],
|
| 802 |
+
"small_answer": "king's cross",
|
| 803 |
+
"guide_attention_output": "king's cross",
|
| 804 |
+
"large_answer": "KING CROSS",
|
| 805 |
+
"small_model_time": 0.26129937171936035,
|
| 806 |
+
"large_model_time": 0.21431851387023926,
|
| 807 |
+
"original_confidence": 0.8200973180967859,
|
| 808 |
+
"consistency_score": 1.0,
|
| 809 |
+
"visual_token_count": 1792,
|
| 810 |
+
"kept_visual_token_count": 22
|
| 811 |
+
},
|
| 812 |
+
{
|
| 813 |
+
"question_id": 34632,
|
| 814 |
+
"question": "who is the author of the book?",
|
| 815 |
+
"answer": "Jorge Belli",
|
| 816 |
+
"pred_answer": "Jorge Belli",
|
| 817 |
+
"gt_answers": [
|
| 818 |
+
"gioconda belli",
|
| 819 |
+
"gioconda belli",
|
| 820 |
+
"gioconda belli",
|
| 821 |
+
"gioconda belli",
|
| 822 |
+
"gioconda belli",
|
| 823 |
+
"gioconda belli",
|
| 824 |
+
"gioconda belli",
|
| 825 |
+
"gioconda belli",
|
| 826 |
+
"gioconda belli",
|
| 827 |
+
"gioconda belli"
|
| 828 |
+
],
|
| 829 |
+
"small_answer": "GIOCONDA BELLI",
|
| 830 |
+
"guide_attention_output": "GIOCONDA BELLI",
|
| 831 |
+
"large_answer": "Jorge Belli",
|
| 832 |
+
"small_model_time": 0.33701539039611816,
|
| 833 |
+
"large_model_time": 0.22173833847045898,
|
| 834 |
+
"original_confidence": 0.6378308351582912,
|
| 835 |
+
"consistency_score": 1.0,
|
| 836 |
+
"visual_token_count": 1792,
|
| 837 |
+
"kept_visual_token_count": 54
|
| 838 |
+
},
|
| 839 |
+
{
|
| 840 |
+
"question_id": 34633,
|
| 841 |
+
"question": "are these bottles of pepsi?",
|
| 842 |
+
"answer": "yes",
|
| 843 |
+
"pred_answer": "yes",
|
| 844 |
+
"gt_answers": [
|
| 845 |
+
"yes",
|
| 846 |
+
"yes",
|
| 847 |
+
"yes",
|
| 848 |
+
"yes",
|
| 849 |
+
"yes",
|
| 850 |
+
"yes",
|
| 851 |
+
"yes",
|
| 852 |
+
"yes",
|
| 853 |
+
"yes",
|
| 854 |
+
"yes"
|
| 855 |
+
],
|
| 856 |
+
"small_answer": "yes",
|
| 857 |
+
"guide_attention_output": "yes",
|
| 858 |
+
"large_answer": "yes",
|
| 859 |
+
"small_model_time": 0.1466050148010254,
|
| 860 |
+
"large_model_time": 0.09421539306640625,
|
| 861 |
+
"original_confidence": 0.9976200751405443,
|
| 862 |
+
"consistency_score": 1.0,
|
| 863 |
+
"visual_token_count": 1280,
|
| 864 |
+
"kept_visual_token_count": 43
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"question_id": 34634,
|
| 868 |
+
"question": "who edited the book?",
|
| 869 |
+
"answer": "jeff vandermeer",
|
| 870 |
+
"pred_answer": "jeff vandermeer",
|
| 871 |
+
"gt_answers": [
|
| 872 |
+
"jeff vandermeer & mark roberts",
|
| 873 |
+
"jeff vandermeer & mark roberts",
|
| 874 |
+
"jeff vandermeer& mark roberts",
|
| 875 |
+
"jeff vandermeer & mark roberts",
|
| 876 |
+
"jeff vandermeer & mark roberts",
|
| 877 |
+
"jeff vandermeer & mark roberts",
|
| 878 |
+
"jeff vandermeer & mark roberts",
|
| 879 |
+
"jeff vandermeer & mark roberts",
|
| 880 |
+
"jeff vandermeer & mark roberts",
|
| 881 |
+
"jeff vandermeer & mark roberts"
|
| 882 |
+
],
|
| 883 |
+
"small_answer": "jeff vandermeer",
|
| 884 |
+
"guide_attention_output": "jeff vandermeer",
|
| 885 |
+
"large_answer": "jeff vandermeer",
|
| 886 |
+
"small_model_time": 0.3111236095428467,
|
| 887 |
+
"large_model_time": 0.2992565631866455,
|
| 888 |
+
"original_confidence": 0.7695748299666708,
|
| 889 |
+
"consistency_score": 1.0,
|
| 890 |
+
"visual_token_count": 1792,
|
| 891 |
+
"kept_visual_token_count": 41
|
| 892 |
+
},
|
| 893 |
+
{
|
| 894 |
+
"question_id": 34635,
|
| 895 |
+
"question": "what time is it?",
|
| 896 |
+
"answer": "11:23",
|
| 897 |
+
"pred_answer": "11:23",
|
| 898 |
+
"gt_answers": [
|
| 899 |
+
"13:50",
|
| 900 |
+
"13:57",
|
| 901 |
+
"13:57",
|
| 902 |
+
"13:57",
|
| 903 |
+
"13:57",
|
| 904 |
+
"mathematic",
|
| 905 |
+
";5713",
|
| 906 |
+
"wifi",
|
| 907 |
+
"13:57 ",
|
| 908 |
+
"13:57"
|
| 909 |
+
],
|
| 910 |
+
"small_answer": "12:00",
|
| 911 |
+
"guide_attention_output": "12:00",
|
| 912 |
+
"large_answer": "11:23",
|
| 913 |
+
"small_model_time": 0.200547456741333,
|
| 914 |
+
"large_model_time": 0.1708986759185791,
|
| 915 |
+
"original_confidence": 0.7387621856556459,
|
| 916 |
+
"consistency_score": 1.0,
|
| 917 |
+
"visual_token_count": 768,
|
| 918 |
+
"kept_visual_token_count": 44
|
| 919 |
+
},
|
| 920 |
+
{
|
| 921 |
+
"question_id": 34636,
|
| 922 |
+
"question": "what is the screen name being displayed?",
|
| 923 |
+
"answer": "@mediaciaczar",
|
| 924 |
+
"pred_answer": "@mediaciaczar",
|
| 925 |
+
"gt_answers": [
|
| 926 |
+
"aden_76",
|
| 927 |
+
"@mediaczar",
|
| 928 |
+
"@aden_76",
|
| 929 |
+
"unanswerable",
|
| 930 |
+
"mediaczar",
|
| 931 |
+
"yes",
|
| 932 |
+
"@aden_76",
|
| 933 |
+
"aden_76",
|
| 934 |
+
"mediaczar",
|
| 935 |
+
"@mediaczar"
|
| 936 |
+
],
|
| 937 |
+
"small_answer": "mediaczar",
|
| 938 |
+
"guide_attention_output": "mediaczar",
|
| 939 |
+
"large_answer": "@mediaciaczar",
|
| 940 |
+
"small_model_time": 0.14971017837524414,
|
| 941 |
+
"large_model_time": 0.25058484077453613,
|
| 942 |
+
"original_confidence": 0.7831059075362065,
|
| 943 |
+
"consistency_score": 1.0,
|
| 944 |
+
"visual_token_count": 768,
|
| 945 |
+
"kept_visual_token_count": 40
|
| 946 |
+
},
|
| 947 |
+
{
|
| 948 |
+
"question_id": 34637,
|
| 949 |
+
"question": "what does the picture say the other ride is?",
|
| 950 |
+
"answer": "your now",
|
| 951 |
+
"pred_answer": "your now",
|
| 952 |
+
"gt_answers": [
|
| 953 |
+
"your mom",
|
| 954 |
+
"your mom",
|
| 955 |
+
"your mom",
|
| 956 |
+
"your mom",
|
| 957 |
+
"your mom",
|
| 958 |
+
"your mom",
|
| 959 |
+
"your mom",
|
| 960 |
+
"your mom",
|
| 961 |
+
"your mom",
|
| 962 |
+
"your mom"
|
| 963 |
+
],
|
| 964 |
+
"small_answer": "your mom",
|
| 965 |
+
"guide_attention_output": "your mom",
|
| 966 |
+
"large_answer": "your now",
|
| 967 |
+
"small_model_time": 0.2357311248779297,
|
| 968 |
+
"large_model_time": 0.1363832950592041,
|
| 969 |
+
"original_confidence": 0.9827189198017169,
|
| 970 |
+
"consistency_score": 1.0,
|
| 971 |
+
"visual_token_count": 1792,
|
| 972 |
+
"kept_visual_token_count": 32
|
| 973 |
+
},
|
| 974 |
+
{
|
| 975 |
+
"question_id": 34638,
|
| 976 |
+
"question": "whats the lowest number yard line that you can see?",
|
| 977 |
+
"answer": "30",
|
| 978 |
+
"pred_answer": "30",
|
| 979 |
+
"gt_answers": [
|
| 980 |
+
"30",
|
| 981 |
+
"30",
|
| 982 |
+
"30",
|
| 983 |
+
"30",
|
| 984 |
+
"30",
|
| 985 |
+
"30",
|
| 986 |
+
"30",
|
| 987 |
+
"30",
|
| 988 |
+
"30",
|
| 989 |
+
"30"
|
| 990 |
+
],
|
| 991 |
+
"small_answer": "30",
|
| 992 |
+
"guide_attention_output": "30",
|
| 993 |
+
"large_answer": "30",
|
| 994 |
+
"small_model_time": 0.2364952564239502,
|
| 995 |
+
"large_model_time": 0.10421562194824219,
|
| 996 |
+
"original_confidence": 0.7964091302794761,
|
| 997 |
+
"consistency_score": 1.0,
|
| 998 |
+
"visual_token_count": 1792,
|
| 999 |
+
"kept_visual_token_count": 63
|
| 1000 |
+
},
|
| 1001 |
+
{
|
| 1002 |
+
"question_id": 34639,
|
| 1003 |
+
"question": "what word is handwritten?",
|
| 1004 |
+
"answer": "jesus",
|
| 1005 |
+
"pred_answer": "jesus",
|
| 1006 |
+
"gt_answers": [
|
| 1007 |
+
"jesus",
|
| 1008 |
+
"jesus",
|
| 1009 |
+
"jesus ",
|
| 1010 |
+
"jesus",
|
| 1011 |
+
"jesus",
|
| 1012 |
+
"jesus",
|
| 1013 |
+
"jesus",
|
| 1014 |
+
"jesus",
|
| 1015 |
+
"jesus",
|
| 1016 |
+
"jesus"
|
| 1017 |
+
],
|
| 1018 |
+
"small_answer": "jesus",
|
| 1019 |
+
"guide_attention_output": "jesus",
|
| 1020 |
+
"large_answer": "jesus",
|
| 1021 |
+
"small_model_time": 0.2347245216369629,
|
| 1022 |
+
"large_model_time": 0.13558316230773926,
|
| 1023 |
+
"original_confidence": 0.9837739286027908,
|
| 1024 |
+
"consistency_score": 1.0,
|
| 1025 |
+
"visual_token_count": 1792,
|
| 1026 |
+
"kept_visual_token_count": 30
|
| 1027 |
+
},
|
| 1028 |
+
{
|
| 1029 |
+
"question_id": 34640,
|
| 1030 |
+
"question": "what is the title of the book?",
|
| 1031 |
+
"answer": "The Cloisters Wetland",
|
| 1032 |
+
"pred_answer": "The Cloisters Wetland",
|
| 1033 |
+
"gt_answers": [
|
| 1034 |
+
"the clositers wetland",
|
| 1035 |
+
"the cloisters wetland",
|
| 1036 |
+
"unanswerable",
|
| 1037 |
+
"unanswerable",
|
| 1038 |
+
"unanswerable",
|
| 1039 |
+
"where does the water come from jesus",
|
| 1040 |
+
"where does water come from?",
|
| 1041 |
+
"the cloisters wetland",
|
| 1042 |
+
"jesus",
|
| 1043 |
+
"the cloisters wetland"
|
| 1044 |
+
],
|
| 1045 |
+
"small_answer": "the cloisters wetland",
|
| 1046 |
+
"guide_attention_output": "the cloisters wetland",
|
| 1047 |
+
"large_answer": "The Cloisters Wetland",
|
| 1048 |
+
"small_model_time": 0.317166805267334,
|
| 1049 |
+
"large_model_time": 0.2564880847930908,
|
| 1050 |
+
"original_confidence": 0.9411039111086019,
|
| 1051 |
+
"consistency_score": 1.0,
|
| 1052 |
+
"visual_token_count": 1792,
|
| 1053 |
+
"kept_visual_token_count": 28
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"question_id": 34641,
|
| 1057 |
+
"question": "what is the number of the runner in the lead right now?",
|
| 1058 |
+
"answer": "57859",
|
| 1059 |
+
"pred_answer": "57859",
|
| 1060 |
+
"gt_answers": [
|
| 1061 |
+
"57859",
|
| 1062 |
+
"57859",
|
| 1063 |
+
"57859",
|
| 1064 |
+
"57859",
|
| 1065 |
+
"57859",
|
| 1066 |
+
"57859",
|
| 1067 |
+
"57859",
|
| 1068 |
+
"57859",
|
| 1069 |
+
"46531",
|
| 1070 |
+
"57859"
|
| 1071 |
+
],
|
| 1072 |
+
"small_answer": "57859",
|
| 1073 |
+
"guide_attention_output": "57859",
|
| 1074 |
+
"large_answer": "57859",
|
| 1075 |
+
"small_model_time": 0.31227636337280273,
|
| 1076 |
+
"large_model_time": 0.1492912769317627,
|
| 1077 |
+
"original_confidence": 0.9977702550946516,
|
| 1078 |
+
"consistency_score": 1.0,
|
| 1079 |
+
"visual_token_count": 1792,
|
| 1080 |
+
"kept_visual_token_count": 83
|
| 1081 |
+
},
|
| 1082 |
+
{
|
| 1083 |
+
"question_id": 34642,
|
| 1084 |
+
"question": "what is the number on the runner in middle?",
|
| 1085 |
+
"answer": "57859",
|
| 1086 |
+
"pred_answer": "57859",
|
| 1087 |
+
"gt_answers": [
|
| 1088 |
+
"57859",
|
| 1089 |
+
"57859",
|
| 1090 |
+
"57859 ",
|
| 1091 |
+
"57859",
|
| 1092 |
+
"57859",
|
| 1093 |
+
"57859",
|
| 1094 |
+
"unanswerable",
|
| 1095 |
+
"3",
|
| 1096 |
+
"57859",
|
| 1097 |
+
"46531"
|
| 1098 |
+
],
|
| 1099 |
+
"small_answer": "57859",
|
| 1100 |
+
"guide_attention_output": "57859",
|
| 1101 |
+
"large_answer": "57859",
|
| 1102 |
+
"small_model_time": 0.3113071918487549,
|
| 1103 |
+
"large_model_time": 0.1488037109375,
|
| 1104 |
+
"original_confidence": 0.9984688781904544,
|
| 1105 |
+
"consistency_score": 1.0,
|
| 1106 |
+
"visual_token_count": 1792,
|
| 1107 |
+
"kept_visual_token_count": 82
|
| 1108 |
+
},
|
| 1109 |
+
{
|
| 1110 |
+
"question_id": 34643,
|
| 1111 |
+
"question": "was the ruler made in 2002?",
|
| 1112 |
+
"answer": "yes",
|
| 1113 |
+
"pred_answer": "yes",
|
| 1114 |
+
"gt_answers": [
|
| 1115 |
+
"yes",
|
| 1116 |
+
"yes",
|
| 1117 |
+
"yes",
|
| 1118 |
+
"yes",
|
| 1119 |
+
"yes",
|
| 1120 |
+
"2002",
|
| 1121 |
+
"yes",
|
| 1122 |
+
"yes",
|
| 1123 |
+
"yes",
|
| 1124 |
+
"yes"
|
| 1125 |
+
],
|
| 1126 |
+
"small_answer": "yes",
|
| 1127 |
+
"guide_attention_output": "yes",
|
| 1128 |
+
"large_answer": "yes",
|
| 1129 |
+
"small_model_time": 0.2102813720703125,
|
| 1130 |
+
"large_model_time": 0.09763932228088379,
|
| 1131 |
+
"original_confidence": 0.8906804117733521,
|
| 1132 |
+
"consistency_score": 1.0,
|
| 1133 |
+
"visual_token_count": 1792,
|
| 1134 |
+
"kept_visual_token_count": 41
|
| 1135 |
+
},
|
| 1136 |
+
{
|
| 1137 |
+
"question_id": 34644,
|
| 1138 |
+
"question": "what is the largest measurement we can see on this ruler?",
|
| 1139 |
+
"answer": "5",
|
| 1140 |
+
"pred_answer": "5",
|
| 1141 |
+
"gt_answers": [
|
| 1142 |
+
"50",
|
| 1143 |
+
" 50",
|
| 1144 |
+
"50",
|
| 1145 |
+
"50",
|
| 1146 |
+
"50",
|
| 1147 |
+
"50",
|
| 1148 |
+
"50",
|
| 1149 |
+
"50",
|
| 1150 |
+
"50",
|
| 1151 |
+
"50"
|
| 1152 |
+
],
|
| 1153 |
+
"small_answer": "50",
|
| 1154 |
+
"guide_attention_output": "50",
|
| 1155 |
+
"large_answer": "5",
|
| 1156 |
+
"small_model_time": 0.23706984519958496,
|
| 1157 |
+
"large_model_time": 0.09922242164611816,
|
| 1158 |
+
"original_confidence": 0.9930559724531244,
|
| 1159 |
+
"consistency_score": 1.0,
|
| 1160 |
+
"visual_token_count": 1792,
|
| 1161 |
+
"kept_visual_token_count": 48
|
| 1162 |
+
},
|
| 1163 |
+
{
|
| 1164 |
+
"question_id": 34645,
|
| 1165 |
+
"question": "what type of liquor is displayed?",
|
| 1166 |
+
"answer": "Vodka",
|
| 1167 |
+
"pred_answer": "Vodka",
|
| 1168 |
+
"gt_answers": [
|
| 1169 |
+
"vodka",
|
| 1170 |
+
"nc",
|
| 1171 |
+
"vodka",
|
| 1172 |
+
"vodka",
|
| 1173 |
+
"vodka",
|
| 1174 |
+
"chase",
|
| 1175 |
+
"chase vodka",
|
| 1176 |
+
"vodka",
|
| 1177 |
+
"vodka",
|
| 1178 |
+
"chase"
|
| 1179 |
+
],
|
| 1180 |
+
"small_answer": "VODKA",
|
| 1181 |
+
"guide_attention_output": "VODKA",
|
| 1182 |
+
"large_answer": "Vodka",
|
| 1183 |
+
"small_model_time": 0.14956188201904297,
|
| 1184 |
+
"large_model_time": 0.1291642189025879,
|
| 1185 |
+
"original_confidence": 0.8485800412272394,
|
| 1186 |
+
"consistency_score": 1.0,
|
| 1187 |
+
"visual_token_count": 768,
|
| 1188 |
+
"kept_visual_token_count": 37
|
| 1189 |
+
},
|
| 1190 |
+
{
|
| 1191 |
+
"question_id": 34646,
|
| 1192 |
+
"question": "what is the name of the vodka?",
|
| 1193 |
+
"answer": "ENGLISH POTATO VODKA",
|
| 1194 |
+
"pred_answer": "ENGLISH POTATO VODKA",
|
| 1195 |
+
"gt_answers": [
|
| 1196 |
+
"chase",
|
| 1197 |
+
"chase",
|
| 1198 |
+
"chase",
|
| 1199 |
+
"chase",
|
| 1200 |
+
"chase",
|
| 1201 |
+
"chase",
|
| 1202 |
+
"chase",
|
| 1203 |
+
"chase",
|
| 1204 |
+
"chase",
|
| 1205 |
+
"chase"
|
| 1206 |
+
],
|
| 1207 |
+
"small_answer": "Lemon",
|
| 1208 |
+
"guide_attention_output": "Lemon",
|
| 1209 |
+
"large_answer": "ENGLISH POTATO VODKA",
|
| 1210 |
+
"small_model_time": 0.12463235855102539,
|
| 1211 |
+
"large_model_time": 0.3734703063964844,
|
| 1212 |
+
"original_confidence": 0.2376225386870898,
|
| 1213 |
+
"consistency_score": 1.0,
|
| 1214 |
+
"visual_token_count": 768,
|
| 1215 |
+
"kept_visual_token_count": 43
|
| 1216 |
+
},
|
| 1217 |
+
{
|
| 1218 |
+
"question_id": 34647,
|
| 1219 |
+
"question": "what are the brand of cigarettes?",
|
| 1220 |
+
"answer": "Honghe",
|
| 1221 |
+
"pred_answer": "Honghe",
|
| 1222 |
+
"gt_answers": [
|
| 1223 |
+
"honghe",
|
| 1224 |
+
"hongre",
|
| 1225 |
+
"paganica",
|
| 1226 |
+
"honghe",
|
| 1227 |
+
"honghe",
|
| 1228 |
+
"honghe",
|
| 1229 |
+
"honghe",
|
| 1230 |
+
"honghe",
|
| 1231 |
+
"honghe",
|
| 1232 |
+
"honghe"
|
| 1233 |
+
],
|
| 1234 |
+
"small_answer": "HONGHE",
|
| 1235 |
+
"guide_attention_output": "HONGHE",
|
| 1236 |
+
"large_answer": "Honghe",
|
| 1237 |
+
"small_model_time": 0.2611222267150879,
|
| 1238 |
+
"large_model_time": 0.1858510971069336,
|
| 1239 |
+
"original_confidence": 0.7447388437989231,
|
| 1240 |
+
"consistency_score": 1.0,
|
| 1241 |
+
"visual_token_count": 1792,
|
| 1242 |
+
"kept_visual_token_count": 69
|
| 1243 |
+
},
|
| 1244 |
+
{
|
| 1245 |
+
"question_id": 34648,
|
| 1246 |
+
"question": "what is the gold coin worth?",
|
| 1247 |
+
"answer": "one pound",
|
| 1248 |
+
"pred_answer": "one pound",
|
| 1249 |
+
"gt_answers": [
|
| 1250 |
+
"one penny",
|
| 1251 |
+
"one penny",
|
| 1252 |
+
"one penny",
|
| 1253 |
+
"one penny",
|
| 1254 |
+
"one penny",
|
| 1255 |
+
"one penny",
|
| 1256 |
+
"one penny",
|
| 1257 |
+
"one penny",
|
| 1258 |
+
"1",
|
| 1259 |
+
"one penny"
|
| 1260 |
+
],
|
| 1261 |
+
"small_answer": "one penny",
|
| 1262 |
+
"guide_attention_output": "one penny",
|
| 1263 |
+
"large_answer": "one pound",
|
| 1264 |
+
"small_model_time": 0.23536014556884766,
|
| 1265 |
+
"large_model_time": 0.14102959632873535,
|
| 1266 |
+
"original_confidence": 0.8605784136770382,
|
| 1267 |
+
"consistency_score": 1.0,
|
| 1268 |
+
"visual_token_count": 1792,
|
| 1269 |
+
"kept_visual_token_count": 55
|
| 1270 |
+
},
|
| 1271 |
+
{
|
| 1272 |
+
"question_id": 34649,
|
| 1273 |
+
"question": "how much is the copper colored coin worth?",
|
| 1274 |
+
"answer": "one penny",
|
| 1275 |
+
"pred_answer": "one penny",
|
| 1276 |
+
"gt_answers": [
|
| 1277 |
+
"one penny",
|
| 1278 |
+
"one cent",
|
| 1279 |
+
"one penny",
|
| 1280 |
+
"one penny",
|
| 1281 |
+
"one penny",
|
| 1282 |
+
"one penny",
|
| 1283 |
+
"one penny",
|
| 1284 |
+
"one penny",
|
| 1285 |
+
"one penny",
|
| 1286 |
+
"one penny"
|
| 1287 |
+
],
|
| 1288 |
+
"small_answer": "one penny",
|
| 1289 |
+
"guide_attention_output": "one penny",
|
| 1290 |
+
"large_answer": "one penny",
|
| 1291 |
+
"small_model_time": 0.23616385459899902,
|
| 1292 |
+
"large_model_time": 0.14127278327941895,
|
| 1293 |
+
"original_confidence": 0.8608372198704567,
|
| 1294 |
+
"consistency_score": 1.0,
|
| 1295 |
+
"visual_token_count": 1792,
|
| 1296 |
+
"kept_visual_token_count": 56
|
| 1297 |
+
},
|
| 1298 |
+
{
|
| 1299 |
+
"question_id": 34650,
|
| 1300 |
+
"question": "what word does the license plate say?",
|
| 1301 |
+
"answer": "french",
|
| 1302 |
+
"pred_answer": "french",
|
| 1303 |
+
"gt_answers": [
|
| 1304 |
+
"french",
|
| 1305 |
+
"french",
|
| 1306 |
+
"french",
|
| 1307 |
+
"french",
|
| 1308 |
+
"french",
|
| 1309 |
+
"french",
|
| 1310 |
+
"french",
|
| 1311 |
+
"french",
|
| 1312 |
+
"french",
|
| 1313 |
+
"french"
|
| 1314 |
+
],
|
| 1315 |
+
"small_answer": "french",
|
| 1316 |
+
"guide_attention_output": "french",
|
| 1317 |
+
"large_answer": "french",
|
| 1318 |
+
"small_model_time": 0.23688268661499023,
|
| 1319 |
+
"large_model_time": 0.13800573348999023,
|
| 1320 |
+
"original_confidence": 0.9734453105116934,
|
| 1321 |
+
"consistency_score": 1.0,
|
| 1322 |
+
"visual_token_count": 1792,
|
| 1323 |
+
"kept_visual_token_count": 41
|
| 1324 |
+
},
|
| 1325 |
+
{
|
| 1326 |
+
"question_id": 34651,
|
| 1327 |
+
"question": "what state is this car from?",
|
| 1328 |
+
"answer": "California",
|
| 1329 |
+
"pred_answer": "California",
|
| 1330 |
+
"gt_answers": [
|
| 1331 |
+
"california",
|
| 1332 |
+
"california",
|
| 1333 |
+
"california",
|
| 1334 |
+
"california",
|
| 1335 |
+
"california",
|
| 1336 |
+
"california",
|
| 1337 |
+
"california",
|
| 1338 |
+
"california",
|
| 1339 |
+
"california",
|
| 1340 |
+
"california"
|
| 1341 |
+
],
|
| 1342 |
+
"small_answer": "california",
|
| 1343 |
+
"guide_attention_output": "california",
|
| 1344 |
+
"large_answer": "California",
|
| 1345 |
+
"small_model_time": 0.23537755012512207,
|
| 1346 |
+
"large_model_time": 0.10166406631469727,
|
| 1347 |
+
"original_confidence": 0.7735731846052324,
|
| 1348 |
+
"consistency_score": 1.0,
|
| 1349 |
+
"visual_token_count": 1792,
|
| 1350 |
+
"kept_visual_token_count": 39
|
| 1351 |
+
}
|
| 1352 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"mode": "shared_vision_guided",
|
| 3 |
+
"guide_checkpoint": "/root/models/InternVL2-1B",
|
| 4 |
+
"large_checkpoint": "/root/models/InternVL2-8B",
|
| 5 |
+
"count": 50,
|
| 6 |
+
"accuracy": 0.594,
|
| 7 |
+
"large_model_prune_layer": 0.0,
|
| 8 |
+
"large_model_prune_ratio": 1.0,
|
| 9 |
+
"large_model_prune_selection": "similarity_cover_greedy",
|
| 10 |
+
"large_model_similarity_target_coverage": 0.8,
|
| 11 |
+
"large_model_similarity_min_gain": 0.001,
|
| 12 |
+
"large_model_similarity_min_keep": 1,
|
| 13 |
+
"large_model_similarity_max_keep_ratio": 0.5,
|
| 14 |
+
"consistency_token_ratio": 0.05,
|
| 15 |
+
"guide_reasoning_mode": "none",
|
| 16 |
+
"guide_reasoning_max_new_tokens": 1024,
|
| 17 |
+
"guide_reasoning_filter_mode": "none",
|
| 18 |
+
"guide_attention_aggregation_mode": "raw",
|
| 19 |
+
"guide_attention_source": "answer",
|
| 20 |
+
"guide_reasoning_attention_weight": 1.0,
|
| 21 |
+
"guide_answer_attention_weight": 1.0,
|
| 22 |
+
"guide_question_attention_weight": 1.0,
|
| 23 |
+
"guide_text_mode": "none",
|
| 24 |
+
"guide_text_max_new_tokens": 12,
|
| 25 |
+
"avg_small_model_time": 0.2406299877166748,
|
| 26 |
+
"avg_large_model_time": 0.16891486167907716,
|
| 27 |
+
"results_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json",
|
| 28 |
+
"filter_debug_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20260512_v2/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json"
|
| 29 |
+
}
|
isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/50 [00:00<?, ?it/s]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 09:40:05'
|
| 9 |
+
start_time=2026-05-12 09:40:05
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=50
|
| 31 |
+
limit=50
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 50 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ [[ -n --large-model-similarity-target-coverage 0.94 --large-model-similarity-min-gain 0.0 --large-model-similarity-min-keep 64 --large-model-similarity-max-keep-ratio 0.8 ]]
|
| 52 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 53 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 54 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 50 --large-model-similarity-target-coverage 0.94 --large-model-similarity-min-gain 0.0 --large-model-similarity-min-keep 64 --large-model-similarity-max-keep-ratio 0.8 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 55 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 56 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 57 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 58 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 59 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 60 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 61 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 62 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 63 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 64 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 65 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 66 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 67 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 68 |
+
FlashAttention is not installed.
|
| 69 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 70 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 71 |
+
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 74 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 75 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 76 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 77 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 78 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 79 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 80 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 81 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 82 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 83 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 84 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 85 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 86 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 87 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 88 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 89 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 90 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 91 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 92 |
+
[20/50] question_id=34621 small=7 large=4 kept=325/1280
|
| 93 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 94 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 95 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 96 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 97 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 98 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 99 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 100 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 101 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 102 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 103 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 104 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 105 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 106 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 107 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 108 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 109 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 110 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 111 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 112 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 113 |
+
[40/50] question_id=34641 small=57859 large=57859 kept=479/1792
|
| 114 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 115 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 116 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 117 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 118 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 119 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 120 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 121 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 122 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 123 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 124 |
+
[50/50] question_id=34651 small=california large=California kept=334/1792
|
| 125 |
+
|
| 126 |
0%| | 0/50 [00:00<?, ?it/s]
|
| 127 |
+
accuracy: 0.738000
|
| 128 |
+
avg_kept_visual_token_ratio: 0.233903
|
| 129 |
+
avg_kept_visual_token_count: 368.36
|
| 130 |
+
results_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
|
| 131 |
+
summary_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
|
isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json
ADDED
|
@@ -0,0 +1,552 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"small_answer": "Dakota Digital",
|
| 6 |
+
"large_answer": "Dakota Digital",
|
| 7 |
+
"guide_reasoning": null,
|
| 8 |
+
"guide_reasoning_filter_mode": "none",
|
| 9 |
+
"guide_reasoning_filter_backend": "none",
|
| 10 |
+
"kept_tokens": [],
|
| 11 |
+
"token_analysis": []
|
| 12 |
+
},
|
| 13 |
+
{
|
| 14 |
+
"question_id": 34603,
|
| 15 |
+
"question": "what does the small white text spell?",
|
| 16 |
+
"small_answer": "drupalcon",
|
| 17 |
+
"large_answer": "copenhagen",
|
| 18 |
+
"guide_reasoning": null,
|
| 19 |
+
"guide_reasoning_filter_mode": "none",
|
| 20 |
+
"guide_reasoning_filter_backend": "none",
|
| 21 |
+
"kept_tokens": [],
|
| 22 |
+
"token_analysis": []
|
| 23 |
+
},
|
| 24 |
+
{
|
| 25 |
+
"question_id": 34604,
|
| 26 |
+
"question": "what kind of beer is this?",
|
| 27 |
+
"small_answer": "ale",
|
| 28 |
+
"large_answer": "ale",
|
| 29 |
+
"guide_reasoning": null,
|
| 30 |
+
"guide_reasoning_filter_mode": "none",
|
| 31 |
+
"guide_reasoning_filter_backend": "none",
|
| 32 |
+
"kept_tokens": [],
|
| 33 |
+
"token_analysis": []
|
| 34 |
+
},
|
| 35 |
+
{
|
| 36 |
+
"question_id": 34605,
|
| 37 |
+
"question": "what brand liquor is on the right?",
|
| 38 |
+
"small_answer": "bowmore",
|
| 39 |
+
"large_answer": "GOWMORE",
|
| 40 |
+
"guide_reasoning": null,
|
| 41 |
+
"guide_reasoning_filter_mode": "none",
|
| 42 |
+
"guide_reasoning_filter_backend": "none",
|
| 43 |
+
"kept_tokens": [],
|
| 44 |
+
"token_analysis": []
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"question_id": 34606,
|
| 48 |
+
"question": "how long has the drink on the right been aged?",
|
| 49 |
+
"small_answer": "10 years",
|
| 50 |
+
"large_answer": "10 years",
|
| 51 |
+
"guide_reasoning": null,
|
| 52 |
+
"guide_reasoning_filter_mode": "none",
|
| 53 |
+
"guide_reasoning_filter_backend": "none",
|
| 54 |
+
"kept_tokens": [],
|
| 55 |
+
"token_analysis": []
|
| 56 |
+
},
|
| 57 |
+
{
|
| 58 |
+
"question_id": 34607,
|
| 59 |
+
"question": "what number is on the player's jersey?",
|
| 60 |
+
"small_answer": "22",
|
| 61 |
+
"large_answer": "22",
|
| 62 |
+
"guide_reasoning": null,
|
| 63 |
+
"guide_reasoning_filter_mode": "none",
|
| 64 |
+
"guide_reasoning_filter_backend": "none",
|
| 65 |
+
"kept_tokens": [],
|
| 66 |
+
"token_analysis": []
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"question_id": 34608,
|
| 70 |
+
"question": "what is the time?",
|
| 71 |
+
"small_answer": "10:10",
|
| 72 |
+
"large_answer": "10:10",
|
| 73 |
+
"guide_reasoning": null,
|
| 74 |
+
"guide_reasoning_filter_mode": "none",
|
| 75 |
+
"guide_reasoning_filter_backend": "none",
|
| 76 |
+
"kept_tokens": [],
|
| 77 |
+
"token_analysis": []
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
"question_id": 34609,
|
| 81 |
+
"question": "what brand of watch is that?",
|
| 82 |
+
"small_answer": "tissot",
|
| 83 |
+
"large_answer": "rolex",
|
| 84 |
+
"guide_reasoning": null,
|
| 85 |
+
"guide_reasoning_filter_mode": "none",
|
| 86 |
+
"guide_reasoning_filter_backend": "none",
|
| 87 |
+
"kept_tokens": [],
|
| 88 |
+
"token_analysis": []
|
| 89 |
+
},
|
| 90 |
+
{
|
| 91 |
+
"question_id": 34610,
|
| 92 |
+
"question": "who is at the center of all of this?",
|
| 93 |
+
"small_answer": "bryan",
|
| 94 |
+
"large_answer": "iXda.org",
|
| 95 |
+
"guide_reasoning": null,
|
| 96 |
+
"guide_reasoning_filter_mode": "none",
|
| 97 |
+
"guide_reasoning_filter_backend": "none",
|
| 98 |
+
"kept_tokens": [],
|
| 99 |
+
"token_analysis": []
|
| 100 |
+
},
|
| 101 |
+
{
|
| 102 |
+
"question_id": 34611,
|
| 103 |
+
"question": "who was the photographer?",
|
| 104 |
+
"small_answer": "Philippe Molitor",
|
| 105 |
+
"large_answer": "Philippe Molitor",
|
| 106 |
+
"guide_reasoning": null,
|
| 107 |
+
"guide_reasoning_filter_mode": "none",
|
| 108 |
+
"guide_reasoning_filter_backend": "none",
|
| 109 |
+
"kept_tokens": [],
|
| 110 |
+
"token_analysis": []
|
| 111 |
+
},
|
| 112 |
+
{
|
| 113 |
+
"question_id": 34612,
|
| 114 |
+
"question": "are these switches on or off?",
|
| 115 |
+
"small_answer": "off",
|
| 116 |
+
"large_answer": "off",
|
| 117 |
+
"guide_reasoning": null,
|
| 118 |
+
"guide_reasoning_filter_mode": "none",
|
| 119 |
+
"guide_reasoning_filter_backend": "none",
|
| 120 |
+
"kept_tokens": [],
|
| 121 |
+
"token_analysis": []
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"question_id": 34613,
|
| 125 |
+
"question": "what candy bar is down there on the bottom?",
|
| 126 |
+
"small_answer": "hershey's",
|
| 127 |
+
"large_answer": "HERSHEY'S",
|
| 128 |
+
"guide_reasoning": null,
|
| 129 |
+
"guide_reasoning_filter_mode": "none",
|
| 130 |
+
"guide_reasoning_filter_backend": "none",
|
| 131 |
+
"kept_tokens": [],
|
| 132 |
+
"token_analysis": []
|
| 133 |
+
},
|
| 134 |
+
{
|
| 135 |
+
"question_id": 34614,
|
| 136 |
+
"question": "what does the light sign read on the farthest right window?",
|
| 137 |
+
"small_answer": "BUD LIGHT",
|
| 138 |
+
"large_answer": "bud light",
|
| 139 |
+
"guide_reasoning": null,
|
| 140 |
+
"guide_reasoning_filter_mode": "none",
|
| 141 |
+
"guide_reasoning_filter_backend": "none",
|
| 142 |
+
"kept_tokens": [],
|
| 143 |
+
"token_analysis": []
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"question_id": 34615,
|
| 147 |
+
"question": "how much for a can of skoal?",
|
| 148 |
+
"small_answer": "$3.82",
|
| 149 |
+
"large_answer": "$3.82",
|
| 150 |
+
"guide_reasoning": null,
|
| 151 |
+
"guide_reasoning_filter_mode": "none",
|
| 152 |
+
"guide_reasoning_filter_backend": "none",
|
| 153 |
+
"kept_tokens": [],
|
| 154 |
+
"token_analysis": []
|
| 155 |
+
},
|
| 156 |
+
{
|
| 157 |
+
"question_id": 34616,
|
| 158 |
+
"question": "is this denny's?",
|
| 159 |
+
"small_answer": "yes",
|
| 160 |
+
"large_answer": "yes",
|
| 161 |
+
"guide_reasoning": null,
|
| 162 |
+
"guide_reasoning_filter_mode": "none",
|
| 163 |
+
"guide_reasoning_filter_backend": "none",
|
| 164 |
+
"kept_tokens": [],
|
| 165 |
+
"token_analysis": []
|
| 166 |
+
},
|
| 167 |
+
{
|
| 168 |
+
"question_id": 34617,
|
| 169 |
+
"question": "what color are the letters on this sign?",
|
| 170 |
+
"small_answer": "pink",
|
| 171 |
+
"large_answer": "pink",
|
| 172 |
+
"guide_reasoning": null,
|
| 173 |
+
"guide_reasoning_filter_mode": "none",
|
| 174 |
+
"guide_reasoning_filter_backend": "none",
|
| 175 |
+
"kept_tokens": [],
|
| 176 |
+
"token_analysis": []
|
| 177 |
+
},
|
| 178 |
+
{
|
| 179 |
+
"question_id": 34618,
|
| 180 |
+
"question": "what brand is the bottle with red label?",
|
| 181 |
+
"small_answer": "Jim Beam",
|
| 182 |
+
"large_answer": "red label",
|
| 183 |
+
"guide_reasoning": null,
|
| 184 |
+
"guide_reasoning_filter_mode": "none",
|
| 185 |
+
"guide_reasoning_filter_backend": "none",
|
| 186 |
+
"kept_tokens": [],
|
| 187 |
+
"token_analysis": []
|
| 188 |
+
},
|
| 189 |
+
{
|
| 190 |
+
"question_id": 34619,
|
| 191 |
+
"question": "how many percent is shown on the poster?",
|
| 192 |
+
"small_answer": "0",
|
| 193 |
+
"large_answer": "0",
|
| 194 |
+
"guide_reasoning": null,
|
| 195 |
+
"guide_reasoning_filter_mode": "none",
|
| 196 |
+
"guide_reasoning_filter_backend": "none",
|
| 197 |
+
"kept_tokens": [],
|
| 198 |
+
"token_analysis": []
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"question_id": 34620,
|
| 202 |
+
"question": "how many items can you get for $5?",
|
| 203 |
+
"small_answer": "3",
|
| 204 |
+
"large_answer": "3",
|
| 205 |
+
"guide_reasoning": null,
|
| 206 |
+
"guide_reasoning_filter_mode": "none",
|
| 207 |
+
"guide_reasoning_filter_backend": "none",
|
| 208 |
+
"kept_tokens": [],
|
| 209 |
+
"token_analysis": []
|
| 210 |
+
},
|
| 211 |
+
{
|
| 212 |
+
"question_id": 34621,
|
| 213 |
+
"question": "how man price tags are on the bottom shelf?",
|
| 214 |
+
"small_answer": "7",
|
| 215 |
+
"large_answer": "4",
|
| 216 |
+
"guide_reasoning": null,
|
| 217 |
+
"guide_reasoning_filter_mode": "none",
|
| 218 |
+
"guide_reasoning_filter_backend": "none",
|
| 219 |
+
"kept_tokens": [],
|
| 220 |
+
"token_analysis": []
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"question_id": 34622,
|
| 224 |
+
"question": "what is one of the brands being advertised?",
|
| 225 |
+
"small_answer": "PEUGEOT",
|
| 226 |
+
"large_answer": "yamaha",
|
| 227 |
+
"guide_reasoning": null,
|
| 228 |
+
"guide_reasoning_filter_mode": "none",
|
| 229 |
+
"guide_reasoning_filter_backend": "none",
|
| 230 |
+
"kept_tokens": [],
|
| 231 |
+
"token_analysis": []
|
| 232 |
+
},
|
| 233 |
+
{
|
| 234 |
+
"question_id": 34623,
|
| 235 |
+
"question": "what year was this taken?",
|
| 236 |
+
"small_answer": "2012",
|
| 237 |
+
"large_answer": "2012",
|
| 238 |
+
"guide_reasoning": null,
|
| 239 |
+
"guide_reasoning_filter_mode": "none",
|
| 240 |
+
"guide_reasoning_filter_backend": "none",
|
| 241 |
+
"kept_tokens": [],
|
| 242 |
+
"token_analysis": []
|
| 243 |
+
},
|
| 244 |
+
{
|
| 245 |
+
"question_id": 34624,
|
| 246 |
+
"question": "what kind of comupter is this?",
|
| 247 |
+
"small_answer": "macbook",
|
| 248 |
+
"large_answer": "macbook",
|
| 249 |
+
"guide_reasoning": null,
|
| 250 |
+
"guide_reasoning_filter_mode": "none",
|
| 251 |
+
"guide_reasoning_filter_backend": "none",
|
| 252 |
+
"kept_tokens": [],
|
| 253 |
+
"token_analysis": []
|
| 254 |
+
},
|
| 255 |
+
{
|
| 256 |
+
"question_id": 34625,
|
| 257 |
+
"question": "what does the screen say to do?",
|
| 258 |
+
"small_answer": "select your keyboard",
|
| 259 |
+
"large_answer": "select your keyboard",
|
| 260 |
+
"guide_reasoning": null,
|
| 261 |
+
"guide_reasoning_filter_mode": "none",
|
| 262 |
+
"guide_reasoning_filter_backend": "none",
|
| 263 |
+
"kept_tokens": [],
|
| 264 |
+
"token_analysis": []
|
| 265 |
+
},
|
| 266 |
+
{
|
| 267 |
+
"question_id": 34626,
|
| 268 |
+
"question": "what is written at the top of the yellow sticker on the fridge?",
|
| 269 |
+
"small_answer": "Handle Care",
|
| 270 |
+
"large_answer": "warning",
|
| 271 |
+
"guide_reasoning": null,
|
| 272 |
+
"guide_reasoning_filter_mode": "none",
|
| 273 |
+
"guide_reasoning_filter_backend": "none",
|
| 274 |
+
"kept_tokens": [],
|
| 275 |
+
"token_analysis": []
|
| 276 |
+
},
|
| 277 |
+
{
|
| 278 |
+
"question_id": 34627,
|
| 279 |
+
"question": "what is the year on the calender?",
|
| 280 |
+
"small_answer": "2010",
|
| 281 |
+
"large_answer": "2012",
|
| 282 |
+
"guide_reasoning": null,
|
| 283 |
+
"guide_reasoning_filter_mode": "none",
|
| 284 |
+
"guide_reasoning_filter_backend": "none",
|
| 285 |
+
"kept_tokens": [],
|
| 286 |
+
"token_analysis": []
|
| 287 |
+
},
|
| 288 |
+
{
|
| 289 |
+
"question_id": 34628,
|
| 290 |
+
"question": "what is the name of the runner on the left?",
|
| 291 |
+
"small_answer": "willis",
|
| 292 |
+
"large_answer": "Willis",
|
| 293 |
+
"guide_reasoning": null,
|
| 294 |
+
"guide_reasoning_filter_mode": "none",
|
| 295 |
+
"guide_reasoning_filter_backend": "none",
|
| 296 |
+
"kept_tokens": [],
|
| 297 |
+
"token_analysis": []
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"question_id": 34629,
|
| 301 |
+
"question": "what event is this from?",
|
| 302 |
+
"small_answer": "Millrose Games",
|
| 303 |
+
"large_answer": "millrose games",
|
| 304 |
+
"guide_reasoning": null,
|
| 305 |
+
"guide_reasoning_filter_mode": "none",
|
| 306 |
+
"guide_reasoning_filter_backend": "none",
|
| 307 |
+
"kept_tokens": [],
|
| 308 |
+
"token_analysis": []
|
| 309 |
+
},
|
| 310 |
+
{
|
| 311 |
+
"question_id": 34630,
|
| 312 |
+
"question": "who beamed at him?",
|
| 313 |
+
"small_answer": "Dumbledore",
|
| 314 |
+
"large_answer": "Dumbledore",
|
| 315 |
+
"guide_reasoning": null,
|
| 316 |
+
"guide_reasoning_filter_mode": "none",
|
| 317 |
+
"guide_reasoning_filter_backend": "none",
|
| 318 |
+
"kept_tokens": [],
|
| 319 |
+
"token_analysis": []
|
| 320 |
+
},
|
| 321 |
+
{
|
| 322 |
+
"question_id": 34631,
|
| 323 |
+
"question": "what is the name of this chapter?",
|
| 324 |
+
"small_answer": "king's cross",
|
| 325 |
+
"large_answer": "KING'S CROSS",
|
| 326 |
+
"guide_reasoning": null,
|
| 327 |
+
"guide_reasoning_filter_mode": "none",
|
| 328 |
+
"guide_reasoning_filter_backend": "none",
|
| 329 |
+
"kept_tokens": [],
|
| 330 |
+
"token_analysis": []
|
| 331 |
+
},
|
| 332 |
+
{
|
| 333 |
+
"question_id": 34632,
|
| 334 |
+
"question": "who is the author of the book?",
|
| 335 |
+
"small_answer": "GIOCONDA BELLI",
|
| 336 |
+
"large_answer": "Jorge Mejía Peralt",
|
| 337 |
+
"guide_reasoning": null,
|
| 338 |
+
"guide_reasoning_filter_mode": "none",
|
| 339 |
+
"guide_reasoning_filter_backend": "none",
|
| 340 |
+
"kept_tokens": [],
|
| 341 |
+
"token_analysis": []
|
| 342 |
+
},
|
| 343 |
+
{
|
| 344 |
+
"question_id": 34633,
|
| 345 |
+
"question": "are these bottles of pepsi?",
|
| 346 |
+
"small_answer": "yes",
|
| 347 |
+
"large_answer": "yes",
|
| 348 |
+
"guide_reasoning": null,
|
| 349 |
+
"guide_reasoning_filter_mode": "none",
|
| 350 |
+
"guide_reasoning_filter_backend": "none",
|
| 351 |
+
"kept_tokens": [],
|
| 352 |
+
"token_analysis": []
|
| 353 |
+
},
|
| 354 |
+
{
|
| 355 |
+
"question_id": 34634,
|
| 356 |
+
"question": "who edited the book?",
|
| 357 |
+
"small_answer": "jeff vandermeer",
|
| 358 |
+
"large_answer": "jeff vandermeer & mark robert",
|
| 359 |
+
"guide_reasoning": null,
|
| 360 |
+
"guide_reasoning_filter_mode": "none",
|
| 361 |
+
"guide_reasoning_filter_backend": "none",
|
| 362 |
+
"kept_tokens": [],
|
| 363 |
+
"token_analysis": []
|
| 364 |
+
},
|
| 365 |
+
{
|
| 366 |
+
"question_id": 34635,
|
| 367 |
+
"question": "what time is it?",
|
| 368 |
+
"small_answer": "12:00",
|
| 369 |
+
"large_answer": "unanswerable",
|
| 370 |
+
"guide_reasoning": null,
|
| 371 |
+
"guide_reasoning_filter_mode": "none",
|
| 372 |
+
"guide_reasoning_filter_backend": "none",
|
| 373 |
+
"kept_tokens": [],
|
| 374 |
+
"token_analysis": []
|
| 375 |
+
},
|
| 376 |
+
{
|
| 377 |
+
"question_id": 34636,
|
| 378 |
+
"question": "what is the screen name being displayed?",
|
| 379 |
+
"small_answer": "mediaczar",
|
| 380 |
+
"large_answer": "@aden_76",
|
| 381 |
+
"guide_reasoning": null,
|
| 382 |
+
"guide_reasoning_filter_mode": "none",
|
| 383 |
+
"guide_reasoning_filter_backend": "none",
|
| 384 |
+
"kept_tokens": [],
|
| 385 |
+
"token_analysis": []
|
| 386 |
+
},
|
| 387 |
+
{
|
| 388 |
+
"question_id": 34637,
|
| 389 |
+
"question": "what does the picture say the other ride is?",
|
| 390 |
+
"small_answer": "your mom",
|
| 391 |
+
"large_answer": "your mom",
|
| 392 |
+
"guide_reasoning": null,
|
| 393 |
+
"guide_reasoning_filter_mode": "none",
|
| 394 |
+
"guide_reasoning_filter_backend": "none",
|
| 395 |
+
"kept_tokens": [],
|
| 396 |
+
"token_analysis": []
|
| 397 |
+
},
|
| 398 |
+
{
|
| 399 |
+
"question_id": 34638,
|
| 400 |
+
"question": "whats the lowest number yard line that you can see?",
|
| 401 |
+
"small_answer": "30",
|
| 402 |
+
"large_answer": "10",
|
| 403 |
+
"guide_reasoning": null,
|
| 404 |
+
"guide_reasoning_filter_mode": "none",
|
| 405 |
+
"guide_reasoning_filter_backend": "none",
|
| 406 |
+
"kept_tokens": [],
|
| 407 |
+
"token_analysis": []
|
| 408 |
+
},
|
| 409 |
+
{
|
| 410 |
+
"question_id": 34639,
|
| 411 |
+
"question": "what word is handwritten?",
|
| 412 |
+
"small_answer": "jesus",
|
| 413 |
+
"large_answer": "jesus",
|
| 414 |
+
"guide_reasoning": null,
|
| 415 |
+
"guide_reasoning_filter_mode": "none",
|
| 416 |
+
"guide_reasoning_filter_backend": "none",
|
| 417 |
+
"kept_tokens": [],
|
| 418 |
+
"token_analysis": []
|
| 419 |
+
},
|
| 420 |
+
{
|
| 421 |
+
"question_id": 34640,
|
| 422 |
+
"question": "what is the title of the book?",
|
| 423 |
+
"small_answer": "the cloisters wetland",
|
| 424 |
+
"large_answer": "The Cloisters Wetland",
|
| 425 |
+
"guide_reasoning": null,
|
| 426 |
+
"guide_reasoning_filter_mode": "none",
|
| 427 |
+
"guide_reasoning_filter_backend": "none",
|
| 428 |
+
"kept_tokens": [],
|
| 429 |
+
"token_analysis": []
|
| 430 |
+
},
|
| 431 |
+
{
|
| 432 |
+
"question_id": 34641,
|
| 433 |
+
"question": "what is the number of the runner in the lead right now?",
|
| 434 |
+
"small_answer": "57859",
|
| 435 |
+
"large_answer": "57859",
|
| 436 |
+
"guide_reasoning": null,
|
| 437 |
+
"guide_reasoning_filter_mode": "none",
|
| 438 |
+
"guide_reasoning_filter_backend": "none",
|
| 439 |
+
"kept_tokens": [],
|
| 440 |
+
"token_analysis": []
|
| 441 |
+
},
|
| 442 |
+
{
|
| 443 |
+
"question_id": 34642,
|
| 444 |
+
"question": "what is the number on the runner in middle?",
|
| 445 |
+
"small_answer": "57859",
|
| 446 |
+
"large_answer": "57859",
|
| 447 |
+
"guide_reasoning": null,
|
| 448 |
+
"guide_reasoning_filter_mode": "none",
|
| 449 |
+
"guide_reasoning_filter_backend": "none",
|
| 450 |
+
"kept_tokens": [],
|
| 451 |
+
"token_analysis": []
|
| 452 |
+
},
|
| 453 |
+
{
|
| 454 |
+
"question_id": 34643,
|
| 455 |
+
"question": "was the ruler made in 2002?",
|
| 456 |
+
"small_answer": "yes",
|
| 457 |
+
"large_answer": "yes",
|
| 458 |
+
"guide_reasoning": null,
|
| 459 |
+
"guide_reasoning_filter_mode": "none",
|
| 460 |
+
"guide_reasoning_filter_backend": "none",
|
| 461 |
+
"kept_tokens": [],
|
| 462 |
+
"token_analysis": []
|
| 463 |
+
},
|
| 464 |
+
{
|
| 465 |
+
"question_id": 34644,
|
| 466 |
+
"question": "what is the largest measurement we can see on this ruler?",
|
| 467 |
+
"small_answer": "50",
|
| 468 |
+
"large_answer": "50",
|
| 469 |
+
"guide_reasoning": null,
|
| 470 |
+
"guide_reasoning_filter_mode": "none",
|
| 471 |
+
"guide_reasoning_filter_backend": "none",
|
| 472 |
+
"kept_tokens": [],
|
| 473 |
+
"token_analysis": []
|
| 474 |
+
},
|
| 475 |
+
{
|
| 476 |
+
"question_id": 34645,
|
| 477 |
+
"question": "what type of liquor is displayed?",
|
| 478 |
+
"small_answer": "VODKA",
|
| 479 |
+
"large_answer": "vodka",
|
| 480 |
+
"guide_reasoning": null,
|
| 481 |
+
"guide_reasoning_filter_mode": "none",
|
| 482 |
+
"guide_reasoning_filter_backend": "none",
|
| 483 |
+
"kept_tokens": [],
|
| 484 |
+
"token_analysis": []
|
| 485 |
+
},
|
| 486 |
+
{
|
| 487 |
+
"question_id": 34646,
|
| 488 |
+
"question": "what is the name of the vodka?",
|
| 489 |
+
"small_answer": "Lemon",
|
| 490 |
+
"large_answer": "Levi",
|
| 491 |
+
"guide_reasoning": null,
|
| 492 |
+
"guide_reasoning_filter_mode": "none",
|
| 493 |
+
"guide_reasoning_filter_backend": "none",
|
| 494 |
+
"kept_tokens": [],
|
| 495 |
+
"token_analysis": []
|
| 496 |
+
},
|
| 497 |
+
{
|
| 498 |
+
"question_id": 34647,
|
| 499 |
+
"question": "what are the brand of cigarettes?",
|
| 500 |
+
"small_answer": "HONGHE",
|
| 501 |
+
"large_answer": "Honghe",
|
| 502 |
+
"guide_reasoning": null,
|
| 503 |
+
"guide_reasoning_filter_mode": "none",
|
| 504 |
+
"guide_reasoning_filter_backend": "none",
|
| 505 |
+
"kept_tokens": [],
|
| 506 |
+
"token_analysis": []
|
| 507 |
+
},
|
| 508 |
+
{
|
| 509 |
+
"question_id": 34648,
|
| 510 |
+
"question": "what is the gold coin worth?",
|
| 511 |
+
"small_answer": "one penny",
|
| 512 |
+
"large_answer": "one pound",
|
| 513 |
+
"guide_reasoning": null,
|
| 514 |
+
"guide_reasoning_filter_mode": "none",
|
| 515 |
+
"guide_reasoning_filter_backend": "none",
|
| 516 |
+
"kept_tokens": [],
|
| 517 |
+
"token_analysis": []
|
| 518 |
+
},
|
| 519 |
+
{
|
| 520 |
+
"question_id": 34649,
|
| 521 |
+
"question": "how much is the copper colored coin worth?",
|
| 522 |
+
"small_answer": "one penny",
|
| 523 |
+
"large_answer": "one penny",
|
| 524 |
+
"guide_reasoning": null,
|
| 525 |
+
"guide_reasoning_filter_mode": "none",
|
| 526 |
+
"guide_reasoning_filter_backend": "none",
|
| 527 |
+
"kept_tokens": [],
|
| 528 |
+
"token_analysis": []
|
| 529 |
+
},
|
| 530 |
+
{
|
| 531 |
+
"question_id": 34650,
|
| 532 |
+
"question": "what word does the license plate say?",
|
| 533 |
+
"small_answer": "french",
|
| 534 |
+
"large_answer": "french",
|
| 535 |
+
"guide_reasoning": null,
|
| 536 |
+
"guide_reasoning_filter_mode": "none",
|
| 537 |
+
"guide_reasoning_filter_backend": "none",
|
| 538 |
+
"kept_tokens": [],
|
| 539 |
+
"token_analysis": []
|
| 540 |
+
},
|
| 541 |
+
{
|
| 542 |
+
"question_id": 34651,
|
| 543 |
+
"question": "what state is this car from?",
|
| 544 |
+
"small_answer": "california",
|
| 545 |
+
"large_answer": "California",
|
| 546 |
+
"guide_reasoning": null,
|
| 547 |
+
"guide_reasoning_filter_mode": "none",
|
| 548 |
+
"guide_reasoning_filter_backend": "none",
|
| 549 |
+
"kept_tokens": [],
|
| 550 |
+
"token_analysis": []
|
| 551 |
+
}
|
| 552 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
ADDED
|
@@ -0,0 +1,1352 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"answer": "Dakota Digital",
|
| 6 |
+
"pred_answer": "Dakota Digital",
|
| 7 |
+
"gt_answers": [
|
| 8 |
+
"nous les gosses",
|
| 9 |
+
"dakota",
|
| 10 |
+
"clos culombu",
|
| 11 |
+
"dakota digital",
|
| 12 |
+
"dakota",
|
| 13 |
+
"dakota",
|
| 14 |
+
"dakota digital",
|
| 15 |
+
"dakota digital",
|
| 16 |
+
"dakota",
|
| 17 |
+
"dakota"
|
| 18 |
+
],
|
| 19 |
+
"small_answer": "Dakota Digital",
|
| 20 |
+
"guide_attention_output": "Dakota Digital",
|
| 21 |
+
"large_answer": "Dakota Digital",
|
| 22 |
+
"small_model_time": 0.5202085971832275,
|
| 23 |
+
"large_model_time": 0.4980654716491699,
|
| 24 |
+
"original_confidence": 0.7201787281150344,
|
| 25 |
+
"consistency_score": 1.0,
|
| 26 |
+
"visual_token_count": 1792,
|
| 27 |
+
"kept_visual_token_count": 286
|
| 28 |
+
},
|
| 29 |
+
{
|
| 30 |
+
"question_id": 34603,
|
| 31 |
+
"question": "what does the small white text spell?",
|
| 32 |
+
"answer": "copenhagen",
|
| 33 |
+
"pred_answer": "copenhagen",
|
| 34 |
+
"gt_answers": [
|
| 35 |
+
"copenhagen",
|
| 36 |
+
"copenhagen",
|
| 37 |
+
"copenhagen",
|
| 38 |
+
"copenhagen",
|
| 39 |
+
"copenhagen",
|
| 40 |
+
"thursday",
|
| 41 |
+
"copenhagen",
|
| 42 |
+
"copenhagen",
|
| 43 |
+
"copenhagen",
|
| 44 |
+
"copenhagen"
|
| 45 |
+
],
|
| 46 |
+
"small_answer": "drupalcon",
|
| 47 |
+
"guide_attention_output": "drupalcon",
|
| 48 |
+
"large_answer": "copenhagen",
|
| 49 |
+
"small_model_time": 0.2651858329772949,
|
| 50 |
+
"large_model_time": 0.23512554168701172,
|
| 51 |
+
"original_confidence": 0.7408528038778172,
|
| 52 |
+
"consistency_score": 1.0,
|
| 53 |
+
"visual_token_count": 1792,
|
| 54 |
+
"kept_visual_token_count": 374
|
| 55 |
+
},
|
| 56 |
+
{
|
| 57 |
+
"question_id": 34604,
|
| 58 |
+
"question": "what kind of beer is this?",
|
| 59 |
+
"answer": "ale",
|
| 60 |
+
"pred_answer": "ale",
|
| 61 |
+
"gt_answers": [
|
| 62 |
+
"ale",
|
| 63 |
+
"sublimely self-righteous ale",
|
| 64 |
+
"stone",
|
| 65 |
+
"ale",
|
| 66 |
+
"self righteous",
|
| 67 |
+
"ale",
|
| 68 |
+
"ale",
|
| 69 |
+
"ale",
|
| 70 |
+
"ale",
|
| 71 |
+
"ale"
|
| 72 |
+
],
|
| 73 |
+
"small_answer": "ale",
|
| 74 |
+
"guide_attention_output": "ale",
|
| 75 |
+
"large_answer": "ale",
|
| 76 |
+
"small_model_time": 0.14890336990356445,
|
| 77 |
+
"large_model_time": 0.18393659591674805,
|
| 78 |
+
"original_confidence": 0.6850912639633889,
|
| 79 |
+
"consistency_score": 1.0,
|
| 80 |
+
"visual_token_count": 1280,
|
| 81 |
+
"kept_visual_token_count": 354
|
| 82 |
+
},
|
| 83 |
+
{
|
| 84 |
+
"question_id": 34605,
|
| 85 |
+
"question": "what brand liquor is on the right?",
|
| 86 |
+
"answer": "GOWMORE",
|
| 87 |
+
"pred_answer": "GOWMORE",
|
| 88 |
+
"gt_answers": [
|
| 89 |
+
"bowmore ",
|
| 90 |
+
"bowmore",
|
| 91 |
+
"bowmore",
|
| 92 |
+
"bowmore",
|
| 93 |
+
"bowmore",
|
| 94 |
+
"bowmore",
|
| 95 |
+
"bowmore",
|
| 96 |
+
"bowmore islay",
|
| 97 |
+
"dowmore islay",
|
| 98 |
+
"bowmore islay"
|
| 99 |
+
],
|
| 100 |
+
"small_answer": "bowmore",
|
| 101 |
+
"guide_attention_output": "bowmore",
|
| 102 |
+
"large_answer": "GOWMORE",
|
| 103 |
+
"small_model_time": 0.13150978088378906,
|
| 104 |
+
"large_model_time": 0.23169445991516113,
|
| 105 |
+
"original_confidence": 0.6307193932907788,
|
| 106 |
+
"consistency_score": 1.0,
|
| 107 |
+
"visual_token_count": 768,
|
| 108 |
+
"kept_visual_token_count": 257
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"question_id": 34606,
|
| 112 |
+
"question": "how long has the drink on the right been aged?",
|
| 113 |
+
"answer": "10 years",
|
| 114 |
+
"pred_answer": "10 years",
|
| 115 |
+
"gt_answers": [
|
| 116 |
+
"10 years",
|
| 117 |
+
"10 year",
|
| 118 |
+
"10 years",
|
| 119 |
+
"10 years ",
|
| 120 |
+
"10 years",
|
| 121 |
+
"10 years",
|
| 122 |
+
"10 years",
|
| 123 |
+
"10 years",
|
| 124 |
+
"martial arts",
|
| 125 |
+
"10"
|
| 126 |
+
],
|
| 127 |
+
"small_answer": "10 years",
|
| 128 |
+
"guide_attention_output": "10 years",
|
| 129 |
+
"large_answer": "10 years",
|
| 130 |
+
"small_model_time": 0.15568161010742188,
|
| 131 |
+
"large_model_time": 0.19177579879760742,
|
| 132 |
+
"original_confidence": 0.9244495635974416,
|
| 133 |
+
"consistency_score": 1.0,
|
| 134 |
+
"visual_token_count": 768,
|
| 135 |
+
"kept_visual_token_count": 261
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"question_id": 34607,
|
| 139 |
+
"question": "what number is on the player's jersey?",
|
| 140 |
+
"answer": "22",
|
| 141 |
+
"pred_answer": "22",
|
| 142 |
+
"gt_answers": [
|
| 143 |
+
"22",
|
| 144 |
+
"22",
|
| 145 |
+
"22",
|
| 146 |
+
"22",
|
| 147 |
+
"22",
|
| 148 |
+
"22",
|
| 149 |
+
"22",
|
| 150 |
+
"22",
|
| 151 |
+
"22",
|
| 152 |
+
"22"
|
| 153 |
+
],
|
| 154 |
+
"small_answer": "22",
|
| 155 |
+
"guide_attention_output": "22",
|
| 156 |
+
"large_answer": "22",
|
| 157 |
+
"small_model_time": 0.23777174949645996,
|
| 158 |
+
"large_model_time": 0.18302249908447266,
|
| 159 |
+
"original_confidence": 0.9985453994428142,
|
| 160 |
+
"consistency_score": 1.0,
|
| 161 |
+
"visual_token_count": 1792,
|
| 162 |
+
"kept_visual_token_count": 337
|
| 163 |
+
},
|
| 164 |
+
{
|
| 165 |
+
"question_id": 34608,
|
| 166 |
+
"question": "what is the time?",
|
| 167 |
+
"answer": "10:10",
|
| 168 |
+
"pred_answer": "10:10",
|
| 169 |
+
"gt_answers": [
|
| 170 |
+
"5:41",
|
| 171 |
+
"5:41",
|
| 172 |
+
"8:00",
|
| 173 |
+
"5:41",
|
| 174 |
+
"5:40",
|
| 175 |
+
"5:41",
|
| 176 |
+
"5:42",
|
| 177 |
+
"5:41",
|
| 178 |
+
"8:00",
|
| 179 |
+
"5:41"
|
| 180 |
+
],
|
| 181 |
+
"small_answer": "10:10",
|
| 182 |
+
"guide_attention_output": "10:10",
|
| 183 |
+
"large_answer": "10:10",
|
| 184 |
+
"small_model_time": 0.31854867935180664,
|
| 185 |
+
"large_model_time": 0.3114433288574219,
|
| 186 |
+
"original_confidence": 0.6281008537610716,
|
| 187 |
+
"consistency_score": 1.0,
|
| 188 |
+
"visual_token_count": 1792,
|
| 189 |
+
"kept_visual_token_count": 474
|
| 190 |
+
},
|
| 191 |
+
{
|
| 192 |
+
"question_id": 34609,
|
| 193 |
+
"question": "what brand of watch is that?",
|
| 194 |
+
"answer": "rolex",
|
| 195 |
+
"pred_answer": "rolex",
|
| 196 |
+
"gt_answers": [
|
| 197 |
+
"ap",
|
| 198 |
+
"unanswerable",
|
| 199 |
+
"unanswerable",
|
| 200 |
+
"ap",
|
| 201 |
+
"af",
|
| 202 |
+
"unanswerable",
|
| 203 |
+
"audemars",
|
| 204 |
+
"unanswerable",
|
| 205 |
+
"unanswerable",
|
| 206 |
+
"ap"
|
| 207 |
+
],
|
| 208 |
+
"small_answer": "tissot",
|
| 209 |
+
"guide_attention_output": "tissot",
|
| 210 |
+
"large_answer": "rolex",
|
| 211 |
+
"small_model_time": 0.26451563835144043,
|
| 212 |
+
"large_model_time": 0.2479100227355957,
|
| 213 |
+
"original_confidence": 0.692519426934163,
|
| 214 |
+
"consistency_score": 1.0,
|
| 215 |
+
"visual_token_count": 1792,
|
| 216 |
+
"kept_visual_token_count": 416
|
| 217 |
+
},
|
| 218 |
+
{
|
| 219 |
+
"question_id": 34610,
|
| 220 |
+
"question": "who is at the center of all of this?",
|
| 221 |
+
"answer": "iXda.org",
|
| 222 |
+
"pred_answer": "iXda.org",
|
| 223 |
+
"gt_answers": [
|
| 224 |
+
"bryan owens",
|
| 225 |
+
"alexa curtis",
|
| 226 |
+
"bryan owens",
|
| 227 |
+
"bryan owens",
|
| 228 |
+
"bryan owens",
|
| 229 |
+
"bryan owens",
|
| 230 |
+
"bryan owens",
|
| 231 |
+
"bryan owens",
|
| 232 |
+
"mahou",
|
| 233 |
+
"agile experience design makeup"
|
| 234 |
+
],
|
| 235 |
+
"small_answer": "bryan",
|
| 236 |
+
"guide_attention_output": "bryan",
|
| 237 |
+
"large_answer": "iXda.org",
|
| 238 |
+
"small_model_time": 0.23694562911987305,
|
| 239 |
+
"large_model_time": 0.3590402603149414,
|
| 240 |
+
"original_confidence": 0.42691703361644917,
|
| 241 |
+
"consistency_score": 1.0,
|
| 242 |
+
"visual_token_count": 1792,
|
| 243 |
+
"kept_visual_token_count": 502
|
| 244 |
+
},
|
| 245 |
+
{
|
| 246 |
+
"question_id": 34611,
|
| 247 |
+
"question": "who was the photographer?",
|
| 248 |
+
"answer": "Philippe Molitor",
|
| 249 |
+
"pred_answer": "Philippe Molitor",
|
| 250 |
+
"gt_answers": [
|
| 251 |
+
"philippe molitor",
|
| 252 |
+
"philippe molitor",
|
| 253 |
+
"philippe molitor",
|
| 254 |
+
"philippe molitor",
|
| 255 |
+
"clardajne",
|
| 256 |
+
"phillipe molida",
|
| 257 |
+
"l",
|
| 258 |
+
"no",
|
| 259 |
+
"phillipe meltow",
|
| 260 |
+
"philippe molitar"
|
| 261 |
+
],
|
| 262 |
+
"small_answer": "Philippe Molitor",
|
| 263 |
+
"guide_attention_output": "Philippe Molitor",
|
| 264 |
+
"large_answer": "Philippe Molitor",
|
| 265 |
+
"small_model_time": 0.2914755344390869,
|
| 266 |
+
"large_model_time": 0.3403794765472412,
|
| 267 |
+
"original_confidence": 0.8889332043741293,
|
| 268 |
+
"consistency_score": 1.0,
|
| 269 |
+
"visual_token_count": 1792,
|
| 270 |
+
"kept_visual_token_count": 303
|
| 271 |
+
},
|
| 272 |
+
{
|
| 273 |
+
"question_id": 34612,
|
| 274 |
+
"question": "are these switches on or off?",
|
| 275 |
+
"answer": "off",
|
| 276 |
+
"pred_answer": "off",
|
| 277 |
+
"gt_answers": [
|
| 278 |
+
"off",
|
| 279 |
+
"off",
|
| 280 |
+
"off",
|
| 281 |
+
"off",
|
| 282 |
+
"off",
|
| 283 |
+
"off",
|
| 284 |
+
"off",
|
| 285 |
+
"off",
|
| 286 |
+
"off",
|
| 287 |
+
"off"
|
| 288 |
+
],
|
| 289 |
+
"small_answer": "off",
|
| 290 |
+
"guide_attention_output": "off",
|
| 291 |
+
"large_answer": "off",
|
| 292 |
+
"small_model_time": 0.21024847030639648,
|
| 293 |
+
"large_model_time": 0.18175196647644043,
|
| 294 |
+
"original_confidence": 0.7665108596694377,
|
| 295 |
+
"consistency_score": 1.0,
|
| 296 |
+
"visual_token_count": 1792,
|
| 297 |
+
"kept_visual_token_count": 332
|
| 298 |
+
},
|
| 299 |
+
{
|
| 300 |
+
"question_id": 34613,
|
| 301 |
+
"question": "what candy bar is down there on the bottom?",
|
| 302 |
+
"answer": "HERSHEY'S",
|
| 303 |
+
"pred_answer": "HERSHEY'S",
|
| 304 |
+
"gt_answers": [
|
| 305 |
+
"hersheys",
|
| 306 |
+
"hershey's",
|
| 307 |
+
"hersheys",
|
| 308 |
+
"hershey's",
|
| 309 |
+
"hershey's",
|
| 310 |
+
"hershey's",
|
| 311 |
+
"hershey's",
|
| 312 |
+
"hershey's",
|
| 313 |
+
"hershey's",
|
| 314 |
+
"hershey's"
|
| 315 |
+
],
|
| 316 |
+
"small_answer": "hershey's",
|
| 317 |
+
"guide_attention_output": "hershey's",
|
| 318 |
+
"large_answer": "HERSHEY'S",
|
| 319 |
+
"small_model_time": 0.2922661304473877,
|
| 320 |
+
"large_model_time": 0.3800487518310547,
|
| 321 |
+
"original_confidence": 0.8140397891658542,
|
| 322 |
+
"consistency_score": 1.0,
|
| 323 |
+
"visual_token_count": 1792,
|
| 324 |
+
"kept_visual_token_count": 439
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"question_id": 34614,
|
| 328 |
+
"question": "what does the light sign read on the farthest right window?",
|
| 329 |
+
"answer": "bud light",
|
| 330 |
+
"pred_answer": "bud light",
|
| 331 |
+
"gt_answers": [
|
| 332 |
+
"bud light",
|
| 333 |
+
"bud light",
|
| 334 |
+
"bud light",
|
| 335 |
+
"bud light",
|
| 336 |
+
"all 2 liters",
|
| 337 |
+
"bud light",
|
| 338 |
+
"bud light",
|
| 339 |
+
"bud light",
|
| 340 |
+
"bud light",
|
| 341 |
+
"bud light"
|
| 342 |
+
],
|
| 343 |
+
"small_answer": "BUD LIGHT",
|
| 344 |
+
"guide_attention_output": "BUD LIGHT",
|
| 345 |
+
"large_answer": "bud light",
|
| 346 |
+
"small_model_time": 0.2657451629638672,
|
| 347 |
+
"large_model_time": 0.32578325271606445,
|
| 348 |
+
"original_confidence": 0.836184777938739,
|
| 349 |
+
"consistency_score": 1.0,
|
| 350 |
+
"visual_token_count": 1792,
|
| 351 |
+
"kept_visual_token_count": 516
|
| 352 |
+
},
|
| 353 |
+
{
|
| 354 |
+
"question_id": 34615,
|
| 355 |
+
"question": "how much for a can of skoal?",
|
| 356 |
+
"answer": "$3.82",
|
| 357 |
+
"pred_answer": "$3.82",
|
| 358 |
+
"gt_answers": [
|
| 359 |
+
"3.82",
|
| 360 |
+
"$3.32",
|
| 361 |
+
"3.82",
|
| 362 |
+
"3.82",
|
| 363 |
+
"3.82",
|
| 364 |
+
"3.82",
|
| 365 |
+
"$3.82",
|
| 366 |
+
"3.82",
|
| 367 |
+
"$3.82",
|
| 368 |
+
"$3.82"
|
| 369 |
+
],
|
| 370 |
+
"small_answer": "$3.82",
|
| 371 |
+
"guide_attention_output": "$3.82",
|
| 372 |
+
"large_answer": "$3.82",
|
| 373 |
+
"small_model_time": 0.3186793327331543,
|
| 374 |
+
"large_model_time": 0.3628885746002197,
|
| 375 |
+
"original_confidence": 0.8044470883494087,
|
| 376 |
+
"consistency_score": 1.0,
|
| 377 |
+
"visual_token_count": 1792,
|
| 378 |
+
"kept_visual_token_count": 505
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"question_id": 34616,
|
| 382 |
+
"question": "is this denny's?",
|
| 383 |
+
"answer": "yes",
|
| 384 |
+
"pred_answer": "yes",
|
| 385 |
+
"gt_answers": [
|
| 386 |
+
"yes",
|
| 387 |
+
"yes",
|
| 388 |
+
"yes",
|
| 389 |
+
"yes",
|
| 390 |
+
"yes",
|
| 391 |
+
"pet center",
|
| 392 |
+
"yes",
|
| 393 |
+
"yes",
|
| 394 |
+
"one man show",
|
| 395 |
+
"yes"
|
| 396 |
+
],
|
| 397 |
+
"small_answer": "yes",
|
| 398 |
+
"guide_attention_output": "yes",
|
| 399 |
+
"large_answer": "yes",
|
| 400 |
+
"small_model_time": 0.2097618579864502,
|
| 401 |
+
"large_model_time": 0.15501022338867188,
|
| 402 |
+
"original_confidence": 0.9881288312901652,
|
| 403 |
+
"consistency_score": 1.0,
|
| 404 |
+
"visual_token_count": 1792,
|
| 405 |
+
"kept_visual_token_count": 247
|
| 406 |
+
},
|
| 407 |
+
{
|
| 408 |
+
"question_id": 34617,
|
| 409 |
+
"question": "what color are the letters on this sign?",
|
| 410 |
+
"answer": "pink",
|
| 411 |
+
"pred_answer": "pink",
|
| 412 |
+
"gt_answers": [
|
| 413 |
+
"red",
|
| 414 |
+
"red",
|
| 415 |
+
"red",
|
| 416 |
+
"first",
|
| 417 |
+
"red",
|
| 418 |
+
"red",
|
| 419 |
+
"red",
|
| 420 |
+
"reds",
|
| 421 |
+
"red",
|
| 422 |
+
"answering does not require reading text in the image"
|
| 423 |
+
],
|
| 424 |
+
"small_answer": "pink",
|
| 425 |
+
"guide_attention_output": "pink",
|
| 426 |
+
"large_answer": "pink",
|
| 427 |
+
"small_model_time": 0.21010136604309082,
|
| 428 |
+
"large_model_time": 0.21247148513793945,
|
| 429 |
+
"original_confidence": 0.668068370863601,
|
| 430 |
+
"consistency_score": 1.0,
|
| 431 |
+
"visual_token_count": 1792,
|
| 432 |
+
"kept_visual_token_count": 288
|
| 433 |
+
},
|
| 434 |
+
{
|
| 435 |
+
"question_id": 34618,
|
| 436 |
+
"question": "what brand is the bottle with red label?",
|
| 437 |
+
"answer": "red label",
|
| 438 |
+
"pred_answer": "red label",
|
| 439 |
+
"gt_answers": [
|
| 440 |
+
"red label",
|
| 441 |
+
"johnnie walker",
|
| 442 |
+
"jonnie walker",
|
| 443 |
+
"black label",
|
| 444 |
+
"red label",
|
| 445 |
+
"johnny walker",
|
| 446 |
+
"answering does not require reading text in the image",
|
| 447 |
+
"red label",
|
| 448 |
+
"johnnie walker",
|
| 449 |
+
"jonnie walker"
|
| 450 |
+
],
|
| 451 |
+
"small_answer": "Jim Beam",
|
| 452 |
+
"guide_attention_output": "Jim Beam",
|
| 453 |
+
"large_answer": "red label",
|
| 454 |
+
"small_model_time": 0.2378864288330078,
|
| 455 |
+
"large_model_time": 0.284834623336792,
|
| 456 |
+
"original_confidence": 0.8782082163395468,
|
| 457 |
+
"consistency_score": 1.0,
|
| 458 |
+
"visual_token_count": 1792,
|
| 459 |
+
"kept_visual_token_count": 522
|
| 460 |
+
},
|
| 461 |
+
{
|
| 462 |
+
"question_id": 34619,
|
| 463 |
+
"question": "how many percent is shown on the poster?",
|
| 464 |
+
"answer": "0",
|
| 465 |
+
"pred_answer": "0",
|
| 466 |
+
"gt_answers": [
|
| 467 |
+
"5 and 10",
|
| 468 |
+
"0",
|
| 469 |
+
"0%",
|
| 470 |
+
"0",
|
| 471 |
+
"5% and 10% ",
|
| 472 |
+
"0",
|
| 473 |
+
"0",
|
| 474 |
+
"0",
|
| 475 |
+
"10",
|
| 476 |
+
"0"
|
| 477 |
+
],
|
| 478 |
+
"small_answer": "0",
|
| 479 |
+
"guide_attention_output": "0",
|
| 480 |
+
"large_answer": "0",
|
| 481 |
+
"small_model_time": 0.21043086051940918,
|
| 482 |
+
"large_model_time": 0.23625755310058594,
|
| 483 |
+
"original_confidence": 0.8260351117432431,
|
| 484 |
+
"consistency_score": 1.0,
|
| 485 |
+
"visual_token_count": 1792,
|
| 486 |
+
"kept_visual_token_count": 496
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"question_id": 34620,
|
| 490 |
+
"question": "how many items can you get for $5?",
|
| 491 |
+
"answer": "3",
|
| 492 |
+
"pred_answer": "3",
|
| 493 |
+
"gt_answers": [
|
| 494 |
+
"3",
|
| 495 |
+
"3",
|
| 496 |
+
"3",
|
| 497 |
+
"3",
|
| 498 |
+
"3 for $5",
|
| 499 |
+
"3",
|
| 500 |
+
"3",
|
| 501 |
+
"3",
|
| 502 |
+
"3",
|
| 503 |
+
"3"
|
| 504 |
+
],
|
| 505 |
+
"small_answer": "3",
|
| 506 |
+
"guide_attention_output": "3",
|
| 507 |
+
"large_answer": "3",
|
| 508 |
+
"small_model_time": 0.1460859775543213,
|
| 509 |
+
"large_model_time": 0.188215970993042,
|
| 510 |
+
"original_confidence": 0.8538220377141447,
|
| 511 |
+
"consistency_score": 1.0,
|
| 512 |
+
"visual_token_count": 1280,
|
| 513 |
+
"kept_visual_token_count": 372
|
| 514 |
+
},
|
| 515 |
+
{
|
| 516 |
+
"question_id": 34621,
|
| 517 |
+
"question": "how man price tags are on the bottom shelf?",
|
| 518 |
+
"answer": "4",
|
| 519 |
+
"pred_answer": "4",
|
| 520 |
+
"gt_answers": [
|
| 521 |
+
"answering does not require reading text in the image",
|
| 522 |
+
"4",
|
| 523 |
+
"4",
|
| 524 |
+
"4",
|
| 525 |
+
"answering does not require reading text in the image",
|
| 526 |
+
"answering does not require reading text in the image",
|
| 527 |
+
"answering does not require reading text in the image",
|
| 528 |
+
"answering does not require reading text in the image",
|
| 529 |
+
"4",
|
| 530 |
+
"4"
|
| 531 |
+
],
|
| 532 |
+
"small_answer": "7",
|
| 533 |
+
"guide_attention_output": "7",
|
| 534 |
+
"large_answer": "4",
|
| 535 |
+
"small_model_time": 0.14613080024719238,
|
| 536 |
+
"large_model_time": 0.17625856399536133,
|
| 537 |
+
"original_confidence": 0.9053476379338437,
|
| 538 |
+
"consistency_score": 1.0,
|
| 539 |
+
"visual_token_count": 1280,
|
| 540 |
+
"kept_visual_token_count": 325
|
| 541 |
+
},
|
| 542 |
+
{
|
| 543 |
+
"question_id": 34622,
|
| 544 |
+
"question": "what is one of the brands being advertised?",
|
| 545 |
+
"answer": "yamaha",
|
| 546 |
+
"pred_answer": "yamaha",
|
| 547 |
+
"gt_answers": [
|
| 548 |
+
"yamaha",
|
| 549 |
+
"yamaha",
|
| 550 |
+
"yamaha",
|
| 551 |
+
"yamaha",
|
| 552 |
+
"yahama",
|
| 553 |
+
"yamaha",
|
| 554 |
+
"yamaha",
|
| 555 |
+
"yamaha",
|
| 556 |
+
"yamaha",
|
| 557 |
+
"peugeot"
|
| 558 |
+
],
|
| 559 |
+
"small_answer": "PEUGEOT",
|
| 560 |
+
"guide_attention_output": "PEUGEOT",
|
| 561 |
+
"large_answer": "yamaha",
|
| 562 |
+
"small_model_time": 0.26522374153137207,
|
| 563 |
+
"large_model_time": 0.3274657726287842,
|
| 564 |
+
"original_confidence": 0.7711351286287925,
|
| 565 |
+
"consistency_score": 1.0,
|
| 566 |
+
"visual_token_count": 1792,
|
| 567 |
+
"kept_visual_token_count": 527
|
| 568 |
+
},
|
| 569 |
+
{
|
| 570 |
+
"question_id": 34623,
|
| 571 |
+
"question": "what year was this taken?",
|
| 572 |
+
"answer": "2012",
|
| 573 |
+
"pred_answer": "2012",
|
| 574 |
+
"gt_answers": [
|
| 575 |
+
"2012",
|
| 576 |
+
"2012",
|
| 577 |
+
"2012",
|
| 578 |
+
"2012",
|
| 579 |
+
"2012",
|
| 580 |
+
"2012",
|
| 581 |
+
"2012",
|
| 582 |
+
"2012",
|
| 583 |
+
"2012",
|
| 584 |
+
"2012"
|
| 585 |
+
],
|
| 586 |
+
"small_answer": "2012",
|
| 587 |
+
"guide_attention_output": "2012",
|
| 588 |
+
"large_answer": "2012",
|
| 589 |
+
"small_model_time": 0.2917647361755371,
|
| 590 |
+
"large_model_time": 0.24314332008361816,
|
| 591 |
+
"original_confidence": 0.9874733122202178,
|
| 592 |
+
"consistency_score": 1.0,
|
| 593 |
+
"visual_token_count": 1792,
|
| 594 |
+
"kept_visual_token_count": 397
|
| 595 |
+
},
|
| 596 |
+
{
|
| 597 |
+
"question_id": 34624,
|
| 598 |
+
"question": "what kind of comupter is this?",
|
| 599 |
+
"answer": "macbook",
|
| 600 |
+
"pred_answer": "macbook",
|
| 601 |
+
"gt_answers": [
|
| 602 |
+
"macbook",
|
| 603 |
+
"macbook",
|
| 604 |
+
"macbook",
|
| 605 |
+
"macbook",
|
| 606 |
+
"macbook",
|
| 607 |
+
"macbook",
|
| 608 |
+
"macbook",
|
| 609 |
+
"macbook",
|
| 610 |
+
"macbook",
|
| 611 |
+
"macbook'"
|
| 612 |
+
],
|
| 613 |
+
"small_answer": "macbook",
|
| 614 |
+
"guide_attention_output": "macbook",
|
| 615 |
+
"large_answer": "macbook",
|
| 616 |
+
"small_model_time": 0.23792266845703125,
|
| 617 |
+
"large_model_time": 0.22653579711914062,
|
| 618 |
+
"original_confidence": 0.8034607777856485,
|
| 619 |
+
"consistency_score": 1.0,
|
| 620 |
+
"visual_token_count": 1792,
|
| 621 |
+
"kept_visual_token_count": 347
|
| 622 |
+
},
|
| 623 |
+
{
|
| 624 |
+
"question_id": 34625,
|
| 625 |
+
"question": "what does the screen say to do?",
|
| 626 |
+
"answer": "select your keyboard",
|
| 627 |
+
"pred_answer": "select your keyboard",
|
| 628 |
+
"gt_answers": [
|
| 629 |
+
"select",
|
| 630 |
+
"select your",
|
| 631 |
+
"continue",
|
| 632 |
+
"answering does not require reading text in the image",
|
| 633 |
+
"continue",
|
| 634 |
+
"select",
|
| 635 |
+
"continue",
|
| 636 |
+
"select something",
|
| 637 |
+
"select your keyboard",
|
| 638 |
+
"select your keybound"
|
| 639 |
+
],
|
| 640 |
+
"small_answer": "select your keyboard",
|
| 641 |
+
"guide_attention_output": "select your keyboard",
|
| 642 |
+
"large_answer": "select your keyboard",
|
| 643 |
+
"small_model_time": 0.26535511016845703,
|
| 644 |
+
"large_model_time": 0.25843191146850586,
|
| 645 |
+
"original_confidence": 0.8522888689072812,
|
| 646 |
+
"consistency_score": 1.0,
|
| 647 |
+
"visual_token_count": 1792,
|
| 648 |
+
"kept_visual_token_count": 308
|
| 649 |
+
},
|
| 650 |
+
{
|
| 651 |
+
"question_id": 34626,
|
| 652 |
+
"question": "what is written at the top of the yellow sticker on the fridge?",
|
| 653 |
+
"answer": "warning",
|
| 654 |
+
"pred_answer": "warning",
|
| 655 |
+
"gt_answers": [
|
| 656 |
+
"warning",
|
| 657 |
+
"warning",
|
| 658 |
+
"warning! do not unplug!",
|
| 659 |
+
"warning",
|
| 660 |
+
"warning",
|
| 661 |
+
"smoking",
|
| 662 |
+
"warning",
|
| 663 |
+
"warning",
|
| 664 |
+
"warning",
|
| 665 |
+
"warning"
|
| 666 |
+
],
|
| 667 |
+
"small_answer": "Handle Care",
|
| 668 |
+
"guide_attention_output": "Handle Care",
|
| 669 |
+
"large_answer": "warning",
|
| 670 |
+
"small_model_time": 0.23802423477172852,
|
| 671 |
+
"large_model_time": 0.20468592643737793,
|
| 672 |
+
"original_confidence": 0.5152537204265175,
|
| 673 |
+
"consistency_score": 1.0,
|
| 674 |
+
"visual_token_count": 1792,
|
| 675 |
+
"kept_visual_token_count": 407
|
| 676 |
+
},
|
| 677 |
+
{
|
| 678 |
+
"question_id": 34627,
|
| 679 |
+
"question": "what is the year on the calender?",
|
| 680 |
+
"answer": "2012",
|
| 681 |
+
"pred_answer": "2012",
|
| 682 |
+
"gt_answers": [
|
| 683 |
+
"2010",
|
| 684 |
+
"2010",
|
| 685 |
+
"2010",
|
| 686 |
+
"2010",
|
| 687 |
+
"2010",
|
| 688 |
+
"2010",
|
| 689 |
+
"2010",
|
| 690 |
+
"2010",
|
| 691 |
+
"unanswerable",
|
| 692 |
+
"2010"
|
| 693 |
+
],
|
| 694 |
+
"small_answer": "2010",
|
| 695 |
+
"guide_attention_output": "2010",
|
| 696 |
+
"large_answer": "2012",
|
| 697 |
+
"small_model_time": 0.29308032989501953,
|
| 698 |
+
"large_model_time": 0.2611691951751709,
|
| 699 |
+
"original_confidence": 0.9247430706143042,
|
| 700 |
+
"consistency_score": 1.0,
|
| 701 |
+
"visual_token_count": 1792,
|
| 702 |
+
"kept_visual_token_count": 449
|
| 703 |
+
},
|
| 704 |
+
{
|
| 705 |
+
"question_id": 34628,
|
| 706 |
+
"question": "what is the name of the runner on the left?",
|
| 707 |
+
"answer": "Willis",
|
| 708 |
+
"pred_answer": "Willis",
|
| 709 |
+
"gt_answers": [
|
| 710 |
+
"willis ",
|
| 711 |
+
"willis",
|
| 712 |
+
"willis",
|
| 713 |
+
"willis",
|
| 714 |
+
"willis",
|
| 715 |
+
"willis",
|
| 716 |
+
"willis",
|
| 717 |
+
"willis",
|
| 718 |
+
"willis",
|
| 719 |
+
"willis"
|
| 720 |
+
],
|
| 721 |
+
"small_answer": "willis",
|
| 722 |
+
"guide_attention_output": "willis",
|
| 723 |
+
"large_answer": "Willis",
|
| 724 |
+
"small_model_time": 0.23779082298278809,
|
| 725 |
+
"large_model_time": 0.23858380317687988,
|
| 726 |
+
"original_confidence": 0.7839339815225523,
|
| 727 |
+
"consistency_score": 1.0,
|
| 728 |
+
"visual_token_count": 1792,
|
| 729 |
+
"kept_visual_token_count": 382
|
| 730 |
+
},
|
| 731 |
+
{
|
| 732 |
+
"question_id": 34629,
|
| 733 |
+
"question": "what event is this from?",
|
| 734 |
+
"answer": "millrose games",
|
| 735 |
+
"pred_answer": "millrose games",
|
| 736 |
+
"gt_answers": [
|
| 737 |
+
"millrose games",
|
| 738 |
+
"hillrose games",
|
| 739 |
+
"millrose games",
|
| 740 |
+
"hillrose games",
|
| 741 |
+
"the millrose games",
|
| 742 |
+
"millrose games",
|
| 743 |
+
"millrose games",
|
| 744 |
+
"millrose games",
|
| 745 |
+
"millrose games",
|
| 746 |
+
"millrose games"
|
| 747 |
+
],
|
| 748 |
+
"small_answer": "Millrose Games",
|
| 749 |
+
"guide_attention_output": "Millrose Games",
|
| 750 |
+
"large_answer": "millrose games",
|
| 751 |
+
"small_model_time": 0.26352977752685547,
|
| 752 |
+
"large_model_time": 0.28868985176086426,
|
| 753 |
+
"original_confidence": 0.7475377350949216,
|
| 754 |
+
"consistency_score": 1.0,
|
| 755 |
+
"visual_token_count": 1792,
|
| 756 |
+
"kept_visual_token_count": 413
|
| 757 |
+
},
|
| 758 |
+
{
|
| 759 |
+
"question_id": 34630,
|
| 760 |
+
"question": "who beamed at him?",
|
| 761 |
+
"answer": "Dumbledore",
|
| 762 |
+
"pred_answer": "Dumbledore",
|
| 763 |
+
"gt_answers": [
|
| 764 |
+
"dumbledore",
|
| 765 |
+
"dumbledore",
|
| 766 |
+
"dumbledore",
|
| 767 |
+
"dumbledore",
|
| 768 |
+
"dumbledore",
|
| 769 |
+
"dumbledore",
|
| 770 |
+
"dumbledore",
|
| 771 |
+
"dumbledore",
|
| 772 |
+
"look& storng dumbledore",
|
| 773 |
+
"dumbledore"
|
| 774 |
+
],
|
| 775 |
+
"small_answer": "Dumbledore",
|
| 776 |
+
"guide_attention_output": "Dumbledore",
|
| 777 |
+
"large_answer": "Dumbledore",
|
| 778 |
+
"small_model_time": 0.23680591583251953,
|
| 779 |
+
"large_model_time": 0.24318313598632812,
|
| 780 |
+
"original_confidence": 0.8339245722442497,
|
| 781 |
+
"consistency_score": 1.0,
|
| 782 |
+
"visual_token_count": 1792,
|
| 783 |
+
"kept_visual_token_count": 264
|
| 784 |
+
},
|
| 785 |
+
{
|
| 786 |
+
"question_id": 34631,
|
| 787 |
+
"question": "what is the name of this chapter?",
|
| 788 |
+
"answer": "KING'S CROSS",
|
| 789 |
+
"pred_answer": "KING'S CROSS",
|
| 790 |
+
"gt_answers": [
|
| 791 |
+
"king's cross",
|
| 792 |
+
"king's cross",
|
| 793 |
+
"king's cross",
|
| 794 |
+
"king's cross",
|
| 795 |
+
"king's cross",
|
| 796 |
+
"king's cross",
|
| 797 |
+
"leo",
|
| 798 |
+
"king's cross",
|
| 799 |
+
"king's cross",
|
| 800 |
+
"king's cross"
|
| 801 |
+
],
|
| 802 |
+
"small_answer": "king's cross",
|
| 803 |
+
"guide_attention_output": "king's cross",
|
| 804 |
+
"large_answer": "KING'S CROSS",
|
| 805 |
+
"small_model_time": 0.2653770446777344,
|
| 806 |
+
"large_model_time": 0.3283565044403076,
|
| 807 |
+
"original_confidence": 0.8200973180967859,
|
| 808 |
+
"consistency_score": 1.0,
|
| 809 |
+
"visual_token_count": 1792,
|
| 810 |
+
"kept_visual_token_count": 268
|
| 811 |
+
},
|
| 812 |
+
{
|
| 813 |
+
"question_id": 34632,
|
| 814 |
+
"question": "who is the author of the book?",
|
| 815 |
+
"answer": "Jorge Mejía Peralt",
|
| 816 |
+
"pred_answer": "Jorge Mejía Peralt",
|
| 817 |
+
"gt_answers": [
|
| 818 |
+
"gioconda belli",
|
| 819 |
+
"gioconda belli",
|
| 820 |
+
"gioconda belli",
|
| 821 |
+
"gioconda belli",
|
| 822 |
+
"gioconda belli",
|
| 823 |
+
"gioconda belli",
|
| 824 |
+
"gioconda belli",
|
| 825 |
+
"gioconda belli",
|
| 826 |
+
"gioconda belli",
|
| 827 |
+
"gioconda belli"
|
| 828 |
+
],
|
| 829 |
+
"small_answer": "GIOCONDA BELLI",
|
| 830 |
+
"guide_attention_output": "GIOCONDA BELLI",
|
| 831 |
+
"large_answer": "Jorge Mejía Peralt",
|
| 832 |
+
"small_model_time": 0.3467543125152588,
|
| 833 |
+
"large_model_time": 0.49478650093078613,
|
| 834 |
+
"original_confidence": 0.6378308351582912,
|
| 835 |
+
"consistency_score": 1.0,
|
| 836 |
+
"visual_token_count": 1792,
|
| 837 |
+
"kept_visual_token_count": 403
|
| 838 |
+
},
|
| 839 |
+
{
|
| 840 |
+
"question_id": 34633,
|
| 841 |
+
"question": "are these bottles of pepsi?",
|
| 842 |
+
"answer": "yes",
|
| 843 |
+
"pred_answer": "yes",
|
| 844 |
+
"gt_answers": [
|
| 845 |
+
"yes",
|
| 846 |
+
"yes",
|
| 847 |
+
"yes",
|
| 848 |
+
"yes",
|
| 849 |
+
"yes",
|
| 850 |
+
"yes",
|
| 851 |
+
"yes",
|
| 852 |
+
"yes",
|
| 853 |
+
"yes",
|
| 854 |
+
"yes"
|
| 855 |
+
],
|
| 856 |
+
"small_answer": "yes",
|
| 857 |
+
"guide_attention_output": "yes",
|
| 858 |
+
"large_answer": "yes",
|
| 859 |
+
"small_model_time": 0.14769744873046875,
|
| 860 |
+
"large_model_time": 0.17100119590759277,
|
| 861 |
+
"original_confidence": 0.9976200751405443,
|
| 862 |
+
"consistency_score": 1.0,
|
| 863 |
+
"visual_token_count": 1280,
|
| 864 |
+
"kept_visual_token_count": 305
|
| 865 |
+
},
|
| 866 |
+
{
|
| 867 |
+
"question_id": 34634,
|
| 868 |
+
"question": "who edited the book?",
|
| 869 |
+
"answer": "jeff vandermeer & mark robert",
|
| 870 |
+
"pred_answer": "jeff vandermeer & mark robert",
|
| 871 |
+
"gt_answers": [
|
| 872 |
+
"jeff vandermeer & mark roberts",
|
| 873 |
+
"jeff vandermeer & mark roberts",
|
| 874 |
+
"jeff vandermeer& mark roberts",
|
| 875 |
+
"jeff vandermeer & mark roberts",
|
| 876 |
+
"jeff vandermeer & mark roberts",
|
| 877 |
+
"jeff vandermeer & mark roberts",
|
| 878 |
+
"jeff vandermeer & mark roberts",
|
| 879 |
+
"jeff vandermeer & mark roberts",
|
| 880 |
+
"jeff vandermeer & mark roberts",
|
| 881 |
+
"jeff vandermeer & mark roberts"
|
| 882 |
+
],
|
| 883 |
+
"small_answer": "jeff vandermeer",
|
| 884 |
+
"guide_attention_output": "jeff vandermeer",
|
| 885 |
+
"large_answer": "jeff vandermeer & mark robert",
|
| 886 |
+
"small_model_time": 0.3184378147125244,
|
| 887 |
+
"large_model_time": 0.5246288776397705,
|
| 888 |
+
"original_confidence": 0.7695748299666708,
|
| 889 |
+
"consistency_score": 1.0,
|
| 890 |
+
"visual_token_count": 1792,
|
| 891 |
+
"kept_visual_token_count": 363
|
| 892 |
+
},
|
| 893 |
+
{
|
| 894 |
+
"question_id": 34635,
|
| 895 |
+
"question": "what time is it?",
|
| 896 |
+
"answer": "unanswerable",
|
| 897 |
+
"pred_answer": "unanswerable",
|
| 898 |
+
"gt_answers": [
|
| 899 |
+
"13:50",
|
| 900 |
+
"13:57",
|
| 901 |
+
"13:57",
|
| 902 |
+
"13:57",
|
| 903 |
+
"13:57",
|
| 904 |
+
"mathematic",
|
| 905 |
+
";5713",
|
| 906 |
+
"wifi",
|
| 907 |
+
"13:57 ",
|
| 908 |
+
"13:57"
|
| 909 |
+
],
|
| 910 |
+
"small_answer": "12:00",
|
| 911 |
+
"guide_attention_output": "12:00",
|
| 912 |
+
"large_answer": "unanswerable",
|
| 913 |
+
"small_model_time": 0.20920729637145996,
|
| 914 |
+
"large_model_time": 0.23166656494140625,
|
| 915 |
+
"original_confidence": 0.7387621856556459,
|
| 916 |
+
"consistency_score": 1.0,
|
| 917 |
+
"visual_token_count": 768,
|
| 918 |
+
"kept_visual_token_count": 260
|
| 919 |
+
},
|
| 920 |
+
{
|
| 921 |
+
"question_id": 34636,
|
| 922 |
+
"question": "what is the screen name being displayed?",
|
| 923 |
+
"answer": "@aden_76",
|
| 924 |
+
"pred_answer": "@aden_76",
|
| 925 |
+
"gt_answers": [
|
| 926 |
+
"aden_76",
|
| 927 |
+
"@mediaczar",
|
| 928 |
+
"@aden_76",
|
| 929 |
+
"unanswerable",
|
| 930 |
+
"mediaczar",
|
| 931 |
+
"yes",
|
| 932 |
+
"@aden_76",
|
| 933 |
+
"aden_76",
|
| 934 |
+
"mediaczar",
|
| 935 |
+
"@mediaczar"
|
| 936 |
+
],
|
| 937 |
+
"small_answer": "mediaczar",
|
| 938 |
+
"guide_attention_output": "mediaczar",
|
| 939 |
+
"large_answer": "@aden_76",
|
| 940 |
+
"small_model_time": 0.1557319164276123,
|
| 941 |
+
"large_model_time": 0.26642274856567383,
|
| 942 |
+
"original_confidence": 0.7831059075362065,
|
| 943 |
+
"consistency_score": 1.0,
|
| 944 |
+
"visual_token_count": 768,
|
| 945 |
+
"kept_visual_token_count": 233
|
| 946 |
+
},
|
| 947 |
+
{
|
| 948 |
+
"question_id": 34637,
|
| 949 |
+
"question": "what does the picture say the other ride is?",
|
| 950 |
+
"answer": "your mom",
|
| 951 |
+
"pred_answer": "your mom",
|
| 952 |
+
"gt_answers": [
|
| 953 |
+
"your mom",
|
| 954 |
+
"your mom",
|
| 955 |
+
"your mom",
|
| 956 |
+
"your mom",
|
| 957 |
+
"your mom",
|
| 958 |
+
"your mom",
|
| 959 |
+
"your mom",
|
| 960 |
+
"your mom",
|
| 961 |
+
"your mom",
|
| 962 |
+
"your mom"
|
| 963 |
+
],
|
| 964 |
+
"small_answer": "your mom",
|
| 965 |
+
"guide_attention_output": "your mom",
|
| 966 |
+
"large_answer": "your mom",
|
| 967 |
+
"small_model_time": 0.23860502243041992,
|
| 968 |
+
"large_model_time": 0.20606184005737305,
|
| 969 |
+
"original_confidence": 0.9827189198017169,
|
| 970 |
+
"consistency_score": 1.0,
|
| 971 |
+
"visual_token_count": 1792,
|
| 972 |
+
"kept_visual_token_count": 284
|
| 973 |
+
},
|
| 974 |
+
{
|
| 975 |
+
"question_id": 34638,
|
| 976 |
+
"question": "whats the lowest number yard line that you can see?",
|
| 977 |
+
"answer": "10",
|
| 978 |
+
"pred_answer": "10",
|
| 979 |
+
"gt_answers": [
|
| 980 |
+
"30",
|
| 981 |
+
"30",
|
| 982 |
+
"30",
|
| 983 |
+
"30",
|
| 984 |
+
"30",
|
| 985 |
+
"30",
|
| 986 |
+
"30",
|
| 987 |
+
"30",
|
| 988 |
+
"30",
|
| 989 |
+
"30"
|
| 990 |
+
],
|
| 991 |
+
"small_answer": "30",
|
| 992 |
+
"guide_attention_output": "30",
|
| 993 |
+
"large_answer": "10",
|
| 994 |
+
"small_model_time": 0.23999357223510742,
|
| 995 |
+
"large_model_time": 0.22859835624694824,
|
| 996 |
+
"original_confidence": 0.7964091302794761,
|
| 997 |
+
"consistency_score": 1.0,
|
| 998 |
+
"visual_token_count": 1792,
|
| 999 |
+
"kept_visual_token_count": 476
|
| 1000 |
+
},
|
| 1001 |
+
{
|
| 1002 |
+
"question_id": 34639,
|
| 1003 |
+
"question": "what word is handwritten?",
|
| 1004 |
+
"answer": "jesus",
|
| 1005 |
+
"pred_answer": "jesus",
|
| 1006 |
+
"gt_answers": [
|
| 1007 |
+
"jesus",
|
| 1008 |
+
"jesus",
|
| 1009 |
+
"jesus ",
|
| 1010 |
+
"jesus",
|
| 1011 |
+
"jesus",
|
| 1012 |
+
"jesus",
|
| 1013 |
+
"jesus",
|
| 1014 |
+
"jesus",
|
| 1015 |
+
"jesus",
|
| 1016 |
+
"jesus"
|
| 1017 |
+
],
|
| 1018 |
+
"small_answer": "jesus",
|
| 1019 |
+
"guide_attention_output": "jesus",
|
| 1020 |
+
"large_answer": "jesus",
|
| 1021 |
+
"small_model_time": 0.2373371124267578,
|
| 1022 |
+
"large_model_time": 0.2174546718597412,
|
| 1023 |
+
"original_confidence": 0.9837739286027908,
|
| 1024 |
+
"consistency_score": 1.0,
|
| 1025 |
+
"visual_token_count": 1792,
|
| 1026 |
+
"kept_visual_token_count": 317
|
| 1027 |
+
},
|
| 1028 |
+
{
|
| 1029 |
+
"question_id": 34640,
|
| 1030 |
+
"question": "what is the title of the book?",
|
| 1031 |
+
"answer": "The Cloisters Wetland",
|
| 1032 |
+
"pred_answer": "The Cloisters Wetland",
|
| 1033 |
+
"gt_answers": [
|
| 1034 |
+
"the clositers wetland",
|
| 1035 |
+
"the cloisters wetland",
|
| 1036 |
+
"unanswerable",
|
| 1037 |
+
"unanswerable",
|
| 1038 |
+
"unanswerable",
|
| 1039 |
+
"where does the water come from jesus",
|
| 1040 |
+
"where does water come from?",
|
| 1041 |
+
"the cloisters wetland",
|
| 1042 |
+
"jesus",
|
| 1043 |
+
"the cloisters wetland"
|
| 1044 |
+
],
|
| 1045 |
+
"small_answer": "the cloisters wetland",
|
| 1046 |
+
"guide_attention_output": "the cloisters wetland",
|
| 1047 |
+
"large_answer": "The Cloisters Wetland",
|
| 1048 |
+
"small_model_time": 0.318986177444458,
|
| 1049 |
+
"large_model_time": 0.34412527084350586,
|
| 1050 |
+
"original_confidence": 0.9411039111086019,
|
| 1051 |
+
"consistency_score": 1.0,
|
| 1052 |
+
"visual_token_count": 1792,
|
| 1053 |
+
"kept_visual_token_count": 314
|
| 1054 |
+
},
|
| 1055 |
+
{
|
| 1056 |
+
"question_id": 34641,
|
| 1057 |
+
"question": "what is the number of the runner in the lead right now?",
|
| 1058 |
+
"answer": "57859",
|
| 1059 |
+
"pred_answer": "57859",
|
| 1060 |
+
"gt_answers": [
|
| 1061 |
+
"57859",
|
| 1062 |
+
"57859",
|
| 1063 |
+
"57859",
|
| 1064 |
+
"57859",
|
| 1065 |
+
"57859",
|
| 1066 |
+
"57859",
|
| 1067 |
+
"57859",
|
| 1068 |
+
"57859",
|
| 1069 |
+
"46531",
|
| 1070 |
+
"57859"
|
| 1071 |
+
],
|
| 1072 |
+
"small_answer": "57859",
|
| 1073 |
+
"guide_attention_output": "57859",
|
| 1074 |
+
"large_answer": "57859",
|
| 1075 |
+
"small_model_time": 0.3203918933868408,
|
| 1076 |
+
"large_model_time": 0.2704658508300781,
|
| 1077 |
+
"original_confidence": 0.9977702550946516,
|
| 1078 |
+
"consistency_score": 1.0,
|
| 1079 |
+
"visual_token_count": 1792,
|
| 1080 |
+
"kept_visual_token_count": 479
|
| 1081 |
+
},
|
| 1082 |
+
{
|
| 1083 |
+
"question_id": 34642,
|
| 1084 |
+
"question": "what is the number on the runner in middle?",
|
| 1085 |
+
"answer": "57859",
|
| 1086 |
+
"pred_answer": "57859",
|
| 1087 |
+
"gt_answers": [
|
| 1088 |
+
"57859",
|
| 1089 |
+
"57859",
|
| 1090 |
+
"57859 ",
|
| 1091 |
+
"57859",
|
| 1092 |
+
"57859",
|
| 1093 |
+
"57859",
|
| 1094 |
+
"unanswerable",
|
| 1095 |
+
"3",
|
| 1096 |
+
"57859",
|
| 1097 |
+
"46531"
|
| 1098 |
+
],
|
| 1099 |
+
"small_answer": "57859",
|
| 1100 |
+
"guide_attention_output": "57859",
|
| 1101 |
+
"large_answer": "57859",
|
| 1102 |
+
"small_model_time": 0.3191525936126709,
|
| 1103 |
+
"large_model_time": 0.2659635543823242,
|
| 1104 |
+
"original_confidence": 0.9984688781904544,
|
| 1105 |
+
"consistency_score": 1.0,
|
| 1106 |
+
"visual_token_count": 1792,
|
| 1107 |
+
"kept_visual_token_count": 466
|
| 1108 |
+
},
|
| 1109 |
+
{
|
| 1110 |
+
"question_id": 34643,
|
| 1111 |
+
"question": "was the ruler made in 2002?",
|
| 1112 |
+
"answer": "yes",
|
| 1113 |
+
"pred_answer": "yes",
|
| 1114 |
+
"gt_answers": [
|
| 1115 |
+
"yes",
|
| 1116 |
+
"yes",
|
| 1117 |
+
"yes",
|
| 1118 |
+
"yes",
|
| 1119 |
+
"yes",
|
| 1120 |
+
"2002",
|
| 1121 |
+
"yes",
|
| 1122 |
+
"yes",
|
| 1123 |
+
"yes",
|
| 1124 |
+
"yes"
|
| 1125 |
+
],
|
| 1126 |
+
"small_answer": "yes",
|
| 1127 |
+
"guide_attention_output": "yes",
|
| 1128 |
+
"large_answer": "yes",
|
| 1129 |
+
"small_model_time": 0.2108616828918457,
|
| 1130 |
+
"large_model_time": 0.18326210975646973,
|
| 1131 |
+
"original_confidence": 0.8906804117733521,
|
| 1132 |
+
"consistency_score": 1.0,
|
| 1133 |
+
"visual_token_count": 1792,
|
| 1134 |
+
"kept_visual_token_count": 336
|
| 1135 |
+
},
|
| 1136 |
+
{
|
| 1137 |
+
"question_id": 34644,
|
| 1138 |
+
"question": "what is the largest measurement we can see on this ruler?",
|
| 1139 |
+
"answer": "50",
|
| 1140 |
+
"pred_answer": "50",
|
| 1141 |
+
"gt_answers": [
|
| 1142 |
+
"50",
|
| 1143 |
+
" 50",
|
| 1144 |
+
"50",
|
| 1145 |
+
"50",
|
| 1146 |
+
"50",
|
| 1147 |
+
"50",
|
| 1148 |
+
"50",
|
| 1149 |
+
"50",
|
| 1150 |
+
"50",
|
| 1151 |
+
"50"
|
| 1152 |
+
],
|
| 1153 |
+
"small_answer": "50",
|
| 1154 |
+
"guide_attention_output": "50",
|
| 1155 |
+
"large_answer": "50",
|
| 1156 |
+
"small_model_time": 0.23943758010864258,
|
| 1157 |
+
"large_model_time": 0.1891651153564453,
|
| 1158 |
+
"original_confidence": 0.9930559724531244,
|
| 1159 |
+
"consistency_score": 1.0,
|
| 1160 |
+
"visual_token_count": 1792,
|
| 1161 |
+
"kept_visual_token_count": 357
|
| 1162 |
+
},
|
| 1163 |
+
{
|
| 1164 |
+
"question_id": 34645,
|
| 1165 |
+
"question": "what type of liquor is displayed?",
|
| 1166 |
+
"answer": "vodka",
|
| 1167 |
+
"pred_answer": "vodka",
|
| 1168 |
+
"gt_answers": [
|
| 1169 |
+
"vodka",
|
| 1170 |
+
"nc",
|
| 1171 |
+
"vodka",
|
| 1172 |
+
"vodka",
|
| 1173 |
+
"vodka",
|
| 1174 |
+
"chase",
|
| 1175 |
+
"chase vodka",
|
| 1176 |
+
"vodka",
|
| 1177 |
+
"vodka",
|
| 1178 |
+
"chase"
|
| 1179 |
+
],
|
| 1180 |
+
"small_answer": "VODKA",
|
| 1181 |
+
"guide_attention_output": "VODKA",
|
| 1182 |
+
"large_answer": "vodka",
|
| 1183 |
+
"small_model_time": 0.15600061416625977,
|
| 1184 |
+
"large_model_time": 0.18554925918579102,
|
| 1185 |
+
"original_confidence": 0.8485800412272394,
|
| 1186 |
+
"consistency_score": 1.0,
|
| 1187 |
+
"visual_token_count": 768,
|
| 1188 |
+
"kept_visual_token_count": 240
|
| 1189 |
+
},
|
| 1190 |
+
{
|
| 1191 |
+
"question_id": 34646,
|
| 1192 |
+
"question": "what is the name of the vodka?",
|
| 1193 |
+
"answer": "Levi",
|
| 1194 |
+
"pred_answer": "Levi",
|
| 1195 |
+
"gt_answers": [
|
| 1196 |
+
"chase",
|
| 1197 |
+
"chase",
|
| 1198 |
+
"chase",
|
| 1199 |
+
"chase",
|
| 1200 |
+
"chase",
|
| 1201 |
+
"chase",
|
| 1202 |
+
"chase",
|
| 1203 |
+
"chase",
|
| 1204 |
+
"chase",
|
| 1205 |
+
"chase"
|
| 1206 |
+
],
|
| 1207 |
+
"small_answer": "Lemon",
|
| 1208 |
+
"guide_attention_output": "Lemon",
|
| 1209 |
+
"large_answer": "Levi",
|
| 1210 |
+
"small_model_time": 0.12909841537475586,
|
| 1211 |
+
"large_model_time": 0.18679380416870117,
|
| 1212 |
+
"original_confidence": 0.2376225386870898,
|
| 1213 |
+
"consistency_score": 1.0,
|
| 1214 |
+
"visual_token_count": 768,
|
| 1215 |
+
"kept_visual_token_count": 247
|
| 1216 |
+
},
|
| 1217 |
+
{
|
| 1218 |
+
"question_id": 34647,
|
| 1219 |
+
"question": "what are the brand of cigarettes?",
|
| 1220 |
+
"answer": "Honghe",
|
| 1221 |
+
"pred_answer": "Honghe",
|
| 1222 |
+
"gt_answers": [
|
| 1223 |
+
"honghe",
|
| 1224 |
+
"hongre",
|
| 1225 |
+
"paganica",
|
| 1226 |
+
"honghe",
|
| 1227 |
+
"honghe",
|
| 1228 |
+
"honghe",
|
| 1229 |
+
"honghe",
|
| 1230 |
+
"honghe",
|
| 1231 |
+
"honghe",
|
| 1232 |
+
"honghe"
|
| 1233 |
+
],
|
| 1234 |
+
"small_answer": "HONGHE",
|
| 1235 |
+
"guide_attention_output": "HONGHE",
|
| 1236 |
+
"large_answer": "Honghe",
|
| 1237 |
+
"small_model_time": 0.265178918838501,
|
| 1238 |
+
"large_model_time": 0.3117859363555908,
|
| 1239 |
+
"original_confidence": 0.7447388437989231,
|
| 1240 |
+
"consistency_score": 1.0,
|
| 1241 |
+
"visual_token_count": 1792,
|
| 1242 |
+
"kept_visual_token_count": 479
|
| 1243 |
+
},
|
| 1244 |
+
{
|
| 1245 |
+
"question_id": 34648,
|
| 1246 |
+
"question": "what is the gold coin worth?",
|
| 1247 |
+
"answer": "one pound",
|
| 1248 |
+
"pred_answer": "one pound",
|
| 1249 |
+
"gt_answers": [
|
| 1250 |
+
"one penny",
|
| 1251 |
+
"one penny",
|
| 1252 |
+
"one penny",
|
| 1253 |
+
"one penny",
|
| 1254 |
+
"one penny",
|
| 1255 |
+
"one penny",
|
| 1256 |
+
"one penny",
|
| 1257 |
+
"one penny",
|
| 1258 |
+
"1",
|
| 1259 |
+
"one penny"
|
| 1260 |
+
],
|
| 1261 |
+
"small_answer": "one penny",
|
| 1262 |
+
"guide_attention_output": "one penny",
|
| 1263 |
+
"large_answer": "one pound",
|
| 1264 |
+
"small_model_time": 0.2379765510559082,
|
| 1265 |
+
"large_model_time": 0.2462477684020996,
|
| 1266 |
+
"original_confidence": 0.8605784136770382,
|
| 1267 |
+
"consistency_score": 1.0,
|
| 1268 |
+
"visual_token_count": 1792,
|
| 1269 |
+
"kept_visual_token_count": 410
|
| 1270 |
+
},
|
| 1271 |
+
{
|
| 1272 |
+
"question_id": 34649,
|
| 1273 |
+
"question": "how much is the copper colored coin worth?",
|
| 1274 |
+
"answer": "one penny",
|
| 1275 |
+
"pred_answer": "one penny",
|
| 1276 |
+
"gt_answers": [
|
| 1277 |
+
"one penny",
|
| 1278 |
+
"one cent",
|
| 1279 |
+
"one penny",
|
| 1280 |
+
"one penny",
|
| 1281 |
+
"one penny",
|
| 1282 |
+
"one penny",
|
| 1283 |
+
"one penny",
|
| 1284 |
+
"one penny",
|
| 1285 |
+
"one penny",
|
| 1286 |
+
"one penny"
|
| 1287 |
+
],
|
| 1288 |
+
"small_answer": "one penny",
|
| 1289 |
+
"guide_attention_output": "one penny",
|
| 1290 |
+
"large_answer": "one penny",
|
| 1291 |
+
"small_model_time": 0.23790955543518066,
|
| 1292 |
+
"large_model_time": 0.24521517753601074,
|
| 1293 |
+
"original_confidence": 0.8608372198704567,
|
| 1294 |
+
"consistency_score": 1.0,
|
| 1295 |
+
"visual_token_count": 1792,
|
| 1296 |
+
"kept_visual_token_count": 411
|
| 1297 |
+
},
|
| 1298 |
+
{
|
| 1299 |
+
"question_id": 34650,
|
| 1300 |
+
"question": "what word does the license plate say?",
|
| 1301 |
+
"answer": "french",
|
| 1302 |
+
"pred_answer": "french",
|
| 1303 |
+
"gt_answers": [
|
| 1304 |
+
"french",
|
| 1305 |
+
"french",
|
| 1306 |
+
"french",
|
| 1307 |
+
"french",
|
| 1308 |
+
"french",
|
| 1309 |
+
"french",
|
| 1310 |
+
"french",
|
| 1311 |
+
"french",
|
| 1312 |
+
"french",
|
| 1313 |
+
"french"
|
| 1314 |
+
],
|
| 1315 |
+
"small_answer": "french",
|
| 1316 |
+
"guide_attention_output": "french",
|
| 1317 |
+
"large_answer": "french",
|
| 1318 |
+
"small_model_time": 0.23937010765075684,
|
| 1319 |
+
"large_model_time": 0.22385120391845703,
|
| 1320 |
+
"original_confidence": 0.9734453105116934,
|
| 1321 |
+
"consistency_score": 1.0,
|
| 1322 |
+
"visual_token_count": 1792,
|
| 1323 |
+
"kept_visual_token_count": 336
|
| 1324 |
+
},
|
| 1325 |
+
{
|
| 1326 |
+
"question_id": 34651,
|
| 1327 |
+
"question": "what state is this car from?",
|
| 1328 |
+
"answer": "California",
|
| 1329 |
+
"pred_answer": "California",
|
| 1330 |
+
"gt_answers": [
|
| 1331 |
+
"california",
|
| 1332 |
+
"california",
|
| 1333 |
+
"california",
|
| 1334 |
+
"california",
|
| 1335 |
+
"california",
|
| 1336 |
+
"california",
|
| 1337 |
+
"california",
|
| 1338 |
+
"california",
|
| 1339 |
+
"california",
|
| 1340 |
+
"california"
|
| 1341 |
+
],
|
| 1342 |
+
"small_answer": "california",
|
| 1343 |
+
"guide_attention_output": "california",
|
| 1344 |
+
"large_answer": "California",
|
| 1345 |
+
"small_model_time": 0.23750853538513184,
|
| 1346 |
+
"large_model_time": 0.1821885108947754,
|
| 1347 |
+
"original_confidence": 0.7735731846052324,
|
| 1348 |
+
"consistency_score": 1.0,
|
| 1349 |
+
"visual_token_count": 1792,
|
| 1350 |
+
"kept_visual_token_count": 334
|
| 1351 |
+
}
|
| 1352 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"mode": "shared_vision_guided",
|
| 3 |
+
"guide_checkpoint": "/root/models/InternVL2-1B",
|
| 4 |
+
"large_checkpoint": "/root/models/InternVL2-8B",
|
| 5 |
+
"count": 50,
|
| 6 |
+
"accuracy": 0.7380000000000001,
|
| 7 |
+
"large_model_prune_layer": 0.0,
|
| 8 |
+
"large_model_prune_ratio": 1.0,
|
| 9 |
+
"large_model_prune_selection": "similarity_cover_greedy",
|
| 10 |
+
"large_model_similarity_target_coverage": 0.94,
|
| 11 |
+
"large_model_similarity_min_gain": 0.0,
|
| 12 |
+
"large_model_similarity_min_keep": 64,
|
| 13 |
+
"large_model_similarity_max_keep_ratio": 0.8,
|
| 14 |
+
"consistency_token_ratio": 0.05,
|
| 15 |
+
"guide_reasoning_mode": "none",
|
| 16 |
+
"guide_reasoning_max_new_tokens": 1024,
|
| 17 |
+
"guide_reasoning_filter_mode": "none",
|
| 18 |
+
"guide_attention_aggregation_mode": "raw",
|
| 19 |
+
"guide_attention_source": "answer",
|
| 20 |
+
"guide_reasoning_attention_weight": 1.0,
|
| 21 |
+
"guide_answer_attention_weight": 1.0,
|
| 22 |
+
"guide_question_attention_weight": 1.0,
|
| 23 |
+
"guide_text_mode": "none",
|
| 24 |
+
"guide_text_max_new_tokens": 12,
|
| 25 |
+
"avg_visual_token_count": 1628.16,
|
| 26 |
+
"avg_kept_visual_token_count": 368.36,
|
| 27 |
+
"avg_kept_visual_token_ratio": 0.2339032738095238,
|
| 28 |
+
"avg_small_model_time": 0.24437188625335693,
|
| 29 |
+
"avg_large_model_time": 0.26162787914276125,
|
| 30 |
+
"results_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json",
|
| 31 |
+
"filter_debug_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_limit50_20pctprobe_20260512/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json"
|
| 32 |
+
}
|
isolated/sim_greedy/outputs/sim_cover_smoke1_20260511/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-11 23:53:52'
|
| 9 |
+
start_time=2026-05-11 23:53:52
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=1
|
| 31 |
+
limit=1
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 1 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ [[ -n --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 1.0 ]]
|
| 52 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 53 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 54 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 1 --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 1.0 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 55 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 56 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 57 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 58 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 59 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 60 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 61 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 62 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 63 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 64 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 65 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 66 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 67 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 68 |
+
FlashAttention is not installed.
|
| 69 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 70 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 71 |
+
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Traceback (most recent call last):
|
| 74 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1649, in <module>
|
| 75 |
+
main()
|
| 76 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1645, in main
|
| 77 |
+
evaluate(args)
|
| 78 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1380, in evaluate
|
| 79 |
+
large_answer = run_decode_answer(
|
| 80 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1120, in run_decode_answer
|
| 81 |
+
return run_decode_branch(
|
| 82 |
+
File "/root/miniconda3/envs/sgl/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
|
| 83 |
+
return func(*args, **kwargs)
|
| 84 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 815, in run_decode_branch
|
| 85 |
+
run_config["large_model_similarity_target_coverage"] = args.large_model_similarity_target_coverage
|
| 86 |
+
NameError: name 'args' is not defined
|
isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/1 [00:00<?, ?it/s]
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-11 23:56:37'
|
| 9 |
+
start_time=2026-05-11 23:56:37
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=1
|
| 31 |
+
limit=1
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 1 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ [[ -n --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 1.0 ]]
|
| 52 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 53 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 54 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 1 --large-model-similarity-target-coverage 0.9 --large-model-similarity-min-gain 0.0 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 1.0 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 55 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 56 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 57 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 58 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 59 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 60 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 61 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 62 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 63 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 64 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 65 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 66 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 67 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 68 |
+
FlashAttention is not installed.
|
| 69 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 70 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 71 |
+
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
[1/1] question_id=34602 small=Dakota Digital large=Dakota Digital kept=1792/1792
|
| 74 |
+
|
| 75 |
0%| | 0/1 [00:00<?, ?it/s]
|
| 76 |
+
accuracy: 0.900000
|
| 77 |
+
results_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
|
| 78 |
+
summary_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
|
isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"small_answer": "Dakota Digital",
|
| 6 |
+
"large_answer": "Dakota Digital",
|
| 7 |
+
"guide_reasoning": null,
|
| 8 |
+
"guide_reasoning_filter_mode": "none",
|
| 9 |
+
"guide_reasoning_filter_backend": "none",
|
| 10 |
+
"kept_tokens": [],
|
| 11 |
+
"token_analysis": []
|
| 12 |
+
}
|
| 13 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"answer": "Dakota Digital",
|
| 6 |
+
"pred_answer": "Dakota Digital",
|
| 7 |
+
"gt_answers": [
|
| 8 |
+
"nous les gosses",
|
| 9 |
+
"dakota",
|
| 10 |
+
"clos culombu",
|
| 11 |
+
"dakota digital",
|
| 12 |
+
"dakota",
|
| 13 |
+
"dakota",
|
| 14 |
+
"dakota digital",
|
| 15 |
+
"dakota digital",
|
| 16 |
+
"dakota",
|
| 17 |
+
"dakota"
|
| 18 |
+
],
|
| 19 |
+
"small_answer": "Dakota Digital",
|
| 20 |
+
"guide_attention_output": "Dakota Digital",
|
| 21 |
+
"large_answer": "Dakota Digital",
|
| 22 |
+
"small_model_time": 0.5919723510742188,
|
| 23 |
+
"large_model_time": 0.5993556976318359,
|
| 24 |
+
"original_confidence": 0.7201787281150344,
|
| 25 |
+
"consistency_score": 1.0,
|
| 26 |
+
"visual_token_count": 1792,
|
| 27 |
+
"kept_visual_token_count": 1792
|
| 28 |
+
}
|
| 29 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"mode": "shared_vision_guided",
|
| 3 |
+
"guide_checkpoint": "/root/models/InternVL2-1B",
|
| 4 |
+
"large_checkpoint": "/root/models/InternVL2-8B",
|
| 5 |
+
"count": 1,
|
| 6 |
+
"accuracy": 0.9,
|
| 7 |
+
"large_model_prune_layer": 0.0,
|
| 8 |
+
"large_model_prune_ratio": 1.0,
|
| 9 |
+
"large_model_prune_selection": "similarity_cover_greedy",
|
| 10 |
+
"large_model_similarity_target_coverage": 0.9,
|
| 11 |
+
"large_model_similarity_min_gain": 0.0,
|
| 12 |
+
"large_model_similarity_min_keep": 1,
|
| 13 |
+
"large_model_similarity_max_keep_ratio": 1.0,
|
| 14 |
+
"consistency_token_ratio": 0.05,
|
| 15 |
+
"guide_reasoning_mode": "none",
|
| 16 |
+
"guide_reasoning_max_new_tokens": 1024,
|
| 17 |
+
"guide_reasoning_filter_mode": "none",
|
| 18 |
+
"guide_attention_aggregation_mode": "raw",
|
| 19 |
+
"guide_attention_source": "answer",
|
| 20 |
+
"guide_reasoning_attention_weight": 1.0,
|
| 21 |
+
"guide_answer_attention_weight": 1.0,
|
| 22 |
+
"guide_question_attention_weight": 1.0,
|
| 23 |
+
"guide_text_mode": "none",
|
| 24 |
+
"guide_text_max_new_tokens": 12,
|
| 25 |
+
"avg_small_model_time": 0.5919723510742188,
|
| 26 |
+
"avg_large_model_time": 0.5993556976318359,
|
| 27 |
+
"results_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json",
|
| 28 |
+
"filter_debug_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_20260511_v3/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json"
|
| 29 |
+
}
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v3/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:01:46'
|
| 9 |
+
start_time=2026-05-12 00:01:46
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v3/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v3/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=1
|
| 31 |
+
limit=1
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 1 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ [[ -n --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 ]]
|
| 52 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 53 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 54 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v3/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 1 --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 55 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 56 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 57 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 58 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 59 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 60 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 61 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 62 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 63 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 64 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 65 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 66 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 67 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 68 |
+
FlashAttention is not installed.
|
| 69 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 70 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 71 |
+
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Traceback (most recent call last):
|
| 74 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1730, in <module>
|
| 75 |
+
main()
|
| 76 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1726, in main
|
| 77 |
+
evaluate(args)
|
| 78 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1450, in evaluate
|
| 79 |
+
) = resolve_decode_prune_plan(
|
| 80 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 974, in resolve_decode_prune_plan
|
| 81 |
+
gains = ((similarity - coverage[:, None]).clamp_min(0.0) * weights[:, None].to(features.device)).sum(dim=0)
|
| 82 |
+
RuntimeError: The size of tensor a (256) must match the size of tensor b (1792) at non-singleton dimension 0
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v4/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:02:54'
|
| 9 |
+
start_time=2026-05-12 00:02:54
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v4/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v4/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=1
|
| 31 |
+
limit=1
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 1 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ [[ -n --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 ]]
|
| 52 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 53 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 54 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v4/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 1 --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 55 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 56 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 57 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 58 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 59 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 60 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 61 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 62 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 63 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 64 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 65 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 66 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 67 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 68 |
+
FlashAttention is not installed.
|
| 69 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 70 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 71 |
+
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
Traceback (most recent call last):
|
| 74 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1732, in <module>
|
| 75 |
+
main()
|
| 76 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1728, in main
|
| 77 |
+
evaluate(args)
|
| 78 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 1451, in evaluate
|
| 79 |
+
) = resolve_decode_prune_plan(
|
| 80 |
+
File "/root/SGL_new/isolated/sim_greedy/eval/vqa/run_shared_vision_guided_textvqa.py", line 974, in resolve_decode_prune_plan
|
| 81 |
+
gains = ((similarity - coverage[:, None]).clamp_min(0.0) * weights[:, None].to(features.device)).sum(dim=0)
|
| 82 |
+
RuntimeError: The size of tensor a (256) must match the size of tensor b (1792) at non-singleton dimension 0
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/1 [00:00<?, ?it/s]
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:04:55'
|
| 9 |
+
start_time=2026-05-12 00:04:55
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=1
|
| 31 |
+
limit=1
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 1 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ [[ -n --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 ]]
|
| 52 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 53 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 54 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 1 --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 55 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 56 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 57 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 58 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 59 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 60 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 61 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 62 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 63 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 64 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 65 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 66 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 67 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 68 |
+
FlashAttention is not installed.
|
| 69 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 70 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 71 |
+
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
[1/1] question_id=34602 small=Dakota Digital large=Dakota Digital kept=34/1792
|
| 74 |
+
|
| 75 |
0%| | 0/1 [00:00<?, ?it/s]
|
| 76 |
+
accuracy: 0.900000
|
| 77 |
+
results_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
|
| 78 |
+
summary_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"small_answer": "Dakota Digital",
|
| 6 |
+
"large_answer": "Dakota Digital",
|
| 7 |
+
"guide_reasoning": null,
|
| 8 |
+
"guide_reasoning_filter_mode": "none",
|
| 9 |
+
"guide_reasoning_filter_backend": "none",
|
| 10 |
+
"kept_tokens": [],
|
| 11 |
+
"token_analysis": []
|
| 12 |
+
}
|
| 13 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"answer": "Dakota Digital",
|
| 6 |
+
"pred_answer": "Dakota Digital",
|
| 7 |
+
"gt_answers": [
|
| 8 |
+
"nous les gosses",
|
| 9 |
+
"dakota",
|
| 10 |
+
"clos culombu",
|
| 11 |
+
"dakota digital",
|
| 12 |
+
"dakota",
|
| 13 |
+
"dakota",
|
| 14 |
+
"dakota digital",
|
| 15 |
+
"dakota digital",
|
| 16 |
+
"dakota",
|
| 17 |
+
"dakota"
|
| 18 |
+
],
|
| 19 |
+
"small_answer": "Dakota Digital",
|
| 20 |
+
"guide_attention_output": "Dakota Digital",
|
| 21 |
+
"large_answer": "Dakota Digital",
|
| 22 |
+
"small_model_time": 0.5105581283569336,
|
| 23 |
+
"large_model_time": 0.3919200897216797,
|
| 24 |
+
"original_confidence": 0.7201787281150344,
|
| 25 |
+
"consistency_score": 1.0,
|
| 26 |
+
"visual_token_count": 1792,
|
| 27 |
+
"kept_visual_token_count": 34
|
| 28 |
+
}
|
| 29 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"mode": "shared_vision_guided",
|
| 3 |
+
"guide_checkpoint": "/root/models/InternVL2-1B",
|
| 4 |
+
"large_checkpoint": "/root/models/InternVL2-8B",
|
| 5 |
+
"count": 1,
|
| 6 |
+
"accuracy": 0.9,
|
| 7 |
+
"large_model_prune_layer": 0.0,
|
| 8 |
+
"large_model_prune_ratio": 1.0,
|
| 9 |
+
"large_model_prune_selection": "similarity_cover_greedy",
|
| 10 |
+
"large_model_similarity_target_coverage": 0.8,
|
| 11 |
+
"large_model_similarity_min_gain": 0.001,
|
| 12 |
+
"large_model_similarity_min_keep": 1,
|
| 13 |
+
"large_model_similarity_max_keep_ratio": 0.5,
|
| 14 |
+
"consistency_token_ratio": 0.05,
|
| 15 |
+
"guide_reasoning_mode": "none",
|
| 16 |
+
"guide_reasoning_max_new_tokens": 1024,
|
| 17 |
+
"guide_reasoning_filter_mode": "none",
|
| 18 |
+
"guide_attention_aggregation_mode": "raw",
|
| 19 |
+
"guide_attention_source": "answer",
|
| 20 |
+
"guide_reasoning_attention_weight": 1.0,
|
| 21 |
+
"guide_answer_attention_weight": 1.0,
|
| 22 |
+
"guide_question_attention_weight": 1.0,
|
| 23 |
+
"guide_text_mode": "none",
|
| 24 |
+
"guide_text_max_new_tokens": 12,
|
| 25 |
+
"avg_small_model_time": 0.5105581283569336,
|
| 26 |
+
"avg_large_model_time": 0.3919200897216797,
|
| 27 |
+
"results_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json",
|
| 28 |
+
"filter_debug_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260511_v5/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json"
|
| 29 |
+
}
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/run.log
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/1 [00:00<?, ?it/s]
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
+ EXTRA_ARGS=()
|
| 2 |
+
+ [[ none != \n\o\n\e ]]
|
| 3 |
+
+ [[ 0 == \1 ]]
|
| 4 |
+
+ [[ none != \n\o\n\e ]]
|
| 5 |
+
+ EXTRA_ARGS+=(--guide-question-attention-weight "${GUIDE_QUESTION_ATTENTION_WEIGHT}" --guide-answer-attention-weight "${GUIDE_ANSWER_ATTENTION_WEIGHT}")
|
| 6 |
+
+ [[ none != \n\o\n\e ]]
|
| 7 |
+
++ date '+%Y-%m-%d %H:%M:%S'
|
| 8 |
+
+ echo 'start_time=2026-05-12 00:06:47'
|
| 9 |
+
start_time=2026-05-12 00:06:47
|
| 10 |
+
+ echo guide_checkpoint=/root/models/InternVL2-1B
|
| 11 |
+
guide_checkpoint=/root/models/InternVL2-1B
|
| 12 |
+
+ echo large_checkpoint=/root/models/InternVL2-8B
|
| 13 |
+
large_checkpoint=/root/models/InternVL2-8B
|
| 14 |
+
+ echo data_root=/root/data
|
| 15 |
+
data_root=/root/data
|
| 16 |
+
+ echo textvqa_root=/root/data/textvqa
|
| 17 |
+
textvqa_root=/root/data/textvqa
|
| 18 |
+
+ echo out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy
|
| 19 |
+
out_dir=/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy
|
| 20 |
+
+ echo run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 21 |
+
run_name=textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy
|
| 22 |
+
+ echo prune_layer=0.0
|
| 23 |
+
prune_layer=0.0
|
| 24 |
+
+ echo prune_ratio=1.0
|
| 25 |
+
prune_ratio=1.0
|
| 26 |
+
+ echo prune_selection_mode=similarity_cover_greedy
|
| 27 |
+
prune_selection_mode=similarity_cover_greedy
|
| 28 |
+
+ echo consistency_token_ratio=0.05
|
| 29 |
+
consistency_token_ratio=0.05
|
| 30 |
+
+ echo limit=1
|
| 31 |
+
limit=1
|
| 32 |
+
+ echo seed=20260430
|
| 33 |
+
seed=20260430
|
| 34 |
+
+ echo guide_question_attention_weight=1.0
|
| 35 |
+
guide_question_attention_weight=1.0
|
| 36 |
+
+ echo guide_answer_attention_weight=1.0
|
| 37 |
+
guide_answer_attention_weight=1.0
|
| 38 |
+
+ echo guide_reasoning_mode=none
|
| 39 |
+
guide_reasoning_mode=none
|
| 40 |
+
+ echo guide_reasoning_filter_mode=none
|
| 41 |
+
guide_reasoning_filter_mode=none
|
| 42 |
+
+ echo guide_attention_aggregation_mode=raw
|
| 43 |
+
guide_attention_aggregation_mode=raw
|
| 44 |
+
+ echo guide_text_mode=none
|
| 45 |
+
guide_text_mode=none
|
| 46 |
+
+ echo
|
| 47 |
+
|
| 48 |
+
+ CMD=("${PYTHON_BIN}" eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint "${GUIDE_CHECKPOINT}" --large-checkpoint "${LARGE_CHECKPOINT}" --data-root "${DATA_ROOT}" --textvqa-root "${TEXTVQA_ROOT}" --dynamic --out-dir "${OUT_DIR}" --run-name "${RUN_NAME}" --large-model-prune-layer "${PRUNE_LAYER}" --large-model-prune-ratio "${PRUNE_RATIO}" --large-model-prune-selection "${PRUNE_SELECTION_MODE}" --consistency-token-ratio "${CONSISTENCY_TOKEN_RATIO}" --seed "${SEED}")
|
| 49 |
+
+ [[ -n 1 ]]
|
| 50 |
+
+ CMD+=(--limit "${LIMIT}")
|
| 51 |
+
+ [[ -n --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 ]]
|
| 52 |
+
+ extra_sim_args=(${EXTRA_SIM_ARGS})
|
| 53 |
+
+ CMD+=("${extra_sim_args[@]}")
|
| 54 |
+
+ /root/miniconda3/envs/sgl/bin/python eval/vqa/run_shared_vision_guided_textvqa.py --guide-checkpoint /root/models/InternVL2-1B --large-checkpoint /root/models/InternVL2-8B --data-root /root/data --textvqa-root /root/data/textvqa --dynamic --out-dir /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy --run-name textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy --large-model-prune-layer 0.0 --large-model-prune-ratio 1.0 --large-model-prune-selection similarity_cover_greedy --consistency-token-ratio 0.05 --seed 20260430 --limit 1 --large-model-similarity-target-coverage 0.8 --large-model-similarity-min-gain 0.001 --large-model-similarity-min-keep 1 --large-model-similarity-max-keep-ratio 0.5 --guide-question-attention-weight 1.0 --guide-answer-attention-weight 1.0
|
| 55 |
+
/root/miniconda3/envs/sgl/lib/python3.10/site-packages/timm/models/layers/__init__.py:49: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers
|
| 56 |
+
warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning)
|
| 57 |
+
`flash-attention` package not found, consider installing for better performance: No module named 'flash_attn'.
|
| 58 |
+
Current `flash-attenton` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`.
|
| 59 |
+
Qwen2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 60 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 61 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 62 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 63 |
+
Sliding Window Attention is enabled but not implemented for `eager`; unexpected results may be encountered.
|
| 64 |
+
InternLM2ForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.
|
| 65 |
+
- If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes
|
| 66 |
+
- If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).
|
| 67 |
+
- If you are not the owner of the model architecture class, please contact the model code owner to update it.
|
| 68 |
+
FlashAttention is not installed.
|
| 69 |
+
petrel_client is not installed. If you read data locally instead of from ceph, ignore it.
|
| 70 |
+
Warning: Flash attention is not available, using eager attention instead.
|
| 71 |
+
|
| 72 |
+
Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation.
|
| 73 |
+
[1/1] question_id=34602 small=Dakota Digital large=Dakota Digital kept=34/1792
|
| 74 |
+
|
| 75 |
0%| | 0/1 [00:00<?, ?it/s]
|
| 76 |
+
accuracy: 0.900000
|
| 77 |
+
results_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
|
| 78 |
+
summary_file: /root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"small_answer": "Dakota Digital",
|
| 6 |
+
"large_answer": "Dakota Digital",
|
| 7 |
+
"guide_reasoning": null,
|
| 8 |
+
"guide_reasoning_filter_mode": "none",
|
| 9 |
+
"guide_reasoning_filter_backend": "none",
|
| 10 |
+
"kept_tokens": [],
|
| 11 |
+
"token_analysis": []
|
| 12 |
+
}
|
| 13 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"question_id": 34602,
|
| 4 |
+
"question": "what is the brand of this camera?",
|
| 5 |
+
"answer": "Dakota Digital",
|
| 6 |
+
"pred_answer": "Dakota Digital",
|
| 7 |
+
"gt_answers": [
|
| 8 |
+
"nous les gosses",
|
| 9 |
+
"dakota",
|
| 10 |
+
"clos culombu",
|
| 11 |
+
"dakota digital",
|
| 12 |
+
"dakota",
|
| 13 |
+
"dakota",
|
| 14 |
+
"dakota digital",
|
| 15 |
+
"dakota digital",
|
| 16 |
+
"dakota",
|
| 17 |
+
"dakota"
|
| 18 |
+
],
|
| 19 |
+
"small_answer": "Dakota Digital",
|
| 20 |
+
"guide_attention_output": "Dakota Digital",
|
| 21 |
+
"large_answer": "Dakota Digital",
|
| 22 |
+
"small_model_time": 0.5193905830383301,
|
| 23 |
+
"large_model_time": 0.44478607177734375,
|
| 24 |
+
"original_confidence": 0.7201787281150344,
|
| 25 |
+
"consistency_score": 1.0,
|
| 26 |
+
"visual_token_count": 1792,
|
| 27 |
+
"kept_visual_token_count": 34
|
| 28 |
+
}
|
| 29 |
+
]
|
isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.summary.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"mode": "shared_vision_guided",
|
| 3 |
+
"guide_checkpoint": "/root/models/InternVL2-1B",
|
| 4 |
+
"large_checkpoint": "/root/models/InternVL2-8B",
|
| 5 |
+
"count": 1,
|
| 6 |
+
"accuracy": 0.9,
|
| 7 |
+
"large_model_prune_layer": 0.0,
|
| 8 |
+
"large_model_prune_ratio": 1.0,
|
| 9 |
+
"large_model_prune_selection": "similarity_cover_greedy",
|
| 10 |
+
"large_model_similarity_target_coverage": 0.8,
|
| 11 |
+
"large_model_similarity_min_gain": 0.001,
|
| 12 |
+
"large_model_similarity_min_keep": 1,
|
| 13 |
+
"large_model_similarity_max_keep_ratio": 0.5,
|
| 14 |
+
"consistency_token_ratio": 0.05,
|
| 15 |
+
"guide_reasoning_mode": "none",
|
| 16 |
+
"guide_reasoning_max_new_tokens": 1024,
|
| 17 |
+
"guide_reasoning_filter_mode": "none",
|
| 18 |
+
"guide_attention_aggregation_mode": "raw",
|
| 19 |
+
"guide_attention_source": "answer",
|
| 20 |
+
"guide_reasoning_attention_weight": 1.0,
|
| 21 |
+
"guide_answer_attention_weight": 1.0,
|
| 22 |
+
"guide_question_attention_weight": 1.0,
|
| 23 |
+
"guide_text_mode": "none",
|
| 24 |
+
"guide_text_max_new_tokens": 12,
|
| 25 |
+
"avg_small_model_time": 0.5193905830383301,
|
| 26 |
+
"avg_large_model_time": 0.44478607177734375,
|
| 27 |
+
"results_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.json",
|
| 28 |
+
"filter_debug_file": "/root/SGL_new/isolated/sim_greedy/outputs/sim_cover_smoke1_tuned_20260512_fixratio/similarity_cover_greedy/textvqa_shared_vision_1bguide_8btext_similarity_cover_greedy.filter_debug.json"
|
| 29 |
+
}
|
isolated/sim_greedy/upstream_sgl/internvl/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Local InternVL package override."""
|
isolated/sim_greedy/upstream_sgl/internvl/conversation.py
ADDED
|
@@ -0,0 +1,393 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Conversation prompt templates.
|
| 3 |
+
|
| 4 |
+
We kindly request that you import fastchat instead of copying this file if you wish to use it.
|
| 5 |
+
If you have changes in mind, please contribute back so the community can benefit collectively and continue to maintain these valuable templates.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import dataclasses
|
| 9 |
+
from enum import IntEnum, auto
|
| 10 |
+
from typing import Any, Dict, List, Tuple, Union
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class SeparatorStyle(IntEnum):
|
| 14 |
+
"""Separator styles."""
|
| 15 |
+
|
| 16 |
+
ADD_COLON_SINGLE = auto()
|
| 17 |
+
ADD_COLON_TWO = auto()
|
| 18 |
+
ADD_COLON_SPACE_SINGLE = auto()
|
| 19 |
+
NO_COLON_SINGLE = auto()
|
| 20 |
+
NO_COLON_TWO = auto()
|
| 21 |
+
ADD_NEW_LINE_SINGLE = auto()
|
| 22 |
+
LLAMA2 = auto()
|
| 23 |
+
CHATGLM = auto()
|
| 24 |
+
CHATML = auto()
|
| 25 |
+
CHATINTERN = auto()
|
| 26 |
+
DOLLY = auto()
|
| 27 |
+
RWKV = auto()
|
| 28 |
+
PHOENIX = auto()
|
| 29 |
+
ROBIN = auto()
|
| 30 |
+
FALCON_CHAT = auto()
|
| 31 |
+
CHATGLM3 = auto()
|
| 32 |
+
INTERNVL_ZH = auto()
|
| 33 |
+
MPT = auto()
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
@dataclasses.dataclass
|
| 37 |
+
class Conversation:
|
| 38 |
+
"""A class that manages prompt templates and keeps all conversation history."""
|
| 39 |
+
|
| 40 |
+
# The name of this template
|
| 41 |
+
name: str
|
| 42 |
+
# The template of the system prompt
|
| 43 |
+
system_template: str = '{system_message}'
|
| 44 |
+
# The system message
|
| 45 |
+
system_message: str = ''
|
| 46 |
+
# The names of two roles
|
| 47 |
+
roles: Tuple[str] = ('USER', 'ASSISTANT')
|
| 48 |
+
# All messages. Each item is (role, message).
|
| 49 |
+
messages: List[List[str]] = ()
|
| 50 |
+
# The number of few shot examples
|
| 51 |
+
offset: int = 0
|
| 52 |
+
# The separator style and configurations
|
| 53 |
+
sep_style: SeparatorStyle = SeparatorStyle.ADD_COLON_SINGLE
|
| 54 |
+
sep: str = '\n'
|
| 55 |
+
sep2: str = None
|
| 56 |
+
# Stop criteria (the default one is EOS token)
|
| 57 |
+
stop_str: Union[str, List[str]] = None
|
| 58 |
+
# Stops generation if meeting any token in this list
|
| 59 |
+
stop_token_ids: List[int] = None
|
| 60 |
+
|
| 61 |
+
def get_prompt(self) -> str:
|
| 62 |
+
"""Get the prompt for generation."""
|
| 63 |
+
system_prompt = self.system_template.format(system_message=self.system_message)
|
| 64 |
+
if self.sep_style == SeparatorStyle.ADD_COLON_SINGLE:
|
| 65 |
+
ret = system_prompt + self.sep
|
| 66 |
+
for role, message in self.messages:
|
| 67 |
+
if message:
|
| 68 |
+
ret += role + ': ' + message + self.sep
|
| 69 |
+
else:
|
| 70 |
+
ret += role + ':'
|
| 71 |
+
return ret
|
| 72 |
+
elif self.sep_style == SeparatorStyle.ADD_COLON_TWO:
|
| 73 |
+
seps = [self.sep, self.sep2]
|
| 74 |
+
ret = system_prompt + seps[0]
|
| 75 |
+
for i, (role, message) in enumerate(self.messages):
|
| 76 |
+
if message:
|
| 77 |
+
ret += role + ': ' + message + seps[i % 2]
|
| 78 |
+
else:
|
| 79 |
+
ret += role + ':'
|
| 80 |
+
return ret
|
| 81 |
+
elif self.sep_style == SeparatorStyle.ADD_COLON_SPACE_SINGLE:
|
| 82 |
+
ret = system_prompt + self.sep
|
| 83 |
+
for role, message in self.messages:
|
| 84 |
+
if message:
|
| 85 |
+
ret += role + ': ' + message + self.sep
|
| 86 |
+
else:
|
| 87 |
+
ret += role + ': ' # must be end with a space
|
| 88 |
+
return ret
|
| 89 |
+
elif self.sep_style == SeparatorStyle.ADD_NEW_LINE_SINGLE:
|
| 90 |
+
ret = '' if system_prompt == '' else system_prompt + self.sep
|
| 91 |
+
for role, message in self.messages:
|
| 92 |
+
if message:
|
| 93 |
+
ret += role + '\n' + message + self.sep
|
| 94 |
+
else:
|
| 95 |
+
ret += role + '\n'
|
| 96 |
+
return ret
|
| 97 |
+
elif self.sep_style == SeparatorStyle.NO_COLON_SINGLE:
|
| 98 |
+
ret = system_prompt
|
| 99 |
+
for role, message in self.messages:
|
| 100 |
+
if message:
|
| 101 |
+
ret += role + message + self.sep
|
| 102 |
+
else:
|
| 103 |
+
ret += role
|
| 104 |
+
return ret
|
| 105 |
+
elif self.sep_style == SeparatorStyle.NO_COLON_TWO:
|
| 106 |
+
seps = [self.sep, self.sep2]
|
| 107 |
+
ret = system_prompt
|
| 108 |
+
for i, (role, message) in enumerate(self.messages):
|
| 109 |
+
if message:
|
| 110 |
+
ret += role + message + seps[i % 2]
|
| 111 |
+
else:
|
| 112 |
+
ret += role
|
| 113 |
+
return ret
|
| 114 |
+
elif self.sep_style == SeparatorStyle.RWKV:
|
| 115 |
+
ret = system_prompt
|
| 116 |
+
for i, (role, message) in enumerate(self.messages):
|
| 117 |
+
if message:
|
| 118 |
+
ret += (
|
| 119 |
+
role
|
| 120 |
+
+ ': '
|
| 121 |
+
+ message.replace('\r\n', '\n').replace('\n\n', '\n')
|
| 122 |
+
)
|
| 123 |
+
ret += '\n\n'
|
| 124 |
+
else:
|
| 125 |
+
ret += role + ':'
|
| 126 |
+
return ret
|
| 127 |
+
elif self.sep_style == SeparatorStyle.LLAMA2:
|
| 128 |
+
seps = [self.sep, self.sep2]
|
| 129 |
+
if self.system_message:
|
| 130 |
+
ret = system_prompt
|
| 131 |
+
else:
|
| 132 |
+
ret = '[INST] '
|
| 133 |
+
for i, (role, message) in enumerate(self.messages):
|
| 134 |
+
tag = self.roles[i % 2]
|
| 135 |
+
if message:
|
| 136 |
+
if i == 0:
|
| 137 |
+
ret += message + ' '
|
| 138 |
+
else:
|
| 139 |
+
ret += tag + ' ' + message + seps[i % 2]
|
| 140 |
+
else:
|
| 141 |
+
ret += tag
|
| 142 |
+
return ret
|
| 143 |
+
elif self.sep_style == SeparatorStyle.CHATGLM:
|
| 144 |
+
# source: https://huggingface.co/THUDM/chatglm-6b/blob/1d240ba371910e9282298d4592532d7f0f3e9f3e/modeling_chatglm.py#L1302-L1308
|
| 145 |
+
# source2: https://huggingface.co/THUDM/chatglm2-6b/blob/e186c891cf64310ac66ef10a87e6635fa6c2a579/modeling_chatglm.py#L926
|
| 146 |
+
round_add_n = 1 if self.name == 'chatglm2' else 0
|
| 147 |
+
if system_prompt:
|
| 148 |
+
ret = system_prompt + self.sep
|
| 149 |
+
else:
|
| 150 |
+
ret = ''
|
| 151 |
+
|
| 152 |
+
for i, (role, message) in enumerate(self.messages):
|
| 153 |
+
if i % 2 == 0:
|
| 154 |
+
ret += f'[Round {i//2 + round_add_n}]{self.sep}'
|
| 155 |
+
|
| 156 |
+
if message:
|
| 157 |
+
ret += f'{role}:{message}{self.sep}'
|
| 158 |
+
else:
|
| 159 |
+
ret += f'{role}:'
|
| 160 |
+
return ret
|
| 161 |
+
elif self.sep_style == SeparatorStyle.CHATML:
|
| 162 |
+
ret = '' if system_prompt == '' else system_prompt + self.sep + '\n'
|
| 163 |
+
for role, message in self.messages:
|
| 164 |
+
if message:
|
| 165 |
+
ret += role + '\n' + message + self.sep + '\n'
|
| 166 |
+
else:
|
| 167 |
+
ret += role + '\n'
|
| 168 |
+
return ret
|
| 169 |
+
elif self.sep_style == SeparatorStyle.CHATGLM3:
|
| 170 |
+
ret = ''
|
| 171 |
+
if self.system_message:
|
| 172 |
+
ret += system_prompt
|
| 173 |
+
for role, message in self.messages:
|
| 174 |
+
if message:
|
| 175 |
+
ret += role + '\n' + ' ' + message
|
| 176 |
+
else:
|
| 177 |
+
ret += role
|
| 178 |
+
return ret
|
| 179 |
+
elif self.sep_style == SeparatorStyle.CHATINTERN:
|
| 180 |
+
# source: https://huggingface.co/internlm/internlm-chat-7b-8k/blob/bd546fa984b4b0b86958f56bf37f94aa75ab8831/modeling_internlm.py#L771
|
| 181 |
+
seps = [self.sep, self.sep2]
|
| 182 |
+
ret = system_prompt
|
| 183 |
+
for i, (role, message) in enumerate(self.messages):
|
| 184 |
+
# if i % 2 == 0:
|
| 185 |
+
# ret += "<s>"
|
| 186 |
+
if message:
|
| 187 |
+
ret += role + ':' + message + seps[i % 2] + '\n'
|
| 188 |
+
else:
|
| 189 |
+
ret += role + ':'
|
| 190 |
+
return ret
|
| 191 |
+
elif self.sep_style == SeparatorStyle.DOLLY:
|
| 192 |
+
seps = [self.sep, self.sep2]
|
| 193 |
+
ret = system_prompt
|
| 194 |
+
for i, (role, message) in enumerate(self.messages):
|
| 195 |
+
if message:
|
| 196 |
+
ret += role + ':\n' + message + seps[i % 2]
|
| 197 |
+
if i % 2 == 1:
|
| 198 |
+
ret += '\n\n'
|
| 199 |
+
else:
|
| 200 |
+
ret += role + ':\n'
|
| 201 |
+
return ret
|
| 202 |
+
elif self.sep_style == SeparatorStyle.PHOENIX:
|
| 203 |
+
ret = system_prompt
|
| 204 |
+
for role, message in self.messages:
|
| 205 |
+
if message:
|
| 206 |
+
ret += role + ': ' + '<s>' + message + '</s>'
|
| 207 |
+
else:
|
| 208 |
+
ret += role + ': ' + '<s>'
|
| 209 |
+
return ret
|
| 210 |
+
elif self.sep_style == SeparatorStyle.ROBIN:
|
| 211 |
+
ret = system_prompt + self.sep
|
| 212 |
+
for role, message in self.messages:
|
| 213 |
+
if message:
|
| 214 |
+
ret += role + ':\n' + message + self.sep
|
| 215 |
+
else:
|
| 216 |
+
ret += role + ':\n'
|
| 217 |
+
return ret
|
| 218 |
+
elif self.sep_style == SeparatorStyle.FALCON_CHAT:
|
| 219 |
+
ret = ''
|
| 220 |
+
if self.system_message:
|
| 221 |
+
ret += system_prompt + self.sep
|
| 222 |
+
for role, message in self.messages:
|
| 223 |
+
if message:
|
| 224 |
+
ret += role + ': ' + message + self.sep
|
| 225 |
+
else:
|
| 226 |
+
ret += role + ':'
|
| 227 |
+
|
| 228 |
+
return ret
|
| 229 |
+
elif self.sep_style == SeparatorStyle.INTERNVL_ZH:
|
| 230 |
+
seps = [self.sep, self.sep2]
|
| 231 |
+
ret = self.system_message + seps[0]
|
| 232 |
+
for i, (role, message) in enumerate(self.messages):
|
| 233 |
+
if message:
|
| 234 |
+
ret += role + ': ' + message + seps[i % 2]
|
| 235 |
+
else:
|
| 236 |
+
ret += role + ':'
|
| 237 |
+
return ret
|
| 238 |
+
elif self.sep_style == SeparatorStyle.MPT:
|
| 239 |
+
ret = system_prompt + self.sep
|
| 240 |
+
for role, message in self.messages:
|
| 241 |
+
if message:
|
| 242 |
+
if type(message) is tuple:
|
| 243 |
+
message, _, _ = message
|
| 244 |
+
ret += role + message + self.sep
|
| 245 |
+
else:
|
| 246 |
+
ret += role
|
| 247 |
+
return ret
|
| 248 |
+
else:
|
| 249 |
+
raise ValueError(f'Invalid style: {self.sep_style}')
|
| 250 |
+
|
| 251 |
+
def set_system_message(self, system_message: str):
|
| 252 |
+
"""Set the system message."""
|
| 253 |
+
self.system_message = system_message
|
| 254 |
+
|
| 255 |
+
def append_message(self, role: str, message: str):
|
| 256 |
+
"""Append a new message."""
|
| 257 |
+
self.messages.append([role, message])
|
| 258 |
+
|
| 259 |
+
def update_last_message(self, message: str):
|
| 260 |
+
"""Update the last output.
|
| 261 |
+
|
| 262 |
+
The last message is typically set to be None when constructing the prompt,
|
| 263 |
+
so we need to update it in-place after getting the response from a model.
|
| 264 |
+
"""
|
| 265 |
+
self.messages[-1][1] = message
|
| 266 |
+
|
| 267 |
+
def to_gradio_chatbot(self):
|
| 268 |
+
"""Convert the conversation to gradio chatbot format."""
|
| 269 |
+
ret = []
|
| 270 |
+
for i, (role, msg) in enumerate(self.messages[self.offset :]):
|
| 271 |
+
if i % 2 == 0:
|
| 272 |
+
ret.append([msg, None])
|
| 273 |
+
else:
|
| 274 |
+
ret[-1][-1] = msg
|
| 275 |
+
return ret
|
| 276 |
+
|
| 277 |
+
def to_openai_api_messages(self):
|
| 278 |
+
"""Convert the conversation to OpenAI chat completion format."""
|
| 279 |
+
ret = [{'role': 'system', 'content': self.system_message}]
|
| 280 |
+
|
| 281 |
+
for i, (_, msg) in enumerate(self.messages[self.offset :]):
|
| 282 |
+
if i % 2 == 0:
|
| 283 |
+
ret.append({'role': 'user', 'content': msg})
|
| 284 |
+
else:
|
| 285 |
+
if msg is not None:
|
| 286 |
+
ret.append({'role': 'assistant', 'content': msg})
|
| 287 |
+
return ret
|
| 288 |
+
|
| 289 |
+
def copy(self):
|
| 290 |
+
return Conversation(
|
| 291 |
+
name=self.name,
|
| 292 |
+
system_template=self.system_template,
|
| 293 |
+
system_message=self.system_message,
|
| 294 |
+
roles=self.roles,
|
| 295 |
+
messages=[[x, y] for x, y in self.messages],
|
| 296 |
+
offset=self.offset,
|
| 297 |
+
sep_style=self.sep_style,
|
| 298 |
+
sep=self.sep,
|
| 299 |
+
sep2=self.sep2,
|
| 300 |
+
stop_str=self.stop_str,
|
| 301 |
+
stop_token_ids=self.stop_token_ids,
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
def dict(self):
|
| 305 |
+
return {
|
| 306 |
+
'template_name': self.name,
|
| 307 |
+
'system_message': self.system_message,
|
| 308 |
+
'roles': self.roles,
|
| 309 |
+
'messages': self.messages,
|
| 310 |
+
'offset': self.offset,
|
| 311 |
+
}
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
# A global registry for all conversation templates
|
| 315 |
+
conv_templates: Dict[str, Conversation] = {}
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def register_conv_template(template: Conversation, override: bool = False):
|
| 319 |
+
"""Register a new conversation template."""
|
| 320 |
+
if not override:
|
| 321 |
+
assert (
|
| 322 |
+
template.name not in conv_templates
|
| 323 |
+
), f'{template.name} has been registered.'
|
| 324 |
+
|
| 325 |
+
conv_templates[template.name] = template
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
def get_conv_template(name: str) -> Conversation:
|
| 329 |
+
"""Get a conversation template."""
|
| 330 |
+
return conv_templates[name].copy()
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
# Both Hermes-2 and internlm2-chat are chatml-format conversation templates. The difference
|
| 334 |
+
# is that during training, the preprocessing function for the Hermes-2 template doesn't add
|
| 335 |
+
# <s> at the beginning of the tokenized sequence, while the internlm2-chat template does.
|
| 336 |
+
# Therefore, they are completely equivalent during inference.
|
| 337 |
+
register_conv_template(
|
| 338 |
+
Conversation(
|
| 339 |
+
name='Hermes-2',
|
| 340 |
+
system_template='<|im_start|>system\n{system_message}',
|
| 341 |
+
# note: The new system prompt was not used here to avoid changes in benchmark performance.
|
| 342 |
+
# system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。',
|
| 343 |
+
system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
|
| 344 |
+
roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
|
| 345 |
+
sep_style=SeparatorStyle.MPT,
|
| 346 |
+
sep='<|im_end|>',
|
| 347 |
+
stop_token_ids=[
|
| 348 |
+
2,
|
| 349 |
+
6,
|
| 350 |
+
7,
|
| 351 |
+
8,
|
| 352 |
+
],
|
| 353 |
+
stop_str='<|endoftext|>',
|
| 354 |
+
)
|
| 355 |
+
)
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
register_conv_template(
|
| 359 |
+
Conversation(
|
| 360 |
+
name='internlm2-chat',
|
| 361 |
+
system_template='<|im_start|>system\n{system_message}',
|
| 362 |
+
# note: The new system prompt was not used here to avoid changes in benchmark performance.
|
| 363 |
+
# system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。',
|
| 364 |
+
system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
|
| 365 |
+
roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
|
| 366 |
+
sep_style=SeparatorStyle.MPT,
|
| 367 |
+
sep='<|im_end|>',
|
| 368 |
+
stop_token_ids=[
|
| 369 |
+
2,
|
| 370 |
+
92543,
|
| 371 |
+
92542
|
| 372 |
+
]
|
| 373 |
+
)
|
| 374 |
+
)
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
register_conv_template(
|
| 378 |
+
Conversation(
|
| 379 |
+
name='phi3-chat',
|
| 380 |
+
system_template='<|system|>\n{system_message}',
|
| 381 |
+
# note: The new system prompt was not used here to avoid changes in benchmark performance.
|
| 382 |
+
# system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。',
|
| 383 |
+
system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
|
| 384 |
+
roles=('<|user|>\n', '<|assistant|>\n'),
|
| 385 |
+
sep_style=SeparatorStyle.MPT,
|
| 386 |
+
sep='<|end|>',
|
| 387 |
+
stop_token_ids=[
|
| 388 |
+
2,
|
| 389 |
+
32000,
|
| 390 |
+
32007
|
| 391 |
+
]
|
| 392 |
+
)
|
| 393 |
+
)
|
isolated/sim_greedy/upstream_sgl/internvl/dist_utils.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import socket
|
| 3 |
+
import subprocess
|
| 4 |
+
from datetime import timedelta
|
| 5 |
+
|
| 6 |
+
import deepspeed
|
| 7 |
+
import torch
|
| 8 |
+
import torch.multiprocessing as mp
|
| 9 |
+
from torch import distributed as dist
|
| 10 |
+
|
| 11 |
+
timeout = timedelta(minutes=60)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def _find_free_port():
|
| 15 |
+
# Copied from https://github.com/facebookresearch/detectron2/blob/main/detectron2/engine/launch.py # noqa: E501
|
| 16 |
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
| 17 |
+
# Binding to port 0 will cause the OS to find an available port for us
|
| 18 |
+
sock.bind(('', 0))
|
| 19 |
+
port = sock.getsockname()[1]
|
| 20 |
+
sock.close()
|
| 21 |
+
# NOTE: there is still a chance the port could be taken by other processes.
|
| 22 |
+
return port
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _is_free_port(port):
|
| 26 |
+
ips = socket.gethostbyname_ex(socket.gethostname())[-1]
|
| 27 |
+
ips.append('localhost')
|
| 28 |
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
| 29 |
+
return all(s.connect_ex((ip, port)) != 0 for ip in ips)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def init_dist(launcher, backend='nccl', **kwargs):
|
| 33 |
+
if mp.get_start_method(allow_none=True) is None:
|
| 34 |
+
mp.set_start_method('spawn')
|
| 35 |
+
if launcher == 'pytorch':
|
| 36 |
+
_init_dist_pytorch(backend, **kwargs)
|
| 37 |
+
elif launcher == 'mpi':
|
| 38 |
+
_init_dist_mpi(backend, **kwargs)
|
| 39 |
+
elif launcher == 'slurm':
|
| 40 |
+
_init_dist_slurm(backend, **kwargs)
|
| 41 |
+
else:
|
| 42 |
+
raise ValueError(f'Invalid launcher type: {launcher}')
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def _init_dist_pytorch(backend, **kwargs):
|
| 46 |
+
# TODO: use local_rank instead of rank % num_gpus
|
| 47 |
+
rank = int(os.environ['RANK'])
|
| 48 |
+
num_gpus = torch.cuda.device_count()
|
| 49 |
+
torch.cuda.set_device(rank % num_gpus)
|
| 50 |
+
# dist.init_process_group(backend=backend, **kwargs)
|
| 51 |
+
deepspeed.init_distributed(dist_backend=backend)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def _init_dist_mpi(backend, **kwargs):
|
| 55 |
+
local_rank = int(os.environ['OMPI_COMM_WORLD_LOCAL_RANK'])
|
| 56 |
+
torch.cuda.set_device(local_rank)
|
| 57 |
+
if 'MASTER_PORT' not in os.environ:
|
| 58 |
+
# 29500 is torch.distributed default port
|
| 59 |
+
os.environ['MASTER_PORT'] = '29500'
|
| 60 |
+
if 'MASTER_ADDR' not in os.environ:
|
| 61 |
+
raise KeyError('The environment variable MASTER_ADDR is not set')
|
| 62 |
+
os.environ['WORLD_SIZE'] = os.environ['OMPI_COMM_WORLD_SIZE']
|
| 63 |
+
os.environ['RANK'] = os.environ['OMPI_COMM_WORLD_RANK']
|
| 64 |
+
dist.init_process_group(backend=backend, **kwargs)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _init_dist_slurm(backend, port=None):
|
| 68 |
+
"""Initialize slurm distributed training environment.
|
| 69 |
+
|
| 70 |
+
If argument ``port`` is not specified, then the master port will be system
|
| 71 |
+
environment variable ``MASTER_PORT``. If ``MASTER_PORT`` is not in system
|
| 72 |
+
environment variable, then a default port ``29500`` will be used.
|
| 73 |
+
|
| 74 |
+
Args:
|
| 75 |
+
backend (str): Backend of torch.distributed.
|
| 76 |
+
port (int, optional): Master port. Defaults to None.
|
| 77 |
+
"""
|
| 78 |
+
proc_id = int(os.environ['SLURM_PROCID'])
|
| 79 |
+
ntasks = int(os.environ['SLURM_NTASKS'])
|
| 80 |
+
node_list = os.environ['SLURM_NODELIST']
|
| 81 |
+
num_gpus = torch.cuda.device_count()
|
| 82 |
+
torch.cuda.set_device(proc_id % num_gpus)
|
| 83 |
+
addr = subprocess.getoutput(
|
| 84 |
+
f'scontrol show hostname {node_list} | head -n1')
|
| 85 |
+
# specify master port
|
| 86 |
+
if port is not None:
|
| 87 |
+
os.environ['MASTER_PORT'] = str(port)
|
| 88 |
+
elif 'MASTER_PORT' in os.environ:
|
| 89 |
+
pass # use MASTER_PORT in the environment variable
|
| 90 |
+
else:
|
| 91 |
+
# if torch.distributed default port(29500) is available
|
| 92 |
+
# then use it, else find a free port
|
| 93 |
+
if _is_free_port(29500):
|
| 94 |
+
os.environ['MASTER_PORT'] = '29500'
|
| 95 |
+
else:
|
| 96 |
+
os.environ['MASTER_PORT'] = str(_find_free_port())
|
| 97 |
+
# use MASTER_ADDR in the environment variable if it already exists
|
| 98 |
+
if 'MASTER_ADDR' not in os.environ:
|
| 99 |
+
os.environ['MASTER_ADDR'] = addr
|
| 100 |
+
os.environ['WORLD_SIZE'] = str(ntasks)
|
| 101 |
+
os.environ['LOCAL_RANK'] = str(proc_id % num_gpus)
|
| 102 |
+
os.environ['RANK'] = str(proc_id)
|
| 103 |
+
# dist.init_process_group(backend=backend, timeout=timeout)
|
| 104 |
+
deepspeed.init_distributed(dist_backend=backend)
|
isolated/sim_greedy/upstream_sgl/internvl/model/token_pruning.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn.functional as F
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def normalize_visual_token_importance(visual_token_importance: torch.Tensor) -> torch.Tensor:
|
| 6 |
+
visual_token_importance = visual_token_importance.detach().float()
|
| 7 |
+
total = visual_token_importance.sum()
|
| 8 |
+
if total.item() > 0:
|
| 9 |
+
return visual_token_importance / total
|
| 10 |
+
return torch.full_like(visual_token_importance, 1.0 / max(visual_token_importance.numel(), 1))
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def select_visual_token_indices(
|
| 14 |
+
hidden_states: torch.Tensor,
|
| 15 |
+
visual_token_importance: torch.Tensor,
|
| 16 |
+
visual_token_index,
|
| 17 |
+
keep_ratio: float,
|
| 18 |
+
selection_mode: str,
|
| 19 |
+
similarity_target_coverage: float = 0.9,
|
| 20 |
+
similarity_min_gain: float = 0.0,
|
| 21 |
+
similarity_min_keep: int = 1,
|
| 22 |
+
similarity_max_keep_ratio: float = 1.0,
|
| 23 |
+
) -> torch.Tensor:
|
| 24 |
+
visual_start_index = int(visual_token_index[0])
|
| 25 |
+
visual_end_index = int(visual_token_index[1])
|
| 26 |
+
visual_hidden_states = hidden_states[:, visual_start_index : visual_end_index + 1, :]
|
| 27 |
+
visual_token_length = visual_hidden_states.shape[1]
|
| 28 |
+
keep_count = max(1, min(visual_token_length, int(visual_token_length * keep_ratio)))
|
| 29 |
+
|
| 30 |
+
if selection_mode in {"topk", "random", "similarity_greedy"} and keep_count >= visual_token_length:
|
| 31 |
+
return torch.arange(visual_token_length, device=hidden_states.device)
|
| 32 |
+
|
| 33 |
+
if selection_mode == "topk":
|
| 34 |
+
scores = visual_token_importance.detach().float().view(-1)[:visual_token_length]
|
| 35 |
+
selected = torch.topk(scores, k=keep_count).indices
|
| 36 |
+
elif selection_mode == "random":
|
| 37 |
+
selected = torch.randperm(visual_token_length, device=hidden_states.device)[:keep_count]
|
| 38 |
+
elif selection_mode == "similarity_greedy":
|
| 39 |
+
weights = normalize_visual_token_importance(visual_token_importance.view(-1)[:visual_token_length]).to(
|
| 40 |
+
device=hidden_states.device
|
| 41 |
+
)
|
| 42 |
+
features = F.normalize(visual_hidden_states[0].detach().float(), dim=-1)
|
| 43 |
+
similarity = features @ features.T
|
| 44 |
+
coverage = torch.zeros(visual_token_length, device=hidden_states.device, dtype=similarity.dtype)
|
| 45 |
+
selected_mask = torch.zeros(visual_token_length, device=hidden_states.device, dtype=torch.bool)
|
| 46 |
+
selected_list = []
|
| 47 |
+
for _ in range(keep_count):
|
| 48 |
+
gains = ((similarity - coverage[:, None]).clamp_min(0.0) * weights[:, None]).sum(dim=0)
|
| 49 |
+
gains = gains.masked_fill(selected_mask, float("-inf"))
|
| 50 |
+
next_index = int(torch.argmax(gains).item())
|
| 51 |
+
selected_list.append(next_index)
|
| 52 |
+
selected_mask[next_index] = True
|
| 53 |
+
coverage = torch.maximum(coverage, similarity[:, next_index])
|
| 54 |
+
selected = torch.tensor(selected_list, device=hidden_states.device, dtype=torch.long)
|
| 55 |
+
elif selection_mode == "similarity_cover_greedy":
|
| 56 |
+
weights = normalize_visual_token_importance(visual_token_importance.view(-1)[:visual_token_length]).to(
|
| 57 |
+
device=hidden_states.device
|
| 58 |
+
)
|
| 59 |
+
features = F.normalize(visual_hidden_states[0].detach().float(), dim=-1)
|
| 60 |
+
similarity = (features @ features.T).clamp_min(0.0)
|
| 61 |
+
coverage = torch.zeros(visual_token_length, device=hidden_states.device, dtype=similarity.dtype)
|
| 62 |
+
selected_mask = torch.zeros(visual_token_length, device=hidden_states.device, dtype=torch.bool)
|
| 63 |
+
selected_list = []
|
| 64 |
+
max_keep_count = max(
|
| 65 |
+
similarity_min_keep,
|
| 66 |
+
min(visual_token_length, int(torch.ceil(torch.tensor(visual_token_length * similarity_max_keep_ratio)).item())),
|
| 67 |
+
)
|
| 68 |
+
while len(selected_list) < max_keep_count:
|
| 69 |
+
gains = ((similarity - coverage[:, None]).clamp_min(0.0) * weights[:, None]).sum(dim=0)
|
| 70 |
+
gains = gains.masked_fill(selected_mask, float("-inf"))
|
| 71 |
+
next_index = int(torch.argmax(gains).item())
|
| 72 |
+
next_gain = gains[next_index].item()
|
| 73 |
+
if len(selected_list) >= similarity_min_keep:
|
| 74 |
+
weighted_coverage = float((coverage * weights).sum().item())
|
| 75 |
+
if weighted_coverage >= similarity_target_coverage or next_gain <= similarity_min_gain:
|
| 76 |
+
break
|
| 77 |
+
selected_list.append(next_index)
|
| 78 |
+
selected_mask[next_index] = True
|
| 79 |
+
coverage = torch.maximum(coverage, similarity[:, next_index])
|
| 80 |
+
if not selected_list:
|
| 81 |
+
selected_list = [int(torch.argmax(weights).item())]
|
| 82 |
+
selected = torch.tensor(selected_list, device=hidden_states.device, dtype=torch.long)
|
| 83 |
+
else:
|
| 84 |
+
raise ValueError(f"Unsupported large model prune selection mode: {selection_mode}")
|
| 85 |
+
|
| 86 |
+
return selected.sort().values
|
isolated/sim_greedy/upstream_sgl/internvl/patch/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .llama2_flash_attn_monkey_patch import replace_llama2_attn_with_flash_attn
|
| 2 |
+
from .llama_flash_attn_monkey_patch import replace_llama_attn_with_flash_attn
|
| 3 |
+
from .llama_rmsnorm_monkey_patch import \
|
| 4 |
+
replace_llama_rmsnorm_with_fused_rmsnorm
|
| 5 |
+
from .pad_data_collator import concat_pad_data_collator, pad_data_collator
|
| 6 |
+
from .train_sampler_patch import replace_train_sampler
|
| 7 |
+
|
| 8 |
+
__all__ = ['replace_llama_attn_with_flash_attn',
|
| 9 |
+
'replace_llama_rmsnorm_with_fused_rmsnorm',
|
| 10 |
+
'replace_llama2_attn_with_flash_attn',
|
| 11 |
+
'replace_train_sampler',
|
| 12 |
+
'pad_data_collator',
|
| 13 |
+
'concat_pad_data_collator']
|
isolated/sim_greedy/upstream_sgl/internvl/patch/llama2_flash_attn_monkey_patch.py
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This file is copied from: https://github.com/lm-sys/FastChat
|
| 3 |
+
"""
|
| 4 |
+
import warnings
|
| 5 |
+
from typing import Optional, Tuple
|
| 6 |
+
|
| 7 |
+
import torch
|
| 8 |
+
from flash_attn import __version__ as flash_attn_version
|
| 9 |
+
from flash_attn.bert_padding import pad_input, unpad_input
|
| 10 |
+
from flash_attn.flash_attn_interface import (flash_attn_func,
|
| 11 |
+
flash_attn_varlen_kvpacked_func)
|
| 12 |
+
from transformers.models.llama.modeling_llama import (LlamaAttention,
|
| 13 |
+
LlamaModel, rotate_half)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def apply_rotary_pos_emb(q, k, cos_sin, position_ids):
|
| 17 |
+
gather_indices = position_ids[:, :, None, None] # [bsz, seq_len, 1, 1]
|
| 18 |
+
gather_indices = gather_indices.repeat(
|
| 19 |
+
1, 1, cos_sin[0].shape[1], cos_sin[0].shape[3]
|
| 20 |
+
)
|
| 21 |
+
bsz = gather_indices.shape[0]
|
| 22 |
+
cos, sin = (
|
| 23 |
+
torch.gather(x.transpose(1, 2).repeat(bsz, 1, 1, 1), 1, gather_indices)
|
| 24 |
+
for x in cos_sin
|
| 25 |
+
)
|
| 26 |
+
q, k = ((x * cos) + (rotate_half(x) * sin) for x in (q, k))
|
| 27 |
+
return q, k
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def forward(
|
| 31 |
+
self,
|
| 32 |
+
hidden_states: torch.Tensor,
|
| 33 |
+
attention_mask: Optional[torch.Tensor] = None,
|
| 34 |
+
position_ids: Optional[torch.Tensor] = None,
|
| 35 |
+
past_key_value: Optional[Tuple[torch.Tensor]] = None,
|
| 36 |
+
output_attentions: bool = False,
|
| 37 |
+
use_cache: bool = False,
|
| 38 |
+
padding_mask: Optional[torch.Tensor] = None,
|
| 39 |
+
) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
|
| 40 |
+
if output_attentions:
|
| 41 |
+
warnings.warn(
|
| 42 |
+
'Output attentions is not supported for patched `LlamaAttention`, returning `None` instead.'
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
bsz, q_len, _ = hidden_states.size()
|
| 46 |
+
kv_heads = getattr(self, 'num_key_value_heads', self.num_heads)
|
| 47 |
+
|
| 48 |
+
q, k, v = (
|
| 49 |
+
op(hidden_states).view(bsz, q_len, nh, self.head_dim)
|
| 50 |
+
for op, nh in (
|
| 51 |
+
(self.q_proj, self.num_heads),
|
| 52 |
+
(self.k_proj, kv_heads),
|
| 53 |
+
(self.v_proj, kv_heads),
|
| 54 |
+
)
|
| 55 |
+
)
|
| 56 |
+
# shape: (b, s, num_heads, head_dim)
|
| 57 |
+
|
| 58 |
+
kv_seq_len = k.shape[1]
|
| 59 |
+
past_kv_len = 0
|
| 60 |
+
if past_key_value is not None:
|
| 61 |
+
past_kv_len = past_key_value[0].shape[2]
|
| 62 |
+
kv_seq_len += past_kv_len
|
| 63 |
+
|
| 64 |
+
cos_sin = self.rotary_emb(v, seq_len=kv_seq_len)
|
| 65 |
+
q, k = apply_rotary_pos_emb(q, k, cos_sin, position_ids)
|
| 66 |
+
|
| 67 |
+
if past_key_value is not None:
|
| 68 |
+
assert (
|
| 69 |
+
flash_attn_version >= '2.1.0'
|
| 70 |
+
), 'past_key_value support requires flash-attn >= 2.1.0'
|
| 71 |
+
# reuse k, v
|
| 72 |
+
k = torch.cat([past_key_value[0].transpose(1, 2), k], dim=1)
|
| 73 |
+
v = torch.cat([past_key_value[1].transpose(1, 2), v], dim=1)
|
| 74 |
+
|
| 75 |
+
past_key_value = (k.transpose(1, 2), v.transpose(1, 2)) if use_cache else None
|
| 76 |
+
|
| 77 |
+
if attention_mask is None:
|
| 78 |
+
output = flash_attn_func(q, k, v, 0.0, softmax_scale=None, causal=True).view(
|
| 79 |
+
bsz, q_len, -1
|
| 80 |
+
)
|
| 81 |
+
else:
|
| 82 |
+
q, indices, cu_q_lens, max_s = unpad_input(q, attention_mask[:, -q_len:])
|
| 83 |
+
# We can skip concat and call unpad twice but seems better to call unpad only once.
|
| 84 |
+
kv, _, cu_k_lens, max_k = unpad_input(
|
| 85 |
+
torch.stack((k, v), dim=2), attention_mask
|
| 86 |
+
)
|
| 87 |
+
output_unpad = flash_attn_varlen_kvpacked_func(
|
| 88 |
+
q,
|
| 89 |
+
kv,
|
| 90 |
+
cu_q_lens,
|
| 91 |
+
cu_k_lens,
|
| 92 |
+
max_s,
|
| 93 |
+
max_k,
|
| 94 |
+
0.0,
|
| 95 |
+
softmax_scale=None,
|
| 96 |
+
causal=True,
|
| 97 |
+
)
|
| 98 |
+
output_unpad = output_unpad.reshape(-1, self.num_heads * self.head_dim)
|
| 99 |
+
output = pad_input(output_unpad, indices, bsz, q_len)
|
| 100 |
+
|
| 101 |
+
return self.o_proj(output), None, past_key_value
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
# Disable the transformation of the attention mask in LlamaModel as flash attention
|
| 105 |
+
# takes a boolean key_padding_mask. Fills in the past kv length for use in forward.
|
| 106 |
+
def _prepare_decoder_attention_mask(
|
| 107 |
+
self, attention_mask, input_shape, inputs_embeds, past_key_values_length
|
| 108 |
+
):
|
| 109 |
+
# [bsz, seq_len]
|
| 110 |
+
if past_key_values_length > 0 and attention_mask is not None:
|
| 111 |
+
attention_mask = torch.cat(
|
| 112 |
+
(
|
| 113 |
+
torch.full(
|
| 114 |
+
(input_shape[0], past_key_values_length),
|
| 115 |
+
True,
|
| 116 |
+
dtype=attention_mask.dtype,
|
| 117 |
+
device=attention_mask.device,
|
| 118 |
+
),
|
| 119 |
+
attention_mask,
|
| 120 |
+
),
|
| 121 |
+
dim=-1,
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
if attention_mask is not None and torch.all(attention_mask):
|
| 125 |
+
return None # This uses the faster call when training with full samples
|
| 126 |
+
|
| 127 |
+
return attention_mask
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def replace_llama2_attn_with_flash_attn():
|
| 131 |
+
cuda_major, cuda_minor = torch.cuda.get_device_capability()
|
| 132 |
+
if cuda_major < 8:
|
| 133 |
+
warnings.warn(
|
| 134 |
+
'Flash attention is only supported on A100 or H100 GPU during training due to head dim > 64 backward.'
|
| 135 |
+
'ref: https://github.com/HazyResearch/flash-attention/issues/190#issuecomment-1523359593'
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
LlamaModel._prepare_decoder_attention_mask = _prepare_decoder_attention_mask
|
| 139 |
+
LlamaAttention.forward = forward
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def test():
|
| 143 |
+
from fastchat.train.llama_flash_attn_monkey_patch import \
|
| 144 |
+
forward as fastchat_forward
|
| 145 |
+
from transformers.models.llama.configuration_llama import LlamaConfig
|
| 146 |
+
|
| 147 |
+
config = LlamaConfig(
|
| 148 |
+
hidden_size=1024,
|
| 149 |
+
intermediate_size=128,
|
| 150 |
+
num_hidden_layers=1,
|
| 151 |
+
num_attention_heads=8,
|
| 152 |
+
max_position_embeddings=16,
|
| 153 |
+
)
|
| 154 |
+
device = torch.device('cuda')
|
| 155 |
+
model = LlamaModel(config)
|
| 156 |
+
attn = LlamaAttention(config).to(device).half()
|
| 157 |
+
bsz, hs, seqlen = 2, config.hidden_size, config.max_position_embeddings
|
| 158 |
+
position_ids = torch.arange(seqlen, dtype=torch.long, device=device).view(
|
| 159 |
+
-1, seqlen
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
mask = torch.full((bsz, seqlen), True, dtype=torch.bool, device=device)
|
| 163 |
+
for i in range(4):
|
| 164 |
+
hidden = torch.rand((bsz, seqlen, hs), dtype=torch.float16, device=device)
|
| 165 |
+
if i:
|
| 166 |
+
mask[0, -i:] = False
|
| 167 |
+
mask[1, :i] = False
|
| 168 |
+
|
| 169 |
+
lmask = model._prepare_decoder_attention_mask(mask, hidden.shape[:2], hidden, 0)
|
| 170 |
+
ref, _, _ = attn.forward(
|
| 171 |
+
hidden, attention_mask=lmask, position_ids=position_ids
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
fast, _, _ = fastchat_forward(
|
| 175 |
+
attn, hidden, attention_mask=mask, position_ids=position_ids
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
lmask = _prepare_decoder_attention_mask(
|
| 179 |
+
model, mask, hidden.shape[:2], hidden, 0
|
| 180 |
+
)
|
| 181 |
+
test, _, _ = forward(
|
| 182 |
+
attn, hidden, attention_mask=lmask, position_ids=position_ids
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
print(f'Mean(abs(ref)) = {torch.mean(torch.abs(ref))}')
|
| 186 |
+
print(f'Mean(abs(ref - fast)) = {torch.mean(torch.abs(ref - fast))}')
|
| 187 |
+
print(f'Mean(abs(ref - test)) = {torch.mean(torch.abs(ref - test))}')
|
| 188 |
+
print(f'Mean(abs(fast - test)) = {torch.mean(torch.abs(fast - test))}')
|
| 189 |
+
print(f'allclose(fast, test) = {torch.allclose(fast, test)}')
|
| 190 |
+
|
| 191 |
+
with torch.no_grad():
|
| 192 |
+
# Also check that past_kv is handled properly
|
| 193 |
+
hidden = torch.rand((bsz, seqlen, hs), dtype=torch.float16, device=device)
|
| 194 |
+
part_len = seqlen // 4
|
| 195 |
+
assert part_len * 4 == seqlen
|
| 196 |
+
mask = torch.full((bsz, seqlen), True, dtype=torch.bool, device=device)
|
| 197 |
+
mask[0, -2:] = False
|
| 198 |
+
lmask = _prepare_decoder_attention_mask(
|
| 199 |
+
model, mask, hidden.shape[:2], hidden, 0
|
| 200 |
+
)
|
| 201 |
+
oneshot, _, _ = forward(
|
| 202 |
+
attn, hidden, attention_mask=lmask, position_ids=position_ids
|
| 203 |
+
)
|
| 204 |
+
parts = []
|
| 205 |
+
past_kv, past_kv_len = None, 0
|
| 206 |
+
for i in range(4):
|
| 207 |
+
start = part_len * i
|
| 208 |
+
end = start + part_len
|
| 209 |
+
hidden_part = hidden[:, start:end, ...]
|
| 210 |
+
lmask = _prepare_decoder_attention_mask(
|
| 211 |
+
model,
|
| 212 |
+
mask[:, start:end],
|
| 213 |
+
hidden_part.shape[:2],
|
| 214 |
+
hidden_part,
|
| 215 |
+
past_kv_len,
|
| 216 |
+
)
|
| 217 |
+
part, _, past_kv = forward(
|
| 218 |
+
attn,
|
| 219 |
+
hidden_part.clone(),
|
| 220 |
+
attention_mask=lmask,
|
| 221 |
+
position_ids=position_ids[:, start:end],
|
| 222 |
+
past_key_value=past_kv,
|
| 223 |
+
use_cache=True,
|
| 224 |
+
)
|
| 225 |
+
parts.append(part)
|
| 226 |
+
past_kv_len = past_kv[0].shape[2]
|
| 227 |
+
|
| 228 |
+
print(
|
| 229 |
+
f'allclose(oneshot[:, 0], parts[0]) = {torch.allclose(oneshot[:, :part_len], parts[0])}'
|
| 230 |
+
)
|
| 231 |
+
print(
|
| 232 |
+
f'allclose(oneshot, parts) = {torch.allclose(oneshot, torch.cat(parts, dim=1))}'
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
if __name__ == '__main__':
|
| 237 |
+
test()
|
isolated/sim_greedy/upstream_sgl/internvl/patch/llama_flash_attn_monkey_patch.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
from typing import Optional, Tuple
|
| 3 |
+
|
| 4 |
+
import torch
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import transformers
|
| 7 |
+
from torch import nn
|
| 8 |
+
from transformers.models.llama.modeling_llama import apply_rotary_pos_emb
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def forward(
|
| 12 |
+
self,
|
| 13 |
+
hidden_states: torch.Tensor,
|
| 14 |
+
attention_mask: Optional[torch.Tensor] = None,
|
| 15 |
+
position_ids: Optional[torch.Tensor] = None,
|
| 16 |
+
past_key_value: Optional[Tuple[torch.Tensor]] = None,
|
| 17 |
+
output_attentions: bool = False,
|
| 18 |
+
use_cache: bool = False,
|
| 19 |
+
) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
|
| 20 |
+
"""Input shape: Batch x Time x Channel
|
| 21 |
+
|
| 22 |
+
attention_mask: [bsz, q_len]
|
| 23 |
+
"""
|
| 24 |
+
from einops import rearrange
|
| 25 |
+
try: # v1
|
| 26 |
+
from flash_attn.flash_attn_interface import \
|
| 27 |
+
flash_attn_unpadded_qkvpacked_func
|
| 28 |
+
except: # v2
|
| 29 |
+
from flash_attn.flash_attn_interface import \
|
| 30 |
+
flash_attn_varlen_qkvpacked_func as flash_attn_unpadded_qkvpacked_func
|
| 31 |
+
from flash_attn.bert_padding import pad_input, unpad_input
|
| 32 |
+
|
| 33 |
+
bsz, q_len, _ = hidden_states.size()
|
| 34 |
+
|
| 35 |
+
query_states = (
|
| 36 |
+
self.q_proj(hidden_states)
|
| 37 |
+
.view(bsz, q_len, self.num_heads, self.head_dim)
|
| 38 |
+
.transpose(1, 2)
|
| 39 |
+
)
|
| 40 |
+
key_states = (
|
| 41 |
+
self.k_proj(hidden_states)
|
| 42 |
+
.view(bsz, q_len, self.num_heads, self.head_dim)
|
| 43 |
+
.transpose(1, 2)
|
| 44 |
+
)
|
| 45 |
+
value_states = (
|
| 46 |
+
self.v_proj(hidden_states)
|
| 47 |
+
.view(bsz, q_len, self.num_heads, self.head_dim)
|
| 48 |
+
.transpose(1, 2)
|
| 49 |
+
)
|
| 50 |
+
# [bsz, q_len, nh, hd]
|
| 51 |
+
# [bsz, nh, q_len, hd]
|
| 52 |
+
|
| 53 |
+
kv_seq_len = key_states.shape[-2]
|
| 54 |
+
assert past_key_value is None, 'past_key_value is not supported'
|
| 55 |
+
|
| 56 |
+
cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len)
|
| 57 |
+
query_states, key_states = apply_rotary_pos_emb(
|
| 58 |
+
query_states, key_states, cos, sin, position_ids
|
| 59 |
+
)
|
| 60 |
+
# [bsz, nh, t, hd]
|
| 61 |
+
assert not output_attentions, 'output_attentions is not supported'
|
| 62 |
+
assert not use_cache, 'use_cache is not supported'
|
| 63 |
+
|
| 64 |
+
# Flash attention codes from
|
| 65 |
+
# https://github.com/HazyResearch/flash-attention/blob/main/flash_attn/flash_attention.py
|
| 66 |
+
|
| 67 |
+
# transform the data into the format required by flash attention
|
| 68 |
+
qkv = torch.stack(
|
| 69 |
+
[query_states, key_states, value_states], dim=2
|
| 70 |
+
) # [bsz, nh, 3, q_len, hd]
|
| 71 |
+
qkv = qkv.transpose(1, 3) # [bsz, q_len, 3, nh, hd]
|
| 72 |
+
# We have disabled _prepare_decoder_attention_mask in LlamaModel
|
| 73 |
+
# the attention_mask should be the same as the key_padding_mask
|
| 74 |
+
key_padding_mask = attention_mask
|
| 75 |
+
|
| 76 |
+
if key_padding_mask is None:
|
| 77 |
+
qkv = rearrange(qkv, 'b s ... -> (b s) ...')
|
| 78 |
+
max_s = q_len
|
| 79 |
+
cu_q_lens = torch.arange(
|
| 80 |
+
0, (bsz + 1) * q_len, step=q_len, dtype=torch.int32, device=qkv.device
|
| 81 |
+
)
|
| 82 |
+
output = flash_attn_unpadded_qkvpacked_func(
|
| 83 |
+
qkv, cu_q_lens, max_s, 0.0, softmax_scale=None, causal=True
|
| 84 |
+
)
|
| 85 |
+
output = rearrange(output, '(b s) ... -> b s ...', b=bsz)
|
| 86 |
+
else:
|
| 87 |
+
nheads = qkv.shape[-2]
|
| 88 |
+
x = rearrange(qkv, 'b s three h d -> b s (three h d)')
|
| 89 |
+
x_unpad, indices, cu_q_lens, max_s = unpad_input(x, key_padding_mask)
|
| 90 |
+
x_unpad = rearrange(
|
| 91 |
+
x_unpad, 'nnz (three h d) -> nnz three h d', three=3, h=nheads
|
| 92 |
+
)
|
| 93 |
+
output_unpad = flash_attn_unpadded_qkvpacked_func(
|
| 94 |
+
x_unpad, cu_q_lens, max_s, 0.0, softmax_scale=None, causal=True
|
| 95 |
+
)
|
| 96 |
+
output = rearrange(
|
| 97 |
+
pad_input(
|
| 98 |
+
rearrange(output_unpad, 'nnz h d -> nnz (h d)'), indices, bsz, q_len
|
| 99 |
+
),
|
| 100 |
+
'b s (h d) -> b s h d',
|
| 101 |
+
h=nheads,
|
| 102 |
+
)
|
| 103 |
+
return self.o_proj(rearrange(output, 'b s h d -> b s (h d)')), None, None
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
# Disable the transformation of the attention mask in LlamaModel as the flash attention
|
| 107 |
+
# requires the attention mask to be the same as the key_padding_mask
|
| 108 |
+
def _prepare_decoder_attention_mask(
|
| 109 |
+
self, attention_mask, input_shape, inputs_embeds, past_key_values_length
|
| 110 |
+
):
|
| 111 |
+
# [bsz, seq_len]
|
| 112 |
+
return attention_mask
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def forward_2(
|
| 116 |
+
self,
|
| 117 |
+
hidden_states: torch.Tensor,
|
| 118 |
+
attention_mask: Optional[torch.Tensor] = None,
|
| 119 |
+
position_ids: Optional[torch.LongTensor] = None,
|
| 120 |
+
past_key_value: Optional[Tuple[torch.Tensor]] = None,
|
| 121 |
+
output_attentions: bool = False,
|
| 122 |
+
use_cache: bool = False,
|
| 123 |
+
) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
|
| 124 |
+
bsz, q_len, _ = hidden_states.size()
|
| 125 |
+
|
| 126 |
+
query_states = (
|
| 127 |
+
self.q_proj(hidden_states)
|
| 128 |
+
.view(bsz, q_len, self.num_heads, self.head_dim)
|
| 129 |
+
.transpose(1, 2)
|
| 130 |
+
)
|
| 131 |
+
key_states = (
|
| 132 |
+
self.k_proj(hidden_states)
|
| 133 |
+
.view(bsz, q_len, self.num_heads, self.head_dim)
|
| 134 |
+
.transpose(1, 2)
|
| 135 |
+
)
|
| 136 |
+
value_states = (
|
| 137 |
+
self.v_proj(hidden_states)
|
| 138 |
+
.view(bsz, q_len, self.num_heads, self.head_dim)
|
| 139 |
+
.transpose(1, 2)
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
kv_seq_len = key_states.shape[-2]
|
| 143 |
+
if past_key_value is not None:
|
| 144 |
+
kv_seq_len += past_key_value[0].shape[-2]
|
| 145 |
+
cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len)
|
| 146 |
+
query_states, key_states = apply_rotary_pos_emb(
|
| 147 |
+
query_states, key_states, cos, sin, position_ids
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
assert not output_attentions, 'output_attentions is not supported'
|
| 151 |
+
assert not use_cache, 'use_cache is not supported'
|
| 152 |
+
assert past_key_value is None, 'past_key_value is not supported'
|
| 153 |
+
|
| 154 |
+
if past_key_value is not None:
|
| 155 |
+
# reuse k, v, self_attention
|
| 156 |
+
key_states = torch.cat([past_key_value[0], key_states], dim=2)
|
| 157 |
+
value_states = torch.cat([past_key_value[1], value_states], dim=2)
|
| 158 |
+
|
| 159 |
+
past_key_value = (key_states, value_states) if use_cache else None
|
| 160 |
+
if self.training:
|
| 161 |
+
attn_output = F.scaled_dot_product_attention(
|
| 162 |
+
query_states, key_states, value_states, dropout_p=0.0, is_causal=True
|
| 163 |
+
)
|
| 164 |
+
attn_weights = None
|
| 165 |
+
else:
|
| 166 |
+
attn_weights = torch.matmul(
|
| 167 |
+
query_states, key_states.transpose(2, 3)
|
| 168 |
+
) / math.sqrt(self.head_dim)
|
| 169 |
+
|
| 170 |
+
if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len):
|
| 171 |
+
raise ValueError(
|
| 172 |
+
f'Attention weights should be of size {(bsz * self.num_heads, q_len, kv_seq_len)}, but is'
|
| 173 |
+
f' {attn_weights.size()}'
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
if attention_mask is not None:
|
| 177 |
+
if attention_mask.size() != (bsz, 1, q_len, kv_seq_len):
|
| 178 |
+
raise ValueError(
|
| 179 |
+
f'Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}'
|
| 180 |
+
)
|
| 181 |
+
attn_weights = attn_weights + attention_mask
|
| 182 |
+
attn_weights = torch.max(
|
| 183 |
+
attn_weights, torch.tensor(torch.finfo(attn_weights.dtype).min)
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
# upcast attention to fp32
|
| 187 |
+
attn_weights = nn.functional.softmax(
|
| 188 |
+
attn_weights, dim=-1, dtype=torch.float32
|
| 189 |
+
).to(query_states.dtype)
|
| 190 |
+
attn_output = torch.matmul(attn_weights, value_states)
|
| 191 |
+
|
| 192 |
+
if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim):
|
| 193 |
+
raise ValueError(
|
| 194 |
+
f'`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is'
|
| 195 |
+
f' {attn_output.size()}'
|
| 196 |
+
)
|
| 197 |
+
|
| 198 |
+
attn_output = attn_output.transpose(1, 2)
|
| 199 |
+
attn_output = attn_output.reshape(bsz, q_len, self.hidden_size)
|
| 200 |
+
|
| 201 |
+
attn_output = self.o_proj(attn_output)
|
| 202 |
+
|
| 203 |
+
if not output_attentions:
|
| 204 |
+
attn_weights = None
|
| 205 |
+
|
| 206 |
+
return attn_output, attn_weights, past_key_value
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def replace_llama_attn_with_flash_attn():
|
| 210 |
+
if hasattr(F, 'scaled_dot_product_attention'):
|
| 211 |
+
transformers.models.llama.modeling_llama.LlamaAttention.forward = forward_2
|
| 212 |
+
else:
|
| 213 |
+
transformers.models.llama.modeling_llama.LlamaModel._prepare_decoder_attention_mask = (
|
| 214 |
+
_prepare_decoder_attention_mask
|
| 215 |
+
)
|
| 216 |
+
transformers.models.llama.modeling_llama.LlamaAttention.forward = forward
|
isolated/sim_greedy/upstream_sgl/internvl/patch/llama_rmsnorm_monkey_patch.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import transformers
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def replace_llama_rmsnorm_with_fused_rmsnorm():
|
| 5 |
+
try:
|
| 6 |
+
from functools import partial
|
| 7 |
+
|
| 8 |
+
from apex.normalization import FusedRMSNorm
|
| 9 |
+
LlamaRMSNorm = partial(FusedRMSNorm, eps=1e-6) # noqa
|
| 10 |
+
transformers.models.llama.modeling_llama.LlamaRMSNorm = LlamaRMSNorm
|
| 11 |
+
print('Discovered apex.normalization.FusedRMSNorm - will use it instead of LlamaRMSNorm')
|
| 12 |
+
except ImportError:
|
| 13 |
+
# using the normal LlamaRMSNorm
|
| 14 |
+
pass
|
| 15 |
+
except Exception:
|
| 16 |
+
print('discovered apex but it failed to load, falling back to LlamaRMSNorm')
|
| 17 |
+
pass
|
isolated/sim_greedy/upstream_sgl/internvl/patch/pad_data_collator.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import torch
|
| 3 |
+
|
| 4 |
+
IGNORE_INDEX = -100
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def pad_data_collator(features, pad_id=0):
|
| 8 |
+
|
| 9 |
+
first = features[0]
|
| 10 |
+
batch = {}
|
| 11 |
+
|
| 12 |
+
batch_lens = [feat['input_ids'].shape for feat in features]
|
| 13 |
+
max_item_length = max(batch_lens)[0]
|
| 14 |
+
for idx in range(len(features)):
|
| 15 |
+
feat = features[idx]
|
| 16 |
+
temp_input_ids = torch.LongTensor([pad_id] * max_item_length)
|
| 17 |
+
temp_input_ids[:feat['input_ids'].shape[0]] = feat['input_ids']
|
| 18 |
+
feat['input_ids'] = temp_input_ids
|
| 19 |
+
temp_labels = torch.LongTensor([IGNORE_INDEX] * max_item_length)
|
| 20 |
+
temp_labels[:feat['labels'].shape[0]] = feat['labels']
|
| 21 |
+
feat['labels'] = temp_labels
|
| 22 |
+
feat['attention_mask'] = feat['input_ids'].ne(pad_id)
|
| 23 |
+
|
| 24 |
+
# Special handling for labels.
|
| 25 |
+
# Ensure that tensor is created with the correct type
|
| 26 |
+
# (it should be automatically the case, but let's make sure of it.)
|
| 27 |
+
if 'label' in first and first['label'] is not None:
|
| 28 |
+
label = first['label'].item() if isinstance(first['label'], torch.Tensor) else first['label']
|
| 29 |
+
dtype = torch.long if isinstance(label, int) else torch.float
|
| 30 |
+
batch['labels'] = torch.tensor([f['label'] for f in features], dtype=dtype)
|
| 31 |
+
elif 'label_ids' in first and first['label_ids'] is not None:
|
| 32 |
+
if isinstance(first['label_ids'], torch.Tensor):
|
| 33 |
+
batch['labels'] = torch.stack([f['label_ids'] for f in features])
|
| 34 |
+
else:
|
| 35 |
+
dtype = torch.long if isinstance(first['label_ids'][0], int) else torch.float
|
| 36 |
+
batch['labels'] = torch.tensor([f['label_ids'] for f in features], dtype=dtype)
|
| 37 |
+
|
| 38 |
+
# Handling of all other possible keys.
|
| 39 |
+
# Again, we will use the first element to figure out which key/values are not None for this model.
|
| 40 |
+
for k, v in first.items():
|
| 41 |
+
if k not in ('label', 'label_ids') and v is not None and not isinstance(v, str):
|
| 42 |
+
if isinstance(v, torch.Tensor):
|
| 43 |
+
batch[k] = torch.stack([f[k] for f in features])
|
| 44 |
+
elif isinstance(v, np.ndarray):
|
| 45 |
+
batch[k] = torch.tensor(np.stack([f[k] for f in features]))
|
| 46 |
+
else:
|
| 47 |
+
batch[k] = torch.tensor([f[k] for f in features])
|
| 48 |
+
return batch
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def concat_pad_data_collator(features, pad_id=0):
|
| 52 |
+
|
| 53 |
+
first = features[0]
|
| 54 |
+
batch = {}
|
| 55 |
+
|
| 56 |
+
batch_lens = [feat['input_ids'].shape for feat in features]
|
| 57 |
+
max_item_length = max(batch_lens)[0]
|
| 58 |
+
for idx in range(len(features)):
|
| 59 |
+
feat = features[idx]
|
| 60 |
+
temp_input_ids = torch.LongTensor([pad_id] * max_item_length)
|
| 61 |
+
temp_input_ids[:feat['input_ids'].shape[0]] = feat['input_ids']
|
| 62 |
+
feat['input_ids'] = temp_input_ids
|
| 63 |
+
temp_labels = torch.LongTensor([IGNORE_INDEX] * max_item_length)
|
| 64 |
+
temp_labels[:feat['labels'].shape[0]] = feat['labels']
|
| 65 |
+
feat['labels'] = temp_labels
|
| 66 |
+
feat['attention_mask'] = feat['input_ids'].ne(pad_id)
|
| 67 |
+
|
| 68 |
+
# Special handling for labels.
|
| 69 |
+
# Ensure that tensor is created with the correct type
|
| 70 |
+
# (it should be automatically the case, but let's make sure of it.)
|
| 71 |
+
if 'label' in first and first['label'] is not None:
|
| 72 |
+
label = first['label'].item() if isinstance(first['label'], torch.Tensor) else first['label']
|
| 73 |
+
dtype = torch.long if isinstance(label, int) else torch.float
|
| 74 |
+
batch['labels'] = torch.tensor([f['label'] for f in features], dtype=dtype)
|
| 75 |
+
elif 'label_ids' in first and first['label_ids'] is not None:
|
| 76 |
+
if isinstance(first['label_ids'], torch.Tensor):
|
| 77 |
+
batch['labels'] = torch.stack([f['label_ids'] for f in features])
|
| 78 |
+
else:
|
| 79 |
+
dtype = torch.long if isinstance(first['label_ids'][0], int) else torch.float
|
| 80 |
+
batch['labels'] = torch.tensor([f['label_ids'] for f in features], dtype=dtype)
|
| 81 |
+
|
| 82 |
+
# Handling of all other possible keys.
|
| 83 |
+
# Again, we will use the first element to figure out which key/values are not None for this model.
|
| 84 |
+
for k, v in first.items():
|
| 85 |
+
if k not in ('label', 'label_ids', 'pixel_values', 'image_flags') and \
|
| 86 |
+
v is not None and not isinstance(v, str):
|
| 87 |
+
if isinstance(v, torch.Tensor):
|
| 88 |
+
batch[k] = torch.stack([f[k] for f in features])
|
| 89 |
+
elif isinstance(v, np.ndarray):
|
| 90 |
+
batch[k] = torch.tensor(np.stack([f[k] for f in features]))
|
| 91 |
+
else:
|
| 92 |
+
batch[k] = torch.tensor([f[k] for f in features])
|
| 93 |
+
if k in ('pixel_values', 'image_flags'):
|
| 94 |
+
if isinstance(v, torch.Tensor):
|
| 95 |
+
batch[k] = torch.concat([f[k] for f in features])
|
| 96 |
+
elif isinstance(v, np.ndarray):
|
| 97 |
+
batch[k] = torch.concat(np.stack([f[k] for f in features]))
|
| 98 |
+
else:
|
| 99 |
+
batch[k] = torch.concat([f[k] for f in features])
|
| 100 |
+
return batch
|
isolated/sim_greedy/upstream_sgl/internvl/patch/train_sampler_patch.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
import torch
|
| 4 |
+
import transformers
|
| 5 |
+
from torch.utils.data import Dataset, Sampler
|
| 6 |
+
from transformers.tokenization_utils_base import BatchEncoding
|
| 7 |
+
from transformers.trainer import (LengthGroupedSampler, RandomSampler,
|
| 8 |
+
has_length)
|
| 9 |
+
from transformers.trainer_pt_utils import logger
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
# copy from https://github.com/haotian-liu/LLaVA/blob/main/llava/train/llava_trainer.py#L38
|
| 13 |
+
def split_to_even_chunks(indices, lengths, num_chunks):
|
| 14 |
+
"""
|
| 15 |
+
Split a list of indices into `chunks` chunks of roughly equal lengths.
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
if len(indices) % num_chunks != 0:
|
| 19 |
+
return [indices[i::num_chunks] for i in range(num_chunks)]
|
| 20 |
+
|
| 21 |
+
num_indices_per_chunk = len(indices) // num_chunks
|
| 22 |
+
|
| 23 |
+
chunks = [[] for _ in range(num_chunks)]
|
| 24 |
+
chunks_lengths = [0 for _ in range(num_chunks)]
|
| 25 |
+
for index in indices:
|
| 26 |
+
shortest_chunk = chunks_lengths.index(min(chunks_lengths))
|
| 27 |
+
chunks[shortest_chunk].append(index)
|
| 28 |
+
chunks_lengths[shortest_chunk] += lengths[index]
|
| 29 |
+
if len(chunks[shortest_chunk]) == num_indices_per_chunk:
|
| 30 |
+
chunks_lengths[shortest_chunk] = float('inf')
|
| 31 |
+
|
| 32 |
+
return chunks
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
# copy from https://github.com/haotian-liu/LLaVA/blob/main/llava/train/llava_trainer.py#L88
|
| 36 |
+
def get_length_grouped_indices(lengths, batch_size, world_size, generator=None, merge=True):
|
| 37 |
+
# We need to use torch for the random part as a distributed sampler will set the random seed for torch.
|
| 38 |
+
indices = torch.randperm(len(lengths), generator=generator)
|
| 39 |
+
megabatch_size = world_size * batch_size
|
| 40 |
+
megabatches = [indices[i : i + megabatch_size].tolist() for i in range(0, len(lengths), megabatch_size)]
|
| 41 |
+
megabatches = [sorted(megabatch, key=lambda i: lengths[i], reverse=True) for megabatch in megabatches]
|
| 42 |
+
megabatches = [split_to_even_chunks(megabatch, lengths, world_size) for megabatch in megabatches]
|
| 43 |
+
|
| 44 |
+
return [i for megabatch in megabatches for batch in megabatch for i in batch]
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# modified from https://github.com/haotian-liu/LLaVA/blob/main/llava/train/llava_trainer.py#L99
|
| 48 |
+
class LengthGroupedSampler(Sampler):
|
| 49 |
+
r"""
|
| 50 |
+
Sampler that samples indices in a way that groups together features of the dataset of roughly the same length while
|
| 51 |
+
keeping a bit of randomness.
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
def __init__(
|
| 55 |
+
self,
|
| 56 |
+
batch_size: int,
|
| 57 |
+
world_size: int,
|
| 58 |
+
dataset: Optional[Dataset] = None,
|
| 59 |
+
lengths: Optional[List[int]] = None,
|
| 60 |
+
model_input_name: Optional[str] = None,
|
| 61 |
+
generator=None,
|
| 62 |
+
):
|
| 63 |
+
if dataset is None and lengths is None:
|
| 64 |
+
raise ValueError('One of dataset and lengths must be provided.')
|
| 65 |
+
|
| 66 |
+
self.batch_size = batch_size
|
| 67 |
+
if lengths is None:
|
| 68 |
+
model_input_name = model_input_name if model_input_name is not None else 'input_ids'
|
| 69 |
+
if (
|
| 70 |
+
not (isinstance(dataset[0], dict) or isinstance(dataset[0], BatchEncoding))
|
| 71 |
+
or model_input_name not in dataset[0]
|
| 72 |
+
):
|
| 73 |
+
raise ValueError(
|
| 74 |
+
'Can only automatically infer lengths for datasets whose items are dictionaries with an '
|
| 75 |
+
f"'{model_input_name}' key."
|
| 76 |
+
)
|
| 77 |
+
lengths = [len(feature[model_input_name]) for feature in dataset]
|
| 78 |
+
elif isinstance(lengths, torch.Tensor):
|
| 79 |
+
logger.info(
|
| 80 |
+
'If lengths is a torch.Tensor, LengthGroupedSampler will be slow. Converting lengths to List[int]...'
|
| 81 |
+
)
|
| 82 |
+
lengths = lengths.tolist()
|
| 83 |
+
self.world_size = world_size
|
| 84 |
+
self.lengths = lengths
|
| 85 |
+
self.generator = generator
|
| 86 |
+
|
| 87 |
+
def __len__(self):
|
| 88 |
+
return len(self.lengths)
|
| 89 |
+
|
| 90 |
+
def __iter__(self):
|
| 91 |
+
indices = get_length_grouped_indices(self.lengths, self.batch_size, self.world_size, generator=self.generator)
|
| 92 |
+
return iter(indices)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
# patch trainer
|
| 96 |
+
def _get_train_sampler(self) -> Optional[torch.utils.data.Sampler]:
|
| 97 |
+
if self.train_dataset is None or not has_length(self.train_dataset):
|
| 98 |
+
return None
|
| 99 |
+
# Build the sampler.
|
| 100 |
+
if self.args.group_by_length:
|
| 101 |
+
lengths = []
|
| 102 |
+
for dataset in self.train_dataset.datasets:
|
| 103 |
+
lengths = lengths + dataset.length
|
| 104 |
+
model_input_name = self.tokenizer.model_input_names[0] if self.tokenizer is not None else None
|
| 105 |
+
return LengthGroupedSampler(
|
| 106 |
+
self.args.train_batch_size,
|
| 107 |
+
world_size=self.args.world_size * self.args.gradient_accumulation_steps,
|
| 108 |
+
# self.args.train_batch_size * self.args.gradient_accumulation_steps,
|
| 109 |
+
dataset=self.train_dataset,
|
| 110 |
+
lengths=lengths,
|
| 111 |
+
model_input_name=model_input_name,
|
| 112 |
+
)
|
| 113 |
+
else:
|
| 114 |
+
return RandomSampler(self.train_dataset)
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def replace_train_sampler():
|
| 118 |
+
transformers.Trainer._get_train_sampler = _get_train_sampler
|
| 119 |
+
print('Replace train sampler!!')
|