Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .DS_Store +0 -0
- .gitattributes +48 -0
- OnZeta/11.log +152 -0
- OnZeta/LICENSE +201 -0
- OnZeta/MAPLS/__init__.py +0 -0
- OnZeta/MAPLS/__pycache__/__init__.cpython-39.pyc +0 -0
- OnZeta/MAPLS/__pycache__/common.cpython-39.pyc +0 -0
- OnZeta/MAPLS/__pycache__/common_cuda.cpython-39.pyc +0 -0
- OnZeta/MAPLS/__pycache__/mapls.cpython-39.pyc +0 -0
- OnZeta/MAPLS/__pycache__/mapls_cuda.cpython-39.pyc +0 -0
- OnZeta/MAPLS/common.py +146 -0
- OnZeta/MAPLS/common_cuda.py +80 -0
- OnZeta/MAPLS/mapls.py +153 -0
- OnZeta/MAPLS/mapls_cuda.py +76 -0
- OnZeta/README.md +24 -0
- OnZeta/clip_cifar10.py +110 -0
- OnZeta/code_draft.py +3 -0
- OnZeta/lame/__pycache__/lame.cpython-39.pyc +0 -0
- OnZeta/lame/lame.py +153 -0
- OnZeta/lame/lame_px.py +71 -0
- OnZeta/logs/debug_onzeta_eval_2025-06-06_22-11-27.log +3 -0
- OnZeta/logs/debug_onzeta_eval_2025-06-07_00-13-37.log +140 -0
- OnZeta/logs/debug_onzeta_eval_2025-06-11_22-30-48.log +150 -0
- OnZeta/logs/debug_onzeta_eval_2025-06-11_22-37-19.log +150 -0
- OnZeta/logs/debug_onzeta_eval_2025-06-11_22-52-28.log +150 -0
- OnZeta/logs/debug_onzeta_eval_2025-06-11_23-00-32.log +150 -0
- OnZeta/logs/debug_onzeta_eval_2025-07-22_13-00-45.log +0 -0
- OnZeta/logs/debug_onzeta_eval_2025-07-22_13-01-26.log +0 -0
- OnZeta/logs/debug_onzeta_eval_2025-07-22_13-03-56.log +0 -0
- OnZeta/logs/debug_onzeta_eval_2025-07-22_13-04-08.log +0 -0
- OnZeta/logs/debug_onzeta_eval_2025-07-22_13-04-24.log +0 -0
- OnZeta/logs/mapls_inloop_mapls_only_RN50.log +10 -0
- OnZeta/logs/mapls_inloop_mapls_only_vitb16.log +10 -0
- OnZeta/logs/onzeta_eval.log +191 -0
- OnZeta/logs/onzeta_eval_2025-06-06_13-25-22.log +140 -0
- OnZeta/logs/onzeta_eval_2025-06-06_21-54-46.log +155 -0
- OnZeta/logs/onzeta_eval_2025-06-11_20-29-37.log +150 -0
- OnZeta/logs/onzeta_eval_2025-06-11_21-19-15.log +150 -0
- OnZeta/logs/onzeta_eval_2025-06-11_21-44-32.log +150 -0
- OnZeta/logs/onzeta_eval_2025-06-11_22-09-19.log +150 -0
- OnZeta/main_online_cifar10.py +209 -0
- OnZeta/main_online_cifar100.py +292 -0
- OnZeta/main_online_imagenet_adap_freq.py +353 -0
- OnZeta/main_online_imagenet_inloop_online_MAPLS_only.py +326 -0
- OnZeta/main_online_imagenet_mapls.py +417 -0
- OnZeta/main_online_imagenet_mapls_aug.py +434 -0
- OnZeta/main_online_imagenet_mapls_inloop.py +347 -0
- OnZeta/main_online_imagenet_mapls_lame.py +366 -0
- OnZeta/main_online_imagenet_mapls_nonlinear.py +368 -0
- OnZeta/main_online_imagenet_margin_softmax.py +346 -0
.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
.gitattributes
CHANGED
|
@@ -13582,3 +13582,51 @@ tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx16_cscFalse_ctpend/seed3/prompt_l
|
|
| 13582 |
tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13583 |
tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13584 |
tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13582 |
tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13583 |
tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13584 |
tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13585 |
+
OnZeta/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx16_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13586 |
+
OnZeta/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx16_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13587 |
+
OnZeta/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx16_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13588 |
+
OnZeta/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13589 |
+
OnZeta/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13590 |
+
OnZeta/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13591 |
+
OnZeta/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx16_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13592 |
+
OnZeta/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx16_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13593 |
+
OnZeta/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx16_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13594 |
+
OnZeta/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13595 |
+
OnZeta/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13596 |
+
OnZeta/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13597 |
+
OnZeta/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx16_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13598 |
+
OnZeta/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx16_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13599 |
+
OnZeta/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx16_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13600 |
+
OnZeta/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13601 |
+
OnZeta/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13602 |
+
OnZeta/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13603 |
+
OnZeta/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx16_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13604 |
+
OnZeta/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx16_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13605 |
+
OnZeta/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx16_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13606 |
+
OnZeta/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13607 |
+
OnZeta/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13608 |
+
OnZeta/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13609 |
+
Onzeta_2/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx16_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13610 |
+
Onzeta_2/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx16_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13611 |
+
Onzeta_2/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx16_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13612 |
+
Onzeta_2/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13613 |
+
Onzeta_2/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13614 |
+
Onzeta_2/tpt/pretrained_cocoop/rn101_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13615 |
+
Onzeta_2/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx16_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13616 |
+
Onzeta_2/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx16_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13617 |
+
Onzeta_2/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx16_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13618 |
+
Onzeta_2/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13619 |
+
Onzeta_2/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13620 |
+
Onzeta_2/tpt/pretrained_cocoop/rn50_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13621 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx16_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13622 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx16_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13623 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx16_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13624 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13625 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13626 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b16_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13627 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx16_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13628 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx16_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13629 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx16_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13630 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed1/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13631 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed2/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
| 13632 |
+
Onzeta_2/tpt/pretrained_cocoop/vit_b32_ep50_16shots/nctx4_cscFalse_ctpend/seed3/prompt_learner/model.pth.tar-50 filter=lfs diff=lfs merge=lfs -text
|
OnZeta/11.log
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 2 |
+
the beta is 1.0
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
obtain text proxy
|
| 6 |
+
accuracy with text proxy: 41.91
|
| 7 |
+
online zero-shot transfer: repeat 5 times
|
| 8 |
+
lam is 0.7000
|
| 9 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 10 |
+
the beta is 1.0
|
| 11 |
+
load pre-trained model
|
| 12 |
+
load data
|
| 13 |
+
obtain text proxy
|
| 14 |
+
accuracy with text proxy: 41.91
|
| 15 |
+
online zero-shot transfer: repeat 5 times
|
| 16 |
+
lam is 0.7000
|
| 17 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 18 |
+
the beta is 1.0
|
| 19 |
+
load pre-trained model
|
| 20 |
+
load data
|
| 21 |
+
obtain text proxy
|
| 22 |
+
accuracy with text proxy: 41.91
|
| 23 |
+
online zero-shot transfer: repeat 5 times
|
| 24 |
+
lam is 0.7000
|
| 25 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 26 |
+
the beta is 1.0
|
| 27 |
+
load pre-trained model
|
| 28 |
+
load data
|
| 29 |
+
obtain text proxy
|
| 30 |
+
accuracy with text proxy: 41.91
|
| 31 |
+
online zero-shot transfer: repeat 5 times
|
| 32 |
+
lam is 0.7000
|
| 33 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 34 |
+
the beta is 1.0
|
| 35 |
+
load pre-trained model
|
| 36 |
+
load data
|
| 37 |
+
obtain text proxy
|
| 38 |
+
accuracy with text proxy: 41.91
|
| 39 |
+
online zero-shot transfer: repeat 5 times
|
| 40 |
+
lam is 0.7000
|
| 41 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 42 |
+
the beta is 1.0
|
| 43 |
+
load pre-trained model
|
| 44 |
+
load data
|
| 45 |
+
obtain text proxy
|
| 46 |
+
accuracy with text proxy: 41.91
|
| 47 |
+
online zero-shot transfer: repeat 5 times
|
| 48 |
+
lam is 0.7000
|
| 49 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 50 |
+
the beta is 1.0
|
| 51 |
+
load pre-trained model
|
| 52 |
+
load data
|
| 53 |
+
obtain text proxy
|
| 54 |
+
accuracy with text proxy: 41.91
|
| 55 |
+
online zero-shot transfer: repeat 5 times
|
| 56 |
+
lam is 0.7000
|
| 57 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 58 |
+
the beta is 1.0
|
| 59 |
+
load pre-trained model
|
| 60 |
+
load data
|
| 61 |
+
obtain text proxy
|
| 62 |
+
accuracy with text proxy: 41.91
|
| 63 |
+
online zero-shot transfer: repeat 5 times
|
| 64 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 65 |
+
the beta is 1.0
|
| 66 |
+
load pre-trained model
|
| 67 |
+
load data
|
| 68 |
+
obtain text proxy
|
| 69 |
+
accuracy with text proxy: 41.91
|
| 70 |
+
online zero-shot transfer: repeat 5 times
|
| 71 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 72 |
+
the beta is 1.0
|
| 73 |
+
load pre-trained model
|
| 74 |
+
load data
|
| 75 |
+
obtain text proxy
|
| 76 |
+
accuracy with text proxy: 41.91
|
| 77 |
+
online zero-shot transfer: repeat 5 times
|
| 78 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 79 |
+
the beta is 1.0
|
| 80 |
+
load pre-trained model
|
| 81 |
+
load data
|
| 82 |
+
obtain text proxy
|
| 83 |
+
accuracy with text proxy: 41.91
|
| 84 |
+
online zero-shot transfer: repeat 5 times
|
| 85 |
+
lam is 0.7000
|
| 86 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 87 |
+
the beta is 1.0
|
| 88 |
+
load pre-trained model
|
| 89 |
+
load data
|
| 90 |
+
obtain text proxy
|
| 91 |
+
accuracy with text proxy: 41.91
|
| 92 |
+
online zero-shot transfer: repeat 5 times
|
| 93 |
+
lam is 0.7000
|
| 94 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 95 |
+
the beta is 1.0
|
| 96 |
+
load pre-trained model
|
| 97 |
+
load data
|
| 98 |
+
obtain text proxy
|
| 99 |
+
accuracy with text proxy: 41.91
|
| 100 |
+
online zero-shot transfer: repeat 5 times
|
| 101 |
+
lam is 0.7000
|
| 102 |
+
lam is 0.7000
|
| 103 |
+
lam is 0.7000
|
| 104 |
+
lam is 0.7000
|
| 105 |
+
lam is 0.7000
|
| 106 |
+
mean acc of onlab is: 47.23
|
| 107 |
+
mean acc of onzeta is: 9.29
|
| 108 |
+
mean acc of MAPLS is: 17.81
|
| 109 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.05)
|
| 110 |
+
the beta is 0.9
|
| 111 |
+
load pre-trained model
|
| 112 |
+
load data
|
| 113 |
+
obtain text proxy
|
| 114 |
+
accuracy with text proxy: 41.91
|
| 115 |
+
online zero-shot transfer: repeat 5 times
|
| 116 |
+
lam is 0.7000
|
| 117 |
+
lam is 0.7000
|
| 118 |
+
lam is 0.7000
|
| 119 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.0)
|
| 120 |
+
the beta is 1.0
|
| 121 |
+
load pre-trained model
|
| 122 |
+
load data
|
| 123 |
+
obtain text proxy
|
| 124 |
+
accuracy with text proxy: 41.91
|
| 125 |
+
online zero-shot transfer: repeat 5 times
|
| 126 |
+
lam is 0.7000
|
| 127 |
+
lam is 0.7000
|
| 128 |
+
lam is 0.7000
|
| 129 |
+
lam is 0.7000
|
| 130 |
+
lam is 0.7000
|
| 131 |
+
mean acc of onlab is: 47.14
|
| 132 |
+
mean acc of onzeta is: 9.65
|
| 133 |
+
mean acc of MAPLS is: 17.93
|
| 134 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.0)
|
| 135 |
+
the beta is 0.9
|
| 136 |
+
load pre-trained model
|
| 137 |
+
load data
|
| 138 |
+
obtain text proxy
|
| 139 |
+
accuracy with text proxy: 41.91
|
| 140 |
+
online zero-shot transfer: repeat 5 times
|
| 141 |
+
lam is 0.7000
|
| 142 |
+
lam is 0.7000
|
| 143 |
+
lam is 0.7000
|
| 144 |
+
lam is 0.7000
|
| 145 |
+
lam is 0.7000
|
| 146 |
+
mean acc of onlab is: 47.14
|
| 147 |
+
mean acc of onzeta is: 12.26
|
| 148 |
+
mean acc of MAPLS is: 12.58
|
| 149 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5, entropy_weight=0.0)
|
| 150 |
+
the beta is 0.8
|
| 151 |
+
load pre-trained model
|
| 152 |
+
load data
|
OnZeta/LICENSE
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Apache License
|
| 2 |
+
Version 2.0, January 2004
|
| 3 |
+
http://www.apache.org/licenses/
|
| 4 |
+
|
| 5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 6 |
+
|
| 7 |
+
1. Definitions.
|
| 8 |
+
|
| 9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 11 |
+
|
| 12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 13 |
+
the copyright owner that is granting the License.
|
| 14 |
+
|
| 15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 16 |
+
other entities that control, are controlled by, or are under common
|
| 17 |
+
control with that entity. For the purposes of this definition,
|
| 18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 19 |
+
direction or management of such entity, whether by contract or
|
| 20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 22 |
+
|
| 23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 24 |
+
exercising permissions granted by this License.
|
| 25 |
+
|
| 26 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 27 |
+
including but not limited to software source code, documentation
|
| 28 |
+
source, and configuration files.
|
| 29 |
+
|
| 30 |
+
"Object" form shall mean any form resulting from mechanical
|
| 31 |
+
transformation or translation of a Source form, including but
|
| 32 |
+
not limited to compiled object code, generated documentation,
|
| 33 |
+
and conversions to other media types.
|
| 34 |
+
|
| 35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 36 |
+
Object form, made available under the License, as indicated by a
|
| 37 |
+
copyright notice that is included in or attached to the work
|
| 38 |
+
(an example is provided in the Appendix below).
|
| 39 |
+
|
| 40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 41 |
+
form, that is based on (or derived from) the Work and for which the
|
| 42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 44 |
+
of this License, Derivative Works shall not include works that remain
|
| 45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 46 |
+
the Work and Derivative Works thereof.
|
| 47 |
+
|
| 48 |
+
"Contribution" shall mean any work of authorship, including
|
| 49 |
+
the original version of the Work and any modifications or additions
|
| 50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 54 |
+
means any form of electronic, verbal, or written communication sent
|
| 55 |
+
to the Licensor or its representatives, including but not limited to
|
| 56 |
+
communication on electronic mailing lists, source code control systems,
|
| 57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 59 |
+
excluding communication that is conspicuously marked or otherwise
|
| 60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 61 |
+
|
| 62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 64 |
+
subsequently incorporated within the Work.
|
| 65 |
+
|
| 66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 71 |
+
Work and such Derivative Works in Source or Object form.
|
| 72 |
+
|
| 73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 76 |
+
(except as stated in this section) patent license to make, have made,
|
| 77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 78 |
+
where such license applies only to those patent claims licensable
|
| 79 |
+
by such Contributor that are necessarily infringed by their
|
| 80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 82 |
+
institute patent litigation against any entity (including a
|
| 83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 84 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 85 |
+
or contributory patent infringement, then any patent licenses
|
| 86 |
+
granted to You under this License for that Work shall terminate
|
| 87 |
+
as of the date such litigation is filed.
|
| 88 |
+
|
| 89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 90 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 91 |
+
modifications, and in Source or Object form, provided that You
|
| 92 |
+
meet the following conditions:
|
| 93 |
+
|
| 94 |
+
(a) You must give any other recipients of the Work or
|
| 95 |
+
Derivative Works a copy of this License; and
|
| 96 |
+
|
| 97 |
+
(b) You must cause any modified files to carry prominent notices
|
| 98 |
+
stating that You changed the files; and
|
| 99 |
+
|
| 100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 101 |
+
that You distribute, all copyright, patent, trademark, and
|
| 102 |
+
attribution notices from the Source form of the Work,
|
| 103 |
+
excluding those notices that do not pertain to any part of
|
| 104 |
+
the Derivative Works; and
|
| 105 |
+
|
| 106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 107 |
+
distribution, then any Derivative Works that You distribute must
|
| 108 |
+
include a readable copy of the attribution notices contained
|
| 109 |
+
within such NOTICE file, excluding those notices that do not
|
| 110 |
+
pertain to any part of the Derivative Works, in at least one
|
| 111 |
+
of the following places: within a NOTICE text file distributed
|
| 112 |
+
as part of the Derivative Works; within the Source form or
|
| 113 |
+
documentation, if provided along with the Derivative Works; or,
|
| 114 |
+
within a display generated by the Derivative Works, if and
|
| 115 |
+
wherever such third-party notices normally appear. The contents
|
| 116 |
+
of the NOTICE file are for informational purposes only and
|
| 117 |
+
do not modify the License. You may add Your own attribution
|
| 118 |
+
notices within Derivative Works that You distribute, alongside
|
| 119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 120 |
+
that such additional attribution notices cannot be construed
|
| 121 |
+
as modifying the License.
|
| 122 |
+
|
| 123 |
+
You may add Your own copyright statement to Your modifications and
|
| 124 |
+
may provide additional or different license terms and conditions
|
| 125 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 126 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 127 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 128 |
+
the conditions stated in this License.
|
| 129 |
+
|
| 130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 132 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 133 |
+
this License, without any additional terms or conditions.
|
| 134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 135 |
+
the terms of any separate license agreement you may have executed
|
| 136 |
+
with Licensor regarding such Contributions.
|
| 137 |
+
|
| 138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 140 |
+
except as required for reasonable and customary use in describing the
|
| 141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 142 |
+
|
| 143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 144 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 147 |
+
implied, including, without limitation, any warranties or conditions
|
| 148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 150 |
+
appropriateness of using or redistributing the Work and assume any
|
| 151 |
+
risks associated with Your exercise of permissions under this License.
|
| 152 |
+
|
| 153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 154 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 155 |
+
unless required by applicable law (such as deliberate and grossly
|
| 156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 157 |
+
liable to You for damages, including any direct, indirect, special,
|
| 158 |
+
incidental, or consequential damages of any character arising as a
|
| 159 |
+
result of this License or out of the use or inability to use the
|
| 160 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 161 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 162 |
+
other commercial damages or losses), even if such Contributor
|
| 163 |
+
has been advised of the possibility of such damages.
|
| 164 |
+
|
| 165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 168 |
+
or other liability obligations and/or rights consistent with this
|
| 169 |
+
License. However, in accepting such obligations, You may act only
|
| 170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 171 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 172 |
+
defend, and hold each Contributor harmless for any liability
|
| 173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 174 |
+
of your accepting any such warranty or additional liability.
|
| 175 |
+
|
| 176 |
+
END OF TERMS AND CONDITIONS
|
| 177 |
+
|
| 178 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 179 |
+
|
| 180 |
+
To apply the Apache License to your work, attach the following
|
| 181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 182 |
+
replaced with your own identifying information. (Don't include
|
| 183 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 184 |
+
comment syntax for the file format. We also recommend that a
|
| 185 |
+
file or class name and description of purpose be included on the
|
| 186 |
+
same "printed page" as the copyright notice for easier
|
| 187 |
+
identification within third-party archives.
|
| 188 |
+
|
| 189 |
+
Copyright [yyyy] [name of copyright owner]
|
| 190 |
+
|
| 191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 192 |
+
you may not use this file except in compliance with the License.
|
| 193 |
+
You may obtain a copy of the License at
|
| 194 |
+
|
| 195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 196 |
+
|
| 197 |
+
Unless required by applicable law or agreed to in writing, software
|
| 198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 200 |
+
See the License for the specific language governing permissions and
|
| 201 |
+
limitations under the License.
|
OnZeta/MAPLS/__init__.py
ADDED
|
File without changes
|
OnZeta/MAPLS/__pycache__/__init__.cpython-39.pyc
ADDED
|
Binary file (138 Bytes). View file
|
|
|
OnZeta/MAPLS/__pycache__/common.cpython-39.pyc
ADDED
|
Binary file (5 kB). View file
|
|
|
OnZeta/MAPLS/__pycache__/common_cuda.cpython-39.pyc
ADDED
|
Binary file (2.83 kB). View file
|
|
|
OnZeta/MAPLS/__pycache__/mapls.cpython-39.pyc
ADDED
|
Binary file (3.57 kB). View file
|
|
|
OnZeta/MAPLS/__pycache__/mapls_cuda.cpython-39.pyc
ADDED
|
Binary file (2.25 kB). View file
|
|
|
OnZeta/MAPLS/common.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Common Utils in Target Label Shift Estimation
|
| 2 |
+
import numpy as np
|
| 3 |
+
from typing import List
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
# Post hoc Label Shift Correction--------------------------------------#
|
| 7 |
+
def lsc(probs: np.ndarray, w: List):
|
| 8 |
+
r"""
|
| 9 |
+
Implementation of Label Shift Compensation (LSC) with known target label distribution.
|
| 10 |
+
Given source domain P(Y_s=i) and P(Y_s=i|X_s=x), target domain P(Y_t=i),
|
| 11 |
+
estimate target predicted probability q(y|x) on test set.
|
| 12 |
+
|
| 13 |
+
Args:
|
| 14 |
+
probs: Softmax probability P(Y_s=i|X_s=x) predicted by the classifier,
|
| 15 |
+
for all samples in validation set (N x C).
|
| 16 |
+
w: Ratio of Target over Source domain label distribution $ w = P(Y_t=i) / P(Y_s=i) $,
|
| 17 |
+
Not necessarily normalized to 1.
|
| 18 |
+
|
| 19 |
+
Shapes:
|
| 20 |
+
* Input:
|
| 21 |
+
probs: N x C (No. of samples) x (No. of classes),
|
| 22 |
+
w: C (No. of classes),
|
| 23 |
+
* Output:
|
| 24 |
+
pc_probs: N x C (No. of samples) x (No. of classes)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
For more information see original paper:
|
| 28 |
+
[2002] "Adjusting the Outputs of a Classifier to New a Priori Probabilities: A Simple Procedure"
|
| 29 |
+
"""
|
| 30 |
+
assert len(w) == probs.shape[-1]
|
| 31 |
+
probs = probs.detach().numpy()
|
| 32 |
+
pc_probs = normalized(probs * w, axis=-1, order=1)
|
| 33 |
+
|
| 34 |
+
return pc_probs
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# Estimation of Source Label Distribution P(Y_s=i) or p(\hat{y}=c_i)----------#
|
| 38 |
+
def get_py(probs: np.ndarray, cls_num_list: List[int] = None, mode='soft'):
|
| 39 |
+
r"""
|
| 40 |
+
Estimation of source label distribution (normalized)
|
| 41 |
+
Given source domain P(Y_s=i|X_s=x)=f(x) and No. of sample per-class,
|
| 42 |
+
estimate P(Y_s=i) or p(\hat{y}=c_i)
|
| 43 |
+
|
| 44 |
+
Args:
|
| 45 |
+
probs: Softmax probability p(\hat{y}|x) predicted by classifier,
|
| 46 |
+
over all samples on train set (N x C).
|
| 47 |
+
cls_num_list: No. of samples in each class (C)
|
| 48 |
+
mode: Method used to estimate p(\hat{y}=c_i),
|
| 49 |
+
'soft' will estimate p(\hat{y}=c_i) \approx \sum^N_j p(\hat{y}=c_i|x_j),
|
| 50 |
+
'hard' will estimate p(\hat{y}=c_i) \approx \sum^N_j \mathds{1}(\arg\max_c p(y=c|x_j)=c_i)
|
| 51 |
+
'gt' will estimate p(\hat{y}=c_i) \approx P(Y_s=i), which is given by cls_num_list
|
| 52 |
+
|
| 53 |
+
Shapes:
|
| 54 |
+
* Input:
|
| 55 |
+
probs: N x C (No. of samples) x (No. of classes),
|
| 56 |
+
cls_num_list: C (No. of classes),
|
| 57 |
+
* Output:
|
| 58 |
+
py: C (No. of classes)
|
| 59 |
+
|
| 60 |
+
Examples:
|
| 61 |
+
|
| 62 |
+
>>> import numpy as np
|
| 63 |
+
>>> from numpy.linalg import norm
|
| 64 |
+
>>> class_num = 5; val_set_sample_num = 10
|
| 65 |
+
>>> prob = norm(np.random.normal(size=(val_set_sample_num, class_num)), ord=1, axis=-1)
|
| 66 |
+
>>> num_list = list(range(class_num))
|
| 67 |
+
>>> py = get_py(prob, num_list)
|
| 68 |
+
"""
|
| 69 |
+
cls_num = probs.shape[-1]
|
| 70 |
+
|
| 71 |
+
if mode == "soft":
|
| 72 |
+
py = np.mean(probs, axis=tuple(range(len(np.shape(probs)) - 1)))
|
| 73 |
+
elif mode == "hard":
|
| 74 |
+
py = np.bincount(np.argmax(probs, axis=-1), minlength=cls_num)
|
| 75 |
+
py = py / py.sum()
|
| 76 |
+
elif mode == 'gt' and cls_num_list is not None:
|
| 77 |
+
py = np.array(cls_num_list) / cls_num
|
| 78 |
+
else:
|
| 79 |
+
raise ValueError("'mode' only support options: 'soft', 'hard', 'gt'")
|
| 80 |
+
|
| 81 |
+
return py / py.sum()
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def get_marginal(probs: np.ndarray, cls_num: int, mode: str = 'soft'):
|
| 86 |
+
r"""
|
| 87 |
+
Get Marginal Distribution $P(Y=.)$ given $P(Y=.|X=x)$ by summing over x
|
| 88 |
+
"""
|
| 89 |
+
assert (mode in ['soft', 'hard']) and probs.shape[-1] == cls_num
|
| 90 |
+
if mode == 'hard':
|
| 91 |
+
qz = np.zeros(cls_num)
|
| 92 |
+
for i in np.argmax(probs, axis=-1):
|
| 93 |
+
qz[i] += 1.
|
| 94 |
+
qz = qz / qz.sum()
|
| 95 |
+
elif mode == 'soft':
|
| 96 |
+
qz = np.mean(probs, axis=0)
|
| 97 |
+
|
| 98 |
+
return qz
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def get_confusion_matrix(probs: np.ndarray,
|
| 102 |
+
labels: List,
|
| 103 |
+
cls_num: int,
|
| 104 |
+
mode: str = 'soft'):
|
| 105 |
+
r"""
|
| 106 |
+
Get Confusion Matrix of prediction given prediction $P(Y=.|X=x)$ and ground truth label $Y=.$
|
| 107 |
+
"""
|
| 108 |
+
assert (mode in ['soft', 'hard']) and probs.shape[-1] == cls_num
|
| 109 |
+
cm = np.zeros((cls_num, cls_num))
|
| 110 |
+
if mode == 'soft':
|
| 111 |
+
for i, j in zip(labels, probs):
|
| 112 |
+
cm[i, :] += j
|
| 113 |
+
elif mode == 'hard':
|
| 114 |
+
labels_pred = np.argmax(probs, axis=-1)
|
| 115 |
+
for i, j in zip(labels, labels_pred):
|
| 116 |
+
cm[i, j] += 1
|
| 117 |
+
|
| 118 |
+
return cm
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def normalized(a, axis=-1, order=2):
|
| 122 |
+
r"""
|
| 123 |
+
Prediction Normalization
|
| 124 |
+
"""
|
| 125 |
+
l2 = np.atleast_1d(np.linalg.norm(a, order, axis))
|
| 126 |
+
l2[l2 == 0] = 1
|
| 127 |
+
return a / np.expand_dims(l2, axis)
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def Topk_qy(probs: np.ndarray, cls_num, topk_ratio=0.8, head=0, normalize=True):
|
| 131 |
+
r"""
|
| 132 |
+
Get Marginal Distribution $P(Y=.)$ given Topk of $P(Y=.|X=x)$ by summing over x
|
| 133 |
+
"""
|
| 134 |
+
assert probs.shape[-1] == cls_num
|
| 135 |
+
|
| 136 |
+
k = np.clip(int(cls_num * topk_ratio) + head, head + 1, cls_num)
|
| 137 |
+
qy = np.zeros(cls_num)
|
| 138 |
+
for x in probs:
|
| 139 |
+
idx = np.argsort(x)[::-1]
|
| 140 |
+
idx = idx[head:k]
|
| 141 |
+
qy[idx] += x[idx]
|
| 142 |
+
|
| 143 |
+
if normalize:
|
| 144 |
+
qy = qy / probs.shape[0]
|
| 145 |
+
|
| 146 |
+
return qy
|
OnZeta/MAPLS/common_cuda.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn.functional as F
|
| 3 |
+
|
| 4 |
+
def lsc_torch(probs: torch.Tensor, w: torch.Tensor):
|
| 5 |
+
"""
|
| 6 |
+
GPU-compatible LSC implementation
|
| 7 |
+
probs: Tensor [N, C]
|
| 8 |
+
w: Tensor [C]
|
| 9 |
+
"""
|
| 10 |
+
assert probs.shape[-1] == w.shape[0], "Shape mismatch"
|
| 11 |
+
|
| 12 |
+
weighted_probs = probs * w
|
| 13 |
+
pc_probs = F.normalize(weighted_probs, p=1, dim=-1)
|
| 14 |
+
return pc_probs
|
| 15 |
+
def get_py_torch(probs: torch.Tensor, cls_num_list=None, mode='soft'):
|
| 16 |
+
"""
|
| 17 |
+
GPU-compatible estimation of P(Y_s=i)
|
| 18 |
+
probs: Tensor [N, C]
|
| 19 |
+
"""
|
| 20 |
+
cls_num = probs.shape[-1]
|
| 21 |
+
|
| 22 |
+
if mode == "soft":
|
| 23 |
+
py = torch.mean(probs, dim=0)
|
| 24 |
+
elif mode == "hard":
|
| 25 |
+
preds = torch.argmax(probs, dim=-1)
|
| 26 |
+
py = torch.bincount(preds, minlength=cls_num).float()
|
| 27 |
+
py = py / py.sum()
|
| 28 |
+
elif mode == 'gt' and cls_num_list is not None:
|
| 29 |
+
py = torch.tensor(cls_num_list, dtype=torch.float32, device=probs.device)
|
| 30 |
+
py = py / py.sum()
|
| 31 |
+
else:
|
| 32 |
+
raise ValueError("mode must be 'soft', 'hard', or 'gt'")
|
| 33 |
+
|
| 34 |
+
return py
|
| 35 |
+
def get_marginal_torch(probs: torch.Tensor, cls_num: int, mode='soft'):
|
| 36 |
+
assert probs.shape[-1] == cls_num
|
| 37 |
+
if mode == 'hard':
|
| 38 |
+
pred = torch.argmax(probs, dim=-1)
|
| 39 |
+
qz = torch.bincount(pred, minlength=cls_num).float()
|
| 40 |
+
qz = qz / qz.sum()
|
| 41 |
+
elif mode == 'soft':
|
| 42 |
+
qz = torch.mean(probs, dim=0)
|
| 43 |
+
return qz
|
| 44 |
+
def get_confusion_matrix_torch(probs: torch.Tensor, labels: torch.Tensor, cls_num: int, mode='soft'):
|
| 45 |
+
"""
|
| 46 |
+
probs: Tensor [N, C]
|
| 47 |
+
labels: Tensor [N] (long)
|
| 48 |
+
returns: [cls_num, cls_num] confusion matrix
|
| 49 |
+
"""
|
| 50 |
+
cm = torch.zeros((cls_num, cls_num), device=probs.device)
|
| 51 |
+
if mode == 'soft':
|
| 52 |
+
for i in range(len(labels)):
|
| 53 |
+
cm[labels[i]] += probs[i]
|
| 54 |
+
elif mode == 'hard':
|
| 55 |
+
pred = torch.argmax(probs, dim=-1)
|
| 56 |
+
for i in range(len(labels)):
|
| 57 |
+
cm[labels[i], pred[i]] += 1
|
| 58 |
+
return cm
|
| 59 |
+
def normalized_torch(a: torch.Tensor, axis=-1, order=2):
|
| 60 |
+
norm = torch.norm(a, p=order, dim=axis, keepdim=True)
|
| 61 |
+
norm[norm == 0] = 1.0
|
| 62 |
+
return a / norm
|
| 63 |
+
def topk_qy_torch(probs: torch.Tensor, cls_num: int, topk_ratio=0.8, head=0, normalize=True):
|
| 64 |
+
"""
|
| 65 |
+
probs: Tensor [N, C]
|
| 66 |
+
return: Tensor [C]
|
| 67 |
+
"""
|
| 68 |
+
N, C = probs.shape
|
| 69 |
+
k = max(min(int(cls_num * topk_ratio) + head, cls_num), head + 1)
|
| 70 |
+
|
| 71 |
+
qy = torch.zeros(cls_num, device=probs.device)
|
| 72 |
+
topk_vals, topk_indices = torch.topk(probs, k=k, dim=1)
|
| 73 |
+
|
| 74 |
+
for i in range(N):
|
| 75 |
+
qy[topk_indices[i][head:]] += topk_vals[i][head:]
|
| 76 |
+
|
| 77 |
+
if normalize:
|
| 78 |
+
qy = qy / N
|
| 79 |
+
|
| 80 |
+
return qy
|
OnZeta/MAPLS/mapls.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
from .common import normalized, Topk_qy
|
| 3 |
+
import logging
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def mapls(test_probs,
|
| 7 |
+
pz: np.ndarray,
|
| 8 |
+
qy_mode: str = 'soft',
|
| 9 |
+
max_iter: int = 100,
|
| 10 |
+
init_mode: str = 'identical',
|
| 11 |
+
lam: float = None,
|
| 12 |
+
dvg_name='kl'):
|
| 13 |
+
r"""
|
| 14 |
+
Implementation of Maximum A Posteriori Label Shift,
|
| 15 |
+
for Unknown target label distribution estimation
|
| 16 |
+
|
| 17 |
+
Given source domain P(Y_s=i|X_s=x) = f(x) and P(Y_s=i),
|
| 18 |
+
estimate targe domain P(Y_t=i) on test set
|
| 19 |
+
|
| 20 |
+
"""
|
| 21 |
+
# FIXME
|
| 22 |
+
if test_probs.is_cuda:
|
| 23 |
+
test_probs = test_probs.cpu()
|
| 24 |
+
test_probs = test_probs.detach().numpy()
|
| 25 |
+
|
| 26 |
+
# Sanity Check
|
| 27 |
+
cls_num = len(pz)
|
| 28 |
+
assert test_probs.shape[-1] == cls_num
|
| 29 |
+
if type(max_iter) != int or max_iter < 0:
|
| 30 |
+
raise Exception('max_iter should be a positive integer, not ' + str(max_iter))
|
| 31 |
+
|
| 32 |
+
# Setup d(p,q) measure
|
| 33 |
+
if dvg_name == 'kl':
|
| 34 |
+
dvg = kl_div
|
| 35 |
+
elif dvg_name == 'js':
|
| 36 |
+
dvg = js_div
|
| 37 |
+
else:
|
| 38 |
+
raise Exception('Unsupported distribution distance measure, expect kl or js.')
|
| 39 |
+
|
| 40 |
+
# Set Prior of Target Label Distribution
|
| 41 |
+
q_prior = np.ones(cls_num) / cls_num
|
| 42 |
+
# q_prior = pz.copy()
|
| 43 |
+
|
| 44 |
+
# Lambda estimation-------------------------------------------------------#
|
| 45 |
+
if lam is None:
|
| 46 |
+
# logging.info('Data shape: %s, %s' % (str(train_probs.shape), str(test_probs.shape)))
|
| 47 |
+
# logging.info('Divergence type is %s' % (dvg))
|
| 48 |
+
# lam = get_lamda(test_probs, pz, q_prior, dvg=dvg, max_iter=max_iter) # FIXME why return none
|
| 49 |
+
lam = lam
|
| 50 |
+
# logging.info('Estimated lambda value is %.4f' % lam)
|
| 51 |
+
else:
|
| 52 |
+
# print('Assigned lambda is %.4f' % lam)
|
| 53 |
+
pass
|
| 54 |
+
logging.info("lam is %.4f" % lam)
|
| 55 |
+
|
| 56 |
+
# EM Algorithm Computation
|
| 57 |
+
qz = mapls_EM(test_probs, pz, lam, q_prior, cls_num,
|
| 58 |
+
init_mode=init_mode, max_iter=max_iter, qy_mode=qy_mode)
|
| 59 |
+
|
| 60 |
+
return qz
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def mapls_EM(probs, pz, lam, q_prior, cls_num, init_mode='identical', max_iter=100, qy_mode='soft'):
|
| 64 |
+
# Normalize Source Label Distribution pz
|
| 65 |
+
pz = np.array(pz) / np.sum(pz)
|
| 66 |
+
# Initialize Target Label Distribution qz
|
| 67 |
+
if init_mode == 'uniform':
|
| 68 |
+
qz = np.ones(cls_num) / cls_num
|
| 69 |
+
elif init_mode == 'identical':
|
| 70 |
+
qz = pz.copy()
|
| 71 |
+
else:
|
| 72 |
+
raise ValueError('init_mode should be either "uniform" or "identical"')
|
| 73 |
+
|
| 74 |
+
# Initialize w
|
| 75 |
+
w = (np.array(qz) / np.array(pz))
|
| 76 |
+
# EM algorithm with MAP estimation----------------------------------------#
|
| 77 |
+
for i in range(max_iter):
|
| 78 |
+
# print('w shape ', w.shape)
|
| 79 |
+
|
| 80 |
+
# E-Step--------------------------------------------------------------#
|
| 81 |
+
mapls_probs = normalized(probs * w, axis=-1, order=1)
|
| 82 |
+
|
| 83 |
+
# M-Step--------------------------------------------------------------#
|
| 84 |
+
if qy_mode == 'hard':
|
| 85 |
+
pred = np.argmax(mapls_probs, axis=-1)
|
| 86 |
+
qz_new = np.bincount(pred.reshape(-1), minlength=cls_num)
|
| 87 |
+
elif qy_mode == 'soft':
|
| 88 |
+
qz_new = np.mean(mapls_probs, axis=0)
|
| 89 |
+
elif qy_mode == 'topk':
|
| 90 |
+
qz_new = Topk_qy(mapls_probs, cls_num, topk_ratio=0.9, head=0)
|
| 91 |
+
else:
|
| 92 |
+
raise Exception('MAPLS mode should be either "soft" or "hard". ')
|
| 93 |
+
# print(np.shape(pc_probs), np.shape(pred), np.shape(cls_num_list_t))
|
| 94 |
+
|
| 95 |
+
# Update w with MAP estimation of Target Label Distribution qz
|
| 96 |
+
# qz = (qz_new + alpha) / (N + np.sum(alpha))
|
| 97 |
+
qz = lam * qz_new + (1 - lam) * q_prior
|
| 98 |
+
qz /= qz.sum()
|
| 99 |
+
w = qz / pz
|
| 100 |
+
|
| 101 |
+
return qz
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def get_lamda(test_probs, pz, q_prior, dvg, max_iter=50):
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
K = len(pz)
|
| 108 |
+
|
| 109 |
+
# MLLS estimation of source and target domain label distribution
|
| 110 |
+
qz_pred = mapls_EM(test_probs, pz, 1, 0, K, max_iter=max_iter)
|
| 111 |
+
|
| 112 |
+
TU_div = dvg(qz_pred, q_prior)
|
| 113 |
+
TS_div = dvg(qz_pred, pz)
|
| 114 |
+
SU_div = dvg(pz, q_prior)
|
| 115 |
+
print('weights are, TU_div %.4f, TS_div %.4f, SU_div %.4f' % (TU_div, TS_div, SU_div))
|
| 116 |
+
|
| 117 |
+
SU_conf = 1 - lam_forward(SU_div, lam_inv(dpq=0.5, lam=0.2))
|
| 118 |
+
TU_conf = lam_forward(TU_div, lam_inv(dpq=0.5, lam=SU_conf))
|
| 119 |
+
TS_conf = lam_forward(TS_div, lam_inv(dpq=0.5, lam=SU_conf))
|
| 120 |
+
print('weights are, unviform_weight %.4f, differ_weight %.4f, regularize weight %.4f'
|
| 121 |
+
% (TU_conf, TS_conf, SU_conf))
|
| 122 |
+
|
| 123 |
+
confs = np.array([TU_conf, 1 - TS_conf])
|
| 124 |
+
w = np.array([0.9, 0.1])
|
| 125 |
+
lam = np.sum(w * confs)
|
| 126 |
+
|
| 127 |
+
print('Estimated lambda is: %.4f', lam)
|
| 128 |
+
|
| 129 |
+
return lam
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def lam_inv(dpq, lam):
|
| 133 |
+
# clip for small lam
|
| 134 |
+
lam = 1e-3 if abs(lam - 1) < 1e-3 else lam
|
| 135 |
+
return (1 / (1 - lam) - 1) / dpq
|
| 136 |
+
|
| 137 |
+
def lam_forward(dpq, gamma):
|
| 138 |
+
return gamma * dpq / (1 + gamma * dpq)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def kl_div(p, q):
|
| 142 |
+
# fixme
|
| 143 |
+
# if p == q:
|
| 144 |
+
# return 0.0
|
| 145 |
+
p = np.asarray(p, dtype=np.float16)
|
| 146 |
+
q = np.asarray(q + 1e-8, dtype=np.float16)
|
| 147 |
+
|
| 148 |
+
return np.sum(np.where(p != 0, p * np.log(p / q), 0))
|
| 149 |
+
|
| 150 |
+
def js_div(p, q):
|
| 151 |
+
assert (np.abs(np.sum(p) - 1) < 1e-6) and (np.abs(np.sum(q) - 1) < 1e-6)
|
| 152 |
+
m = (p + q) / 2
|
| 153 |
+
return kl_div(p, m) / 2 + kl_div(q, m) / 2
|
OnZeta/MAPLS/mapls_cuda.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn.functional as F
|
| 3 |
+
|
| 4 |
+
def mapls_torch(test_probs: torch.Tensor,
|
| 5 |
+
pz: torch.Tensor,
|
| 6 |
+
qy_mode: str = 'soft',
|
| 7 |
+
max_iter: int = 100,
|
| 8 |
+
init_mode: str = 'identical',
|
| 9 |
+
lam: float = None,
|
| 10 |
+
dvg_name='kl') -> torch.Tensor:
|
| 11 |
+
"""
|
| 12 |
+
GPU-compatible MAP Label Shift (MAPLS) using PyTorch.
|
| 13 |
+
"""
|
| 14 |
+
device = test_probs.device
|
| 15 |
+
pz = torch.tensor(pz, dtype=torch.float32, device='cuda')
|
| 16 |
+
cls_num = pz.numel()
|
| 17 |
+
assert test_probs.shape[-1] == cls_num
|
| 18 |
+
|
| 19 |
+
if dvg_name == 'kl':
|
| 20 |
+
dvg = kl_div_torch
|
| 21 |
+
elif dvg_name == 'js':
|
| 22 |
+
dvg = js_div_torch
|
| 23 |
+
else:
|
| 24 |
+
raise ValueError('Unsupported divergence type')
|
| 25 |
+
|
| 26 |
+
# Prior: uniform or given
|
| 27 |
+
q_prior = torch.ones(cls_num, device=device) / cls_num
|
| 28 |
+
|
| 29 |
+
# EM
|
| 30 |
+
qz = mapls_EM_torch(test_probs, pz, lam, q_prior, cls_num,
|
| 31 |
+
init_mode=init_mode, max_iter=max_iter, qy_mode=qy_mode)
|
| 32 |
+
return qz
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def mapls_EM_torch(probs, pz, lam, q_prior, cls_num, init_mode='identical', max_iter=100, qy_mode='soft'):
|
| 36 |
+
pz = pz / pz.sum()
|
| 37 |
+
if init_mode == 'uniform':
|
| 38 |
+
qz = torch.ones(cls_num, device=probs.device) / cls_num
|
| 39 |
+
elif init_mode == 'identical':
|
| 40 |
+
qz = pz.clone()
|
| 41 |
+
else:
|
| 42 |
+
raise ValueError('init_mode must be "uniform" or "identical"')
|
| 43 |
+
|
| 44 |
+
w = qz / pz
|
| 45 |
+
|
| 46 |
+
for _ in range(max_iter):
|
| 47 |
+
mapls_probs = normalize_torch(probs * w, dim=-1)
|
| 48 |
+
|
| 49 |
+
if qy_mode == 'hard':
|
| 50 |
+
pred = torch.argmax(mapls_probs, dim=-1)
|
| 51 |
+
qz_new = torch.bincount(pred, minlength=cls_num).float().to(probs.device)
|
| 52 |
+
elif qy_mode == 'soft':
|
| 53 |
+
qz_new = mapls_probs.mean(dim=0)
|
| 54 |
+
else:
|
| 55 |
+
raise ValueError('qy_mode must be "soft" or "hard"')
|
| 56 |
+
|
| 57 |
+
qz = lam * qz_new + (1 - lam) * q_prior
|
| 58 |
+
qz = qz / qz.sum()
|
| 59 |
+
w = qz / pz
|
| 60 |
+
|
| 61 |
+
return qz
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def normalize_torch(x, dim=-1, eps=1e-8):
|
| 65 |
+
return x / (x.sum(dim=dim, keepdim=True) + eps)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def kl_div_torch(p, q, eps=1e-8):
|
| 69 |
+
p = p.to(torch.float32)
|
| 70 |
+
q = (q + eps).to(torch.float32)
|
| 71 |
+
return torch.sum(torch.where(p != 0, p * torch.log(p / q), torch.zeros_like(p)))
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def js_div_torch(p, q):
|
| 75 |
+
m = (p + q) / 2
|
| 76 |
+
return (kl_div_torch(p, m) + kl_div_torch(q, m)) / 2
|
OnZeta/README.md
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# OnZeta
|
| 2 |
+
PyTorch Implementation for Our ECCV'24 Paper: "Online Zero-Shot Classification with CLIP"
|
| 3 |
+
|
| 4 |
+
## Requirements
|
| 5 |
+
* Python 3.9
|
| 6 |
+
* PyTorch 1.12
|
| 7 |
+
* [CLIP](https://github.com/openai/CLIP)
|
| 8 |
+
|
| 9 |
+
## Usage:
|
| 10 |
+
OnZeta with pre-trained ResNet-50
|
| 11 |
+
```
|
| 12 |
+
python main_online.py -a RN50 --data_path /path/to/imagenet
|
| 13 |
+
```
|
| 14 |
+
|
| 15 |
+
## Citation
|
| 16 |
+
If you use the package in your research, please cite our paper:
|
| 17 |
+
```
|
| 18 |
+
@inproceedings{qian2024onzeta,
|
| 19 |
+
author = {Qi Qian and
|
| 20 |
+
Juhua Hu},
|
| 21 |
+
title = {Online Zero-Shot Classification with CLIP},
|
| 22 |
+
booktitle = {The 18th European Conference on Computer Vision, {ECCV} 2024},
|
| 23 |
+
year = {2024}
|
| 24 |
+
}
|
OnZeta/clip_cifar10.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import clip
|
| 3 |
+
import os
|
| 4 |
+
from torchvision.datasets import MNIST, CIFAR10
|
| 5 |
+
import numpy as np
|
| 6 |
+
|
| 7 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 8 |
+
model, preprocess = clip.load('RN50', device)
|
| 9 |
+
|
| 10 |
+
# from https://github.com/openai/CLIP/blob/main/data/prompts.md
|
| 11 |
+
mnist_classes = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ]
|
| 12 |
+
mnist_templates = ['a photo of the number: "{}".', ]
|
| 13 |
+
cifar10_classes = ['airplane',
|
| 14 |
+
'automobile',
|
| 15 |
+
'bird',
|
| 16 |
+
'cat',
|
| 17 |
+
'deer',
|
| 18 |
+
'dog',
|
| 19 |
+
'frog',
|
| 20 |
+
'horse',
|
| 21 |
+
'ship',
|
| 22 |
+
'truck', ]
|
| 23 |
+
cifar10_templates = [
|
| 24 |
+
'a photo of a {}.',
|
| 25 |
+
'a blurry photo of a {}.',
|
| 26 |
+
'a black and white photo of a {}.',
|
| 27 |
+
'a low contrast photo of a {}.',
|
| 28 |
+
'a high contrast photo of a {}.',
|
| 29 |
+
'a bad photo of a {}.',
|
| 30 |
+
'a good photo of a {}.',
|
| 31 |
+
'a photo of a small {}.',
|
| 32 |
+
'a photo of a big {}.',
|
| 33 |
+
'a photo of the {}.',
|
| 34 |
+
'a blurry photo of the {}.',
|
| 35 |
+
'a black and white photo of the {}.',
|
| 36 |
+
'a low contrast photo of the {}.',
|
| 37 |
+
'a high contrast photo of the {}.',
|
| 38 |
+
'a bad photo of the {}.',
|
| 39 |
+
'a good photo of the {}.',
|
| 40 |
+
'a photo of the small {}.',
|
| 41 |
+
'a photo of the big {}.',
|
| 42 |
+
]
|
| 43 |
+
|
| 44 |
+
class_map = {'MNIST': mnist_classes, 'CIFAR10': cifar10_classes}
|
| 45 |
+
template_map = {'MNIST': mnist_templates, 'CIFAR10': cifar10_templates}
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@torch.no_grad()
|
| 49 |
+
def accuracy(output, target, topk=(1,)):
|
| 50 |
+
maxk = max(topk)
|
| 51 |
+
batch_size = target.size(0)
|
| 52 |
+
|
| 53 |
+
_, pred = output.topk(maxk, 1, True, True)
|
| 54 |
+
pred = pred.t()
|
| 55 |
+
correct = pred.eq(target.reshape(1, -1).expand_as(pred))
|
| 56 |
+
|
| 57 |
+
res = []
|
| 58 |
+
for k in topk:
|
| 59 |
+
correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True)
|
| 60 |
+
res.append(correct_k.mul_(100.0 / batch_size).item())
|
| 61 |
+
return res
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
@torch.no_grad()
|
| 65 |
+
def extract_text_features(dataset_name):
|
| 66 |
+
# code borrowed from: https://github.com/openai/CLIP/blob/fcab8b6eb92af684e7ff0a904464be7b99b49b88/notebooks/Prompt_Engineering_for_ImageNet.ipynb
|
| 67 |
+
class_names = class_map[dataset_name]
|
| 68 |
+
templates = template_map[dataset_name]
|
| 69 |
+
model.to(device)
|
| 70 |
+
model.eval()
|
| 71 |
+
|
| 72 |
+
zeroshot_weights = []
|
| 73 |
+
for classname in class_names:
|
| 74 |
+
texts = [template.format(classname) for template in templates]
|
| 75 |
+
texts = clip.tokenize(texts).to(device)
|
| 76 |
+
class_embeddings = model.encode_text(texts)
|
| 77 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 78 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 79 |
+
class_embedding /= class_embedding.norm()
|
| 80 |
+
zeroshot_weights.append(class_embedding)
|
| 81 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).to(device)
|
| 82 |
+
return zeroshot_weights
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
mnist = MNIST(root=os.path.expanduser("~/.cache"), download=True, train=False)
|
| 86 |
+
cifar10 = CIFAR10(root=os.path.expanduser("~/.cache"), download=True, train=False)
|
| 87 |
+
|
| 88 |
+
for dataset in [mnist, cifar10]:
|
| 89 |
+
# extract image feature, code borrowed from: https://github.com/openai/CLIP#zero-shot-prediction
|
| 90 |
+
image_features = []
|
| 91 |
+
image_labels = []
|
| 92 |
+
for image, class_id in dataset:
|
| 93 |
+
image_input = preprocess(image).unsqueeze(0).to(device)
|
| 94 |
+
with torch.no_grad():
|
| 95 |
+
image_feature = model.encode_image(image_input)
|
| 96 |
+
image_feature /= image_feature.norm()
|
| 97 |
+
image_features.append(image_feature)
|
| 98 |
+
image_labels.append(class_id)
|
| 99 |
+
image_features = torch.stack(image_features, dim=1).to(device)
|
| 100 |
+
image_features = image_features.squeeze()
|
| 101 |
+
|
| 102 |
+
# extract text feature
|
| 103 |
+
dataset_name = 'MNIST' if dataset == mnist else 'CIFAR10'
|
| 104 |
+
text_features = extract_text_features(dataset_name)
|
| 105 |
+
|
| 106 |
+
# compute top-1 accuracy
|
| 107 |
+
logits = (100. * image_features @ text_features).softmax(dim=-1)
|
| 108 |
+
image_labels = torch.tensor(image_labels).unsqueeze(dim=1).to(device)
|
| 109 |
+
top1_acc = accuracy(logits, image_labels, (1,))
|
| 110 |
+
print(f'top-1 accuracy for {dataset_name} dataset: {top1_acc[0]:.3f}')
|
OnZeta/code_draft.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
my_list = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
|
| 2 |
+
|
| 3 |
+
print(len(my_list))
|
OnZeta/lame/__pycache__/lame.cpython-39.pyc
ADDED
|
Binary file (4.67 kB). View file
|
|
|
OnZeta/lame/lame.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.jit
|
| 3 |
+
import logging
|
| 4 |
+
from typing import List, Dict
|
| 5 |
+
|
| 6 |
+
import time
|
| 7 |
+
import torch.nn.functional as F
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
__all__ = ["LAME"]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class AffinityMatrix:
|
| 15 |
+
|
| 16 |
+
def __init__(self, **kwargs):
|
| 17 |
+
pass
|
| 18 |
+
|
| 19 |
+
def __call__(X, **kwargs):
|
| 20 |
+
raise NotImplementedError
|
| 21 |
+
|
| 22 |
+
def is_psd(self, mat):
|
| 23 |
+
eigenvalues = torch.eig(mat)[0][:, 0].sort(descending=True)[0]
|
| 24 |
+
return eigenvalues, float((mat == mat.t()).all() and (eigenvalues >= 0).all())
|
| 25 |
+
|
| 26 |
+
def symmetrize(self, mat):
|
| 27 |
+
return 1 / 2 * (mat + mat.t())
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class kNN_affinity(AffinityMatrix):
|
| 31 |
+
def __init__(self, knn: int, **kwargs):
|
| 32 |
+
self.knn = knn
|
| 33 |
+
|
| 34 |
+
def __call__(self, X):
|
| 35 |
+
N = X.size(0)
|
| 36 |
+
dist = torch.norm(X.unsqueeze(0) - X.unsqueeze(1), dim=-1, p=2) # [N, N]
|
| 37 |
+
n_neighbors = min(self.knn + 1, N)
|
| 38 |
+
|
| 39 |
+
knn_index = dist.topk(n_neighbors, -1, largest=False).indices[:, 1:] # [N, knn]
|
| 40 |
+
|
| 41 |
+
W = torch.zeros(N, N, device=X.device)
|
| 42 |
+
W.scatter_(dim=-1, index=knn_index, value=1.0)
|
| 43 |
+
|
| 44 |
+
return W
|
| 45 |
+
|
| 46 |
+
# def rbf_affinity_blockwise(X, sigma=1.0, batch_size=1000):
|
| 47 |
+
# N = X.size(0)
|
| 48 |
+
# kernel = torch.zeros(N, N, device=X.device)
|
| 49 |
+
#
|
| 50 |
+
# for i in range(0, N, batch_size):
|
| 51 |
+
# x_batch = X[i:i+batch_size] # [B, D]
|
| 52 |
+
# dist = torch.cdist(x_batch, X, p=2) # [B, N]
|
| 53 |
+
# sim = torch.exp(-dist ** 2 / (2 * sigma ** 2)) # [B, N]
|
| 54 |
+
# kernel[i:i+batch_size] = sim
|
| 55 |
+
#
|
| 56 |
+
# return kernel
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class rbf_affinity(AffinityMatrix):
|
| 60 |
+
def __init__(self, sigma: float, **kwargs):
|
| 61 |
+
self.sigma = sigma
|
| 62 |
+
self.k = kwargs['knn']
|
| 63 |
+
|
| 64 |
+
def __call__(self, X):
|
| 65 |
+
|
| 66 |
+
N = X.size(0)
|
| 67 |
+
dist = torch.norm(X.unsqueeze(0) - X.unsqueeze(1), dim=-1, p=2) # [N, N]
|
| 68 |
+
n_neighbors = min(self.k, N)
|
| 69 |
+
kth_dist = dist.topk(k=n_neighbors, dim=-1, largest=False).values[:, -1] # compute k^th distance for each point, [N, knn + 1]
|
| 70 |
+
sigma = kth_dist.mean()
|
| 71 |
+
rbf = torch.exp(- dist ** 2 / (2 * sigma ** 2))
|
| 72 |
+
# mask = torch.eye(X.size(0)).to(X.device)
|
| 73 |
+
# rbf = rbf * (1 - mask)
|
| 74 |
+
return rbf
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class linear_affinity(AffinityMatrix):
|
| 78 |
+
|
| 79 |
+
def __call__(self, X: torch.Tensor):
|
| 80 |
+
"""
|
| 81 |
+
X: [N, d]
|
| 82 |
+
"""
|
| 83 |
+
return torch.matmul(X, X.t())
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class LAME:
|
| 87 |
+
"""
|
| 88 |
+
Our proposed method based on Laplacian Regularization.
|
| 89 |
+
"""
|
| 90 |
+
def __init__(self):
|
| 91 |
+
"""
|
| 92 |
+
Args:
|
| 93 |
+
cfg (CfgNode):
|
| 94 |
+
"""
|
| 95 |
+
self.knn = 5
|
| 96 |
+
self.sigma = 1.0
|
| 97 |
+
# print("self.knn, self.sigma".format(self.knn, self.sigma))
|
| 98 |
+
self.LAME_AFFINITY = 'rbf'
|
| 99 |
+
self.affinity = eval(f'{self.LAME_AFFINITY}_affinity')(sigma=self.sigma, knn=self.knn)
|
| 100 |
+
self.force_symmetry = False
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def run_step(self, logits, feats):
|
| 104 |
+
# print("self.knn = {}, self.sigma = {}".format(self.knn, self.sigma))
|
| 105 |
+
with torch.no_grad():
|
| 106 |
+
probas = logits.softmax(1) # [N, K]# [N, d]
|
| 107 |
+
|
| 108 |
+
# --- Get unary and terms and kernel ---
|
| 109 |
+
|
| 110 |
+
unary = - torch.log(probas + 1e-10) # [N, K]
|
| 111 |
+
feats = F.normalize(feats, p=2, dim=-1) # [N, d]
|
| 112 |
+
kernel = self.affinity(feats) # [N, N]
|
| 113 |
+
# kernel = rbf_affinity_blockwise(feats, sigma=self.sigma, batch_size=1000)
|
| 114 |
+
if self.force_symmetry:
|
| 115 |
+
kernel = 1/2 * (kernel + kernel.t())
|
| 116 |
+
# --- Perform optim ---
|
| 117 |
+
Y = laplacian_optimization(unary, kernel)
|
| 118 |
+
|
| 119 |
+
return Y
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def laplacian_optimization(unary, kernel, bound_lambda=1, max_steps=100):
|
| 123 |
+
|
| 124 |
+
E_list = []
|
| 125 |
+
oldE = float('inf')
|
| 126 |
+
Y = (-unary).softmax(-1) # [N, K]
|
| 127 |
+
device = kernel.device
|
| 128 |
+
# print("device: ", device)
|
| 129 |
+
Y = Y.to(device)
|
| 130 |
+
unary = unary.to(device)
|
| 131 |
+
for i in range(max_steps):
|
| 132 |
+
kernel = kernel.to(torch.float16)
|
| 133 |
+
Y = Y.to(torch.float16)
|
| 134 |
+
# bound_lambda = bound_lambda.to(device)
|
| 135 |
+
pairwise = bound_lambda * kernel.matmul(Y) # [N, K]
|
| 136 |
+
pairwise = pairwise.to(torch.float16)
|
| 137 |
+
exponent = -unary + pairwise
|
| 138 |
+
Y = exponent.softmax(-1)
|
| 139 |
+
E = entropy_energy(Y, unary, pairwise, bound_lambda).item()
|
| 140 |
+
E_list.append(E)
|
| 141 |
+
|
| 142 |
+
if (i > 1 and (abs(E - oldE) <= 1e-8 * abs(oldE))):
|
| 143 |
+
break
|
| 144 |
+
else:
|
| 145 |
+
oldE = E
|
| 146 |
+
|
| 147 |
+
return Y
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def entropy_energy(Y, unary, pairwise, bound_lambda):
|
| 151 |
+
# torch.cuda.empty_cache()
|
| 152 |
+
E = (unary * Y - bound_lambda * pairwise * Y + Y * torch.log(Y.clip(1e-20))).sum()
|
| 153 |
+
return E
|
OnZeta/lame/lame_px.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn.functional as F
|
| 3 |
+
|
| 4 |
+
class AffinityMatrix:
|
| 5 |
+
|
| 6 |
+
def __init__(self, **kwargs):
|
| 7 |
+
pass
|
| 8 |
+
|
| 9 |
+
def __call__(X, **kwargs):
|
| 10 |
+
raise NotImplementedError
|
| 11 |
+
|
| 12 |
+
def is_psd(self, mat):
|
| 13 |
+
eigenvalues = torch.eig(mat)[0][:, 0].sort(descending=True)[0]
|
| 14 |
+
return eigenvalues, float((mat == mat.t()).all() and (eigenvalues >= 0).all())
|
| 15 |
+
|
| 16 |
+
def symmetrize(self, mat):
|
| 17 |
+
return 1 / 2 * (mat + mat.t())
|
| 18 |
+
|
| 19 |
+
class kNN_affinity(AffinityMatrix):
|
| 20 |
+
def __init__(self, knn: int, **kwargs):
|
| 21 |
+
self.knn = knn
|
| 22 |
+
|
| 23 |
+
def __call__(self, X):
|
| 24 |
+
N = X.size(0)
|
| 25 |
+
dist = torch.norm(X.unsqueeze(0) - X.unsqueeze(1), dim=-1, p=2) # [N, N]
|
| 26 |
+
n_neighbors = min(self.knn + 1, N)
|
| 27 |
+
|
| 28 |
+
knn_index = dist.topk(n_neighbors, -1, largest=False).indices[:, 1:] # [N, knn]
|
| 29 |
+
|
| 30 |
+
W = torch.zeros(N, N, device=X.device)
|
| 31 |
+
W.scatter_(dim=-1, index=knn_index, value=1.0)
|
| 32 |
+
|
| 33 |
+
return W
|
| 34 |
+
|
| 35 |
+
def laplacian_optimization(unary, kernel, bound_lambda=1, max_steps=100):
|
| 36 |
+
|
| 37 |
+
E_list = []
|
| 38 |
+
oldE = float('inf')
|
| 39 |
+
Y = (-unary).softmax(-1) # [N, K]
|
| 40 |
+
for i in range(max_steps):
|
| 41 |
+
pairwise = bound_lambda * kernel.matmul(Y) # [N, K]
|
| 42 |
+
exponent = -unary + pairwise
|
| 43 |
+
Y = exponent.softmax(-1)
|
| 44 |
+
E = entropy_energy(Y, unary, pairwise, bound_lambda).item()
|
| 45 |
+
E_list.append(E)
|
| 46 |
+
|
| 47 |
+
if (i > 1 and (abs(E - oldE) <= 1e-8 * abs(oldE))):
|
| 48 |
+
# logger.info(f'Converged in {i} iterations')
|
| 49 |
+
break
|
| 50 |
+
else:
|
| 51 |
+
oldE = E
|
| 52 |
+
|
| 53 |
+
return Y
|
| 54 |
+
|
| 55 |
+
def entropy_energy(Y, unary, pairwise, bound_lambda):
|
| 56 |
+
E = (unary * Y - bound_lambda * pairwise * Y + Y * torch.log(Y.clip(1e-20))).sum()
|
| 57 |
+
return E
|
| 58 |
+
|
| 59 |
+
def lame(probas, feats):
|
| 60 |
+
unary = - torch.log(probas + 1e-10) # [N, K]
|
| 61 |
+
|
| 62 |
+
# feats = self.model.backbone.out # [N, d]
|
| 63 |
+
feats = F.normalize(feats, p=2, dim=-1) # [N, d]
|
| 64 |
+
affinity = kNN_affinity(1)
|
| 65 |
+
kernel = affinity(feats) # [N, N]
|
| 66 |
+
if False:
|
| 67 |
+
kernel = 1 / 2 * (kernel + kernel.t())
|
| 68 |
+
Y = laplacian_optimization(unary, kernel)
|
| 69 |
+
|
| 70 |
+
# final_output = format_result(batched_inputs, Y)
|
| 71 |
+
return Y
|
OnZeta/logs/debug_onzeta_eval_2025-06-06_22-11-27.log
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 2 |
+
load pre-trained model
|
| 3 |
+
load data
|
OnZeta/logs/debug_onzeta_eval_2025-06-07_00-13-37.log
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 2 |
+
load pre-trained model
|
| 3 |
+
load data
|
| 4 |
+
obtain text proxy
|
| 5 |
+
accuracy with text proxy: 90.77
|
| 6 |
+
online zero-shot transfer: repeat 5 times
|
| 7 |
+
lam is 1.0000
|
| 8 |
+
lam is 1.0000
|
| 9 |
+
lam is 1.0000
|
| 10 |
+
lam is 1.0000
|
| 11 |
+
lam is 1.0000
|
| 12 |
+
mean acc of onlab is: 90.77
|
| 13 |
+
mean acc of onzeta is: 91.03
|
| 14 |
+
mean acc of MAPLS is: 91.81
|
| 15 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 16 |
+
load pre-trained model
|
| 17 |
+
load data
|
| 18 |
+
obtain text proxy
|
| 19 |
+
accuracy with text proxy: 90.77
|
| 20 |
+
online zero-shot transfer: repeat 5 times
|
| 21 |
+
lam is 0.9000
|
| 22 |
+
lam is 0.9000
|
| 23 |
+
lam is 0.9000
|
| 24 |
+
lam is 0.9000
|
| 25 |
+
lam is 0.9000
|
| 26 |
+
mean acc of onlab is: 90.77
|
| 27 |
+
mean acc of onzeta is: 90.94
|
| 28 |
+
mean acc of MAPLS is: 91.68
|
| 29 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 30 |
+
load pre-trained model
|
| 31 |
+
load data
|
| 32 |
+
obtain text proxy
|
| 33 |
+
accuracy with text proxy: 90.77
|
| 34 |
+
online zero-shot transfer: repeat 5 times
|
| 35 |
+
lam is 0.8000
|
| 36 |
+
lam is 0.8000
|
| 37 |
+
lam is 0.8000
|
| 38 |
+
lam is 0.8000
|
| 39 |
+
lam is 0.8000
|
| 40 |
+
mean acc of onlab is: 90.77
|
| 41 |
+
mean acc of onzeta is: 91.01
|
| 42 |
+
mean acc of MAPLS is: 91.57
|
| 43 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 44 |
+
load pre-trained model
|
| 45 |
+
load data
|
| 46 |
+
obtain text proxy
|
| 47 |
+
accuracy with text proxy: 90.77
|
| 48 |
+
online zero-shot transfer: repeat 5 times
|
| 49 |
+
lam is 0.7000
|
| 50 |
+
lam is 0.7000
|
| 51 |
+
lam is 0.7000
|
| 52 |
+
lam is 0.7000
|
| 53 |
+
lam is 0.7000
|
| 54 |
+
mean acc of onlab is: 90.77
|
| 55 |
+
mean acc of onzeta is: 90.98
|
| 56 |
+
mean acc of MAPLS is: 91.48
|
| 57 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 58 |
+
load pre-trained model
|
| 59 |
+
load data
|
| 60 |
+
obtain text proxy
|
| 61 |
+
accuracy with text proxy: 90.77
|
| 62 |
+
online zero-shot transfer: repeat 5 times
|
| 63 |
+
lam is 0.6000
|
| 64 |
+
lam is 0.6000
|
| 65 |
+
lam is 0.6000
|
| 66 |
+
lam is 0.6000
|
| 67 |
+
lam is 0.6000
|
| 68 |
+
mean acc of onlab is: 90.77
|
| 69 |
+
mean acc of onzeta is: 91.02
|
| 70 |
+
mean acc of MAPLS is: 91.41
|
| 71 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 72 |
+
load pre-trained model
|
| 73 |
+
load data
|
| 74 |
+
obtain text proxy
|
| 75 |
+
accuracy with text proxy: 90.77
|
| 76 |
+
online zero-shot transfer: repeat 5 times
|
| 77 |
+
lam is 0.5000
|
| 78 |
+
lam is 0.5000
|
| 79 |
+
lam is 0.5000
|
| 80 |
+
lam is 0.5000
|
| 81 |
+
lam is 0.5000
|
| 82 |
+
mean acc of onlab is: 90.77
|
| 83 |
+
mean acc of onzeta is: 91.01
|
| 84 |
+
mean acc of MAPLS is: 91.36
|
| 85 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 86 |
+
load pre-trained model
|
| 87 |
+
load data
|
| 88 |
+
obtain text proxy
|
| 89 |
+
accuracy with text proxy: 90.77
|
| 90 |
+
online zero-shot transfer: repeat 5 times
|
| 91 |
+
lam is 0.4000
|
| 92 |
+
lam is 0.4000
|
| 93 |
+
lam is 0.4000
|
| 94 |
+
lam is 0.4000
|
| 95 |
+
lam is 0.4000
|
| 96 |
+
mean acc of onlab is: 90.77
|
| 97 |
+
mean acc of onzeta is: 90.99
|
| 98 |
+
mean acc of MAPLS is: 91.26
|
| 99 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 100 |
+
load pre-trained model
|
| 101 |
+
load data
|
| 102 |
+
obtain text proxy
|
| 103 |
+
accuracy with text proxy: 90.77
|
| 104 |
+
online zero-shot transfer: repeat 5 times
|
| 105 |
+
lam is 0.3000
|
| 106 |
+
lam is 0.3000
|
| 107 |
+
lam is 0.3000
|
| 108 |
+
lam is 0.3000
|
| 109 |
+
lam is 0.3000
|
| 110 |
+
mean acc of onlab is: 90.77
|
| 111 |
+
mean acc of onzeta is: 91.00
|
| 112 |
+
mean acc of MAPLS is: 91.19
|
| 113 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 114 |
+
load pre-trained model
|
| 115 |
+
load data
|
| 116 |
+
obtain text proxy
|
| 117 |
+
accuracy with text proxy: 90.77
|
| 118 |
+
online zero-shot transfer: repeat 5 times
|
| 119 |
+
lam is 0.2000
|
| 120 |
+
lam is 0.2000
|
| 121 |
+
lam is 0.2000
|
| 122 |
+
lam is 0.2000
|
| 123 |
+
lam is 0.2000
|
| 124 |
+
mean acc of onlab is: 90.77
|
| 125 |
+
mean acc of onzeta is: 90.95
|
| 126 |
+
mean acc of MAPLS is: 91.08
|
| 127 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 128 |
+
load pre-trained model
|
| 129 |
+
load data
|
| 130 |
+
obtain text proxy
|
| 131 |
+
accuracy with text proxy: 90.77
|
| 132 |
+
online zero-shot transfer: repeat 5 times
|
| 133 |
+
lam is 0.1000
|
| 134 |
+
lam is 0.1000
|
| 135 |
+
lam is 0.1000
|
| 136 |
+
lam is 0.1000
|
| 137 |
+
lam is 0.1000
|
| 138 |
+
mean acc of onlab is: 90.77
|
| 139 |
+
mean acc of onzeta is: 90.98
|
| 140 |
+
mean acc of MAPLS is: 91.05
|
OnZeta/logs/debug_onzeta_eval_2025-06-11_22-30-48.log
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 2 |
+
the alpha is 1.0
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
obtain text proxy
|
| 6 |
+
accuracy with text proxy: 41.91
|
| 7 |
+
online zero-shot transfer: repeat 5 times
|
| 8 |
+
lam is 0.7000
|
| 9 |
+
lam is 0.7000
|
| 10 |
+
lam is 0.7000
|
| 11 |
+
lam is 0.7000
|
| 12 |
+
lam is 0.7000
|
| 13 |
+
mean acc of onlab is: 47.22
|
| 14 |
+
mean acc of onzeta is: 47.75
|
| 15 |
+
mean acc of MAPLS is: 47.92
|
| 16 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 17 |
+
the alpha is 0.9
|
| 18 |
+
load pre-trained model
|
| 19 |
+
load data
|
| 20 |
+
obtain text proxy
|
| 21 |
+
accuracy with text proxy: 41.91
|
| 22 |
+
online zero-shot transfer: repeat 5 times
|
| 23 |
+
lam is 0.7000
|
| 24 |
+
lam is 0.7000
|
| 25 |
+
lam is 0.7000
|
| 26 |
+
lam is 0.7000
|
| 27 |
+
lam is 0.7000
|
| 28 |
+
mean acc of onlab is: 46.34
|
| 29 |
+
mean acc of onzeta is: 46.80
|
| 30 |
+
mean acc of MAPLS is: 48.02
|
| 31 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 32 |
+
the alpha is 0.8
|
| 33 |
+
load pre-trained model
|
| 34 |
+
load data
|
| 35 |
+
obtain text proxy
|
| 36 |
+
accuracy with text proxy: 41.91
|
| 37 |
+
online zero-shot transfer: repeat 5 times
|
| 38 |
+
lam is 0.7000
|
| 39 |
+
lam is 0.7000
|
| 40 |
+
lam is 0.7000
|
| 41 |
+
lam is 0.7000
|
| 42 |
+
lam is 0.7000
|
| 43 |
+
mean acc of onlab is: 45.14
|
| 44 |
+
mean acc of onzeta is: 45.29
|
| 45 |
+
mean acc of MAPLS is: 47.54
|
| 46 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 47 |
+
the alpha is 0.7
|
| 48 |
+
load pre-trained model
|
| 49 |
+
load data
|
| 50 |
+
obtain text proxy
|
| 51 |
+
accuracy with text proxy: 41.91
|
| 52 |
+
online zero-shot transfer: repeat 5 times
|
| 53 |
+
lam is 0.7000
|
| 54 |
+
lam is 0.7000
|
| 55 |
+
lam is 0.7000
|
| 56 |
+
lam is 0.7000
|
| 57 |
+
lam is 0.7000
|
| 58 |
+
mean acc of onlab is: 44.03
|
| 59 |
+
mean acc of onzeta is: 44.15
|
| 60 |
+
mean acc of MAPLS is: 47.49
|
| 61 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 62 |
+
the alpha is 0.6
|
| 63 |
+
load pre-trained model
|
| 64 |
+
load data
|
| 65 |
+
obtain text proxy
|
| 66 |
+
accuracy with text proxy: 41.91
|
| 67 |
+
online zero-shot transfer: repeat 5 times
|
| 68 |
+
lam is 0.7000
|
| 69 |
+
lam is 0.7000
|
| 70 |
+
lam is 0.7000
|
| 71 |
+
lam is 0.7000
|
| 72 |
+
lam is 0.7000
|
| 73 |
+
mean acc of onlab is: 43.26
|
| 74 |
+
mean acc of onzeta is: 43.33
|
| 75 |
+
mean acc of MAPLS is: 47.49
|
| 76 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 77 |
+
the alpha is 0.5
|
| 78 |
+
load pre-trained model
|
| 79 |
+
load data
|
| 80 |
+
obtain text proxy
|
| 81 |
+
accuracy with text proxy: 41.91
|
| 82 |
+
online zero-shot transfer: repeat 5 times
|
| 83 |
+
lam is 0.7000
|
| 84 |
+
lam is 0.7000
|
| 85 |
+
lam is 0.7000
|
| 86 |
+
lam is 0.7000
|
| 87 |
+
lam is 0.7000
|
| 88 |
+
mean acc of onlab is: 42.73
|
| 89 |
+
mean acc of onzeta is: 42.65
|
| 90 |
+
mean acc of MAPLS is: 47.27
|
| 91 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 92 |
+
the alpha is 0.4
|
| 93 |
+
load pre-trained model
|
| 94 |
+
load data
|
| 95 |
+
obtain text proxy
|
| 96 |
+
accuracy with text proxy: 41.91
|
| 97 |
+
online zero-shot transfer: repeat 5 times
|
| 98 |
+
lam is 0.7000
|
| 99 |
+
lam is 0.7000
|
| 100 |
+
lam is 0.7000
|
| 101 |
+
lam is 0.7000
|
| 102 |
+
lam is 0.7000
|
| 103 |
+
mean acc of onlab is: 42.27
|
| 104 |
+
mean acc of onzeta is: 42.17
|
| 105 |
+
mean acc of MAPLS is: 47.02
|
| 106 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 107 |
+
the alpha is 0.3
|
| 108 |
+
load pre-trained model
|
| 109 |
+
load data
|
| 110 |
+
obtain text proxy
|
| 111 |
+
accuracy with text proxy: 41.91
|
| 112 |
+
online zero-shot transfer: repeat 5 times
|
| 113 |
+
lam is 0.7000
|
| 114 |
+
lam is 0.7000
|
| 115 |
+
lam is 0.7000
|
| 116 |
+
lam is 0.7000
|
| 117 |
+
lam is 0.7000
|
| 118 |
+
mean acc of onlab is: 42.01
|
| 119 |
+
mean acc of onzeta is: 41.70
|
| 120 |
+
mean acc of MAPLS is: 46.97
|
| 121 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 122 |
+
the alpha is 0.2
|
| 123 |
+
load pre-trained model
|
| 124 |
+
load data
|
| 125 |
+
obtain text proxy
|
| 126 |
+
accuracy with text proxy: 41.91
|
| 127 |
+
online zero-shot transfer: repeat 5 times
|
| 128 |
+
lam is 0.7000
|
| 129 |
+
lam is 0.7000
|
| 130 |
+
lam is 0.7000
|
| 131 |
+
lam is 0.7000
|
| 132 |
+
lam is 0.7000
|
| 133 |
+
mean acc of onlab is: 41.94
|
| 134 |
+
mean acc of onzeta is: 41.62
|
| 135 |
+
mean acc of MAPLS is: 46.95
|
| 136 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 137 |
+
the alpha is 0.1
|
| 138 |
+
load pre-trained model
|
| 139 |
+
load data
|
| 140 |
+
obtain text proxy
|
| 141 |
+
accuracy with text proxy: 41.91
|
| 142 |
+
online zero-shot transfer: repeat 5 times
|
| 143 |
+
lam is 0.7000
|
| 144 |
+
lam is 0.7000
|
| 145 |
+
lam is 0.7000
|
| 146 |
+
lam is 0.7000
|
| 147 |
+
lam is 0.7000
|
| 148 |
+
mean acc of onlab is: 41.92
|
| 149 |
+
mean acc of onzeta is: 41.70
|
| 150 |
+
mean acc of MAPLS is: 46.98
|
OnZeta/logs/debug_onzeta_eval_2025-06-11_22-37-19.log
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 2 |
+
the alpha is 1.0
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
obtain text proxy
|
| 6 |
+
accuracy with text proxy: 68.27
|
| 7 |
+
online zero-shot transfer: repeat 5 times
|
| 8 |
+
lam is 0.7000
|
| 9 |
+
lam is 0.7000
|
| 10 |
+
lam is 0.7000
|
| 11 |
+
lam is 0.7000
|
| 12 |
+
lam is 0.7000
|
| 13 |
+
mean acc of onlab is: 70.59
|
| 14 |
+
mean acc of onzeta is: 71.10
|
| 15 |
+
mean acc of MAPLS is: 71.15
|
| 16 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 17 |
+
the alpha is 0.9
|
| 18 |
+
load pre-trained model
|
| 19 |
+
load data
|
| 20 |
+
obtain text proxy
|
| 21 |
+
accuracy with text proxy: 68.27
|
| 22 |
+
online zero-shot transfer: repeat 5 times
|
| 23 |
+
lam is 0.7000
|
| 24 |
+
lam is 0.7000
|
| 25 |
+
lam is 0.7000
|
| 26 |
+
lam is 0.7000
|
| 27 |
+
lam is 0.7000
|
| 28 |
+
mean acc of onlab is: 70.31
|
| 29 |
+
mean acc of onzeta is: 70.88
|
| 30 |
+
mean acc of MAPLS is: 71.22
|
| 31 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 32 |
+
the alpha is 0.8
|
| 33 |
+
load pre-trained model
|
| 34 |
+
load data
|
| 35 |
+
obtain text proxy
|
| 36 |
+
accuracy with text proxy: 68.27
|
| 37 |
+
online zero-shot transfer: repeat 5 times
|
| 38 |
+
lam is 0.7000
|
| 39 |
+
lam is 0.7000
|
| 40 |
+
lam is 0.7000
|
| 41 |
+
lam is 0.7000
|
| 42 |
+
lam is 0.7000
|
| 43 |
+
mean acc of onlab is: 70.00
|
| 44 |
+
mean acc of onzeta is: 70.34
|
| 45 |
+
mean acc of MAPLS is: 71.51
|
| 46 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 47 |
+
the alpha is 0.7
|
| 48 |
+
load pre-trained model
|
| 49 |
+
load data
|
| 50 |
+
obtain text proxy
|
| 51 |
+
accuracy with text proxy: 68.27
|
| 52 |
+
online zero-shot transfer: repeat 5 times
|
| 53 |
+
lam is 0.7000
|
| 54 |
+
lam is 0.7000
|
| 55 |
+
lam is 0.7000
|
| 56 |
+
lam is 0.7000
|
| 57 |
+
lam is 0.7000
|
| 58 |
+
mean acc of onlab is: 69.62
|
| 59 |
+
mean acc of onzeta is: 69.92
|
| 60 |
+
mean acc of MAPLS is: 71.36
|
| 61 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 62 |
+
the alpha is 0.6
|
| 63 |
+
load pre-trained model
|
| 64 |
+
load data
|
| 65 |
+
obtain text proxy
|
| 66 |
+
accuracy with text proxy: 68.27
|
| 67 |
+
online zero-shot transfer: repeat 5 times
|
| 68 |
+
lam is 0.7000
|
| 69 |
+
lam is 0.7000
|
| 70 |
+
lam is 0.7000
|
| 71 |
+
lam is 0.7000
|
| 72 |
+
lam is 0.7000
|
| 73 |
+
mean acc of onlab is: 69.18
|
| 74 |
+
mean acc of onzeta is: 69.56
|
| 75 |
+
mean acc of MAPLS is: 71.50
|
| 76 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 77 |
+
the alpha is 0.5
|
| 78 |
+
load pre-trained model
|
| 79 |
+
load data
|
| 80 |
+
obtain text proxy
|
| 81 |
+
accuracy with text proxy: 68.27
|
| 82 |
+
online zero-shot transfer: repeat 5 times
|
| 83 |
+
lam is 0.7000
|
| 84 |
+
lam is 0.7000
|
| 85 |
+
lam is 0.7000
|
| 86 |
+
lam is 0.7000
|
| 87 |
+
lam is 0.7000
|
| 88 |
+
mean acc of onlab is: 68.79
|
| 89 |
+
mean acc of onzeta is: 69.11
|
| 90 |
+
mean acc of MAPLS is: 71.25
|
| 91 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 92 |
+
the alpha is 0.4
|
| 93 |
+
load pre-trained model
|
| 94 |
+
load data
|
| 95 |
+
obtain text proxy
|
| 96 |
+
accuracy with text proxy: 68.27
|
| 97 |
+
online zero-shot transfer: repeat 5 times
|
| 98 |
+
lam is 0.7000
|
| 99 |
+
lam is 0.7000
|
| 100 |
+
lam is 0.7000
|
| 101 |
+
lam is 0.7000
|
| 102 |
+
lam is 0.7000
|
| 103 |
+
mean acc of onlab is: 68.61
|
| 104 |
+
mean acc of onzeta is: 68.86
|
| 105 |
+
mean acc of MAPLS is: 71.20
|
| 106 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 107 |
+
the alpha is 0.3
|
| 108 |
+
load pre-trained model
|
| 109 |
+
load data
|
| 110 |
+
obtain text proxy
|
| 111 |
+
accuracy with text proxy: 68.27
|
| 112 |
+
online zero-shot transfer: repeat 5 times
|
| 113 |
+
lam is 0.7000
|
| 114 |
+
lam is 0.7000
|
| 115 |
+
lam is 0.7000
|
| 116 |
+
lam is 0.7000
|
| 117 |
+
lam is 0.7000
|
| 118 |
+
mean acc of onlab is: 68.38
|
| 119 |
+
mean acc of onzeta is: 68.65
|
| 120 |
+
mean acc of MAPLS is: 71.21
|
| 121 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 122 |
+
the alpha is 0.2
|
| 123 |
+
load pre-trained model
|
| 124 |
+
load data
|
| 125 |
+
obtain text proxy
|
| 126 |
+
accuracy with text proxy: 68.27
|
| 127 |
+
online zero-shot transfer: repeat 5 times
|
| 128 |
+
lam is 0.7000
|
| 129 |
+
lam is 0.7000
|
| 130 |
+
lam is 0.7000
|
| 131 |
+
lam is 0.7000
|
| 132 |
+
lam is 0.7000
|
| 133 |
+
mean acc of onlab is: 68.29
|
| 134 |
+
mean acc of onzeta is: 68.48
|
| 135 |
+
mean acc of MAPLS is: 71.13
|
| 136 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 137 |
+
the alpha is 0.1
|
| 138 |
+
load pre-trained model
|
| 139 |
+
load data
|
| 140 |
+
obtain text proxy
|
| 141 |
+
accuracy with text proxy: 68.27
|
| 142 |
+
online zero-shot transfer: repeat 5 times
|
| 143 |
+
lam is 0.7000
|
| 144 |
+
lam is 0.7000
|
| 145 |
+
lam is 0.7000
|
| 146 |
+
lam is 0.7000
|
| 147 |
+
lam is 0.7000
|
| 148 |
+
mean acc of onlab is: 68.27
|
| 149 |
+
mean acc of onzeta is: 68.41
|
| 150 |
+
mean acc of MAPLS is: 71.19
|
OnZeta/logs/debug_onzeta_eval_2025-06-11_22-52-28.log
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 2 |
+
the beta is 1.0
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
obtain text proxy
|
| 6 |
+
accuracy with text proxy: 68.27
|
| 7 |
+
online zero-shot transfer: repeat 5 times
|
| 8 |
+
lam is 0.7000
|
| 9 |
+
lam is 0.7000
|
| 10 |
+
lam is 0.7000
|
| 11 |
+
lam is 0.7000
|
| 12 |
+
lam is 0.7000
|
| 13 |
+
mean acc of onlab is: 70.79
|
| 14 |
+
mean acc of onzeta is: 70.42
|
| 15 |
+
mean acc of MAPLS is: 70.47
|
| 16 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 17 |
+
the beta is 0.9
|
| 18 |
+
load pre-trained model
|
| 19 |
+
load data
|
| 20 |
+
obtain text proxy
|
| 21 |
+
accuracy with text proxy: 68.27
|
| 22 |
+
online zero-shot transfer: repeat 5 times
|
| 23 |
+
lam is 0.7000
|
| 24 |
+
lam is 0.7000
|
| 25 |
+
lam is 0.7000
|
| 26 |
+
lam is 0.7000
|
| 27 |
+
lam is 0.7000
|
| 28 |
+
mean acc of onlab is: 70.68
|
| 29 |
+
mean acc of onzeta is: 70.86
|
| 30 |
+
mean acc of MAPLS is: 70.96
|
| 31 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 32 |
+
the beta is 0.8
|
| 33 |
+
load pre-trained model
|
| 34 |
+
load data
|
| 35 |
+
obtain text proxy
|
| 36 |
+
accuracy with text proxy: 68.27
|
| 37 |
+
online zero-shot transfer: repeat 5 times
|
| 38 |
+
lam is 0.7000
|
| 39 |
+
lam is 0.7000
|
| 40 |
+
lam is 0.7000
|
| 41 |
+
lam is 0.7000
|
| 42 |
+
lam is 0.7000
|
| 43 |
+
mean acc of onlab is: 70.82
|
| 44 |
+
mean acc of onzeta is: 71.23
|
| 45 |
+
mean acc of MAPLS is: 71.28
|
| 46 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 47 |
+
the beta is 0.7
|
| 48 |
+
load pre-trained model
|
| 49 |
+
load data
|
| 50 |
+
obtain text proxy
|
| 51 |
+
accuracy with text proxy: 68.27
|
| 52 |
+
online zero-shot transfer: repeat 5 times
|
| 53 |
+
lam is 0.7000
|
| 54 |
+
lam is 0.7000
|
| 55 |
+
lam is 0.7000
|
| 56 |
+
lam is 0.7000
|
| 57 |
+
lam is 0.7000
|
| 58 |
+
mean acc of onlab is: 70.60
|
| 59 |
+
mean acc of onzeta is: 71.14
|
| 60 |
+
mean acc of MAPLS is: 71.15
|
| 61 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 62 |
+
the beta is 0.6
|
| 63 |
+
load pre-trained model
|
| 64 |
+
load data
|
| 65 |
+
obtain text proxy
|
| 66 |
+
accuracy with text proxy: 68.27
|
| 67 |
+
online zero-shot transfer: repeat 5 times
|
| 68 |
+
lam is 0.7000
|
| 69 |
+
lam is 0.7000
|
| 70 |
+
lam is 0.7000
|
| 71 |
+
lam is 0.7000
|
| 72 |
+
lam is 0.7000
|
| 73 |
+
mean acc of onlab is: 70.70
|
| 74 |
+
mean acc of onzeta is: 71.37
|
| 75 |
+
mean acc of MAPLS is: 71.38
|
| 76 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 77 |
+
the beta is 0.5
|
| 78 |
+
load pre-trained model
|
| 79 |
+
load data
|
| 80 |
+
obtain text proxy
|
| 81 |
+
accuracy with text proxy: 68.27
|
| 82 |
+
online zero-shot transfer: repeat 5 times
|
| 83 |
+
lam is 0.7000
|
| 84 |
+
lam is 0.7000
|
| 85 |
+
lam is 0.7000
|
| 86 |
+
lam is 0.7000
|
| 87 |
+
lam is 0.7000
|
| 88 |
+
mean acc of onlab is: 70.76
|
| 89 |
+
mean acc of onzeta is: 71.35
|
| 90 |
+
mean acc of MAPLS is: 71.42
|
| 91 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 92 |
+
the beta is 0.4
|
| 93 |
+
load pre-trained model
|
| 94 |
+
load data
|
| 95 |
+
obtain text proxy
|
| 96 |
+
accuracy with text proxy: 68.27
|
| 97 |
+
online zero-shot transfer: repeat 5 times
|
| 98 |
+
lam is 0.7000
|
| 99 |
+
lam is 0.7000
|
| 100 |
+
lam is 0.7000
|
| 101 |
+
lam is 0.7000
|
| 102 |
+
lam is 0.7000
|
| 103 |
+
mean acc of onlab is: 70.58
|
| 104 |
+
mean acc of onzeta is: 71.13
|
| 105 |
+
mean acc of MAPLS is: 71.16
|
| 106 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 107 |
+
the beta is 0.3
|
| 108 |
+
load pre-trained model
|
| 109 |
+
load data
|
| 110 |
+
obtain text proxy
|
| 111 |
+
accuracy with text proxy: 68.27
|
| 112 |
+
online zero-shot transfer: repeat 5 times
|
| 113 |
+
lam is 0.7000
|
| 114 |
+
lam is 0.7000
|
| 115 |
+
lam is 0.7000
|
| 116 |
+
lam is 0.7000
|
| 117 |
+
lam is 0.7000
|
| 118 |
+
mean acc of onlab is: 70.76
|
| 119 |
+
mean acc of onzeta is: 71.27
|
| 120 |
+
mean acc of MAPLS is: 71.29
|
| 121 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 122 |
+
the beta is 0.2
|
| 123 |
+
load pre-trained model
|
| 124 |
+
load data
|
| 125 |
+
obtain text proxy
|
| 126 |
+
accuracy with text proxy: 68.27
|
| 127 |
+
online zero-shot transfer: repeat 5 times
|
| 128 |
+
lam is 0.7000
|
| 129 |
+
lam is 0.7000
|
| 130 |
+
lam is 0.7000
|
| 131 |
+
lam is 0.7000
|
| 132 |
+
lam is 0.7000
|
| 133 |
+
mean acc of onlab is: 70.68
|
| 134 |
+
mean acc of onzeta is: 70.98
|
| 135 |
+
mean acc of MAPLS is: 71.05
|
| 136 |
+
Namespace(data_path='./CIFAR100_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 137 |
+
the beta is 0.1
|
| 138 |
+
load pre-trained model
|
| 139 |
+
load data
|
| 140 |
+
obtain text proxy
|
| 141 |
+
accuracy with text proxy: 68.27
|
| 142 |
+
online zero-shot transfer: repeat 5 times
|
| 143 |
+
lam is 0.7000
|
| 144 |
+
lam is 0.7000
|
| 145 |
+
lam is 0.7000
|
| 146 |
+
lam is 0.7000
|
| 147 |
+
lam is 0.7000
|
| 148 |
+
mean acc of onlab is: 70.70
|
| 149 |
+
mean acc of onzeta is: 70.82
|
| 150 |
+
mean acc of MAPLS is: 70.83
|
OnZeta/logs/debug_onzeta_eval_2025-06-11_23-00-32.log
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 2 |
+
the beta is 1.0
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
obtain text proxy
|
| 6 |
+
accuracy with text proxy: 41.91
|
| 7 |
+
online zero-shot transfer: repeat 5 times
|
| 8 |
+
lam is 0.7000
|
| 9 |
+
lam is 0.7000
|
| 10 |
+
lam is 0.7000
|
| 11 |
+
lam is 0.7000
|
| 12 |
+
lam is 0.7000
|
| 13 |
+
mean acc of onlab is: 47.15
|
| 14 |
+
mean acc of onzeta is: 47.21
|
| 15 |
+
mean acc of MAPLS is: 47.43
|
| 16 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 17 |
+
the beta is 0.9
|
| 18 |
+
load pre-trained model
|
| 19 |
+
load data
|
| 20 |
+
obtain text proxy
|
| 21 |
+
accuracy with text proxy: 41.91
|
| 22 |
+
online zero-shot transfer: repeat 5 times
|
| 23 |
+
lam is 0.7000
|
| 24 |
+
lam is 0.7000
|
| 25 |
+
lam is 0.7000
|
| 26 |
+
lam is 0.7000
|
| 27 |
+
lam is 0.7000
|
| 28 |
+
mean acc of onlab is: 47.26
|
| 29 |
+
mean acc of onzeta is: 47.50
|
| 30 |
+
mean acc of MAPLS is: 47.76
|
| 31 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 32 |
+
the beta is 0.8
|
| 33 |
+
load pre-trained model
|
| 34 |
+
load data
|
| 35 |
+
obtain text proxy
|
| 36 |
+
accuracy with text proxy: 41.91
|
| 37 |
+
online zero-shot transfer: repeat 5 times
|
| 38 |
+
lam is 0.7000
|
| 39 |
+
lam is 0.7000
|
| 40 |
+
lam is 0.7000
|
| 41 |
+
lam is 0.7000
|
| 42 |
+
lam is 0.7000
|
| 43 |
+
mean acc of onlab is: 47.10
|
| 44 |
+
mean acc of onzeta is: 47.42
|
| 45 |
+
mean acc of MAPLS is: 47.66
|
| 46 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 47 |
+
the beta is 0.7
|
| 48 |
+
load pre-trained model
|
| 49 |
+
load data
|
| 50 |
+
obtain text proxy
|
| 51 |
+
accuracy with text proxy: 41.91
|
| 52 |
+
online zero-shot transfer: repeat 5 times
|
| 53 |
+
lam is 0.7000
|
| 54 |
+
lam is 0.7000
|
| 55 |
+
lam is 0.7000
|
| 56 |
+
lam is 0.7000
|
| 57 |
+
lam is 0.7000
|
| 58 |
+
mean acc of onlab is: 47.25
|
| 59 |
+
mean acc of onzeta is: 47.74
|
| 60 |
+
mean acc of MAPLS is: 48.04
|
| 61 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 62 |
+
the beta is 0.6
|
| 63 |
+
load pre-trained model
|
| 64 |
+
load data
|
| 65 |
+
obtain text proxy
|
| 66 |
+
accuracy with text proxy: 41.91
|
| 67 |
+
online zero-shot transfer: repeat 5 times
|
| 68 |
+
lam is 0.7000
|
| 69 |
+
lam is 0.7000
|
| 70 |
+
lam is 0.7000
|
| 71 |
+
lam is 0.7000
|
| 72 |
+
lam is 0.7000
|
| 73 |
+
mean acc of onlab is: 47.16
|
| 74 |
+
mean acc of onzeta is: 47.85
|
| 75 |
+
mean acc of MAPLS is: 48.14
|
| 76 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 77 |
+
the beta is 0.5
|
| 78 |
+
load pre-trained model
|
| 79 |
+
load data
|
| 80 |
+
obtain text proxy
|
| 81 |
+
accuracy with text proxy: 41.91
|
| 82 |
+
online zero-shot transfer: repeat 5 times
|
| 83 |
+
lam is 0.7000
|
| 84 |
+
lam is 0.7000
|
| 85 |
+
lam is 0.7000
|
| 86 |
+
lam is 0.7000
|
| 87 |
+
lam is 0.7000
|
| 88 |
+
mean acc of onlab is: 47.35
|
| 89 |
+
mean acc of onzeta is: 47.93
|
| 90 |
+
mean acc of MAPLS is: 48.12
|
| 91 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 92 |
+
the beta is 0.4
|
| 93 |
+
load pre-trained model
|
| 94 |
+
load data
|
| 95 |
+
obtain text proxy
|
| 96 |
+
accuracy with text proxy: 41.91
|
| 97 |
+
online zero-shot transfer: repeat 5 times
|
| 98 |
+
lam is 0.7000
|
| 99 |
+
lam is 0.7000
|
| 100 |
+
lam is 0.7000
|
| 101 |
+
lam is 0.7000
|
| 102 |
+
lam is 0.7000
|
| 103 |
+
mean acc of onlab is: 47.28
|
| 104 |
+
mean acc of onzeta is: 47.90
|
| 105 |
+
mean acc of MAPLS is: 48.06
|
| 106 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 107 |
+
the beta is 0.3
|
| 108 |
+
load pre-trained model
|
| 109 |
+
load data
|
| 110 |
+
obtain text proxy
|
| 111 |
+
accuracy with text proxy: 41.91
|
| 112 |
+
online zero-shot transfer: repeat 5 times
|
| 113 |
+
lam is 0.7000
|
| 114 |
+
lam is 0.7000
|
| 115 |
+
lam is 0.7000
|
| 116 |
+
lam is 0.7000
|
| 117 |
+
lam is 0.7000
|
| 118 |
+
mean acc of onlab is: 47.28
|
| 119 |
+
mean acc of onzeta is: 47.76
|
| 120 |
+
mean acc of MAPLS is: 47.94
|
| 121 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 122 |
+
the beta is 0.2
|
| 123 |
+
load pre-trained model
|
| 124 |
+
load data
|
| 125 |
+
obtain text proxy
|
| 126 |
+
accuracy with text proxy: 41.91
|
| 127 |
+
online zero-shot transfer: repeat 5 times
|
| 128 |
+
lam is 0.7000
|
| 129 |
+
lam is 0.7000
|
| 130 |
+
lam is 0.7000
|
| 131 |
+
lam is 0.7000
|
| 132 |
+
lam is 0.7000
|
| 133 |
+
mean acc of onlab is: 47.22
|
| 134 |
+
mean acc of onzeta is: 47.54
|
| 135 |
+
mean acc of MAPLS is: 47.73
|
| 136 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 137 |
+
the beta is 0.1
|
| 138 |
+
load pre-trained model
|
| 139 |
+
load data
|
| 140 |
+
obtain text proxy
|
| 141 |
+
accuracy with text proxy: 41.91
|
| 142 |
+
online zero-shot transfer: repeat 5 times
|
| 143 |
+
lam is 0.7000
|
| 144 |
+
lam is 0.7000
|
| 145 |
+
lam is 0.7000
|
| 146 |
+
lam is 0.7000
|
| 147 |
+
lam is 0.7000
|
| 148 |
+
mean acc of onlab is: 47.22
|
| 149 |
+
mean acc of onzeta is: 47.39
|
| 150 |
+
mean acc of MAPLS is: 47.58
|
OnZeta/logs/debug_onzeta_eval_2025-07-22_13-00-45.log
ADDED
|
File without changes
|
OnZeta/logs/debug_onzeta_eval_2025-07-22_13-01-26.log
ADDED
|
File without changes
|
OnZeta/logs/debug_onzeta_eval_2025-07-22_13-03-56.log
ADDED
|
File without changes
|
OnZeta/logs/debug_onzeta_eval_2025-07-22_13-04-08.log
ADDED
|
File without changes
|
OnZeta/logs/debug_onzeta_eval_2025-07-22_13-04-24.log
ADDED
|
File without changes
|
OnZeta/logs/mapls_inloop_mapls_only_RN50.log
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
mean acc of MAPLS only in-loop with lambda 0.95 is: 59.89
|
| 2 |
+
mean acc of MAPLS only in-loop with lambda 0.90 is: 61.17
|
| 3 |
+
mean acc of MAPLS only in-loop with lambda 0.80 is: 61.77
|
| 4 |
+
mean acc of MAPLS only in-loop with lambda 0.70 is: 61.77
|
| 5 |
+
mean acc of MAPLS only in-loop with lambda 0.60 is: 61.73
|
| 6 |
+
mean acc of MAPLS only in-loop with lambda 0.50 is: 61.57
|
| 7 |
+
mean acc of MAPLS only in-loop with lambda 0.40 is: 61.36
|
| 8 |
+
mean acc of MAPLS only in-loop with lambda 0.30 is: 61.12
|
| 9 |
+
mean acc of MAPLS only in-loop with lambda 0.20 is: 60.82
|
| 10 |
+
mean acc of MAPLS only in-loop with lambda 0.10 is: 60.57
|
OnZeta/logs/mapls_inloop_mapls_only_vitb16.log
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
mean acc of MAPLS only in-loop with lambda 0.95 is: 68.66
|
| 2 |
+
mean acc of MAPLS only in-loop with lambda 0.90 is: 69.61
|
| 3 |
+
mean acc of MAPLS only in-loop with lambda 0.80 is: 70.07
|
| 4 |
+
mean acc of MAPLS only in-loop with lambda 0.70 is: 70.13
|
| 5 |
+
mean acc of MAPLS only in-loop with lambda 0.60 is: 70.02
|
| 6 |
+
mean acc of MAPLS only in-loop with lambda 0.50 is: 69.78
|
| 7 |
+
mean acc of MAPLS only in-loop with lambda 0.40 is: 69.59
|
| 8 |
+
mean acc of MAPLS only in-loop with lambda 0.30 is: 69.35
|
| 9 |
+
mean acc of MAPLS only in-loop with lambda 0.20 is: 69.12
|
| 10 |
+
mean acc of MAPLS only in-loop with lambda 0.10 is: 68.95
|
OnZeta/logs/onzeta_eval.log
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
2025-06-06 13:07:46,802 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 2 |
+
2025-06-06 13:07:46,802 - INFO - load pre-trained model
|
| 3 |
+
2025-06-06 13:07:48,023 - INFO - load data
|
| 4 |
+
2025-06-06 13:07:53,313 - INFO - obtain text proxy
|
| 5 |
+
2025-06-06 13:07:53,743 - INFO - accuracy with text proxy: 41.91
|
| 6 |
+
2025-06-06 13:07:53,743 - INFO - online zero-shot transfer: repeat 5 times
|
| 7 |
+
2025-06-06 13:08:00,391 - INFO - mean acc of onlab is: 47.16
|
| 8 |
+
2025-06-06 13:08:00,392 - INFO - mean acc of onzeta is: 47.71
|
| 9 |
+
2025-06-06 13:08:00,392 - INFO - mean acc of MAPLS is: 47.22
|
| 10 |
+
2025-06-06 13:08:00,393 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 11 |
+
2025-06-06 13:08:00,393 - INFO - load pre-trained model
|
| 12 |
+
2025-06-06 13:08:01,549 - INFO - load data
|
| 13 |
+
2025-06-06 13:08:06,638 - INFO - obtain text proxy
|
| 14 |
+
2025-06-06 13:08:06,978 - INFO - accuracy with text proxy: 41.91
|
| 15 |
+
2025-06-06 13:08:06,978 - INFO - online zero-shot transfer: repeat 5 times
|
| 16 |
+
2025-06-06 13:08:13,675 - INFO - mean acc of onlab is: 47.13
|
| 17 |
+
2025-06-06 13:08:13,676 - INFO - mean acc of onzeta is: 47.75
|
| 18 |
+
2025-06-06 13:08:13,676 - INFO - mean acc of MAPLS is: 47.99
|
| 19 |
+
2025-06-06 13:08:13,677 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 20 |
+
2025-06-06 13:08:13,677 - INFO - load pre-trained model
|
| 21 |
+
2025-06-06 13:08:14,840 - INFO - load data
|
| 22 |
+
2025-06-06 13:08:19,957 - INFO - obtain text proxy
|
| 23 |
+
2025-06-06 13:08:20,296 - INFO - accuracy with text proxy: 41.91
|
| 24 |
+
2025-06-06 13:08:20,296 - INFO - online zero-shot transfer: repeat 5 times
|
| 25 |
+
2025-06-06 13:08:27,187 - INFO - mean acc of onlab is: 47.25
|
| 26 |
+
2025-06-06 13:08:27,187 - INFO - mean acc of onzeta is: 47.80
|
| 27 |
+
2025-06-06 13:08:27,187 - INFO - mean acc of MAPLS is: 48.03
|
| 28 |
+
2025-06-06 13:08:27,188 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 29 |
+
2025-06-06 13:08:27,188 - INFO - load pre-trained model
|
| 30 |
+
2025-06-06 13:08:28,303 - INFO - load data
|
| 31 |
+
2025-06-06 13:08:33,628 - INFO - obtain text proxy
|
| 32 |
+
2025-06-06 13:08:34,055 - INFO - accuracy with text proxy: 41.91
|
| 33 |
+
2025-06-06 13:08:34,056 - INFO - online zero-shot transfer: repeat 5 times
|
| 34 |
+
2025-06-06 13:08:40,606 - INFO - mean acc of onlab is: 47.19
|
| 35 |
+
2025-06-06 13:08:40,606 - INFO - mean acc of onzeta is: 47.84
|
| 36 |
+
2025-06-06 13:08:40,606 - INFO - mean acc of MAPLS is: 48.05
|
| 37 |
+
2025-06-06 13:08:40,608 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 38 |
+
2025-06-06 13:08:40,608 - INFO - load pre-trained model
|
| 39 |
+
2025-06-06 13:08:41,717 - INFO - load data
|
| 40 |
+
2025-06-06 13:08:46,808 - INFO - obtain text proxy
|
| 41 |
+
2025-06-06 13:08:47,165 - INFO - accuracy with text proxy: 41.91
|
| 42 |
+
2025-06-06 13:08:47,165 - INFO - online zero-shot transfer: repeat 5 times
|
| 43 |
+
2025-06-06 13:08:53,823 - INFO - mean acc of onlab is: 47.24
|
| 44 |
+
2025-06-06 13:08:53,823 - INFO - mean acc of onzeta is: 47.78
|
| 45 |
+
2025-06-06 13:08:53,823 - INFO - mean acc of MAPLS is: 47.97
|
| 46 |
+
2025-06-06 13:08:53,825 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 47 |
+
2025-06-06 13:08:53,825 - INFO - load pre-trained model
|
| 48 |
+
2025-06-06 13:08:54,935 - INFO - load data
|
| 49 |
+
2025-06-06 13:09:00,059 - INFO - obtain text proxy
|
| 50 |
+
2025-06-06 13:09:00,403 - INFO - accuracy with text proxy: 41.91
|
| 51 |
+
2025-06-06 13:09:00,403 - INFO - online zero-shot transfer: repeat 5 times
|
| 52 |
+
2025-06-06 13:09:43,969 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 53 |
+
2025-06-06 13:09:43,970 - INFO - load pre-trained model
|
| 54 |
+
2025-06-06 13:09:45,268 - INFO - load data
|
| 55 |
+
2025-06-06 13:09:50,604 - INFO - obtain text proxy
|
| 56 |
+
2025-06-06 13:09:51,035 - INFO - accuracy with text proxy: 41.91
|
| 57 |
+
2025-06-06 13:09:51,035 - INFO - online zero-shot transfer: repeat 5 times
|
| 58 |
+
2025-06-06 13:09:52,108 - INFO - lam is 1.0000
|
| 59 |
+
2025-06-06 13:09:53,381 - INFO - lam is 1.0000
|
| 60 |
+
2025-06-06 13:09:54,673 - INFO - lam is 1.0000
|
| 61 |
+
2025-06-06 13:09:55,950 - INFO - lam is 1.0000
|
| 62 |
+
2025-06-06 13:09:57,265 - INFO - lam is 1.0000
|
| 63 |
+
2025-06-06 13:09:57,514 - INFO - mean acc of onlab is: 47.30
|
| 64 |
+
2025-06-06 13:09:57,514 - INFO - mean acc of onzeta is: 47.94
|
| 65 |
+
2025-06-06 13:09:57,514 - INFO - mean acc of MAPLS is: 46.68
|
| 66 |
+
2025-06-06 13:09:57,516 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 67 |
+
2025-06-06 13:09:57,516 - INFO - load pre-trained model
|
| 68 |
+
2025-06-06 13:09:58,668 - INFO - load data
|
| 69 |
+
2025-06-06 13:10:03,793 - INFO - obtain text proxy
|
| 70 |
+
2025-06-06 13:10:04,134 - INFO - accuracy with text proxy: 41.91
|
| 71 |
+
2025-06-06 13:10:04,134 - INFO - online zero-shot transfer: repeat 5 times
|
| 72 |
+
2025-06-06 13:10:05,219 - INFO - lam is 0.9000
|
| 73 |
+
2025-06-06 13:10:06,732 - INFO - lam is 0.9000
|
| 74 |
+
2025-06-06 13:10:08,073 - INFO - lam is 0.9000
|
| 75 |
+
2025-06-06 13:10:09,377 - INFO - lam is 0.9000
|
| 76 |
+
2025-06-06 13:10:10,674 - INFO - lam is 0.9000
|
| 77 |
+
2025-06-06 13:10:10,905 - INFO - mean acc of onlab is: 47.14
|
| 78 |
+
2025-06-06 13:10:10,905 - INFO - mean acc of onzeta is: 47.76
|
| 79 |
+
2025-06-06 13:10:10,905 - INFO - mean acc of MAPLS is: 47.99
|
| 80 |
+
2025-06-06 13:10:10,907 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 81 |
+
2025-06-06 13:10:10,907 - INFO - load pre-trained model
|
| 82 |
+
2025-06-06 13:10:12,048 - INFO - load data
|
| 83 |
+
2025-06-06 13:10:17,166 - INFO - obtain text proxy
|
| 84 |
+
2025-06-06 13:10:17,507 - INFO - accuracy with text proxy: 41.91
|
| 85 |
+
2025-06-06 13:10:17,507 - INFO - online zero-shot transfer: repeat 5 times
|
| 86 |
+
2025-06-06 13:10:18,564 - INFO - lam is 0.8000
|
| 87 |
+
2025-06-06 13:10:19,846 - INFO - lam is 0.8000
|
| 88 |
+
2025-06-06 13:10:21,148 - INFO - lam is 0.8000
|
| 89 |
+
2025-06-06 13:10:22,449 - INFO - lam is 0.8000
|
| 90 |
+
2025-06-06 13:10:23,749 - INFO - lam is 0.8000
|
| 91 |
+
2025-06-06 13:10:23,988 - INFO - mean acc of onlab is: 47.26
|
| 92 |
+
2025-06-06 13:10:23,988 - INFO - mean acc of onzeta is: 47.89
|
| 93 |
+
2025-06-06 13:10:23,988 - INFO - mean acc of MAPLS is: 48.11
|
| 94 |
+
2025-06-06 13:10:23,989 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 95 |
+
2025-06-06 13:10:23,989 - INFO - load pre-trained model
|
| 96 |
+
2025-06-06 13:10:25,097 - INFO - load data
|
| 97 |
+
2025-06-06 13:10:30,237 - INFO - obtain text proxy
|
| 98 |
+
2025-06-06 13:10:30,577 - INFO - accuracy with text proxy: 41.91
|
| 99 |
+
2025-06-06 13:10:30,577 - INFO - online zero-shot transfer: repeat 5 times
|
| 100 |
+
2025-06-06 13:10:31,634 - INFO - lam is 0.7000
|
| 101 |
+
2025-06-06 13:10:32,915 - INFO - lam is 0.7000
|
| 102 |
+
2025-06-06 13:10:34,212 - INFO - lam is 0.7000
|
| 103 |
+
2025-06-06 13:10:35,508 - INFO - lam is 0.7000
|
| 104 |
+
2025-06-06 13:10:36,791 - INFO - lam is 0.7000
|
| 105 |
+
2025-06-06 13:10:37,026 - INFO - mean acc of onlab is: 47.29
|
| 106 |
+
2025-06-06 13:10:37,026 - INFO - mean acc of onzeta is: 47.93
|
| 107 |
+
2025-06-06 13:10:37,027 - INFO - mean acc of MAPLS is: 48.16
|
| 108 |
+
2025-06-06 13:10:37,028 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 109 |
+
2025-06-06 13:10:37,028 - INFO - load pre-trained model
|
| 110 |
+
2025-06-06 13:10:38,132 - INFO - load data
|
| 111 |
+
2025-06-06 13:10:43,278 - INFO - obtain text proxy
|
| 112 |
+
2025-06-06 13:10:43,617 - INFO - accuracy with text proxy: 41.91
|
| 113 |
+
2025-06-06 13:10:43,617 - INFO - online zero-shot transfer: repeat 5 times
|
| 114 |
+
2025-06-06 13:10:44,672 - INFO - lam is 0.6000
|
| 115 |
+
2025-06-06 13:10:45,942 - INFO - lam is 0.6000
|
| 116 |
+
2025-06-06 13:10:47,208 - INFO - lam is 0.6000
|
| 117 |
+
2025-06-06 13:10:48,522 - INFO - lam is 0.6000
|
| 118 |
+
2025-06-06 13:10:49,795 - INFO - lam is 0.6000
|
| 119 |
+
2025-06-06 13:10:50,025 - INFO - mean acc of onlab is: 47.23
|
| 120 |
+
2025-06-06 13:10:50,025 - INFO - mean acc of onzeta is: 47.85
|
| 121 |
+
2025-06-06 13:10:50,025 - INFO - mean acc of MAPLS is: 48.05
|
| 122 |
+
2025-06-06 13:10:50,026 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 123 |
+
2025-06-06 13:10:50,026 - INFO - load pre-trained model
|
| 124 |
+
2025-06-06 13:10:51,131 - INFO - load data
|
| 125 |
+
2025-06-06 13:10:56,399 - INFO - obtain text proxy
|
| 126 |
+
2025-06-06 13:10:56,743 - INFO - accuracy with text proxy: 41.91
|
| 127 |
+
2025-06-06 13:10:56,743 - INFO - online zero-shot transfer: repeat 5 times
|
| 128 |
+
2025-06-06 13:10:57,813 - INFO - lam is 0.5000
|
| 129 |
+
2025-06-06 13:10:59,158 - INFO - lam is 0.5000
|
| 130 |
+
2025-06-06 13:11:00,524 - INFO - lam is 0.5000
|
| 131 |
+
2025-06-06 13:11:01,805 - INFO - lam is 0.5000
|
| 132 |
+
2025-06-06 13:11:03,098 - INFO - lam is 0.5000
|
| 133 |
+
2025-06-06 13:11:03,323 - INFO - mean acc of onlab is: 47.14
|
| 134 |
+
2025-06-06 13:11:03,323 - INFO - mean acc of onzeta is: 47.66
|
| 135 |
+
2025-06-06 13:11:03,323 - INFO - mean acc of MAPLS is: 47.76
|
| 136 |
+
2025-06-06 13:11:03,324 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 137 |
+
2025-06-06 13:11:03,324 - INFO - load pre-trained model
|
| 138 |
+
2025-06-06 13:11:04,430 - INFO - load data
|
| 139 |
+
2025-06-06 13:11:09,583 - INFO - obtain text proxy
|
| 140 |
+
2025-06-06 13:11:09,928 - INFO - accuracy with text proxy: 41.91
|
| 141 |
+
2025-06-06 13:11:09,928 - INFO - online zero-shot transfer: repeat 5 times
|
| 142 |
+
2025-06-06 13:11:11,029 - INFO - lam is 0.4000
|
| 143 |
+
2025-06-06 13:11:12,338 - INFO - lam is 0.4000
|
| 144 |
+
2025-06-06 13:11:13,668 - INFO - lam is 0.4000
|
| 145 |
+
2025-06-06 13:11:14,940 - INFO - lam is 0.4000
|
| 146 |
+
2025-06-06 13:11:16,244 - INFO - lam is 0.4000
|
| 147 |
+
2025-06-06 13:11:16,479 - INFO - mean acc of onlab is: 47.24
|
| 148 |
+
2025-06-06 13:11:16,479 - INFO - mean acc of onzeta is: 47.80
|
| 149 |
+
2025-06-06 13:11:16,479 - INFO - mean acc of MAPLS is: 47.93
|
| 150 |
+
2025-06-06 13:11:16,480 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 151 |
+
2025-06-06 13:11:16,480 - INFO - load pre-trained model
|
| 152 |
+
2025-06-06 13:11:17,643 - INFO - load data
|
| 153 |
+
2025-06-06 13:11:22,919 - INFO - obtain text proxy
|
| 154 |
+
2025-06-06 13:11:23,264 - INFO - accuracy with text proxy: 41.91
|
| 155 |
+
2025-06-06 13:11:23,264 - INFO - online zero-shot transfer: repeat 5 times
|
| 156 |
+
2025-06-06 13:11:24,336 - INFO - lam is 0.3000
|
| 157 |
+
2025-06-06 13:11:25,793 - INFO - lam is 0.3000
|
| 158 |
+
2025-06-06 13:11:27,266 - INFO - lam is 0.3000
|
| 159 |
+
2025-06-06 13:11:28,757 - INFO - lam is 0.3000
|
| 160 |
+
2025-06-06 13:11:30,128 - INFO - lam is 0.3000
|
| 161 |
+
2025-06-06 13:11:30,367 - INFO - mean acc of onlab is: 47.19
|
| 162 |
+
2025-06-06 13:11:30,367 - INFO - mean acc of onzeta is: 47.79
|
| 163 |
+
2025-06-06 13:11:30,367 - INFO - mean acc of MAPLS is: 47.90
|
| 164 |
+
2025-06-06 13:11:30,368 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 165 |
+
2025-06-06 13:11:30,368 - INFO - load pre-trained model
|
| 166 |
+
2025-06-06 13:11:31,495 - INFO - load data
|
| 167 |
+
2025-06-06 13:11:36,668 - INFO - obtain text proxy
|
| 168 |
+
2025-06-06 13:11:37,008 - INFO - accuracy with text proxy: 41.91
|
| 169 |
+
2025-06-06 13:11:37,008 - INFO - online zero-shot transfer: repeat 5 times
|
| 170 |
+
2025-06-06 13:11:38,074 - INFO - lam is 0.2000
|
| 171 |
+
2025-06-06 13:11:39,405 - INFO - lam is 0.2000
|
| 172 |
+
2025-06-06 13:11:40,805 - INFO - lam is 0.2000
|
| 173 |
+
2025-06-06 13:11:42,294 - INFO - lam is 0.2000
|
| 174 |
+
2025-06-06 13:11:43,671 - INFO - lam is 0.2000
|
| 175 |
+
2025-06-06 13:11:43,910 - INFO - mean acc of onlab is: 47.40
|
| 176 |
+
2025-06-06 13:11:43,910 - INFO - mean acc of onzeta is: 47.88
|
| 177 |
+
2025-06-06 13:11:43,910 - INFO - mean acc of MAPLS is: 47.95
|
| 178 |
+
2025-06-06 13:11:43,912 - INFO - Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.4, repeat=5)
|
| 179 |
+
2025-06-06 13:11:43,912 - INFO - load pre-trained model
|
| 180 |
+
2025-06-06 13:11:45,020 - INFO - load data
|
| 181 |
+
2025-06-06 13:11:50,158 - INFO - obtain text proxy
|
| 182 |
+
2025-06-06 13:11:50,503 - INFO - accuracy with text proxy: 41.91
|
| 183 |
+
2025-06-06 13:11:50,503 - INFO - online zero-shot transfer: repeat 5 times
|
| 184 |
+
2025-06-06 13:11:51,571 - INFO - lam is 0.1000
|
| 185 |
+
2025-06-06 13:11:52,891 - INFO - lam is 0.1000
|
| 186 |
+
2025-06-06 13:11:54,162 - INFO - lam is 0.1000
|
| 187 |
+
2025-06-06 13:11:55,477 - INFO - lam is 0.1000
|
| 188 |
+
2025-06-06 13:11:56,752 - INFO - lam is 0.1000
|
| 189 |
+
2025-06-06 13:11:57,004 - INFO - mean acc of onlab is: 47.21
|
| 190 |
+
2025-06-06 13:11:57,004 - INFO - mean acc of onzeta is: 47.72
|
| 191 |
+
2025-06-06 13:11:57,004 - INFO - mean acc of MAPLS is: 47.76
|
OnZeta/logs/onzeta_eval_2025-06-06_13-25-22.log
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 2 |
+
load pre-trained model
|
| 3 |
+
load data
|
| 4 |
+
obtain text proxy
|
| 5 |
+
accuracy with text proxy: 41.91
|
| 6 |
+
online zero-shot transfer: repeat 5 times
|
| 7 |
+
lam is 1.0000
|
| 8 |
+
lam is 1.0000
|
| 9 |
+
lam is 1.0000
|
| 10 |
+
lam is 1.0000
|
| 11 |
+
lam is 1.0000
|
| 12 |
+
mean acc of onlab is: 47.24
|
| 13 |
+
mean acc of onzeta is: 47.65
|
| 14 |
+
mean acc of MAPLS is: 46.42
|
| 15 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 16 |
+
load pre-trained model
|
| 17 |
+
load data
|
| 18 |
+
obtain text proxy
|
| 19 |
+
accuracy with text proxy: 41.91
|
| 20 |
+
online zero-shot transfer: repeat 5 times
|
| 21 |
+
lam is 0.9000
|
| 22 |
+
lam is 0.9000
|
| 23 |
+
lam is 0.9000
|
| 24 |
+
lam is 0.9000
|
| 25 |
+
lam is 0.9000
|
| 26 |
+
mean acc of onlab is: 47.28
|
| 27 |
+
mean acc of onzeta is: 47.53
|
| 28 |
+
mean acc of MAPLS is: 47.94
|
| 29 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 30 |
+
load pre-trained model
|
| 31 |
+
load data
|
| 32 |
+
obtain text proxy
|
| 33 |
+
accuracy with text proxy: 41.91
|
| 34 |
+
online zero-shot transfer: repeat 5 times
|
| 35 |
+
lam is 0.8000
|
| 36 |
+
lam is 0.8000
|
| 37 |
+
lam is 0.8000
|
| 38 |
+
lam is 0.8000
|
| 39 |
+
lam is 0.8000
|
| 40 |
+
mean acc of onlab is: 47.22
|
| 41 |
+
mean acc of onzeta is: 47.69
|
| 42 |
+
mean acc of MAPLS is: 47.97
|
| 43 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 44 |
+
load pre-trained model
|
| 45 |
+
load data
|
| 46 |
+
obtain text proxy
|
| 47 |
+
accuracy with text proxy: 41.91
|
| 48 |
+
online zero-shot transfer: repeat 5 times
|
| 49 |
+
lam is 0.7000
|
| 50 |
+
lam is 0.7000
|
| 51 |
+
lam is 0.7000
|
| 52 |
+
lam is 0.7000
|
| 53 |
+
lam is 0.7000
|
| 54 |
+
mean acc of onlab is: 47.31
|
| 55 |
+
mean acc of onzeta is: 47.68
|
| 56 |
+
mean acc of MAPLS is: 47.92
|
| 57 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 58 |
+
load pre-trained model
|
| 59 |
+
load data
|
| 60 |
+
obtain text proxy
|
| 61 |
+
accuracy with text proxy: 41.91
|
| 62 |
+
online zero-shot transfer: repeat 5 times
|
| 63 |
+
lam is 0.6000
|
| 64 |
+
lam is 0.6000
|
| 65 |
+
lam is 0.6000
|
| 66 |
+
lam is 0.6000
|
| 67 |
+
lam is 0.6000
|
| 68 |
+
mean acc of onlab is: 47.22
|
| 69 |
+
mean acc of onzeta is: 47.51
|
| 70 |
+
mean acc of MAPLS is: 47.74
|
| 71 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 72 |
+
load pre-trained model
|
| 73 |
+
load data
|
| 74 |
+
obtain text proxy
|
| 75 |
+
accuracy with text proxy: 41.91
|
| 76 |
+
online zero-shot transfer: repeat 5 times
|
| 77 |
+
lam is 0.5000
|
| 78 |
+
lam is 0.5000
|
| 79 |
+
lam is 0.5000
|
| 80 |
+
lam is 0.5000
|
| 81 |
+
lam is 0.5000
|
| 82 |
+
mean acc of onlab is: 47.23
|
| 83 |
+
mean acc of onzeta is: 47.80
|
| 84 |
+
mean acc of MAPLS is: 47.93
|
| 85 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 86 |
+
load pre-trained model
|
| 87 |
+
load data
|
| 88 |
+
obtain text proxy
|
| 89 |
+
accuracy with text proxy: 41.91
|
| 90 |
+
online zero-shot transfer: repeat 5 times
|
| 91 |
+
lam is 0.4000
|
| 92 |
+
lam is 0.4000
|
| 93 |
+
lam is 0.4000
|
| 94 |
+
lam is 0.4000
|
| 95 |
+
lam is 0.4000
|
| 96 |
+
mean acc of onlab is: 47.20
|
| 97 |
+
mean acc of onzeta is: 47.51
|
| 98 |
+
mean acc of MAPLS is: 47.63
|
| 99 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 100 |
+
load pre-trained model
|
| 101 |
+
load data
|
| 102 |
+
obtain text proxy
|
| 103 |
+
accuracy with text proxy: 41.91
|
| 104 |
+
online zero-shot transfer: repeat 5 times
|
| 105 |
+
lam is 0.3000
|
| 106 |
+
lam is 0.3000
|
| 107 |
+
lam is 0.3000
|
| 108 |
+
lam is 0.3000
|
| 109 |
+
lam is 0.3000
|
| 110 |
+
mean acc of onlab is: 47.19
|
| 111 |
+
mean acc of onzeta is: 47.48
|
| 112 |
+
mean acc of MAPLS is: 47.55
|
| 113 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 114 |
+
load pre-trained model
|
| 115 |
+
load data
|
| 116 |
+
obtain text proxy
|
| 117 |
+
accuracy with text proxy: 41.91
|
| 118 |
+
online zero-shot transfer: repeat 5 times
|
| 119 |
+
lam is 0.2000
|
| 120 |
+
lam is 0.2000
|
| 121 |
+
lam is 0.2000
|
| 122 |
+
lam is 0.2000
|
| 123 |
+
lam is 0.2000
|
| 124 |
+
mean acc of onlab is: 47.29
|
| 125 |
+
mean acc of onzeta is: 47.55
|
| 126 |
+
mean acc of MAPLS is: 47.61
|
| 127 |
+
Namespace(data_path='./CIFAR100_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=1, beta=0.8, repeat=5)
|
| 128 |
+
load pre-trained model
|
| 129 |
+
load data
|
| 130 |
+
obtain text proxy
|
| 131 |
+
accuracy with text proxy: 41.91
|
| 132 |
+
online zero-shot transfer: repeat 5 times
|
| 133 |
+
lam is 0.1000
|
| 134 |
+
lam is 0.1000
|
| 135 |
+
lam is 0.1000
|
| 136 |
+
lam is 0.1000
|
| 137 |
+
lam is 0.1000
|
| 138 |
+
mean acc of onlab is: 47.07
|
| 139 |
+
mean acc of onzeta is: 47.52
|
| 140 |
+
mean acc of MAPLS is: 47.55
|
OnZeta/logs/onzeta_eval_2025-06-06_21-54-46.log
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/home/han321/anaconda3/envs/OnZeta/bin/python /home/han321/projects/OnZeta/main_online_cifar10.py
|
| 2 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
Files already downloaded and verified
|
| 6 |
+
obtain text proxy
|
| 7 |
+
accuracy with text proxy: 71.58
|
| 8 |
+
online zero-shot transfer: repeat 5 times
|
| 9 |
+
/home/han321/projects/OnZeta/main_online_cifar10.py:184: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.)
|
| 10 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 11 |
+
Assigned lambda is 1.0000
|
| 12 |
+
Assigned lambda is 1.0000
|
| 13 |
+
Assigned lambda is 1.0000
|
| 14 |
+
Assigned lambda is 1.0000
|
| 15 |
+
Assigned lambda is 1.0000
|
| 16 |
+
mean acc of onlab is: 71.58
|
| 17 |
+
mean acc of onzeta is: 71.58
|
| 18 |
+
mean acc of MAPLS is: 10.00
|
| 19 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 20 |
+
load pre-trained model
|
| 21 |
+
load data
|
| 22 |
+
Files already downloaded and verified
|
| 23 |
+
obtain text proxy
|
| 24 |
+
accuracy with text proxy: 71.58
|
| 25 |
+
online zero-shot transfer: repeat 5 times
|
| 26 |
+
Assigned lambda is 0.9000
|
| 27 |
+
Assigned lambda is 0.9000
|
| 28 |
+
Assigned lambda is 0.9000
|
| 29 |
+
Assigned lambda is 0.9000
|
| 30 |
+
Assigned lambda is 0.9000
|
| 31 |
+
mean acc of onlab is: 71.58
|
| 32 |
+
mean acc of onzeta is: 71.62
|
| 33 |
+
mean acc of MAPLS is: 76.45
|
| 34 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 35 |
+
load pre-trained model
|
| 36 |
+
load data
|
| 37 |
+
Files already downloaded and verified
|
| 38 |
+
obtain text proxy
|
| 39 |
+
accuracy with text proxy: 71.58
|
| 40 |
+
online zero-shot transfer: repeat 5 times
|
| 41 |
+
Assigned lambda is 0.8000
|
| 42 |
+
Assigned lambda is 0.8000
|
| 43 |
+
Assigned lambda is 0.8000
|
| 44 |
+
Assigned lambda is 0.8000
|
| 45 |
+
Assigned lambda is 0.8000
|
| 46 |
+
mean acc of onlab is: 71.58
|
| 47 |
+
mean acc of onzeta is: 71.66
|
| 48 |
+
mean acc of MAPLS is: 77.57
|
| 49 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 50 |
+
load pre-trained model
|
| 51 |
+
load data
|
| 52 |
+
Files already downloaded and verified
|
| 53 |
+
obtain text proxy
|
| 54 |
+
accuracy with text proxy: 71.58
|
| 55 |
+
online zero-shot transfer: repeat 5 times
|
| 56 |
+
Assigned lambda is 0.7000
|
| 57 |
+
Assigned lambda is 0.7000
|
| 58 |
+
Assigned lambda is 0.7000
|
| 59 |
+
Assigned lambda is 0.7000
|
| 60 |
+
Assigned lambda is 0.7000
|
| 61 |
+
mean acc of onlab is: 71.58
|
| 62 |
+
mean acc of onzeta is: 71.63
|
| 63 |
+
mean acc of MAPLS is: 77.22
|
| 64 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 65 |
+
load pre-trained model
|
| 66 |
+
load data
|
| 67 |
+
Files already downloaded and verified
|
| 68 |
+
obtain text proxy
|
| 69 |
+
accuracy with text proxy: 71.58
|
| 70 |
+
online zero-shot transfer: repeat 5 times
|
| 71 |
+
Assigned lambda is 0.6000
|
| 72 |
+
Assigned lambda is 0.6000
|
| 73 |
+
Assigned lambda is 0.6000
|
| 74 |
+
Assigned lambda is 0.6000
|
| 75 |
+
Assigned lambda is 0.6000
|
| 76 |
+
mean acc of onlab is: 71.58
|
| 77 |
+
mean acc of onzeta is: 71.65
|
| 78 |
+
mean acc of MAPLS is: 76.49
|
| 79 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 80 |
+
load pre-trained model
|
| 81 |
+
load data
|
| 82 |
+
Files already downloaded and verified
|
| 83 |
+
obtain text proxy
|
| 84 |
+
accuracy with text proxy: 71.58
|
| 85 |
+
online zero-shot transfer: repeat 5 times
|
| 86 |
+
Assigned lambda is 0.5000
|
| 87 |
+
Assigned lambda is 0.5000
|
| 88 |
+
Assigned lambda is 0.5000
|
| 89 |
+
Assigned lambda is 0.5000
|
| 90 |
+
Assigned lambda is 0.5000
|
| 91 |
+
mean acc of onlab is: 71.58
|
| 92 |
+
mean acc of onzeta is: 71.62
|
| 93 |
+
mean acc of MAPLS is: 75.84
|
| 94 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 95 |
+
load pre-trained model
|
| 96 |
+
load data
|
| 97 |
+
Files already downloaded and verified
|
| 98 |
+
obtain text proxy
|
| 99 |
+
accuracy with text proxy: 71.58
|
| 100 |
+
online zero-shot transfer: repeat 5 times
|
| 101 |
+
Assigned lambda is 0.4000
|
| 102 |
+
Assigned lambda is 0.4000
|
| 103 |
+
Assigned lambda is 0.4000
|
| 104 |
+
Assigned lambda is 0.4000
|
| 105 |
+
Assigned lambda is 0.4000
|
| 106 |
+
mean acc of onlab is: 71.58
|
| 107 |
+
mean acc of onzeta is: 71.58
|
| 108 |
+
mean acc of MAPLS is: 75.02
|
| 109 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 110 |
+
load pre-trained model
|
| 111 |
+
load data
|
| 112 |
+
Files already downloaded and verified
|
| 113 |
+
obtain text proxy
|
| 114 |
+
accuracy with text proxy: 71.58
|
| 115 |
+
online zero-shot transfer: repeat 5 times
|
| 116 |
+
Assigned lambda is 0.3000
|
| 117 |
+
Assigned lambda is 0.3000
|
| 118 |
+
Assigned lambda is 0.3000
|
| 119 |
+
Assigned lambda is 0.3000
|
| 120 |
+
Assigned lambda is 0.3000
|
| 121 |
+
mean acc of onlab is: 71.58
|
| 122 |
+
mean acc of onzeta is: 71.60
|
| 123 |
+
mean acc of MAPLS is: 74.05
|
| 124 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 125 |
+
load pre-trained model
|
| 126 |
+
load data
|
| 127 |
+
Files already downloaded and verified
|
| 128 |
+
obtain text proxy
|
| 129 |
+
accuracy with text proxy: 71.58
|
| 130 |
+
online zero-shot transfer: repeat 5 times
|
| 131 |
+
Assigned lambda is 0.2000
|
| 132 |
+
Assigned lambda is 0.2000
|
| 133 |
+
Assigned lambda is 0.2000
|
| 134 |
+
Assigned lambda is 0.2000
|
| 135 |
+
Assigned lambda is 0.2000
|
| 136 |
+
mean acc of onlab is: 71.58
|
| 137 |
+
mean acc of onzeta is: 71.59
|
| 138 |
+
mean acc of MAPLS is: 73.21
|
| 139 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 140 |
+
load pre-trained model
|
| 141 |
+
load data
|
| 142 |
+
Files already downloaded and verified
|
| 143 |
+
obtain text proxy
|
| 144 |
+
accuracy with text proxy: 71.58
|
| 145 |
+
online zero-shot transfer: repeat 5 times
|
| 146 |
+
Assigned lambda is 0.1000
|
| 147 |
+
Assigned lambda is 0.1000
|
| 148 |
+
Assigned lambda is 0.1000
|
| 149 |
+
Assigned lambda is 0.1000
|
| 150 |
+
Assigned lambda is 0.1000
|
| 151 |
+
mean acc of onlab is: 71.58
|
| 152 |
+
mean acc of onzeta is: 71.64
|
| 153 |
+
mean acc of MAPLS is: 72.43
|
| 154 |
+
|
| 155 |
+
Process finished with exit code 0
|
OnZeta/logs/onzeta_eval_2025-06-11_20-29-37.log
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 2 |
+
the alpha is 1.0
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
obtain text proxy
|
| 6 |
+
accuracy with text proxy: 90.77
|
| 7 |
+
online zero-shot transfer: repeat 5 times
|
| 8 |
+
lam is 1.0000
|
| 9 |
+
lam is 1.0000
|
| 10 |
+
lam is 1.0000
|
| 11 |
+
lam is 1.0000
|
| 12 |
+
lam is 1.0000
|
| 13 |
+
mean acc of onlab is: 90.46
|
| 14 |
+
mean acc of onzeta is: 90.71
|
| 15 |
+
mean acc of MAPLS is: 90.72
|
| 16 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 17 |
+
the alpha is 0.9
|
| 18 |
+
load pre-trained model
|
| 19 |
+
load data
|
| 20 |
+
obtain text proxy
|
| 21 |
+
accuracy with text proxy: 90.77
|
| 22 |
+
online zero-shot transfer: repeat 5 times
|
| 23 |
+
lam is 1.0000
|
| 24 |
+
lam is 1.0000
|
| 25 |
+
lam is 1.0000
|
| 26 |
+
lam is 1.0000
|
| 27 |
+
lam is 1.0000
|
| 28 |
+
mean acc of onlab is: 90.74
|
| 29 |
+
mean acc of onzeta is: 91.00
|
| 30 |
+
mean acc of MAPLS is: 91.11
|
| 31 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 32 |
+
the alpha is 0.8
|
| 33 |
+
load pre-trained model
|
| 34 |
+
load data
|
| 35 |
+
obtain text proxy
|
| 36 |
+
accuracy with text proxy: 90.77
|
| 37 |
+
online zero-shot transfer: repeat 5 times
|
| 38 |
+
lam is 1.0000
|
| 39 |
+
lam is 1.0000
|
| 40 |
+
lam is 1.0000
|
| 41 |
+
lam is 1.0000
|
| 42 |
+
lam is 1.0000
|
| 43 |
+
mean acc of onlab is: 90.90
|
| 44 |
+
mean acc of onzeta is: 91.11
|
| 45 |
+
mean acc of MAPLS is: 91.51
|
| 46 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 47 |
+
the alpha is 0.7
|
| 48 |
+
load pre-trained model
|
| 49 |
+
load data
|
| 50 |
+
obtain text proxy
|
| 51 |
+
accuracy with text proxy: 90.77
|
| 52 |
+
online zero-shot transfer: repeat 5 times
|
| 53 |
+
lam is 1.0000
|
| 54 |
+
lam is 1.0000
|
| 55 |
+
lam is 1.0000
|
| 56 |
+
lam is 1.0000
|
| 57 |
+
lam is 1.0000
|
| 58 |
+
mean acc of onlab is: 90.88
|
| 59 |
+
mean acc of onzeta is: 91.08
|
| 60 |
+
mean acc of MAPLS is: 91.56
|
| 61 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 62 |
+
the alpha is 0.6
|
| 63 |
+
load pre-trained model
|
| 64 |
+
load data
|
| 65 |
+
obtain text proxy
|
| 66 |
+
accuracy with text proxy: 90.77
|
| 67 |
+
online zero-shot transfer: repeat 5 times
|
| 68 |
+
lam is 1.0000
|
| 69 |
+
lam is 1.0000
|
| 70 |
+
lam is 1.0000
|
| 71 |
+
lam is 1.0000
|
| 72 |
+
lam is 1.0000
|
| 73 |
+
mean acc of onlab is: 90.99
|
| 74 |
+
mean acc of onzeta is: 91.16
|
| 75 |
+
mean acc of MAPLS is: 91.74
|
| 76 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 77 |
+
the alpha is 0.5
|
| 78 |
+
load pre-trained model
|
| 79 |
+
load data
|
| 80 |
+
obtain text proxy
|
| 81 |
+
accuracy with text proxy: 90.77
|
| 82 |
+
online zero-shot transfer: repeat 5 times
|
| 83 |
+
lam is 1.0000
|
| 84 |
+
lam is 1.0000
|
| 85 |
+
lam is 1.0000
|
| 86 |
+
lam is 1.0000
|
| 87 |
+
lam is 1.0000
|
| 88 |
+
mean acc of onlab is: 90.86
|
| 89 |
+
mean acc of onzeta is: 91.06
|
| 90 |
+
mean acc of MAPLS is: 91.70
|
| 91 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 92 |
+
the alpha is 0.4
|
| 93 |
+
load pre-trained model
|
| 94 |
+
load data
|
| 95 |
+
obtain text proxy
|
| 96 |
+
accuracy with text proxy: 90.77
|
| 97 |
+
online zero-shot transfer: repeat 5 times
|
| 98 |
+
lam is 1.0000
|
| 99 |
+
lam is 1.0000
|
| 100 |
+
lam is 1.0000
|
| 101 |
+
lam is 1.0000
|
| 102 |
+
lam is 1.0000
|
| 103 |
+
mean acc of onlab is: 90.94
|
| 104 |
+
mean acc of onzeta is: 91.10
|
| 105 |
+
mean acc of MAPLS is: 91.74
|
| 106 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 107 |
+
the alpha is 0.3
|
| 108 |
+
load pre-trained model
|
| 109 |
+
load data
|
| 110 |
+
obtain text proxy
|
| 111 |
+
accuracy with text proxy: 90.77
|
| 112 |
+
online zero-shot transfer: repeat 5 times
|
| 113 |
+
lam is 1.0000
|
| 114 |
+
lam is 1.0000
|
| 115 |
+
lam is 1.0000
|
| 116 |
+
lam is 1.0000
|
| 117 |
+
lam is 1.0000
|
| 118 |
+
mean acc of onlab is: 90.89
|
| 119 |
+
mean acc of onzeta is: 91.12
|
| 120 |
+
mean acc of MAPLS is: 91.72
|
| 121 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 122 |
+
the alpha is 0.2
|
| 123 |
+
load pre-trained model
|
| 124 |
+
load data
|
| 125 |
+
obtain text proxy
|
| 126 |
+
accuracy with text proxy: 90.77
|
| 127 |
+
online zero-shot transfer: repeat 5 times
|
| 128 |
+
lam is 1.0000
|
| 129 |
+
lam is 1.0000
|
| 130 |
+
lam is 1.0000
|
| 131 |
+
lam is 1.0000
|
| 132 |
+
lam is 1.0000
|
| 133 |
+
mean acc of onlab is: 90.83
|
| 134 |
+
mean acc of onzeta is: 91.01
|
| 135 |
+
mean acc of MAPLS is: 91.80
|
| 136 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 137 |
+
the alpha is 0.1
|
| 138 |
+
load pre-trained model
|
| 139 |
+
load data
|
| 140 |
+
obtain text proxy
|
| 141 |
+
accuracy with text proxy: 90.77
|
| 142 |
+
online zero-shot transfer: repeat 5 times
|
| 143 |
+
lam is 1.0000
|
| 144 |
+
lam is 1.0000
|
| 145 |
+
lam is 1.0000
|
| 146 |
+
lam is 1.0000
|
| 147 |
+
lam is 1.0000
|
| 148 |
+
mean acc of onlab is: 90.78
|
| 149 |
+
mean acc of onzeta is: 90.94
|
| 150 |
+
mean acc of MAPLS is: 91.76
|
OnZeta/logs/onzeta_eval_2025-06-11_21-19-15.log
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 2 |
+
the alpha is 1.0
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
obtain text proxy
|
| 6 |
+
accuracy with text proxy: 71.58
|
| 7 |
+
online zero-shot transfer: repeat 5 times
|
| 8 |
+
lam is 0.8000
|
| 9 |
+
lam is 0.8000
|
| 10 |
+
lam is 0.8000
|
| 11 |
+
lam is 0.8000
|
| 12 |
+
lam is 0.8000
|
| 13 |
+
mean acc of onlab is: 75.26
|
| 14 |
+
mean acc of onzeta is: 75.39
|
| 15 |
+
mean acc of MAPLS is: 75.42
|
| 16 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 17 |
+
the alpha is 0.9
|
| 18 |
+
load pre-trained model
|
| 19 |
+
load data
|
| 20 |
+
obtain text proxy
|
| 21 |
+
accuracy with text proxy: 71.58
|
| 22 |
+
online zero-shot transfer: repeat 5 times
|
| 23 |
+
lam is 0.8000
|
| 24 |
+
lam is 0.8000
|
| 25 |
+
lam is 0.8000
|
| 26 |
+
lam is 0.8000
|
| 27 |
+
lam is 0.8000
|
| 28 |
+
mean acc of onlab is: 75.82
|
| 29 |
+
mean acc of onzeta is: 75.90
|
| 30 |
+
mean acc of MAPLS is: 76.36
|
| 31 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 32 |
+
the alpha is 0.8
|
| 33 |
+
load pre-trained model
|
| 34 |
+
load data
|
| 35 |
+
obtain text proxy
|
| 36 |
+
accuracy with text proxy: 71.58
|
| 37 |
+
online zero-shot transfer: repeat 5 times
|
| 38 |
+
lam is 0.8000
|
| 39 |
+
lam is 0.8000
|
| 40 |
+
lam is 0.8000
|
| 41 |
+
lam is 0.8000
|
| 42 |
+
lam is 0.8000
|
| 43 |
+
mean acc of onlab is: 75.14
|
| 44 |
+
mean acc of onzeta is: 75.17
|
| 45 |
+
mean acc of MAPLS is: 76.65
|
| 46 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 47 |
+
the alpha is 0.7
|
| 48 |
+
load pre-trained model
|
| 49 |
+
load data
|
| 50 |
+
obtain text proxy
|
| 51 |
+
accuracy with text proxy: 71.58
|
| 52 |
+
online zero-shot transfer: repeat 5 times
|
| 53 |
+
lam is 0.8000
|
| 54 |
+
lam is 0.8000
|
| 55 |
+
lam is 0.8000
|
| 56 |
+
lam is 0.8000
|
| 57 |
+
lam is 0.8000
|
| 58 |
+
mean acc of onlab is: 74.39
|
| 59 |
+
mean acc of onzeta is: 74.41
|
| 60 |
+
mean acc of MAPLS is: 77.04
|
| 61 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 62 |
+
the alpha is 0.6
|
| 63 |
+
load pre-trained model
|
| 64 |
+
load data
|
| 65 |
+
obtain text proxy
|
| 66 |
+
accuracy with text proxy: 71.58
|
| 67 |
+
online zero-shot transfer: repeat 5 times
|
| 68 |
+
lam is 0.8000
|
| 69 |
+
lam is 0.8000
|
| 70 |
+
lam is 0.8000
|
| 71 |
+
lam is 0.8000
|
| 72 |
+
lam is 0.8000
|
| 73 |
+
mean acc of onlab is: 73.60
|
| 74 |
+
mean acc of onzeta is: 73.69
|
| 75 |
+
mean acc of MAPLS is: 77.21
|
| 76 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 77 |
+
the alpha is 0.5
|
| 78 |
+
load pre-trained model
|
| 79 |
+
load data
|
| 80 |
+
obtain text proxy
|
| 81 |
+
accuracy with text proxy: 71.58
|
| 82 |
+
online zero-shot transfer: repeat 5 times
|
| 83 |
+
lam is 0.8000
|
| 84 |
+
lam is 0.8000
|
| 85 |
+
lam is 0.8000
|
| 86 |
+
lam is 0.8000
|
| 87 |
+
lam is 0.8000
|
| 88 |
+
mean acc of onlab is: 72.79
|
| 89 |
+
mean acc of onzeta is: 72.76
|
| 90 |
+
mean acc of MAPLS is: 77.44
|
| 91 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 92 |
+
the alpha is 0.4
|
| 93 |
+
load pre-trained model
|
| 94 |
+
load data
|
| 95 |
+
obtain text proxy
|
| 96 |
+
accuracy with text proxy: 71.58
|
| 97 |
+
online zero-shot transfer: repeat 5 times
|
| 98 |
+
lam is 0.8000
|
| 99 |
+
lam is 0.8000
|
| 100 |
+
lam is 0.8000
|
| 101 |
+
lam is 0.8000
|
| 102 |
+
lam is 0.8000
|
| 103 |
+
mean acc of onlab is: 72.33
|
| 104 |
+
mean acc of onzeta is: 72.37
|
| 105 |
+
mean acc of MAPLS is: 77.46
|
| 106 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 107 |
+
the alpha is 0.3
|
| 108 |
+
load pre-trained model
|
| 109 |
+
load data
|
| 110 |
+
obtain text proxy
|
| 111 |
+
accuracy with text proxy: 71.58
|
| 112 |
+
online zero-shot transfer: repeat 5 times
|
| 113 |
+
lam is 0.8000
|
| 114 |
+
lam is 0.8000
|
| 115 |
+
lam is 0.8000
|
| 116 |
+
lam is 0.8000
|
| 117 |
+
lam is 0.8000
|
| 118 |
+
mean acc of onlab is: 72.00
|
| 119 |
+
mean acc of onzeta is: 72.02
|
| 120 |
+
mean acc of MAPLS is: 77.54
|
| 121 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 122 |
+
the alpha is 0.2
|
| 123 |
+
load pre-trained model
|
| 124 |
+
load data
|
| 125 |
+
obtain text proxy
|
| 126 |
+
accuracy with text proxy: 71.58
|
| 127 |
+
online zero-shot transfer: repeat 5 times
|
| 128 |
+
lam is 0.8000
|
| 129 |
+
lam is 0.8000
|
| 130 |
+
lam is 0.8000
|
| 131 |
+
lam is 0.8000
|
| 132 |
+
lam is 0.8000
|
| 133 |
+
mean acc of onlab is: 71.77
|
| 134 |
+
mean acc of onzeta is: 71.79
|
| 135 |
+
mean acc of MAPLS is: 77.56
|
| 136 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 137 |
+
the alpha is 0.1
|
| 138 |
+
load pre-trained model
|
| 139 |
+
load data
|
| 140 |
+
obtain text proxy
|
| 141 |
+
accuracy with text proxy: 71.58
|
| 142 |
+
online zero-shot transfer: repeat 5 times
|
| 143 |
+
lam is 0.8000
|
| 144 |
+
lam is 0.8000
|
| 145 |
+
lam is 0.8000
|
| 146 |
+
lam is 0.8000
|
| 147 |
+
lam is 0.8000
|
| 148 |
+
mean acc of onlab is: 71.64
|
| 149 |
+
mean acc of onzeta is: 71.64
|
| 150 |
+
mean acc of MAPLS is: 77.60
|
OnZeta/logs/onzeta_eval_2025-06-11_21-44-32.log
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 2 |
+
the beta is 1.0
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
obtain text proxy
|
| 6 |
+
accuracy with text proxy: 71.58
|
| 7 |
+
online zero-shot transfer: repeat 5 times
|
| 8 |
+
lam is 0.8000
|
| 9 |
+
lam is 0.8000
|
| 10 |
+
lam is 0.8000
|
| 11 |
+
lam is 0.8000
|
| 12 |
+
lam is 0.8000
|
| 13 |
+
mean acc of onlab is: 71.58
|
| 14 |
+
mean acc of onzeta is: 71.09
|
| 15 |
+
mean acc of MAPLS is: 76.99
|
| 16 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 17 |
+
the beta is 0.9
|
| 18 |
+
load pre-trained model
|
| 19 |
+
load data
|
| 20 |
+
obtain text proxy
|
| 21 |
+
accuracy with text proxy: 71.58
|
| 22 |
+
online zero-shot transfer: repeat 5 times
|
| 23 |
+
lam is 0.8000
|
| 24 |
+
lam is 0.8000
|
| 25 |
+
lam is 0.8000
|
| 26 |
+
lam is 0.8000
|
| 27 |
+
lam is 0.8000
|
| 28 |
+
mean acc of onlab is: 71.58
|
| 29 |
+
mean acc of onzeta is: 71.31
|
| 30 |
+
mean acc of MAPLS is: 77.12
|
| 31 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 32 |
+
the beta is 0.8
|
| 33 |
+
load pre-trained model
|
| 34 |
+
load data
|
| 35 |
+
obtain text proxy
|
| 36 |
+
accuracy with text proxy: 71.58
|
| 37 |
+
online zero-shot transfer: repeat 5 times
|
| 38 |
+
lam is 0.8000
|
| 39 |
+
lam is 0.8000
|
| 40 |
+
lam is 0.8000
|
| 41 |
+
lam is 0.8000
|
| 42 |
+
lam is 0.8000
|
| 43 |
+
mean acc of onlab is: 71.58
|
| 44 |
+
mean acc of onzeta is: 71.29
|
| 45 |
+
mean acc of MAPLS is: 77.15
|
| 46 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 47 |
+
the beta is 0.7
|
| 48 |
+
load pre-trained model
|
| 49 |
+
load data
|
| 50 |
+
obtain text proxy
|
| 51 |
+
accuracy with text proxy: 71.58
|
| 52 |
+
online zero-shot transfer: repeat 5 times
|
| 53 |
+
lam is 0.8000
|
| 54 |
+
lam is 0.8000
|
| 55 |
+
lam is 0.8000
|
| 56 |
+
lam is 0.8000
|
| 57 |
+
lam is 0.8000
|
| 58 |
+
mean acc of onlab is: 71.58
|
| 59 |
+
mean acc of onzeta is: 71.50
|
| 60 |
+
mean acc of MAPLS is: 77.31
|
| 61 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 62 |
+
the beta is 0.6
|
| 63 |
+
load pre-trained model
|
| 64 |
+
load data
|
| 65 |
+
obtain text proxy
|
| 66 |
+
accuracy with text proxy: 71.58
|
| 67 |
+
online zero-shot transfer: repeat 5 times
|
| 68 |
+
lam is 0.8000
|
| 69 |
+
lam is 0.8000
|
| 70 |
+
lam is 0.8000
|
| 71 |
+
lam is 0.8000
|
| 72 |
+
lam is 0.8000
|
| 73 |
+
mean acc of onlab is: 71.58
|
| 74 |
+
mean acc of onzeta is: 71.46
|
| 75 |
+
mean acc of MAPLS is: 77.29
|
| 76 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 77 |
+
the beta is 0.5
|
| 78 |
+
load pre-trained model
|
| 79 |
+
load data
|
| 80 |
+
obtain text proxy
|
| 81 |
+
accuracy with text proxy: 71.58
|
| 82 |
+
online zero-shot transfer: repeat 5 times
|
| 83 |
+
lam is 0.8000
|
| 84 |
+
lam is 0.8000
|
| 85 |
+
lam is 0.8000
|
| 86 |
+
lam is 0.8000
|
| 87 |
+
lam is 0.8000
|
| 88 |
+
mean acc of onlab is: 71.58
|
| 89 |
+
mean acc of onzeta is: 71.59
|
| 90 |
+
mean acc of MAPLS is: 77.42
|
| 91 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 92 |
+
the beta is 0.4
|
| 93 |
+
load pre-trained model
|
| 94 |
+
load data
|
| 95 |
+
obtain text proxy
|
| 96 |
+
accuracy with text proxy: 71.58
|
| 97 |
+
online zero-shot transfer: repeat 5 times
|
| 98 |
+
lam is 0.8000
|
| 99 |
+
lam is 0.8000
|
| 100 |
+
lam is 0.8000
|
| 101 |
+
lam is 0.8000
|
| 102 |
+
lam is 0.8000
|
| 103 |
+
mean acc of onlab is: 71.58
|
| 104 |
+
mean acc of onzeta is: 71.60
|
| 105 |
+
mean acc of MAPLS is: 77.55
|
| 106 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 107 |
+
the beta is 0.3
|
| 108 |
+
load pre-trained model
|
| 109 |
+
load data
|
| 110 |
+
obtain text proxy
|
| 111 |
+
accuracy with text proxy: 71.58
|
| 112 |
+
online zero-shot transfer: repeat 5 times
|
| 113 |
+
lam is 0.8000
|
| 114 |
+
lam is 0.8000
|
| 115 |
+
lam is 0.8000
|
| 116 |
+
lam is 0.8000
|
| 117 |
+
lam is 0.8000
|
| 118 |
+
mean acc of onlab is: 71.58
|
| 119 |
+
mean acc of onzeta is: 71.59
|
| 120 |
+
mean acc of MAPLS is: 77.58
|
| 121 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 122 |
+
the beta is 0.2
|
| 123 |
+
load pre-trained model
|
| 124 |
+
load data
|
| 125 |
+
obtain text proxy
|
| 126 |
+
accuracy with text proxy: 71.58
|
| 127 |
+
online zero-shot transfer: repeat 5 times
|
| 128 |
+
lam is 0.8000
|
| 129 |
+
lam is 0.8000
|
| 130 |
+
lam is 0.8000
|
| 131 |
+
lam is 0.8000
|
| 132 |
+
lam is 0.8000
|
| 133 |
+
mean acc of onlab is: 71.58
|
| 134 |
+
mean acc of onzeta is: 71.69
|
| 135 |
+
mean acc of MAPLS is: 77.52
|
| 136 |
+
Namespace(data_path='./CIFAR10_TEST', arch='RN50', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 137 |
+
the beta is 0.1
|
| 138 |
+
load pre-trained model
|
| 139 |
+
load data
|
| 140 |
+
obtain text proxy
|
| 141 |
+
accuracy with text proxy: 71.58
|
| 142 |
+
online zero-shot transfer: repeat 5 times
|
| 143 |
+
lam is 0.8000
|
| 144 |
+
lam is 0.8000
|
| 145 |
+
lam is 0.8000
|
| 146 |
+
lam is 0.8000
|
| 147 |
+
lam is 0.8000
|
| 148 |
+
mean acc of onlab is: 71.58
|
| 149 |
+
mean acc of onzeta is: 71.65
|
| 150 |
+
mean acc of MAPLS is: 77.58
|
OnZeta/logs/onzeta_eval_2025-06-11_22-09-19.log
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 2 |
+
the beta is 1.0
|
| 3 |
+
load pre-trained model
|
| 4 |
+
load data
|
| 5 |
+
obtain text proxy
|
| 6 |
+
accuracy with text proxy: 90.77
|
| 7 |
+
online zero-shot transfer: repeat 5 times
|
| 8 |
+
lam is 1.0000
|
| 9 |
+
lam is 1.0000
|
| 10 |
+
lam is 1.0000
|
| 11 |
+
lam is 1.0000
|
| 12 |
+
lam is 1.0000
|
| 13 |
+
mean acc of onlab is: 90.77
|
| 14 |
+
mean acc of onzeta is: 91.09
|
| 15 |
+
mean acc of MAPLS is: 91.78
|
| 16 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 17 |
+
the beta is 0.9
|
| 18 |
+
load pre-trained model
|
| 19 |
+
load data
|
| 20 |
+
obtain text proxy
|
| 21 |
+
accuracy with text proxy: 90.77
|
| 22 |
+
online zero-shot transfer: repeat 5 times
|
| 23 |
+
lam is 1.0000
|
| 24 |
+
lam is 1.0000
|
| 25 |
+
lam is 1.0000
|
| 26 |
+
lam is 1.0000
|
| 27 |
+
lam is 1.0000
|
| 28 |
+
mean acc of onlab is: 90.77
|
| 29 |
+
mean acc of onzeta is: 91.09
|
| 30 |
+
mean acc of MAPLS is: 91.83
|
| 31 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 32 |
+
the beta is 0.8
|
| 33 |
+
load pre-trained model
|
| 34 |
+
load data
|
| 35 |
+
obtain text proxy
|
| 36 |
+
accuracy with text proxy: 90.77
|
| 37 |
+
online zero-shot transfer: repeat 5 times
|
| 38 |
+
lam is 1.0000
|
| 39 |
+
lam is 1.0000
|
| 40 |
+
lam is 1.0000
|
| 41 |
+
lam is 1.0000
|
| 42 |
+
lam is 1.0000
|
| 43 |
+
mean acc of onlab is: 90.77
|
| 44 |
+
mean acc of onzeta is: 91.03
|
| 45 |
+
mean acc of MAPLS is: 91.82
|
| 46 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 47 |
+
the beta is 0.7
|
| 48 |
+
load pre-trained model
|
| 49 |
+
load data
|
| 50 |
+
obtain text proxy
|
| 51 |
+
accuracy with text proxy: 90.77
|
| 52 |
+
online zero-shot transfer: repeat 5 times
|
| 53 |
+
lam is 1.0000
|
| 54 |
+
lam is 1.0000
|
| 55 |
+
lam is 1.0000
|
| 56 |
+
lam is 1.0000
|
| 57 |
+
lam is 1.0000
|
| 58 |
+
mean acc of onlab is: 90.77
|
| 59 |
+
mean acc of onzeta is: 91.00
|
| 60 |
+
mean acc of MAPLS is: 91.77
|
| 61 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 62 |
+
the beta is 0.6
|
| 63 |
+
load pre-trained model
|
| 64 |
+
load data
|
| 65 |
+
obtain text proxy
|
| 66 |
+
accuracy with text proxy: 90.77
|
| 67 |
+
online zero-shot transfer: repeat 5 times
|
| 68 |
+
lam is 1.0000
|
| 69 |
+
lam is 1.0000
|
| 70 |
+
lam is 1.0000
|
| 71 |
+
lam is 1.0000
|
| 72 |
+
lam is 1.0000
|
| 73 |
+
mean acc of onlab is: 90.77
|
| 74 |
+
mean acc of onzeta is: 91.04
|
| 75 |
+
mean acc of MAPLS is: 91.78
|
| 76 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 77 |
+
the beta is 0.5
|
| 78 |
+
load pre-trained model
|
| 79 |
+
load data
|
| 80 |
+
obtain text proxy
|
| 81 |
+
accuracy with text proxy: 90.77
|
| 82 |
+
online zero-shot transfer: repeat 5 times
|
| 83 |
+
lam is 1.0000
|
| 84 |
+
lam is 1.0000
|
| 85 |
+
lam is 1.0000
|
| 86 |
+
lam is 1.0000
|
| 87 |
+
lam is 1.0000
|
| 88 |
+
mean acc of onlab is: 90.77
|
| 89 |
+
mean acc of onzeta is: 90.98
|
| 90 |
+
mean acc of MAPLS is: 91.74
|
| 91 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 92 |
+
the beta is 0.4
|
| 93 |
+
load pre-trained model
|
| 94 |
+
load data
|
| 95 |
+
obtain text proxy
|
| 96 |
+
accuracy with text proxy: 90.77
|
| 97 |
+
online zero-shot transfer: repeat 5 times
|
| 98 |
+
lam is 1.0000
|
| 99 |
+
lam is 1.0000
|
| 100 |
+
lam is 1.0000
|
| 101 |
+
lam is 1.0000
|
| 102 |
+
lam is 1.0000
|
| 103 |
+
mean acc of onlab is: 90.77
|
| 104 |
+
mean acc of onzeta is: 90.96
|
| 105 |
+
mean acc of MAPLS is: 91.78
|
| 106 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 107 |
+
the beta is 0.3
|
| 108 |
+
load pre-trained model
|
| 109 |
+
load data
|
| 110 |
+
obtain text proxy
|
| 111 |
+
accuracy with text proxy: 90.77
|
| 112 |
+
online zero-shot transfer: repeat 5 times
|
| 113 |
+
lam is 1.0000
|
| 114 |
+
lam is 1.0000
|
| 115 |
+
lam is 1.0000
|
| 116 |
+
lam is 1.0000
|
| 117 |
+
lam is 1.0000
|
| 118 |
+
mean acc of onlab is: 90.77
|
| 119 |
+
mean acc of onzeta is: 90.94
|
| 120 |
+
mean acc of MAPLS is: 91.75
|
| 121 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 122 |
+
the beta is 0.2
|
| 123 |
+
load pre-trained model
|
| 124 |
+
load data
|
| 125 |
+
obtain text proxy
|
| 126 |
+
accuracy with text proxy: 90.77
|
| 127 |
+
online zero-shot transfer: repeat 5 times
|
| 128 |
+
lam is 1.0000
|
| 129 |
+
lam is 1.0000
|
| 130 |
+
lam is 1.0000
|
| 131 |
+
lam is 1.0000
|
| 132 |
+
lam is 1.0000
|
| 133 |
+
mean acc of onlab is: 90.77
|
| 134 |
+
mean acc of onzeta is: 90.89
|
| 135 |
+
mean acc of MAPLS is: 91.68
|
| 136 |
+
Namespace(data_path='./CIFAR10_TEST', arch='ViT-B/16', workers=8, batch_size=256, tau_t=0.01, tau_i=0.04, cw=0.5, cr=20, alpha=0, beta=0.4, repeat=5)
|
| 137 |
+
the beta is 0.1
|
| 138 |
+
load pre-trained model
|
| 139 |
+
load data
|
| 140 |
+
obtain text proxy
|
| 141 |
+
accuracy with text proxy: 90.77
|
| 142 |
+
online zero-shot transfer: repeat 5 times
|
| 143 |
+
lam is 1.0000
|
| 144 |
+
lam is 1.0000
|
| 145 |
+
lam is 1.0000
|
| 146 |
+
lam is 1.0000
|
| 147 |
+
lam is 1.0000
|
| 148 |
+
mean acc of onlab is: 90.77
|
| 149 |
+
mean acc of onzeta is: 90.89
|
| 150 |
+
mean acc of MAPLS is: 91.62
|
OnZeta/main_online_cifar10.py
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
import numpy as np
|
| 10 |
+
from torchvision.datasets import MNIST, CIFAR10
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
import logging
|
| 13 |
+
|
| 14 |
+
from MAPLS.mapls import mapls
|
| 15 |
+
from MAPLS.common import lsc
|
| 16 |
+
|
| 17 |
+
log_filename = os.path.join("logs", f"onzeta_eval_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.log")
|
| 18 |
+
logging.basicConfig(
|
| 19 |
+
level=logging.INFO,
|
| 20 |
+
format='%(message)s',
|
| 21 |
+
handlers=[
|
| 22 |
+
logging.FileHandler(log_filename),
|
| 23 |
+
logging.StreamHandler()
|
| 24 |
+
]
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 28 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 29 |
+
parser.add_argument('--data_path', default='./CIFAR10_TEST', type=str,
|
| 30 |
+
help='dataset path')
|
| 31 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='ViT-B/16',
|
| 32 |
+
choices=model_names,
|
| 33 |
+
help='model architecture: ' +
|
| 34 |
+
' | '.join(model_names) +
|
| 35 |
+
' (default: RN50)')
|
| 36 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 37 |
+
help='number of data loading workers (default: 8)')
|
| 38 |
+
parser.add_argument('-b', '--batch-size', default=256, type=int,
|
| 39 |
+
metavar='N',
|
| 40 |
+
help='mini-batch size (default: 256)')
|
| 41 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 42 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 43 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 44 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 45 |
+
parser.add_argument('--alpha', default=0, type=float)
|
| 46 |
+
parser.add_argument('--beta', default=0.4, type=float)
|
| 47 |
+
parser.add_argument('--repeat', default=5, type=int)
|
| 48 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 49 |
+
|
| 50 |
+
def main(beta):
|
| 51 |
+
|
| 52 |
+
args = parser.parse_args()
|
| 53 |
+
logging.info(args)
|
| 54 |
+
|
| 55 |
+
lam = 1
|
| 56 |
+
args.beta = beta
|
| 57 |
+
logging.info("the beta is {}".format(beta))
|
| 58 |
+
|
| 59 |
+
cifar10_classes = [
|
| 60 |
+
'airplane',
|
| 61 |
+
'automobile',
|
| 62 |
+
'bird',
|
| 63 |
+
'cat',
|
| 64 |
+
'deer',
|
| 65 |
+
'dog',
|
| 66 |
+
'frog',
|
| 67 |
+
'horse',
|
| 68 |
+
'ship',
|
| 69 |
+
'truck',
|
| 70 |
+
]
|
| 71 |
+
|
| 72 |
+
cifar10_templates = [
|
| 73 |
+
'a photo of a {}.',
|
| 74 |
+
'a blurry photo of a {}.',
|
| 75 |
+
'a black and white photo of a {}.',
|
| 76 |
+
'a low contrast photo of a {}.',
|
| 77 |
+
'a high contrast photo of a {}.',
|
| 78 |
+
'a bad photo of a {}.',
|
| 79 |
+
'a good photo of a {}.',
|
| 80 |
+
'a photo of a small {}.',
|
| 81 |
+
'a photo of a big {}.',
|
| 82 |
+
'a photo of the {}.',
|
| 83 |
+
'a blurry photo of the {}.',
|
| 84 |
+
'a black and white photo of the {}.',
|
| 85 |
+
'a low contrast photo of the {}.',
|
| 86 |
+
'a high contrast photo of the {}.',
|
| 87 |
+
'a bad photo of the {}.',
|
| 88 |
+
'a good photo of the {}.',
|
| 89 |
+
'a photo of the small {}.',
|
| 90 |
+
'a photo of the big {}.',
|
| 91 |
+
]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
logging.info('load pre-trained model')
|
| 95 |
+
model, preprocess = clip.load(args.arch)
|
| 96 |
+
model = model.cuda()
|
| 97 |
+
model.eval()
|
| 98 |
+
|
| 99 |
+
logging.info('load data')
|
| 100 |
+
# valdir = os.path.join(args.data_path, 'val')
|
| 101 |
+
# valdir = os.path.join(args.data_path, '')
|
| 102 |
+
cifar10 = CIFAR10(root=os.path.expanduser("~/.cache"), download=True, train=False)
|
| 103 |
+
# val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 104 |
+
# loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 105 |
+
with torch.no_grad():
|
| 106 |
+
image_feat = []
|
| 107 |
+
image_label = []
|
| 108 |
+
for i, (images, target) in enumerate(cifar10):
|
| 109 |
+
# images = images.cuda()
|
| 110 |
+
# target = target.cuda()
|
| 111 |
+
# image_features = model.encode_image(images)
|
| 112 |
+
images = preprocess(images).unsqueeze(0).to(device)
|
| 113 |
+
with torch.no_grad():
|
| 114 |
+
images = model.encode_image(images)
|
| 115 |
+
images /= images.norm()
|
| 116 |
+
image_feat.append(images)
|
| 117 |
+
image_label.append(target)
|
| 118 |
+
image_feat = torch.stack(image_feat, dim=1).to(device)
|
| 119 |
+
image_feat = image_feat.squeeze()
|
| 120 |
+
# image_label = torch.cat(image_label, dim=0)
|
| 121 |
+
image_label = torch.tensor(image_label, dtype=torch.long).to(device)
|
| 122 |
+
n = len(image_label)
|
| 123 |
+
image_feat = image_feat.float()
|
| 124 |
+
|
| 125 |
+
logging.info('obtain text proxy')
|
| 126 |
+
text_classifier = zeroshot_classifier(clip, model, cifar10_classes, cifar10_templates)
|
| 127 |
+
text_classifier = text_classifier.float()
|
| 128 |
+
logits_t = image_feat @ text_classifier
|
| 129 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 130 |
+
top1 = (acc1 / n) * 100
|
| 131 |
+
logging.info(f'accuracy with text proxy: {top1:.2f}')
|
| 132 |
+
|
| 133 |
+
logging.info('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 134 |
+
num_class = len(torch.unique(image_label))
|
| 135 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 136 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 137 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 138 |
+
for iter in range(args.repeat):
|
| 139 |
+
idx = torch.randperm(n).cuda()
|
| 140 |
+
combo_label = torch.zeros(n, num_class).cuda()
|
| 141 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 142 |
+
w = text_classifier.clone()
|
| 143 |
+
rho = torch.zeros(num_class).cuda()
|
| 144 |
+
for i in range(n):
|
| 145 |
+
lr = args.cw / math.sqrt(i + 1)
|
| 146 |
+
rlr = args.cr / math.sqrt(i + 1)
|
| 147 |
+
beta = args.beta * math.sqrt((i + 1) / n)
|
| 148 |
+
x = image_feat[idx[i], :]
|
| 149 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 150 |
+
tlabel = tlabel * torch.exp(rho)
|
| 151 |
+
tlabel /= torch.sum(tlabel)
|
| 152 |
+
rho -= rlr * (tlabel - args.alpha / num_class)
|
| 153 |
+
rho[rho < 0] = 0
|
| 154 |
+
text_label[i, :] = tlabel
|
| 155 |
+
vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 156 |
+
combo_label[i, :] = beta * vision_label + (1 - beta) * tlabel
|
| 157 |
+
grad = torch.outer(x, vision_label - tlabel)
|
| 158 |
+
w -= (lr / args.tau_i) * grad
|
| 159 |
+
w = F.normalize(w, dim=0)
|
| 160 |
+
acc1, acc5 = accuracy(text_label, image_label[idx], topk=(1, 5))
|
| 161 |
+
acc_onlab[iter] = (acc1 / n) * 100
|
| 162 |
+
acc1, acc5 = accuracy(combo_label, image_label[idx], topk=(1, 5))
|
| 163 |
+
|
| 164 |
+
# MAPLS - EM Algorithm
|
| 165 |
+
pz = np.full(len(cifar10_classes), 1.0 / len(cifar10_classes))
|
| 166 |
+
qy = mapls(combo_label, pz=pz, qy_mode="soft", max_iter=100, lam=lam) # FIXME why return nan
|
| 167 |
+
|
| 168 |
+
w = np.array(qy) / np.array(pz)
|
| 169 |
+
if combo_label.is_cuda:
|
| 170 |
+
combo_label_cpu = combo_label.cpu()
|
| 171 |
+
qy_probs = lsc(combo_label_cpu, 1.0 / w)
|
| 172 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 173 |
+
|
| 174 |
+
acc_onzeta[iter] = (acc1 / n) * 100
|
| 175 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 176 |
+
logging.info('mean acc of onlab is: {:.2f}'.format(torch.mean(acc_onlab)))
|
| 177 |
+
logging.info('mean acc of onzeta is: {:.2f}'.format(torch.mean(acc_onzeta)))
|
| 178 |
+
logging.info('mean acc of MAPLS is: {:.2f}'.format(torch.mean(acc_ls)))
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 182 |
+
with torch.no_grad():
|
| 183 |
+
zeroshot_weights = []
|
| 184 |
+
for classname in classnames:
|
| 185 |
+
texts = [template.format(classname) for template in templates]
|
| 186 |
+
texts = clip.tokenize(texts).cuda()
|
| 187 |
+
class_embeddings = model.encode_text(texts)
|
| 188 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 189 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 190 |
+
class_embedding /= class_embedding.norm()
|
| 191 |
+
zeroshot_weights.append(class_embedding)
|
| 192 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 193 |
+
return zeroshot_weights
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def accuracy(output, target, topk=(1,)):
|
| 197 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 198 |
+
pred, target = pred.cpu(), target.cpu()
|
| 199 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 200 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
if __name__ == '__main__':
|
| 204 |
+
# main()
|
| 205 |
+
|
| 206 |
+
betas = [1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1]
|
| 207 |
+
for beta in betas:
|
| 208 |
+
main(beta)
|
| 209 |
+
|
OnZeta/main_online_cifar100.py
ADDED
|
@@ -0,0 +1,292 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
import numpy as np
|
| 10 |
+
from datetime import datetime
|
| 11 |
+
import logging
|
| 12 |
+
|
| 13 |
+
log_filename = os.path.join("logs", f"debug_onzeta_eval_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.log")
|
| 14 |
+
logging.basicConfig(
|
| 15 |
+
level=logging.INFO,
|
| 16 |
+
format='%(message)s',
|
| 17 |
+
handlers=[
|
| 18 |
+
logging.FileHandler(log_filename),
|
| 19 |
+
logging.StreamHandler()
|
| 20 |
+
]
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
from MAPLS.mapls import mapls
|
| 25 |
+
from MAPLS.common import lsc
|
| 26 |
+
|
| 27 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 28 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 29 |
+
parser.add_argument('--data_path', default='./CIFAR100_TEST', type=str,
|
| 30 |
+
help='dataset path')
|
| 31 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='RN50',
|
| 32 |
+
choices=model_names,
|
| 33 |
+
help='model architecture: ' +
|
| 34 |
+
' | '.join(model_names) +
|
| 35 |
+
' (default: RN50)')
|
| 36 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 37 |
+
help='number of data loading workers (default: 8)')
|
| 38 |
+
parser.add_argument('-b', '--batch-size', default=256, type=int,
|
| 39 |
+
metavar='N',
|
| 40 |
+
help='mini-batch size (default: 256)')
|
| 41 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 42 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 43 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 44 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 45 |
+
parser.add_argument('--alpha', default=1, type=float)
|
| 46 |
+
parser.add_argument('--beta', default=0.4, type=float)
|
| 47 |
+
parser.add_argument('--repeat', default=5, type=int)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def main(beta):
|
| 51 |
+
|
| 52 |
+
args = parser.parse_args()
|
| 53 |
+
logging.info(args)
|
| 54 |
+
|
| 55 |
+
args.beta = beta
|
| 56 |
+
lam = 0.7
|
| 57 |
+
logging.info("the beta is {}".format(beta))
|
| 58 |
+
|
| 59 |
+
cifar100_classes = [
|
| 60 |
+
'apple',
|
| 61 |
+
'aquarium fish',
|
| 62 |
+
'baby',
|
| 63 |
+
'bear',
|
| 64 |
+
'beaver',
|
| 65 |
+
'bed',
|
| 66 |
+
'bee',
|
| 67 |
+
'beetle',
|
| 68 |
+
'bicycle',
|
| 69 |
+
'bottle',
|
| 70 |
+
'bowl',
|
| 71 |
+
'boy',
|
| 72 |
+
'bridge',
|
| 73 |
+
'bus',
|
| 74 |
+
'butterfly',
|
| 75 |
+
'camel',
|
| 76 |
+
'can',
|
| 77 |
+
'castle',
|
| 78 |
+
'caterpillar',
|
| 79 |
+
'cattle',
|
| 80 |
+
'chair',
|
| 81 |
+
'chimpanzee',
|
| 82 |
+
'clock',
|
| 83 |
+
'cloud',
|
| 84 |
+
'cockroach',
|
| 85 |
+
'couch',
|
| 86 |
+
'crab',
|
| 87 |
+
'crocodile',
|
| 88 |
+
'cup',
|
| 89 |
+
'dinosaur',
|
| 90 |
+
'dolphin',
|
| 91 |
+
'elephant',
|
| 92 |
+
'flatfish',
|
| 93 |
+
'forest',
|
| 94 |
+
'fox',
|
| 95 |
+
'girl',
|
| 96 |
+
'hamster',
|
| 97 |
+
'house',
|
| 98 |
+
'kangaroo',
|
| 99 |
+
'keyboard',
|
| 100 |
+
'lamp',
|
| 101 |
+
'lawn mower',
|
| 102 |
+
'leopard',
|
| 103 |
+
'lion',
|
| 104 |
+
'lizard',
|
| 105 |
+
'lobster',
|
| 106 |
+
'man',
|
| 107 |
+
'maple tree',
|
| 108 |
+
'motorcycle',
|
| 109 |
+
'mountain',
|
| 110 |
+
'mouse',
|
| 111 |
+
'mushroom',
|
| 112 |
+
'oak tree',
|
| 113 |
+
'orange',
|
| 114 |
+
'orchid',
|
| 115 |
+
'otter',
|
| 116 |
+
'palm tree',
|
| 117 |
+
'pear',
|
| 118 |
+
'pickup truck',
|
| 119 |
+
'pine tree',
|
| 120 |
+
'plain',
|
| 121 |
+
'plate',
|
| 122 |
+
'poppy',
|
| 123 |
+
'porcupine',
|
| 124 |
+
'possum',
|
| 125 |
+
'rabbit',
|
| 126 |
+
'raccoon',
|
| 127 |
+
'ray',
|
| 128 |
+
'road',
|
| 129 |
+
'rocket',
|
| 130 |
+
'rose',
|
| 131 |
+
'sea',
|
| 132 |
+
'seal',
|
| 133 |
+
'shark',
|
| 134 |
+
'shrew',
|
| 135 |
+
'skunk',
|
| 136 |
+
'skyscraper',
|
| 137 |
+
'snail',
|
| 138 |
+
'snake',
|
| 139 |
+
'spider',
|
| 140 |
+
'squirrel',
|
| 141 |
+
'streetcar',
|
| 142 |
+
'sunflower',
|
| 143 |
+
'sweet pepper',
|
| 144 |
+
'table',
|
| 145 |
+
'tank',
|
| 146 |
+
'telephone',
|
| 147 |
+
'television',
|
| 148 |
+
'tiger',
|
| 149 |
+
'tractor',
|
| 150 |
+
'train',
|
| 151 |
+
'trout',
|
| 152 |
+
'tulip',
|
| 153 |
+
'turtle',
|
| 154 |
+
'wardrobe',
|
| 155 |
+
'whale',
|
| 156 |
+
'willow tree',
|
| 157 |
+
'wolf',
|
| 158 |
+
'woman',
|
| 159 |
+
'worm',
|
| 160 |
+
]
|
| 161 |
+
|
| 162 |
+
cifar100_templates = [
|
| 163 |
+
'a photo of a {}.',
|
| 164 |
+
'a blurry photo of a {}.',
|
| 165 |
+
'a black and white photo of a {}.',
|
| 166 |
+
'a low contrast photo of a {}.',
|
| 167 |
+
'a high contrast photo of a {}.',
|
| 168 |
+
'a bad photo of a {}.',
|
| 169 |
+
'a good photo of a {}.',
|
| 170 |
+
'a photo of a small {}.',
|
| 171 |
+
'a photo of a big {}.',
|
| 172 |
+
'a photo of the {}.',
|
| 173 |
+
'a blurry photo of the {}.',
|
| 174 |
+
'a black and white photo of the {}.',
|
| 175 |
+
'a low contrast photo of the {}.',
|
| 176 |
+
'a high contrast photo of the {}.',
|
| 177 |
+
'a bad photo of the {}.',
|
| 178 |
+
'a good photo of the {}.',
|
| 179 |
+
'a photo of the small {}.',
|
| 180 |
+
'a photo of the big {}.',
|
| 181 |
+
]
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
logging.info('load pre-trained model')
|
| 185 |
+
model, preprocess = clip.load(args.arch)
|
| 186 |
+
model = model.cuda()
|
| 187 |
+
model.eval()
|
| 188 |
+
|
| 189 |
+
logging.info('load data')
|
| 190 |
+
# valdir = os.path.join(args.data_path, 'val')
|
| 191 |
+
valdir = os.path.join(args.data_path, '')
|
| 192 |
+
val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 193 |
+
loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 194 |
+
with torch.no_grad():
|
| 195 |
+
image_feat = []
|
| 196 |
+
image_label = []
|
| 197 |
+
for i, (images, target) in enumerate(loader):
|
| 198 |
+
images = images.cuda()
|
| 199 |
+
target = target.cuda()
|
| 200 |
+
image_features = model.encode_image(images)
|
| 201 |
+
image_feat.append(F.normalize(image_features, dim=1))
|
| 202 |
+
image_label.append(target)
|
| 203 |
+
image_feat = torch.cat(image_feat, dim=0)
|
| 204 |
+
image_label = torch.cat(image_label, dim=0)
|
| 205 |
+
n = len(image_label)
|
| 206 |
+
image_feat = image_feat.float()
|
| 207 |
+
|
| 208 |
+
logging.info('obtain text proxy')
|
| 209 |
+
text_classifier = zeroshot_classifier(clip, model, cifar100_classes, cifar100_templates)
|
| 210 |
+
text_classifier = text_classifier.float()
|
| 211 |
+
logits_t = image_feat @ text_classifier
|
| 212 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 213 |
+
top1 = (acc1 / n) * 100
|
| 214 |
+
logging.info(f'accuracy with text proxy: {top1:.2f}')
|
| 215 |
+
|
| 216 |
+
logging.info('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 217 |
+
num_class = len(torch.unique(image_label))
|
| 218 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 219 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 220 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 221 |
+
for iter in range(args.repeat):
|
| 222 |
+
idx = torch.randperm(n).cuda()
|
| 223 |
+
combo_label = torch.zeros(n, num_class).cuda()
|
| 224 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 225 |
+
w = text_classifier.clone()
|
| 226 |
+
rho = torch.zeros(num_class).cuda()
|
| 227 |
+
for i in range(n):
|
| 228 |
+
lr = args.cw / math.sqrt(i + 1)
|
| 229 |
+
rlr = args.cr / math.sqrt(i + 1)
|
| 230 |
+
beta = args.beta * math.sqrt((i + 1) / n)
|
| 231 |
+
x = image_feat[idx[i], :]
|
| 232 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 233 |
+
tlabel = tlabel * torch.exp(rho)
|
| 234 |
+
tlabel /= torch.sum(tlabel)
|
| 235 |
+
rho -= rlr * (tlabel - args.alpha / num_class)
|
| 236 |
+
rho[rho < 0] = 0
|
| 237 |
+
text_label[i, :] = tlabel
|
| 238 |
+
vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 239 |
+
combo_label[i, :] = beta * vision_label + (1 - beta) * tlabel
|
| 240 |
+
grad = torch.outer(x, vision_label - tlabel)
|
| 241 |
+
w -= (lr / args.tau_i) * grad
|
| 242 |
+
w = F.normalize(w, dim=0)
|
| 243 |
+
acc1, acc5 = accuracy(text_label, image_label[idx], topk=(1, 5))
|
| 244 |
+
acc_onlab[iter] = (acc1 / n) * 100
|
| 245 |
+
acc1, acc5 = accuracy(combo_label, image_label[idx], topk=(1, 5))
|
| 246 |
+
|
| 247 |
+
# MAPLS - EM Algorithm
|
| 248 |
+
pz = np.full(len(cifar100_classes), 1.0 / len(cifar100_classes))
|
| 249 |
+
qy = mapls(combo_label, pz=pz, qy_mode="soft", max_iter=100, lam=lam) # FIXME why return nan
|
| 250 |
+
|
| 251 |
+
w = np.array(qy) / np.array(pz)
|
| 252 |
+
if combo_label.is_cuda:
|
| 253 |
+
combo_label_cpu = combo_label.cpu()
|
| 254 |
+
qy_probs = lsc(combo_label_cpu, 1.0 / w)
|
| 255 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 256 |
+
|
| 257 |
+
acc_onzeta[iter] = (acc1 / n) * 100
|
| 258 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 259 |
+
logging.info('mean acc of onlab is: {:.2f}'.format(torch.mean(acc_onlab)))
|
| 260 |
+
logging.info('mean acc of onzeta is: {:.2f}'.format(torch.mean(acc_onzeta)))
|
| 261 |
+
logging.info('mean acc of MAPLS is: {:.2f}'.format(torch.mean(acc_ls)))
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 265 |
+
with torch.no_grad():
|
| 266 |
+
zeroshot_weights = []
|
| 267 |
+
for classname in classnames:
|
| 268 |
+
texts = [template.format(classname) for template in templates]
|
| 269 |
+
texts = clip.tokenize(texts).cuda()
|
| 270 |
+
class_embeddings = model.encode_text(texts)
|
| 271 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 272 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 273 |
+
class_embedding /= class_embedding.norm()
|
| 274 |
+
zeroshot_weights.append(class_embedding)
|
| 275 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 276 |
+
return zeroshot_weights
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
def accuracy(output, target, topk=(1,)):
|
| 280 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 281 |
+
pred, target = pred.cpu(), target.cpu()
|
| 282 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 283 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
if __name__ == '__main__':
|
| 287 |
+
# main()
|
| 288 |
+
|
| 289 |
+
betas = [1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1]
|
| 290 |
+
for beta in betas:
|
| 291 |
+
main(beta)
|
| 292 |
+
|
OnZeta/main_online_imagenet_adap_freq.py
ADDED
|
@@ -0,0 +1,353 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
from MAPLS.mapls import mapls
|
| 12 |
+
from MAPLS.common import lsc
|
| 13 |
+
|
| 14 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 15 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 16 |
+
parser.add_argument('--data_path', default='/home/lt_test/projects/datasets/ImageNet/', type=str,
|
| 17 |
+
help='dataset path')
|
| 18 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='ViT-L/14@336px',
|
| 19 |
+
choices=model_names,
|
| 20 |
+
help='model architecture: ' +
|
| 21 |
+
' | '.join(model_names) +
|
| 22 |
+
' (default: RN50)')
|
| 23 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 24 |
+
help='number of data loading workers (default: 8)')
|
| 25 |
+
parser.add_argument('-b', '--batch-size', default=512, type=int,
|
| 26 |
+
metavar='N',
|
| 27 |
+
help='mini-batch size (default: 256)')
|
| 28 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 29 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 30 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 31 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 32 |
+
parser.add_argument('--alpha', default=1, type=float)
|
| 33 |
+
parser.add_argument('--beta', default=0.8, type=float)
|
| 34 |
+
parser.add_argument('--repeat', default=5, type=int)
|
| 35 |
+
parser.add_argument('--adpt_weight', default=0.05, type=float)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def main(adpt_weight):
|
| 39 |
+
|
| 40 |
+
args = parser.parse_args()
|
| 41 |
+
print(args)
|
| 42 |
+
print("adpt_weight: ", adpt_weight)
|
| 43 |
+
imagenet_classes = ["tench", "goldfish", "great white shark", "tiger shark", "hammerhead shark", "electric ray",
|
| 44 |
+
"stingray", "rooster", "hen", "ostrich", "brambling", "goldfinch", "house finch", "junco",
|
| 45 |
+
"indigo bunting", "American robin", "bulbul", "jay", "magpie", "chickadee", "American dipper",
|
| 46 |
+
"kite (bird of prey)", "bald eagle", "vulture", "great grey owl", "fire salamander",
|
| 47 |
+
"smooth newt", "newt", "spotted salamander", "axolotl", "American bullfrog", "tree frog",
|
| 48 |
+
"tailed frog", "loggerhead sea turtle", "leatherback sea turtle", "mud turtle", "terrapin",
|
| 49 |
+
"box turtle", "banded gecko", "green iguana", "Carolina anole",
|
| 50 |
+
"desert grassland whiptail lizard", "agama", "frilled-necked lizard", "alligator lizard",
|
| 51 |
+
"Gila monster", "European green lizard", "chameleon", "Komodo dragon", "Nile crocodile",
|
| 52 |
+
"American alligator", "triceratops", "worm snake", "ring-necked snake",
|
| 53 |
+
"eastern hog-nosed snake", "smooth green snake", "kingsnake", "garter snake", "water snake",
|
| 54 |
+
"vine snake", "night snake", "boa constrictor", "African rock python", "Indian cobra",
|
| 55 |
+
"green mamba", "sea snake", "Saharan horned viper", "eastern diamondback rattlesnake",
|
| 56 |
+
"sidewinder rattlesnake", "trilobite", "harvestman", "scorpion", "yellow garden spider",
|
| 57 |
+
"barn spider", "European garden spider", "southern black widow", "tarantula", "wolf spider",
|
| 58 |
+
"tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse", "prairie grouse", "peafowl",
|
| 59 |
+
"quail", "partridge", "african grey parrot", "macaw", "sulphur-crested cockatoo", "lorikeet",
|
| 60 |
+
"coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "duck",
|
| 61 |
+
"red-breasted merganser", "goose", "black swan", "tusker", "echidna", "platypus", "wallaby",
|
| 62 |
+
"koala", "wombat", "jellyfish", "sea anemone", "brain coral", "flatworm", "nematode", "conch",
|
| 63 |
+
"snail", "slug", "sea slug", "chiton", "chambered nautilus", "Dungeness crab", "rock crab",
|
| 64 |
+
"fiddler crab", "red king crab", "American lobster", "spiny lobster", "crayfish", "hermit crab",
|
| 65 |
+
"isopod", "white stork", "black stork", "spoonbill", "flamingo", "little blue heron",
|
| 66 |
+
"great egret", "bittern bird", "crane bird", "limpkin", "common gallinule", "American coot",
|
| 67 |
+
"bustard", "ruddy turnstone", "dunlin", "common redshank", "dowitcher", "oystercatcher",
|
| 68 |
+
"pelican", "king penguin", "albatross", "grey whale", "killer whale", "dugong", "sea lion",
|
| 69 |
+
"Chihuahua", "Japanese Chin", "Maltese", "Pekingese", "Shih Tzu", "King Charles Spaniel",
|
| 70 |
+
"Papillon", "toy terrier", "Rhodesian Ridgeback", "Afghan Hound", "Basset Hound", "Beagle",
|
| 71 |
+
"Bloodhound", "Bluetick Coonhound", "Black and Tan Coonhound", "Treeing Walker Coonhound",
|
| 72 |
+
"English foxhound", "Redbone Coonhound", "borzoi", "Irish Wolfhound", "Italian Greyhound",
|
| 73 |
+
"Whippet", "Ibizan Hound", "Norwegian Elkhound", "Otterhound", "Saluki", "Scottish Deerhound",
|
| 74 |
+
"Weimaraner", "Staffordshire Bull Terrier", "American Staffordshire Terrier",
|
| 75 |
+
"Bedlington Terrier", "Border Terrier", "Kerry Blue Terrier", "Irish Terrier",
|
| 76 |
+
"Norfolk Terrier", "Norwich Terrier", "Yorkshire Terrier", "Wire Fox Terrier",
|
| 77 |
+
"Lakeland Terrier", "Sealyham Terrier", "Airedale Terrier", "Cairn Terrier",
|
| 78 |
+
"Australian Terrier", "Dandie Dinmont Terrier", "Boston Terrier", "Miniature Schnauzer",
|
| 79 |
+
"Giant Schnauzer", "Standard Schnauzer", "Scottish Terrier", "Tibetan Terrier",
|
| 80 |
+
"Australian Silky Terrier", "Soft-coated Wheaten Terrier", "West Highland White Terrier",
|
| 81 |
+
"Lhasa Apso", "Flat-Coated Retriever", "Curly-coated Retriever", "Golden Retriever",
|
| 82 |
+
"Labrador Retriever", "Chesapeake Bay Retriever", "German Shorthaired Pointer", "Vizsla",
|
| 83 |
+
"English Setter", "Irish Setter", "Gordon Setter", "Brittany dog", "Clumber Spaniel",
|
| 84 |
+
"English Springer Spaniel", "Welsh Springer Spaniel", "Cocker Spaniel", "Sussex Spaniel",
|
| 85 |
+
"Irish Water Spaniel", "Kuvasz", "Schipperke", "Groenendael dog", "Malinois", "Briard",
|
| 86 |
+
"Australian Kelpie", "Komondor", "Old English Sheepdog", "Shetland Sheepdog", "collie",
|
| 87 |
+
"Border Collie", "Bouvier des Flandres dog", "Rottweiler", "German Shepherd Dog", "Dobermann",
|
| 88 |
+
"Miniature Pinscher", "Greater Swiss Mountain Dog", "Bernese Mountain Dog",
|
| 89 |
+
"Appenzeller Sennenhund", "Entlebucher Sennenhund", "Boxer", "Bullmastiff", "Tibetan Mastiff",
|
| 90 |
+
"French Bulldog", "Great Dane", "St. Bernard", "husky", "Alaskan Malamute", "Siberian Husky",
|
| 91 |
+
"Dalmatian", "Affenpinscher", "Basenji", "pug", "Leonberger", "Newfoundland dog",
|
| 92 |
+
"Great Pyrenees dog", "Samoyed", "Pomeranian", "Chow Chow", "Keeshond", "brussels griffon",
|
| 93 |
+
"Pembroke Welsh Corgi", "Cardigan Welsh Corgi", "Toy Poodle", "Miniature Poodle",
|
| 94 |
+
"Standard Poodle", "Mexican hairless dog (xoloitzcuintli)", "grey wolf", "Alaskan tundra wolf",
|
| 95 |
+
"red wolf or maned wolf", "coyote", "dingo", "dhole", "African wild dog", "hyena", "red fox",
|
| 96 |
+
"kit fox", "Arctic fox", "grey fox", "tabby cat", "tiger cat", "Persian cat", "Siamese cat",
|
| 97 |
+
"Egyptian Mau", "cougar", "lynx", "leopard", "snow leopard", "jaguar", "lion", "tiger",
|
| 98 |
+
"cheetah", "brown bear", "American black bear", "polar bear", "sloth bear", "mongoose",
|
| 99 |
+
"meerkat", "tiger beetle", "ladybug", "ground beetle", "longhorn beetle", "leaf beetle",
|
| 100 |
+
"dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant", "grasshopper",
|
| 101 |
+
"cricket insect", "stick insect", "cockroach", "praying mantis", "cicada", "leafhopper",
|
| 102 |
+
"lacewing", "dragonfly", "damselfly", "red admiral butterfly", "ringlet butterfly",
|
| 103 |
+
"monarch butterfly", "small white butterfly", "sulphur butterfly", "gossamer-winged butterfly",
|
| 104 |
+
"starfish", "sea urchin", "sea cucumber", "cottontail rabbit", "hare", "Angora rabbit",
|
| 105 |
+
"hamster", "porcupine", "fox squirrel", "marmot", "beaver", "guinea pig", "common sorrel horse",
|
| 106 |
+
"zebra", "pig", "wild boar", "warthog", "hippopotamus", "ox", "water buffalo", "bison",
|
| 107 |
+
"ram (adult male sheep)", "bighorn sheep", "Alpine ibex", "hartebeest", "impala (antelope)",
|
| 108 |
+
"gazelle", "arabian camel", "llama", "weasel", "mink", "European polecat",
|
| 109 |
+
"black-footed ferret", "otter", "skunk", "badger", "armadillo", "three-toed sloth", "orangutan",
|
| 110 |
+
"gorilla", "chimpanzee", "gibbon", "siamang", "guenon", "patas monkey", "baboon", "macaque",
|
| 111 |
+
"langur", "black-and-white colobus", "proboscis monkey", "marmoset", "white-headed capuchin",
|
| 112 |
+
"howler monkey", "titi monkey", "Geoffroy's spider monkey", "common squirrel monkey",
|
| 113 |
+
"ring-tailed lemur", "indri", "Asian elephant", "African bush elephant", "red panda",
|
| 114 |
+
"giant panda", "snoek fish", "eel", "silver salmon", "rock beauty fish", "clownfish",
|
| 115 |
+
"sturgeon", "gar fish", "lionfish", "pufferfish", "abacus", "abaya", "academic gown",
|
| 116 |
+
"accordion", "acoustic guitar", "aircraft carrier", "airliner", "airship", "altar", "ambulance",
|
| 117 |
+
"amphibious vehicle", "analog clock", "apiary", "apron", "trash can", "assault rifle",
|
| 118 |
+
"backpack", "bakery", "balance beam", "balloon", "ballpoint pen", "Band-Aid", "banjo",
|
| 119 |
+
"baluster / handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel",
|
| 120 |
+
"wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "swimming cap", "bath towel",
|
| 121 |
+
"bathtub", "station wagon", "lighthouse", "beaker", "military hat (bearskin or shako)",
|
| 122 |
+
"beer bottle", "beer glass", "bell tower", "baby bib", "tandem bicycle", "bikini",
|
| 123 |
+
"ring binder", "binoculars", "birdhouse", "boathouse", "bobsleigh", "bolo tie", "poke bonnet",
|
| 124 |
+
"bookcase", "bookstore", "bottle cap", "hunting bow", "bow tie", "brass memorial plaque", "bra",
|
| 125 |
+
"breakwater", "breastplate", "broom", "bucket", "buckle", "bulletproof vest",
|
| 126 |
+
"high-speed train", "butcher shop", "taxicab", "cauldron", "candle", "cannon", "canoe",
|
| 127 |
+
"can opener", "cardigan", "car mirror", "carousel", "tool kit", "cardboard box / carton",
|
| 128 |
+
"car wheel", "automated teller machine", "cassette", "cassette player", "castle", "catamaran",
|
| 129 |
+
"CD player", "cello", "mobile phone", "chain", "chain-link fence", "chain mail", "chainsaw",
|
| 130 |
+
"storage chest", "chiffonier", "bell or wind chime", "china cabinet", "Christmas stocking",
|
| 131 |
+
"church", "movie theater", "cleaver", "cliff dwelling", "cloak", "clogs", "cocktail shaker",
|
| 132 |
+
"coffee mug", "coffeemaker", "spiral or coil", "combination lock", "computer keyboard",
|
| 133 |
+
"candy store", "container ship", "convertible", "corkscrew", "cornet", "cowboy boot",
|
| 134 |
+
"cowboy hat", "cradle", "construction crane", "crash helmet", "crate", "infant bed",
|
| 135 |
+
"Crock Pot", "croquet ball", "crutch", "cuirass", "dam", "desk", "desktop computer",
|
| 136 |
+
"rotary dial telephone", "diaper", "digital clock", "digital watch", "dining table",
|
| 137 |
+
"dishcloth", "dishwasher", "disc brake", "dock", "dog sled", "dome", "doormat", "drilling rig",
|
| 138 |
+
"drum", "drumstick", "dumbbell", "Dutch oven", "electric fan", "electric guitar",
|
| 139 |
+
"electric locomotive", "entertainment center", "envelope", "espresso machine", "face powder",
|
| 140 |
+
"feather boa", "filing cabinet", "fireboat", "fire truck", "fire screen", "flagpole", "flute",
|
| 141 |
+
"folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster bed",
|
| 142 |
+
"freight car", "French horn", "frying pan", "fur coat", "garbage truck",
|
| 143 |
+
"gas mask or respirator", "gas pump", "goblet", "go-kart", "golf ball", "golf cart", "gondola",
|
| 144 |
+
"gong", "gown", "grand piano", "greenhouse", "radiator grille", "grocery store", "guillotine",
|
| 145 |
+
"hair clip", "hair spray", "half-track", "hammer", "hamper", "hair dryer", "hand-held computer",
|
| 146 |
+
"handkerchief", "hard disk drive", "harmonica", "harp", "combine harvester", "hatchet",
|
| 147 |
+
"holster", "home theater", "honeycomb", "hook", "hoop skirt", "gymnastic horizontal bar",
|
| 148 |
+
"horse-drawn vehicle", "hourglass", "iPod", "clothes iron", "carved pumpkin", "jeans", "jeep",
|
| 149 |
+
"T-shirt", "jigsaw puzzle", "rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat",
|
| 150 |
+
"ladle", "lampshade", "laptop computer", "lawn mower", "lens cap", "letter opener", "library",
|
| 151 |
+
"lifeboat", "lighter", "limousine", "ocean liner", "lipstick", "slip-on shoe", "lotion",
|
| 152 |
+
"music speaker", "loupe magnifying glass", "sawmill", "magnetic compass", "messenger bag",
|
| 153 |
+
"mailbox", "tights", "one-piece bathing suit", "manhole cover", "maraca", "marimba", "mask",
|
| 154 |
+
"matchstick", "maypole", "maze", "measuring cup", "medicine cabinet", "megalith", "microphone",
|
| 155 |
+
"microwave oven", "military uniform", "milk can", "minibus", "miniskirt", "minivan", "missile",
|
| 156 |
+
"mitten", "mixing bowl", "mobile home", "ford model t", "modem", "monastery", "monitor",
|
| 157 |
+
"moped", "mortar and pestle", "graduation cap", "mosque", "mosquito net", "vespa",
|
| 158 |
+
"mountain bike", "tent", "computer mouse", "mousetrap", "moving van", "muzzle", "metal nail",
|
| 159 |
+
"neck brace", "necklace", "baby pacifier", "notebook computer", "obelisk", "oboe", "ocarina",
|
| 160 |
+
"odometer", "oil filter", "pipe organ", "oscilloscope", "overskirt", "bullock cart",
|
| 161 |
+
"oxygen mask", "product packet / packaging", "paddle", "paddle wheel", "padlock", "paintbrush",
|
| 162 |
+
"pajamas", "palace", "pan flute", "paper towel", "parachute", "parallel bars", "park bench",
|
| 163 |
+
"parking meter", "railroad car", "patio", "payphone", "pedestal", "pencil case",
|
| 164 |
+
"pencil sharpener", "perfume", "Petri dish", "photocopier", "plectrum", "Pickelhaube",
|
| 165 |
+
"picket fence", "pickup truck", "pier", "piggy bank", "pill bottle", "pillow", "ping-pong ball",
|
| 166 |
+
"pinwheel", "pirate ship", "drink pitcher", "block plane", "planetarium", "plastic bag",
|
| 167 |
+
"plate rack", "farm plow", "plunger", "Polaroid camera", "pole", "police van", "poncho",
|
| 168 |
+
"pool table", "soda bottle", "plant pot", "potter's wheel", "power drill", "prayer rug",
|
| 169 |
+
"printer", "prison", "missile", "projector", "hockey puck", "punching bag", "purse", "quill",
|
| 170 |
+
"quilt", "race car", "racket", "radiator", "radio", "radio telescope", "rain barrel",
|
| 171 |
+
"recreational vehicle", "fishing casting reel", "reflex camera", "refrigerator",
|
| 172 |
+
"remote control", "restaurant", "revolver", "rifle", "rocking chair", "rotisserie", "eraser",
|
| 173 |
+
"rugby ball", "ruler measuring stick", "sneaker", "safe", "safety pin", "salt shaker", "sandal",
|
| 174 |
+
"sarong", "saxophone", "scabbard", "weighing scale", "school bus", "schooner", "scoreboard",
|
| 175 |
+
"CRT monitor", "screw", "screwdriver", "seat belt", "sewing machine", "shield", "shoe store",
|
| 176 |
+
"shoji screen / room divider", "shopping basket", "shopping cart", "shovel", "shower cap",
|
| 177 |
+
"shower curtain", "ski", "balaclava ski mask", "sleeping bag", "slide rule", "sliding door",
|
| 178 |
+
"slot machine", "snorkel", "snowmobile", "snowplow", "soap dispenser", "soccer ball", "sock",
|
| 179 |
+
"solar thermal collector", "sombrero", "soup bowl", "keyboard space bar", "space heater",
|
| 180 |
+
"space shuttle", "spatula", "motorboat", "spider web", "spindle", "sports car", "spotlight",
|
| 181 |
+
"stage", "steam locomotive", "through arch bridge", "steel drum", "stethoscope", "scarf",
|
| 182 |
+
"stone wall", "stopwatch", "stove", "strainer", "tram", "stretcher", "couch", "stupa",
|
| 183 |
+
"submarine", "suit", "sundial", "sunglasses", "sunglasses", "sunscreen", "suspension bridge",
|
| 184 |
+
"mop", "sweatshirt", "swim trunks / shorts", "swing", "electrical switch", "syringe",
|
| 185 |
+
"table lamp", "tank", "tape player", "teapot", "teddy bear", "television", "tennis ball",
|
| 186 |
+
"thatched roof", "front curtain", "thimble", "threshing machine", "throne", "tile roof",
|
| 187 |
+
"toaster", "tobacco shop", "toilet seat", "torch", "totem pole", "tow truck", "toy store",
|
| 188 |
+
"tractor", "semi-trailer truck", "tray", "trench coat", "tricycle", "trimaran", "tripod",
|
| 189 |
+
"triumphal arch", "trolleybus", "trombone", "hot tub", "turnstile", "typewriter keyboard",
|
| 190 |
+
"umbrella", "unicycle", "upright piano", "vacuum cleaner", "vase", "vaulted or arched ceiling",
|
| 191 |
+
"velvet fabric", "vending machine", "vestment", "viaduct", "violin", "volleyball",
|
| 192 |
+
"waffle iron", "wall clock", "wallet", "wardrobe", "military aircraft", "sink",
|
| 193 |
+
"washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle",
|
| 194 |
+
"hair wig", "window screen", "window shade", "Windsor tie", "wine bottle", "airplane wing",
|
| 195 |
+
"wok", "wooden spoon", "wool", "split-rail fence", "shipwreck", "sailboat", "yurt", "website",
|
| 196 |
+
"comic book", "crossword", "traffic or street sign", "traffic light", "dust jacket", "menu",
|
| 197 |
+
"plate", "guacamole", "consomme", "hot pot", "trifle", "ice cream", "popsicle", "baguette",
|
| 198 |
+
"bagel", "pretzel", "cheeseburger", "hot dog", "mashed potatoes", "cabbage", "broccoli",
|
| 199 |
+
"cauliflower", "zucchini", "spaghetti squash", "acorn squash", "butternut squash", "cucumber",
|
| 200 |
+
"artichoke", "bell pepper", "cardoon", "mushroom", "Granny Smith apple", "strawberry", "orange",
|
| 201 |
+
"lemon", "fig", "pineapple", "banana", "jackfruit", "cherimoya (custard apple)", "pomegranate",
|
| 202 |
+
"hay", "carbonara", "chocolate syrup", "dough", "meatloaf", "pizza", "pot pie", "burrito",
|
| 203 |
+
"red wine", "espresso", "tea cup", "eggnog", "mountain", "bubble", "cliff", "coral reef",
|
| 204 |
+
"geyser", "lakeshore", "promontory", "sandbar", "beach", "valley", "volcano", "baseball player",
|
| 205 |
+
"bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper", "corn", "acorn",
|
| 206 |
+
"rose hip", "horse chestnut seed", "coral fungus", "agaric", "gyromitra", "stinkhorn mushroom",
|
| 207 |
+
"earth star fungus", "hen of the woods mushroom", "bolete", "corn cob", "toilet paper"]
|
| 208 |
+
|
| 209 |
+
# cifar100_classes = [name for name in os.listdir(args.data_path)
|
| 210 |
+
# if os.path.isdir(os.path.join(args.data_path, name))]
|
| 211 |
+
|
| 212 |
+
imagenet_single_template = [
|
| 213 |
+
'a photo of a {}.',
|
| 214 |
+
]
|
| 215 |
+
|
| 216 |
+
imagenet_7_templates = [
|
| 217 |
+
'itap of a {}.',
|
| 218 |
+
'a origami {}.',
|
| 219 |
+
'a bad photo of the {}.',
|
| 220 |
+
'a photo of the large {}.',
|
| 221 |
+
'a {} in a video game.',
|
| 222 |
+
'art of the {}.',
|
| 223 |
+
'a photo of the small {}.',
|
| 224 |
+
]
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
print('load pre-trained model')
|
| 228 |
+
model, preprocess = clip.load(args.arch)
|
| 229 |
+
model = model.cuda()
|
| 230 |
+
model.eval()
|
| 231 |
+
|
| 232 |
+
print('load data')
|
| 233 |
+
valdir = os.path.join(args.data_path, 'val')
|
| 234 |
+
# valdir = os.path.join(args.data_path, '')
|
| 235 |
+
val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 236 |
+
loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 237 |
+
with torch.no_grad():
|
| 238 |
+
image_feat = []
|
| 239 |
+
image_label = []
|
| 240 |
+
for i, (images, target) in enumerate(loader):
|
| 241 |
+
images = images.cuda()
|
| 242 |
+
target = target.cuda()
|
| 243 |
+
image_features = model.encode_image(images)
|
| 244 |
+
image_feat.append(F.normalize(image_features, dim=1))
|
| 245 |
+
image_label.append(target)
|
| 246 |
+
image_feat = torch.cat(image_feat, dim=0)
|
| 247 |
+
image_label = torch.cat(image_label, dim=0)
|
| 248 |
+
n = len(image_label)
|
| 249 |
+
image_feat = image_feat.float()
|
| 250 |
+
|
| 251 |
+
print('obtain text proxy')
|
| 252 |
+
text_classifier = zeroshot_classifier(clip, model, imagenet_classes, imagenet_7_templates)
|
| 253 |
+
text_classifier = text_classifier.float()
|
| 254 |
+
logits_t = image_feat @ text_classifier
|
| 255 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 256 |
+
top1 = (acc1 / n) * 100
|
| 257 |
+
print(f'accuracy with text proxy: {top1:.2f}')
|
| 258 |
+
|
| 259 |
+
print('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 260 |
+
num_class = len(torch.unique(image_label))
|
| 261 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 262 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 263 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 264 |
+
|
| 265 |
+
from collections import defaultdict
|
| 266 |
+
class_freq = torch.zeros(num_class).cuda()
|
| 267 |
+
|
| 268 |
+
for iter in range(args.repeat):
|
| 269 |
+
idx = torch.randperm(n).cuda()
|
| 270 |
+
combo_label = torch.zeros(n, num_class).cuda()
|
| 271 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 272 |
+
w = text_classifier.clone()
|
| 273 |
+
rho = torch.zeros(num_class).cuda()
|
| 274 |
+
for i in range(n):
|
| 275 |
+
lr = args.cw / math.sqrt(i + 1)
|
| 276 |
+
rlr = args.cr / math.sqrt(i + 1)
|
| 277 |
+
beta = args.beta * math.sqrt((i + 1) / n)
|
| 278 |
+
|
| 279 |
+
x = image_feat[idx[i], :]
|
| 280 |
+
|
| 281 |
+
vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 282 |
+
|
| 283 |
+
pred_class = torch.argmax(vision_label).item()
|
| 284 |
+
class_freq[pred_class] += 1
|
| 285 |
+
|
| 286 |
+
# Step 3: compute freq-based pi_t / class_freq.sum() 肯定大于 0
|
| 287 |
+
freq_prior = class_freq / class_freq.sum() if class_freq.sum() > 0 else torch.ones_like(class_freq) / num_class
|
| 288 |
+
pi_t = (1-adpt_weight) * (args.alpha / num_class) + adpt_weight * freq_prior
|
| 289 |
+
|
| 290 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 291 |
+
tlabel = tlabel * torch.exp(rho)
|
| 292 |
+
tlabel /= torch.sum(tlabel)
|
| 293 |
+
|
| 294 |
+
rho -= rlr * (tlabel - pi_t)
|
| 295 |
+
rho[rho < 0] = 0
|
| 296 |
+
|
| 297 |
+
text_label[i, :] = tlabel
|
| 298 |
+
# vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 299 |
+
combo_label[i, :] = beta * vision_label + (1 - beta) * tlabel
|
| 300 |
+
grad = torch.outer(x, vision_label - tlabel)
|
| 301 |
+
w -= (lr / args.tau_i) * grad
|
| 302 |
+
w = F.normalize(w, dim=0)
|
| 303 |
+
acc1, acc5 = accuracy(text_label, image_label[idx], topk=(1, 5))
|
| 304 |
+
acc_onlab[iter] = (acc1 / n) * 100
|
| 305 |
+
acc1, acc5 = accuracy(combo_label, image_label[idx], topk=(1, 5))
|
| 306 |
+
|
| 307 |
+
# MAPLS - EM Algorithm
|
| 308 |
+
pz = np.full(len(imagenet_classes), 1.0 / len(imagenet_classes))
|
| 309 |
+
qy = mapls(combo_label, pz = pz, qy_mode = "soft", max_iter = 100, lam = 0.8)
|
| 310 |
+
|
| 311 |
+
w = np.array(qy) / np.array(pz)
|
| 312 |
+
if combo_label.is_cuda:
|
| 313 |
+
combo_label_cpu = combo_label.cpu()
|
| 314 |
+
qy_probs = lsc(combo_label_cpu, 1.0/w)
|
| 315 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 316 |
+
|
| 317 |
+
acc_onzeta[iter] = (acc1 / n) * 100
|
| 318 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 319 |
+
print('mean acc of onlab is: {:.2f}'.format(torch.mean(acc_onlab)))
|
| 320 |
+
print('mean acc of onzeta is: {:.2f}'.format(torch.mean(acc_onzeta)))
|
| 321 |
+
print('mean acc of MAPLS is: {:.2f}'.format(torch.mean(acc_ls)))
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 325 |
+
with torch.no_grad():
|
| 326 |
+
zeroshot_weights = []
|
| 327 |
+
for classname in classnames:
|
| 328 |
+
texts = [template.format(classname) for template in templates]
|
| 329 |
+
texts = clip.tokenize(texts).cuda()
|
| 330 |
+
class_embeddings = model.encode_text(texts)
|
| 331 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 332 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 333 |
+
class_embedding /= class_embedding.norm()
|
| 334 |
+
zeroshot_weights.append(class_embedding)
|
| 335 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 336 |
+
return zeroshot_weights
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
def accuracy(output, target, topk=(1,)):
|
| 340 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 341 |
+
pred, target = pred.cpu(), target.cpu()
|
| 342 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 343 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
if __name__ == '__main__':
|
| 347 |
+
# main()
|
| 348 |
+
|
| 349 |
+
# lams = [1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1]
|
| 350 |
+
adpt_weights = [0.0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4]
|
| 351 |
+
for adpt_weight in adpt_weights:
|
| 352 |
+
main(adpt_weight)
|
| 353 |
+
|
OnZeta/main_online_imagenet_inloop_online_MAPLS_only.py
ADDED
|
@@ -0,0 +1,326 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
import numpy as np
|
| 10 |
+
import logging
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
|
| 13 |
+
log_filename = os.path.join("logs", f"debug_onzeta_eval_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.log")
|
| 14 |
+
logging.basicConfig(
|
| 15 |
+
level=logging.INFO,
|
| 16 |
+
format='%(message)s',
|
| 17 |
+
handlers=[
|
| 18 |
+
logging.FileHandler(log_filename),
|
| 19 |
+
logging.StreamHandler()
|
| 20 |
+
]
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from MAPLS.mapls_cuda import mapls_torch
|
| 24 |
+
from MAPLS.common_cuda import lsc_torch
|
| 25 |
+
|
| 26 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 27 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 28 |
+
parser.add_argument('--data_path', default='/home/li325/space_mlai/pengxiao_space/dataset/ImageNet/', type=str,
|
| 29 |
+
help='dataset path')
|
| 30 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='RN50',
|
| 31 |
+
choices=model_names,
|
| 32 |
+
help='model architecture: ' +
|
| 33 |
+
' | '.join(model_names) +
|
| 34 |
+
' (default: RN50)')
|
| 35 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 36 |
+
help='number of data loading workers (default: 8)')
|
| 37 |
+
parser.add_argument('-b', '--batch-size', default=256, type=int,
|
| 38 |
+
metavar='N',
|
| 39 |
+
help='mini-batch size (default: 256)')
|
| 40 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 41 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 42 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 43 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 44 |
+
parser.add_argument('--alpha', default=1, type=float)
|
| 45 |
+
parser.add_argument('--beta', default=0.8, type=float)
|
| 46 |
+
parser.add_argument('--repeat', default=5, type=int)
|
| 47 |
+
|
| 48 |
+
# parser.add_argument('--lam', default=0.6, type=float)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def main(lam):
|
| 52 |
+
|
| 53 |
+
args = parser.parse_args()
|
| 54 |
+
print(args)
|
| 55 |
+
imagenet_classes = ["tench", "goldfish", "great white shark", "tiger shark", "hammerhead shark", "electric ray",
|
| 56 |
+
"stingray", "rooster", "hen", "ostrich", "brambling", "goldfinch", "house finch", "junco",
|
| 57 |
+
"indigo bunting", "American robin", "bulbul", "jay", "magpie", "chickadee", "American dipper",
|
| 58 |
+
"kite (bird of prey)", "bald eagle", "vulture", "great grey owl", "fire salamander",
|
| 59 |
+
"smooth newt", "newt", "spotted salamander", "axolotl", "American bullfrog", "tree frog",
|
| 60 |
+
"tailed frog", "loggerhead sea turtle", "leatherback sea turtle", "mud turtle", "terrapin",
|
| 61 |
+
"box turtle", "banded gecko", "green iguana", "Carolina anole",
|
| 62 |
+
"desert grassland whiptail lizard", "agama", "frilled-necked lizard", "alligator lizard",
|
| 63 |
+
"Gila monster", "European green lizard", "chameleon", "Komodo dragon", "Nile crocodile",
|
| 64 |
+
"American alligator", "triceratops", "worm snake", "ring-necked snake",
|
| 65 |
+
"eastern hog-nosed snake", "smooth green snake", "kingsnake", "garter snake", "water snake",
|
| 66 |
+
"vine snake", "night snake", "boa constrictor", "African rock python", "Indian cobra",
|
| 67 |
+
"green mamba", "sea snake", "Saharan horned viper", "eastern diamondback rattlesnake",
|
| 68 |
+
"sidewinder rattlesnake", "trilobite", "harvestman", "scorpion", "yellow garden spider",
|
| 69 |
+
"barn spider", "European garden spider", "southern black widow", "tarantula", "wolf spider",
|
| 70 |
+
"tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse", "prairie grouse", "peafowl",
|
| 71 |
+
"quail", "partridge", "african grey parrot", "macaw", "sulphur-crested cockatoo", "lorikeet",
|
| 72 |
+
"coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "duck",
|
| 73 |
+
"red-breasted merganser", "goose", "black swan", "tusker", "echidna", "platypus", "wallaby",
|
| 74 |
+
"koala", "wombat", "jellyfish", "sea anemone", "brain coral", "flatworm", "nematode", "conch",
|
| 75 |
+
"snail", "slug", "sea slug", "chiton", "chambered nautilus", "Dungeness crab", "rock crab",
|
| 76 |
+
"fiddler crab", "red king crab", "American lobster", "spiny lobster", "crayfish", "hermit crab",
|
| 77 |
+
"isopod", "white stork", "black stork", "spoonbill", "flamingo", "little blue heron",
|
| 78 |
+
"great egret", "bittern bird", "crane bird", "limpkin", "common gallinule", "American coot",
|
| 79 |
+
"bustard", "ruddy turnstone", "dunlin", "common redshank", "dowitcher", "oystercatcher",
|
| 80 |
+
"pelican", "king penguin", "albatross", "grey whale", "killer whale", "dugong", "sea lion",
|
| 81 |
+
"Chihuahua", "Japanese Chin", "Maltese", "Pekingese", "Shih Tzu", "King Charles Spaniel",
|
| 82 |
+
"Papillon", "toy terrier", "Rhodesian Ridgeback", "Afghan Hound", "Basset Hound", "Beagle",
|
| 83 |
+
"Bloodhound", "Bluetick Coonhound", "Black and Tan Coonhound", "Treeing Walker Coonhound",
|
| 84 |
+
"English foxhound", "Redbone Coonhound", "borzoi", "Irish Wolfhound", "Italian Greyhound",
|
| 85 |
+
"Whippet", "Ibizan Hound", "Norwegian Elkhound", "Otterhound", "Saluki", "Scottish Deerhound",
|
| 86 |
+
"Weimaraner", "Staffordshire Bull Terrier", "American Staffordshire Terrier",
|
| 87 |
+
"Bedlington Terrier", "Border Terrier", "Kerry Blue Terrier", "Irish Terrier",
|
| 88 |
+
"Norfolk Terrier", "Norwich Terrier", "Yorkshire Terrier", "Wire Fox Terrier",
|
| 89 |
+
"Lakeland Terrier", "Sealyham Terrier", "Airedale Terrier", "Cairn Terrier",
|
| 90 |
+
"Australian Terrier", "Dandie Dinmont Terrier", "Boston Terrier", "Miniature Schnauzer",
|
| 91 |
+
"Giant Schnauzer", "Standard Schnauzer", "Scottish Terrier", "Tibetan Terrier",
|
| 92 |
+
"Australian Silky Terrier", "Soft-coated Wheaten Terrier", "West Highland White Terrier",
|
| 93 |
+
"Lhasa Apso", "Flat-Coated Retriever", "Curly-coated Retriever", "Golden Retriever",
|
| 94 |
+
"Labrador Retriever", "Chesapeake Bay Retriever", "German Shorthaired Pointer", "Vizsla",
|
| 95 |
+
"English Setter", "Irish Setter", "Gordon Setter", "Brittany dog", "Clumber Spaniel",
|
| 96 |
+
"English Springer Spaniel", "Welsh Springer Spaniel", "Cocker Spaniel", "Sussex Spaniel",
|
| 97 |
+
"Irish Water Spaniel", "Kuvasz", "Schipperke", "Groenendael dog", "Malinois", "Briard",
|
| 98 |
+
"Australian Kelpie", "Komondor", "Old English Sheepdog", "Shetland Sheepdog", "collie",
|
| 99 |
+
"Border Collie", "Bouvier des Flandres dog", "Rottweiler", "German Shepherd Dog", "Dobermann",
|
| 100 |
+
"Miniature Pinscher", "Greater Swiss Mountain Dog", "Bernese Mountain Dog",
|
| 101 |
+
"Appenzeller Sennenhund", "Entlebucher Sennenhund", "Boxer", "Bullmastiff", "Tibetan Mastiff",
|
| 102 |
+
"French Bulldog", "Great Dane", "St. Bernard", "husky", "Alaskan Malamute", "Siberian Husky",
|
| 103 |
+
"Dalmatian", "Affenpinscher", "Basenji", "pug", "Leonberger", "Newfoundland dog",
|
| 104 |
+
"Great Pyrenees dog", "Samoyed", "Pomeranian", "Chow Chow", "Keeshond", "brussels griffon",
|
| 105 |
+
"Pembroke Welsh Corgi", "Cardigan Welsh Corgi", "Toy Poodle", "Miniature Poodle",
|
| 106 |
+
"Standard Poodle", "Mexican hairless dog (xoloitzcuintli)", "grey wolf", "Alaskan tundra wolf",
|
| 107 |
+
"red wolf or maned wolf", "coyote", "dingo", "dhole", "African wild dog", "hyena", "red fox",
|
| 108 |
+
"kit fox", "Arctic fox", "grey fox", "tabby cat", "tiger cat", "Persian cat", "Siamese cat",
|
| 109 |
+
"Egyptian Mau", "cougar", "lynx", "leopard", "snow leopard", "jaguar", "lion", "tiger",
|
| 110 |
+
"cheetah", "brown bear", "American black bear", "polar bear", "sloth bear", "mongoose",
|
| 111 |
+
"meerkat", "tiger beetle", "ladybug", "ground beetle", "longhorn beetle", "leaf beetle",
|
| 112 |
+
"dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant", "grasshopper",
|
| 113 |
+
"cricket insect", "stick insect", "cockroach", "praying mantis", "cicada", "leafhopper",
|
| 114 |
+
"lacewing", "dragonfly", "damselfly", "red admiral butterfly", "ringlet butterfly",
|
| 115 |
+
"monarch butterfly", "small white butterfly", "sulphur butterfly", "gossamer-winged butterfly",
|
| 116 |
+
"starfish", "sea urchin", "sea cucumber", "cottontail rabbit", "hare", "Angora rabbit",
|
| 117 |
+
"hamster", "porcupine", "fox squirrel", "marmot", "beaver", "guinea pig", "common sorrel horse",
|
| 118 |
+
"zebra", "pig", "wild boar", "warthog", "hippopotamus", "ox", "water buffalo", "bison",
|
| 119 |
+
"ram (adult male sheep)", "bighorn sheep", "Alpine ibex", "hartebeest", "impala (antelope)",
|
| 120 |
+
"gazelle", "arabian camel", "llama", "weasel", "mink", "European polecat",
|
| 121 |
+
"black-footed ferret", "otter", "skunk", "badger", "armadillo", "three-toed sloth", "orangutan",
|
| 122 |
+
"gorilla", "chimpanzee", "gibbon", "siamang", "guenon", "patas monkey", "baboon", "macaque",
|
| 123 |
+
"langur", "black-and-white colobus", "proboscis monkey", "marmoset", "white-headed capuchin",
|
| 124 |
+
"howler monkey", "titi monkey", "Geoffroy's spider monkey", "common squirrel monkey",
|
| 125 |
+
"ring-tailed lemur", "indri", "Asian elephant", "African bush elephant", "red panda",
|
| 126 |
+
"giant panda", "snoek fish", "eel", "silver salmon", "rock beauty fish", "clownfish",
|
| 127 |
+
"sturgeon", "gar fish", "lionfish", "pufferfish", "abacus", "abaya", "academic gown",
|
| 128 |
+
"accordion", "acoustic guitar", "aircraft carrier", "airliner", "airship", "altar", "ambulance",
|
| 129 |
+
"amphibious vehicle", "analog clock", "apiary", "apron", "trash can", "assault rifle",
|
| 130 |
+
"backpack", "bakery", "balance beam", "balloon", "ballpoint pen", "Band-Aid", "banjo",
|
| 131 |
+
"baluster / handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel",
|
| 132 |
+
"wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "swimming cap", "bath towel",
|
| 133 |
+
"bathtub", "station wagon", "lighthouse", "beaker", "military hat (bearskin or shako)",
|
| 134 |
+
"beer bottle", "beer glass", "bell tower", "baby bib", "tandem bicycle", "bikini",
|
| 135 |
+
"ring binder", "binoculars", "birdhouse", "boathouse", "bobsleigh", "bolo tie", "poke bonnet",
|
| 136 |
+
"bookcase", "bookstore", "bottle cap", "hunting bow", "bow tie", "brass memorial plaque", "bra",
|
| 137 |
+
"breakwater", "breastplate", "broom", "bucket", "buckle", "bulletproof vest",
|
| 138 |
+
"high-speed train", "butcher shop", "taxicab", "cauldron", "candle", "cannon", "canoe",
|
| 139 |
+
"can opener", "cardigan", "car mirror", "carousel", "tool kit", "cardboard box / carton",
|
| 140 |
+
"car wheel", "automated teller machine", "cassette", "cassette player", "castle", "catamaran",
|
| 141 |
+
"CD player", "cello", "mobile phone", "chain", "chain-link fence", "chain mail", "chainsaw",
|
| 142 |
+
"storage chest", "chiffonier", "bell or wind chime", "china cabinet", "Christmas stocking",
|
| 143 |
+
"church", "movie theater", "cleaver", "cliff dwelling", "cloak", "clogs", "cocktail shaker",
|
| 144 |
+
"coffee mug", "coffeemaker", "spiral or coil", "combination lock", "computer keyboard",
|
| 145 |
+
"candy store", "container ship", "convertible", "corkscrew", "cornet", "cowboy boot",
|
| 146 |
+
"cowboy hat", "cradle", "construction crane", "crash helmet", "crate", "infant bed",
|
| 147 |
+
"Crock Pot", "croquet ball", "crutch", "cuirass", "dam", "desk", "desktop computer",
|
| 148 |
+
"rotary dial telephone", "diaper", "digital clock", "digital watch", "dining table",
|
| 149 |
+
"dishcloth", "dishwasher", "disc brake", "dock", "dog sled", "dome", "doormat", "drilling rig",
|
| 150 |
+
"drum", "drumstick", "dumbbell", "Dutch oven", "electric fan", "electric guitar",
|
| 151 |
+
"electric locomotive", "entertainment center", "envelope", "espresso machine", "face powder",
|
| 152 |
+
"feather boa", "filing cabinet", "fireboat", "fire truck", "fire screen", "flagpole", "flute",
|
| 153 |
+
"folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster bed",
|
| 154 |
+
"freight car", "French horn", "frying pan", "fur coat", "garbage truck",
|
| 155 |
+
"gas mask or respirator", "gas pump", "goblet", "go-kart", "golf ball", "golf cart", "gondola",
|
| 156 |
+
"gong", "gown", "grand piano", "greenhouse", "radiator grille", "grocery store", "guillotine",
|
| 157 |
+
"hair clip", "hair spray", "half-track", "hammer", "hamper", "hair dryer", "hand-held computer",
|
| 158 |
+
"handkerchief", "hard disk drive", "harmonica", "harp", "combine harvester", "hatchet",
|
| 159 |
+
"holster", "home theater", "honeycomb", "hook", "hoop skirt", "gymnastic horizontal bar",
|
| 160 |
+
"horse-drawn vehicle", "hourglass", "iPod", "clothes iron", "carved pumpkin", "jeans", "jeep",
|
| 161 |
+
"T-shirt", "jigsaw puzzle", "rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat",
|
| 162 |
+
"ladle", "lampshade", "laptop computer", "lawn mower", "lens cap", "letter opener", "library",
|
| 163 |
+
"lifeboat", "lighter", "limousine", "ocean liner", "lipstick", "slip-on shoe", "lotion",
|
| 164 |
+
"music speaker", "loupe magnifying glass", "sawmill", "magnetic compass", "messenger bag",
|
| 165 |
+
"mailbox", "tights", "one-piece bathing suit", "manhole cover", "maraca", "marimba", "mask",
|
| 166 |
+
"matchstick", "maypole", "maze", "measuring cup", "medicine cabinet", "megalith", "microphone",
|
| 167 |
+
"microwave oven", "military uniform", "milk can", "minibus", "miniskirt", "minivan", "missile",
|
| 168 |
+
"mitten", "mixing bowl", "mobile home", "ford model t", "modem", "monastery", "monitor",
|
| 169 |
+
"moped", "mortar and pestle", "graduation cap", "mosque", "mosquito net", "vespa",
|
| 170 |
+
"mountain bike", "tent", "computer mouse", "mousetrap", "moving van", "muzzle", "metal nail",
|
| 171 |
+
"neck brace", "necklace", "baby pacifier", "notebook computer", "obelisk", "oboe", "ocarina",
|
| 172 |
+
"odometer", "oil filter", "pipe organ", "oscilloscope", "overskirt", "bullock cart",
|
| 173 |
+
"oxygen mask", "product packet / packaging", "paddle", "paddle wheel", "padlock", "paintbrush",
|
| 174 |
+
"pajamas", "palace", "pan flute", "paper towel", "parachute", "parallel bars", "park bench",
|
| 175 |
+
"parking meter", "railroad car", "patio", "payphone", "pedestal", "pencil case",
|
| 176 |
+
"pencil sharpener", "perfume", "Petri dish", "photocopier", "plectrum", "Pickelhaube",
|
| 177 |
+
"picket fence", "pickup truck", "pier", "piggy bank", "pill bottle", "pillow", "ping-pong ball",
|
| 178 |
+
"pinwheel", "pirate ship", "drink pitcher", "block plane", "planetarium", "plastic bag",
|
| 179 |
+
"plate rack", "farm plow", "plunger", "Polaroid camera", "pole", "police van", "poncho",
|
| 180 |
+
"pool table", "soda bottle", "plant pot", "potter's wheel", "power drill", "prayer rug",
|
| 181 |
+
"printer", "prison", "missile", "projector", "hockey puck", "punching bag", "purse", "quill",
|
| 182 |
+
"quilt", "race car", "racket", "radiator", "radio", "radio telescope", "rain barrel",
|
| 183 |
+
"recreational vehicle", "fishing casting reel", "reflex camera", "refrigerator",
|
| 184 |
+
"remote control", "restaurant", "revolver", "rifle", "rocking chair", "rotisserie", "eraser",
|
| 185 |
+
"rugby ball", "ruler measuring stick", "sneaker", "safe", "safety pin", "salt shaker", "sandal",
|
| 186 |
+
"sarong", "saxophone", "scabbard", "weighing scale", "school bus", "schooner", "scoreboard",
|
| 187 |
+
"CRT monitor", "screw", "screwdriver", "seat belt", "sewing machine", "shield", "shoe store",
|
| 188 |
+
"shoji screen / room divider", "shopping basket", "shopping cart", "shovel", "shower cap",
|
| 189 |
+
"shower curtain", "ski", "balaclava ski mask", "sleeping bag", "slide rule", "sliding door",
|
| 190 |
+
"slot machine", "snorkel", "snowmobile", "snowplow", "soap dispenser", "soccer ball", "sock",
|
| 191 |
+
"solar thermal collector", "sombrero", "soup bowl", "keyboard space bar", "space heater",
|
| 192 |
+
"space shuttle", "spatula", "motorboat", "spider web", "spindle", "sports car", "spotlight",
|
| 193 |
+
"stage", "steam locomotive", "through arch bridge", "steel drum", "stethoscope", "scarf",
|
| 194 |
+
"stone wall", "stopwatch", "stove", "strainer", "tram", "stretcher", "couch", "stupa",
|
| 195 |
+
"submarine", "suit", "sundial", "sunglasses", "sunglasses", "sunscreen", "suspension bridge",
|
| 196 |
+
"mop", "sweatshirt", "swim trunks / shorts", "swing", "electrical switch", "syringe",
|
| 197 |
+
"table lamp", "tank", "tape player", "teapot", "teddy bear", "television", "tennis ball",
|
| 198 |
+
"thatched roof", "front curtain", "thimble", "threshing machine", "throne", "tile roof",
|
| 199 |
+
"toaster", "tobacco shop", "toilet seat", "torch", "totem pole", "tow truck", "toy store",
|
| 200 |
+
"tractor", "semi-trailer truck", "tray", "trench coat", "tricycle", "trimaran", "tripod",
|
| 201 |
+
"triumphal arch", "trolleybus", "trombone", "hot tub", "turnstile", "typewriter keyboard",
|
| 202 |
+
"umbrella", "unicycle", "upright piano", "vacuum cleaner", "vase", "vaulted or arched ceiling",
|
| 203 |
+
"velvet fabric", "vending machine", "vestment", "viaduct", "violin", "volleyball",
|
| 204 |
+
"waffle iron", "wall clock", "wallet", "wardrobe", "military aircraft", "sink",
|
| 205 |
+
"washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle",
|
| 206 |
+
"hair wig", "window screen", "window shade", "Windsor tie", "wine bottle", "airplane wing",
|
| 207 |
+
"wok", "wooden spoon", "wool", "split-rail fence", "shipwreck", "sailboat", "yurt", "website",
|
| 208 |
+
"comic book", "crossword", "traffic or street sign", "traffic light", "dust jacket", "menu",
|
| 209 |
+
"plate", "guacamole", "consomme", "hot pot", "trifle", "ice cream", "popsicle", "baguette",
|
| 210 |
+
"bagel", "pretzel", "cheeseburger", "hot dog", "mashed potatoes", "cabbage", "broccoli",
|
| 211 |
+
"cauliflower", "zucchini", "spaghetti squash", "acorn squash", "butternut squash", "cucumber",
|
| 212 |
+
"artichoke", "bell pepper", "cardoon", "mushroom", "Granny Smith apple", "strawberry", "orange",
|
| 213 |
+
"lemon", "fig", "pineapple", "banana", "jackfruit", "cherimoya (custard apple)", "pomegranate",
|
| 214 |
+
"hay", "carbonara", "chocolate syrup", "dough", "meatloaf", "pizza", "pot pie", "burrito",
|
| 215 |
+
"red wine", "espresso", "tea cup", "eggnog", "mountain", "bubble", "cliff", "coral reef",
|
| 216 |
+
"geyser", "lakeshore", "promontory", "sandbar", "beach", "valley", "volcano", "baseball player",
|
| 217 |
+
"bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper", "corn", "acorn",
|
| 218 |
+
"rose hip", "horse chestnut seed", "coral fungus", "agaric", "gyromitra", "stinkhorn mushroom",
|
| 219 |
+
"earth star fungus", "hen of the woods mushroom", "bolete", "corn cob", "toilet paper"]
|
| 220 |
+
|
| 221 |
+
# cifar100_classes = [name for name in os.listdir(args.data_path)
|
| 222 |
+
# if os.path.isdir(os.path.join(args.data_path, name))]
|
| 223 |
+
|
| 224 |
+
# imagenet_7_templates = [
|
| 225 |
+
# 'a photo of a {}.',
|
| 226 |
+
# ]
|
| 227 |
+
|
| 228 |
+
imagenet_7_templates = [
|
| 229 |
+
'itap of a {}.',
|
| 230 |
+
'a origami {}.',
|
| 231 |
+
'a bad photo of the {}.',
|
| 232 |
+
'a photo of the large {}.',
|
| 233 |
+
'a {} in a video game.',
|
| 234 |
+
'art of the {}.',
|
| 235 |
+
'a photo of the small {}.',
|
| 236 |
+
]
|
| 237 |
+
|
| 238 |
+
print('load pre-trained model')
|
| 239 |
+
model, preprocess = clip.load(args.arch)
|
| 240 |
+
model = model.cuda()
|
| 241 |
+
model.eval()
|
| 242 |
+
|
| 243 |
+
print('load data')
|
| 244 |
+
valdir = os.path.join(args.data_path, 'val')
|
| 245 |
+
# valdir = os.path.join(args.data_path, '')
|
| 246 |
+
val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 247 |
+
loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 248 |
+
with torch.no_grad():
|
| 249 |
+
image_feat = []
|
| 250 |
+
image_label = []
|
| 251 |
+
for i, (images, target) in enumerate(loader):
|
| 252 |
+
images = images.cuda()
|
| 253 |
+
target = target.cuda()
|
| 254 |
+
image_features = model.encode_image(images)
|
| 255 |
+
image_feat.append(F.normalize(image_features, dim=1))
|
| 256 |
+
image_label.append(target)
|
| 257 |
+
image_feat = torch.cat(image_feat, dim=0)
|
| 258 |
+
image_label = torch.cat(image_label, dim=0)
|
| 259 |
+
n = len(image_label)
|
| 260 |
+
image_feat = image_feat.float()
|
| 261 |
+
|
| 262 |
+
print('obtain text proxy')
|
| 263 |
+
text_classifier = zeroshot_classifier(clip, model, imagenet_classes, imagenet_7_templates)
|
| 264 |
+
text_classifier = text_classifier.float()
|
| 265 |
+
logits_t = image_feat @ text_classifier
|
| 266 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 267 |
+
top1 = (acc1 / n) * 100
|
| 268 |
+
print(f'accuracy with text proxy: {top1:.2f}')
|
| 269 |
+
|
| 270 |
+
print('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 271 |
+
num_class = len(torch.unique(image_label))
|
| 272 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 273 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 274 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 275 |
+
for iter in range(args.repeat):
|
| 276 |
+
idx = torch.randperm(n).cuda()
|
| 277 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 278 |
+
w = text_classifier.clone()
|
| 279 |
+
for i in range(n):
|
| 280 |
+
x = image_feat[idx[i], :]
|
| 281 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 282 |
+
text_label[i, :] = tlabel
|
| 283 |
+
####################################################
|
| 284 |
+
# MAPLS - EM Algorithm
|
| 285 |
+
pz = np.full(len(imagenet_classes), 1.0 / len(imagenet_classes))
|
| 286 |
+
pz = torch.tensor(pz, dtype=torch.float32, device='cuda')
|
| 287 |
+
qy = mapls_torch(text_label, pz=pz, qy_mode="soft", max_iter=50, lam=lam)
|
| 288 |
+
w_mapls = qy.to("cuda") / pz.to("cuda")
|
| 289 |
+
qy_probs = lsc_torch(text_label, 1.0 / w_mapls)
|
| 290 |
+
# MAPLS - EM Algorithm
|
| 291 |
+
####################################################
|
| 292 |
+
qy_probs = torch.from_numpy(qy_probs.cpu().numpy()).float()
|
| 293 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 294 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 295 |
+
logging.info('mean acc of MAPLS only in-loop with lambda {:.2f} is: {:.2f}'.format(lam, torch.mean(acc_ls)))
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 299 |
+
with torch.no_grad():
|
| 300 |
+
zeroshot_weights = []
|
| 301 |
+
for classname in classnames:
|
| 302 |
+
texts = [template.format(classname) for template in templates]
|
| 303 |
+
texts = clip.tokenize(texts).cuda()
|
| 304 |
+
class_embeddings = model.encode_text(texts)
|
| 305 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 306 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 307 |
+
class_embedding /= class_embedding.norm()
|
| 308 |
+
zeroshot_weights.append(class_embedding)
|
| 309 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 310 |
+
return zeroshot_weights
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def accuracy(output, target, topk=(1,)):
|
| 314 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 315 |
+
pred, target = pred.cpu(), target.cpu()
|
| 316 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 317 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
if __name__ == '__main__':
|
| 321 |
+
# main()
|
| 322 |
+
|
| 323 |
+
lams = [0.95, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1]
|
| 324 |
+
for lam in lams:
|
| 325 |
+
main(lam)
|
| 326 |
+
|
OnZeta/main_online_imagenet_mapls.py
ADDED
|
@@ -0,0 +1,417 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
from MAPLS.mapls import mapls
|
| 12 |
+
from MAPLS.common import lsc
|
| 13 |
+
|
| 14 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 15 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 16 |
+
parser.add_argument('--data_path', default='/home/li325/space_mlai/pengxiao_space/dataset/ImageNet/', type=str,
|
| 17 |
+
help='dataset path')
|
| 18 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='RN50',
|
| 19 |
+
choices=model_names,
|
| 20 |
+
help='model architecture: ' +
|
| 21 |
+
' | '.join(model_names) +
|
| 22 |
+
' (default: RN50)')
|
| 23 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 24 |
+
help='number of data loading workers (default: 8)')
|
| 25 |
+
parser.add_argument('-b', '--batch-size', default=1, type=int,
|
| 26 |
+
metavar='N',
|
| 27 |
+
help='mini-batch size (default: 256)')
|
| 28 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 29 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 30 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 31 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 32 |
+
parser.add_argument('--alpha', default=1, type=float)
|
| 33 |
+
parser.add_argument('--beta', default=0.8, type=float)
|
| 34 |
+
parser.add_argument('--repeat', default=5, type=int)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def main(lam):
|
| 38 |
+
|
| 39 |
+
args = parser.parse_args()
|
| 40 |
+
print(args)
|
| 41 |
+
imagenet_classes = ["tench", "goldfish", "great white shark", "tiger shark", "hammerhead shark", "electric ray",
|
| 42 |
+
"stingray", "rooster", "hen", "ostrich", "brambling", "goldfinch", "house finch", "junco",
|
| 43 |
+
"indigo bunting", "American robin", "bulbul", "jay", "magpie", "chickadee", "American dipper",
|
| 44 |
+
"kite (bird of prey)", "bald eagle", "vulture", "great grey owl", "fire salamander",
|
| 45 |
+
"smooth newt", "newt", "spotted salamander", "axolotl", "American bullfrog", "tree frog",
|
| 46 |
+
"tailed frog", "loggerhead sea turtle", "leatherback sea turtle", "mud turtle", "terrapin",
|
| 47 |
+
"box turtle", "banded gecko", "green iguana", "Carolina anole",
|
| 48 |
+
"desert grassland whiptail lizard", "agama", "frilled-necked lizard", "alligator lizard",
|
| 49 |
+
"Gila monster", "European green lizard", "chameleon", "Komodo dragon", "Nile crocodile",
|
| 50 |
+
"American alligator", "triceratops", "worm snake", "ring-necked snake",
|
| 51 |
+
"eastern hog-nosed snake", "smooth green snake", "kingsnake", "garter snake", "water snake",
|
| 52 |
+
"vine snake", "night snake", "boa constrictor", "African rock python", "Indian cobra",
|
| 53 |
+
"green mamba", "sea snake", "Saharan horned viper", "eastern diamondback rattlesnake",
|
| 54 |
+
"sidewinder rattlesnake", "trilobite", "harvestman", "scorpion", "yellow garden spider",
|
| 55 |
+
"barn spider", "European garden spider", "southern black widow", "tarantula", "wolf spider",
|
| 56 |
+
"tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse", "prairie grouse", "peafowl",
|
| 57 |
+
"quail", "partridge", "african grey parrot", "macaw", "sulphur-crested cockatoo", "lorikeet",
|
| 58 |
+
"coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "duck",
|
| 59 |
+
"red-breasted merganser", "goose", "black swan", "tusker", "echidna", "platypus", "wallaby",
|
| 60 |
+
"koala", "wombat", "jellyfish", "sea anemone", "brain coral", "flatworm", "nematode", "conch",
|
| 61 |
+
"snail", "slug", "sea slug", "chiton", "chambered nautilus", "Dungeness crab", "rock crab",
|
| 62 |
+
"fiddler crab", "red king crab", "American lobster", "spiny lobster", "crayfish", "hermit crab",
|
| 63 |
+
"isopod", "white stork", "black stork", "spoonbill", "flamingo", "little blue heron",
|
| 64 |
+
"great egret", "bittern bird", "crane bird", "limpkin", "common gallinule", "American coot",
|
| 65 |
+
"bustard", "ruddy turnstone", "dunlin", "common redshank", "dowitcher", "oystercatcher",
|
| 66 |
+
"pelican", "king penguin", "albatross", "grey whale", "killer whale", "dugong", "sea lion",
|
| 67 |
+
"Chihuahua", "Japanese Chin", "Maltese", "Pekingese", "Shih Tzu", "King Charles Spaniel",
|
| 68 |
+
"Papillon", "toy terrier", "Rhodesian Ridgeback", "Afghan Hound", "Basset Hound", "Beagle",
|
| 69 |
+
"Bloodhound", "Bluetick Coonhound", "Black and Tan Coonhound", "Treeing Walker Coonhound",
|
| 70 |
+
"English foxhound", "Redbone Coonhound", "borzoi", "Irish Wolfhound", "Italian Greyhound",
|
| 71 |
+
"Whippet", "Ibizan Hound", "Norwegian Elkhound", "Otterhound", "Saluki", "Scottish Deerhound",
|
| 72 |
+
"Weimaraner", "Staffordshire Bull Terrier", "American Staffordshire Terrier",
|
| 73 |
+
"Bedlington Terrier", "Border Terrier", "Kerry Blue Terrier", "Irish Terrier",
|
| 74 |
+
"Norfolk Terrier", "Norwich Terrier", "Yorkshire Terrier", "Wire Fox Terrier",
|
| 75 |
+
"Lakeland Terrier", "Sealyham Terrier", "Airedale Terrier", "Cairn Terrier",
|
| 76 |
+
"Australian Terrier", "Dandie Dinmont Terrier", "Boston Terrier", "Miniature Schnauzer",
|
| 77 |
+
"Giant Schnauzer", "Standard Schnauzer", "Scottish Terrier", "Tibetan Terrier",
|
| 78 |
+
"Australian Silky Terrier", "Soft-coated Wheaten Terrier", "West Highland White Terrier",
|
| 79 |
+
"Lhasa Apso", "Flat-Coated Retriever", "Curly-coated Retriever", "Golden Retriever",
|
| 80 |
+
"Labrador Retriever", "Chesapeake Bay Retriever", "German Shorthaired Pointer", "Vizsla",
|
| 81 |
+
"English Setter", "Irish Setter", "Gordon Setter", "Brittany dog", "Clumber Spaniel",
|
| 82 |
+
"English Springer Spaniel", "Welsh Springer Spaniel", "Cocker Spaniel", "Sussex Spaniel",
|
| 83 |
+
"Irish Water Spaniel", "Kuvasz", "Schipperke", "Groenendael dog", "Malinois", "Briard",
|
| 84 |
+
"Australian Kelpie", "Komondor", "Old English Sheepdog", "Shetland Sheepdog", "collie",
|
| 85 |
+
"Border Collie", "Bouvier des Flandres dog", "Rottweiler", "German Shepherd Dog", "Dobermann",
|
| 86 |
+
"Miniature Pinscher", "Greater Swiss Mountain Dog", "Bernese Mountain Dog",
|
| 87 |
+
"Appenzeller Sennenhund", "Entlebucher Sennenhund", "Boxer", "Bullmastiff", "Tibetan Mastiff",
|
| 88 |
+
"French Bulldog", "Great Dane", "St. Bernard", "husky", "Alaskan Malamute", "Siberian Husky",
|
| 89 |
+
"Dalmatian", "Affenpinscher", "Basenji", "pug", "Leonberger", "Newfoundland dog",
|
| 90 |
+
"Great Pyrenees dog", "Samoyed", "Pomeranian", "Chow Chow", "Keeshond", "brussels griffon",
|
| 91 |
+
"Pembroke Welsh Corgi", "Cardigan Welsh Corgi", "Toy Poodle", "Miniature Poodle",
|
| 92 |
+
"Standard Poodle", "Mexican hairless dog (xoloitzcuintli)", "grey wolf", "Alaskan tundra wolf",
|
| 93 |
+
"red wolf or maned wolf", "coyote", "dingo", "dhole", "African wild dog", "hyena", "red fox",
|
| 94 |
+
"kit fox", "Arctic fox", "grey fox", "tabby cat", "tiger cat", "Persian cat", "Siamese cat",
|
| 95 |
+
"Egyptian Mau", "cougar", "lynx", "leopard", "snow leopard", "jaguar", "lion", "tiger",
|
| 96 |
+
"cheetah", "brown bear", "American black bear", "polar bear", "sloth bear", "mongoose",
|
| 97 |
+
"meerkat", "tiger beetle", "ladybug", "ground beetle", "longhorn beetle", "leaf beetle",
|
| 98 |
+
"dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant", "grasshopper",
|
| 99 |
+
"cricket insect", "stick insect", "cockroach", "praying mantis", "cicada", "leafhopper",
|
| 100 |
+
"lacewing", "dragonfly", "damselfly", "red admiral butterfly", "ringlet butterfly",
|
| 101 |
+
"monarch butterfly", "small white butterfly", "sulphur butterfly", "gossamer-winged butterfly",
|
| 102 |
+
"starfish", "sea urchin", "sea cucumber", "cottontail rabbit", "hare", "Angora rabbit",
|
| 103 |
+
"hamster", "porcupine", "fox squirrel", "marmot", "beaver", "guinea pig", "common sorrel horse",
|
| 104 |
+
"zebra", "pig", "wild boar", "warthog", "hippopotamus", "ox", "water buffalo", "bison",
|
| 105 |
+
"ram (adult male sheep)", "bighorn sheep", "Alpine ibex", "hartebeest", "impala (antelope)",
|
| 106 |
+
"gazelle", "arabian camel", "llama", "weasel", "mink", "European polecat",
|
| 107 |
+
"black-footed ferret", "otter", "skunk", "badger", "armadillo", "three-toed sloth", "orangutan",
|
| 108 |
+
"gorilla", "chimpanzee", "gibbon", "siamang", "guenon", "patas monkey", "baboon", "macaque",
|
| 109 |
+
"langur", "black-and-white colobus", "proboscis monkey", "marmoset", "white-headed capuchin",
|
| 110 |
+
"howler monkey", "titi monkey", "Geoffroy's spider monkey", "common squirrel monkey",
|
| 111 |
+
"ring-tailed lemur", "indri", "Asian elephant", "African bush elephant", "red panda",
|
| 112 |
+
"giant panda", "snoek fish", "eel", "silver salmon", "rock beauty fish", "clownfish",
|
| 113 |
+
"sturgeon", "gar fish", "lionfish", "pufferfish", "abacus", "abaya", "academic gown",
|
| 114 |
+
"accordion", "acoustic guitar", "aircraft carrier", "airliner", "airship", "altar", "ambulance",
|
| 115 |
+
"amphibious vehicle", "analog clock", "apiary", "apron", "trash can", "assault rifle",
|
| 116 |
+
"backpack", "bakery", "balance beam", "balloon", "ballpoint pen", "Band-Aid", "banjo",
|
| 117 |
+
"baluster / handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel",
|
| 118 |
+
"wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "swimming cap", "bath towel",
|
| 119 |
+
"bathtub", "station wagon", "lighthouse", "beaker", "military hat (bearskin or shako)",
|
| 120 |
+
"beer bottle", "beer glass", "bell tower", "baby bib", "tandem bicycle", "bikini",
|
| 121 |
+
"ring binder", "binoculars", "birdhouse", "boathouse", "bobsleigh", "bolo tie", "poke bonnet",
|
| 122 |
+
"bookcase", "bookstore", "bottle cap", "hunting bow", "bow tie", "brass memorial plaque", "bra",
|
| 123 |
+
"breakwater", "breastplate", "broom", "bucket", "buckle", "bulletproof vest",
|
| 124 |
+
"high-speed train", "butcher shop", "taxicab", "cauldron", "candle", "cannon", "canoe",
|
| 125 |
+
"can opener", "cardigan", "car mirror", "carousel", "tool kit", "cardboard box / carton",
|
| 126 |
+
"car wheel", "automated teller machine", "cassette", "cassette player", "castle", "catamaran",
|
| 127 |
+
"CD player", "cello", "mobile phone", "chain", "chain-link fence", "chain mail", "chainsaw",
|
| 128 |
+
"storage chest", "chiffonier", "bell or wind chime", "china cabinet", "Christmas stocking",
|
| 129 |
+
"church", "movie theater", "cleaver", "cliff dwelling", "cloak", "clogs", "cocktail shaker",
|
| 130 |
+
"coffee mug", "coffeemaker", "spiral or coil", "combination lock", "computer keyboard",
|
| 131 |
+
"candy store", "container ship", "convertible", "corkscrew", "cornet", "cowboy boot",
|
| 132 |
+
"cowboy hat", "cradle", "construction crane", "crash helmet", "crate", "infant bed",
|
| 133 |
+
"Crock Pot", "croquet ball", "crutch", "cuirass", "dam", "desk", "desktop computer",
|
| 134 |
+
"rotary dial telephone", "diaper", "digital clock", "digital watch", "dining table",
|
| 135 |
+
"dishcloth", "dishwasher", "disc brake", "dock", "dog sled", "dome", "doormat", "drilling rig",
|
| 136 |
+
"drum", "drumstick", "dumbbell", "Dutch oven", "electric fan", "electric guitar",
|
| 137 |
+
"electric locomotive", "entertainment center", "envelope", "espresso machine", "face powder",
|
| 138 |
+
"feather boa", "filing cabinet", "fireboat", "fire truck", "fire screen", "flagpole", "flute",
|
| 139 |
+
"folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster bed",
|
| 140 |
+
"freight car", "French horn", "frying pan", "fur coat", "garbage truck",
|
| 141 |
+
"gas mask or respirator", "gas pump", "goblet", "go-kart", "golf ball", "golf cart", "gondola",
|
| 142 |
+
"gong", "gown", "grand piano", "greenhouse", "radiator grille", "grocery store", "guillotine",
|
| 143 |
+
"hair clip", "hair spray", "half-track", "hammer", "hamper", "hair dryer", "hand-held computer",
|
| 144 |
+
"handkerchief", "hard disk drive", "harmonica", "harp", "combine harvester", "hatchet",
|
| 145 |
+
"holster", "home theater", "honeycomb", "hook", "hoop skirt", "gymnastic horizontal bar",
|
| 146 |
+
"horse-drawn vehicle", "hourglass", "iPod", "clothes iron", "carved pumpkin", "jeans", "jeep",
|
| 147 |
+
"T-shirt", "jigsaw puzzle", "rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat",
|
| 148 |
+
"ladle", "lampshade", "laptop computer", "lawn mower", "lens cap", "letter opener", "library",
|
| 149 |
+
"lifeboat", "lighter", "limousine", "ocean liner", "lipstick", "slip-on shoe", "lotion",
|
| 150 |
+
"music speaker", "loupe magnifying glass", "sawmill", "magnetic compass", "messenger bag",
|
| 151 |
+
"mailbox", "tights", "one-piece bathing suit", "manhole cover", "maraca", "marimba", "mask",
|
| 152 |
+
"matchstick", "maypole", "maze", "measuring cup", "medicine cabinet", "megalith", "microphone",
|
| 153 |
+
"microwave oven", "military uniform", "milk can", "minibus", "miniskirt", "minivan", "missile",
|
| 154 |
+
"mitten", "mixing bowl", "mobile home", "ford model t", "modem", "monastery", "monitor",
|
| 155 |
+
"moped", "mortar and pestle", "graduation cap", "mosque", "mosquito net", "vespa",
|
| 156 |
+
"mountain bike", "tent", "computer mouse", "mousetrap", "moving van", "muzzle", "metal nail",
|
| 157 |
+
"neck brace", "necklace", "baby pacifier", "notebook computer", "obelisk", "oboe", "ocarina",
|
| 158 |
+
"odometer", "oil filter", "pipe organ", "oscilloscope", "overskirt", "bullock cart",
|
| 159 |
+
"oxygen mask", "product packet / packaging", "paddle", "paddle wheel", "padlock", "paintbrush",
|
| 160 |
+
"pajamas", "palace", "pan flute", "paper towel", "parachute", "parallel bars", "park bench",
|
| 161 |
+
"parking meter", "railroad car", "patio", "payphone", "pedestal", "pencil case",
|
| 162 |
+
"pencil sharpener", "perfume", "Petri dish", "photocopier", "plectrum", "Pickelhaube",
|
| 163 |
+
"picket fence", "pickup truck", "pier", "piggy bank", "pill bottle", "pillow", "ping-pong ball",
|
| 164 |
+
"pinwheel", "pirate ship", "drink pitcher", "block plane", "planetarium", "plastic bag",
|
| 165 |
+
"plate rack", "farm plow", "plunger", "Polaroid camera", "pole", "police van", "poncho",
|
| 166 |
+
"pool table", "soda bottle", "plant pot", "potter's wheel", "power drill", "prayer rug",
|
| 167 |
+
"printer", "prison", "missile", "projector", "hockey puck", "punching bag", "purse", "quill",
|
| 168 |
+
"quilt", "race car", "racket", "radiator", "radio", "radio telescope", "rain barrel",
|
| 169 |
+
"recreational vehicle", "fishing casting reel", "reflex camera", "refrigerator",
|
| 170 |
+
"remote control", "restaurant", "revolver", "rifle", "rocking chair", "rotisserie", "eraser",
|
| 171 |
+
"rugby ball", "ruler measuring stick", "sneaker", "safe", "safety pin", "salt shaker", "sandal",
|
| 172 |
+
"sarong", "saxophone", "scabbard", "weighing scale", "school bus", "schooner", "scoreboard",
|
| 173 |
+
"CRT monitor", "screw", "screwdriver", "seat belt", "sewing machine", "shield", "shoe store",
|
| 174 |
+
"shoji screen / room divider", "shopping basket", "shopping cart", "shovel", "shower cap",
|
| 175 |
+
"shower curtain", "ski", "balaclava ski mask", "sleeping bag", "slide rule", "sliding door",
|
| 176 |
+
"slot machine", "snorkel", "snowmobile", "snowplow", "soap dispenser", "soccer ball", "sock",
|
| 177 |
+
"solar thermal collector", "sombrero", "soup bowl", "keyboard space bar", "space heater",
|
| 178 |
+
"space shuttle", "spatula", "motorboat", "spider web", "spindle", "sports car", "spotlight",
|
| 179 |
+
"stage", "steam locomotive", "through arch bridge", "steel drum", "stethoscope", "scarf",
|
| 180 |
+
"stone wall", "stopwatch", "stove", "strainer", "tram", "stretcher", "couch", "stupa",
|
| 181 |
+
"submarine", "suit", "sundial", "sunglasses", "sunglasses", "sunscreen", "suspension bridge",
|
| 182 |
+
"mop", "sweatshirt", "swim trunks / shorts", "swing", "electrical switch", "syringe",
|
| 183 |
+
"table lamp", "tank", "tape player", "teapot", "teddy bear", "television", "tennis ball",
|
| 184 |
+
"thatched roof", "front curtain", "thimble", "threshing machine", "throne", "tile roof",
|
| 185 |
+
"toaster", "tobacco shop", "toilet seat", "torch", "totem pole", "tow truck", "toy store",
|
| 186 |
+
"tractor", "semi-trailer truck", "tray", "trench coat", "tricycle", "trimaran", "tripod",
|
| 187 |
+
"triumphal arch", "trolleybus", "trombone", "hot tub", "turnstile", "typewriter keyboard",
|
| 188 |
+
"umbrella", "unicycle", "upright piano", "vacuum cleaner", "vase", "vaulted or arched ceiling",
|
| 189 |
+
"velvet fabric", "vending machine", "vestment", "viaduct", "violin", "volleyball",
|
| 190 |
+
"waffle iron", "wall clock", "wallet", "wardrobe", "military aircraft", "sink",
|
| 191 |
+
"washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle",
|
| 192 |
+
"hair wig", "window screen", "window shade", "Windsor tie", "wine bottle", "airplane wing",
|
| 193 |
+
"wok", "wooden spoon", "wool", "split-rail fence", "shipwreck", "sailboat", "yurt", "website",
|
| 194 |
+
"comic book", "crossword", "traffic or street sign", "traffic light", "dust jacket", "menu",
|
| 195 |
+
"plate", "guacamole", "consomme", "hot pot", "trifle", "ice cream", "popsicle", "baguette",
|
| 196 |
+
"bagel", "pretzel", "cheeseburger", "hot dog", "mashed potatoes", "cabbage", "broccoli",
|
| 197 |
+
"cauliflower", "zucchini", "spaghetti squash", "acorn squash", "butternut squash", "cucumber",
|
| 198 |
+
"artichoke", "bell pepper", "cardoon", "mushroom", "Granny Smith apple", "strawberry", "orange",
|
| 199 |
+
"lemon", "fig", "pineapple", "banana", "jackfruit", "cherimoya (custard apple)", "pomegranate",
|
| 200 |
+
"hay", "carbonara", "chocolate syrup", "dough", "meatloaf", "pizza", "pot pie", "burrito",
|
| 201 |
+
"red wine", "espresso", "tea cup", "eggnog", "mountain", "bubble", "cliff", "coral reef",
|
| 202 |
+
"geyser", "lakeshore", "promontory", "sandbar", "beach", "valley", "volcano", "baseball player",
|
| 203 |
+
"bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper", "corn", "acorn",
|
| 204 |
+
"rose hip", "horse chestnut seed", "coral fungus", "agaric", "gyromitra", "stinkhorn mushroom",
|
| 205 |
+
"earth star fungus", "hen of the woods mushroom", "bolete", "corn cob", "toilet paper"]
|
| 206 |
+
|
| 207 |
+
# cifar100_classes = [name for name in os.listdir(args.data_path)
|
| 208 |
+
# if os.path.isdir(os.path.join(args.data_path, name))]
|
| 209 |
+
|
| 210 |
+
imagenet_single_template = [
|
| 211 |
+
'a photo of a {}.',
|
| 212 |
+
]
|
| 213 |
+
|
| 214 |
+
imagenet_7_templates = [
|
| 215 |
+
'itap of a {}.',
|
| 216 |
+
'a origami {}.',
|
| 217 |
+
'a bad photo of the {}.',
|
| 218 |
+
'a photo of the large {}.',
|
| 219 |
+
'a {} in a video game.',
|
| 220 |
+
'art of the {}.',
|
| 221 |
+
'a photo of the small {}.',
|
| 222 |
+
]
|
| 223 |
+
|
| 224 |
+
# imagenet_7_templates = [
|
| 225 |
+
# 'a bad photo of a {}.',
|
| 226 |
+
# 'a photo of many {}.',
|
| 227 |
+
# 'a sculpture of a {}.',
|
| 228 |
+
# 'a photo of the hard to see {}.',
|
| 229 |
+
# 'a low resolution photo of the {}.',
|
| 230 |
+
# 'a rendering of a {}.',
|
| 231 |
+
# 'graffiti of a {}.',
|
| 232 |
+
# 'a bad photo of the {}.',
|
| 233 |
+
# 'a cropped photo of the {}.',
|
| 234 |
+
# 'a tattoo of a {}.',
|
| 235 |
+
# 'the embroidered {}.',
|
| 236 |
+
# 'a photo of a hard to see {}.',
|
| 237 |
+
# 'a bright photo of a {}.',
|
| 238 |
+
# 'a photo of a clean {}.',
|
| 239 |
+
# 'a photo of a dirty {}.',
|
| 240 |
+
# 'a dark photo of the {}.',
|
| 241 |
+
# 'a drawing of a {}.',
|
| 242 |
+
# 'a photo of my {}.',
|
| 243 |
+
# 'the plastic {}.',
|
| 244 |
+
# 'a photo of the cool {}.',
|
| 245 |
+
# 'a close-up photo of a {}.',
|
| 246 |
+
# 'a black and white photo of the {}.',
|
| 247 |
+
# 'a painting of the {}.',
|
| 248 |
+
# 'a painting of a {}.',
|
| 249 |
+
# 'a pixelated photo of the {}.',
|
| 250 |
+
# 'a sculpture of the {}.',
|
| 251 |
+
# 'a bright photo of the {}.',
|
| 252 |
+
# 'a cropped photo of a {}.',
|
| 253 |
+
# 'a plastic {}.',
|
| 254 |
+
# 'a photo of the dirty {}.',
|
| 255 |
+
# 'a jpeg corrupted photo of a {}.',
|
| 256 |
+
# 'a blurry photo of the {}.',
|
| 257 |
+
# 'a photo of the {}.',
|
| 258 |
+
# 'a good photo of the {}.',
|
| 259 |
+
# 'a rendering of the {}.',
|
| 260 |
+
# 'a {} in a video game.',
|
| 261 |
+
# 'a photo of one {}.',
|
| 262 |
+
# 'a doodle of a {}.',
|
| 263 |
+
# 'a close-up photo of the {}.',
|
| 264 |
+
# 'a photo of a {}.',
|
| 265 |
+
# 'the origami {}.',
|
| 266 |
+
# 'the {} in a video game.',
|
| 267 |
+
# 'a sketch of a {}.',
|
| 268 |
+
# 'a doodle of the {}.',
|
| 269 |
+
# 'a origami {}.',
|
| 270 |
+
# 'a low resolution photo of a {}.',
|
| 271 |
+
# 'the toy {}.',
|
| 272 |
+
# 'a rendition of the {}.',
|
| 273 |
+
# 'a photo of the clean {}.',
|
| 274 |
+
# 'a photo of a large {}.',
|
| 275 |
+
# 'a rendition of a {}.',
|
| 276 |
+
# 'a photo of a nice {}.',
|
| 277 |
+
# 'a photo of a weird {}.',
|
| 278 |
+
# 'a blurry photo of a {}.',
|
| 279 |
+
# 'a cartoon {}.',
|
| 280 |
+
# 'art of a {}.',
|
| 281 |
+
# 'a sketch of the {}.',
|
| 282 |
+
# 'a embroidered {}.',
|
| 283 |
+
# 'a pixelated photo of a {}.',
|
| 284 |
+
# 'itap of the {}.',
|
| 285 |
+
# 'a jpeg corrupted photo of the {}.',
|
| 286 |
+
# 'a good photo of a {}.',
|
| 287 |
+
# 'a plushie {}.',
|
| 288 |
+
# 'a photo of the nice {}.',
|
| 289 |
+
# 'a photo of the small {}.',
|
| 290 |
+
# 'a photo of the weird {}.',
|
| 291 |
+
# 'the cartoon {}.',
|
| 292 |
+
# 'art of the {}.',
|
| 293 |
+
# 'a drawing of the {}.',
|
| 294 |
+
# 'a photo of the large {}.',
|
| 295 |
+
# 'a black and white photo of a {}.',
|
| 296 |
+
# 'the plushie {}.',
|
| 297 |
+
# 'a dark photo of a {}.',
|
| 298 |
+
# 'itap of a {}.',
|
| 299 |
+
# 'graffiti of the {}.',
|
| 300 |
+
# 'a toy {}.',
|
| 301 |
+
# 'itap of my {}.',
|
| 302 |
+
# 'a photo of a cool {}.',
|
| 303 |
+
# 'a photo of a small {}.',
|
| 304 |
+
# 'a tattoo of the {}.',
|
| 305 |
+
# ]
|
| 306 |
+
|
| 307 |
+
print('load pre-trained model')
|
| 308 |
+
model, preprocess = clip.load(args.arch)
|
| 309 |
+
model = model.cuda()
|
| 310 |
+
model.eval()
|
| 311 |
+
|
| 312 |
+
print('load data')
|
| 313 |
+
valdir = os.path.join(args.data_path, 'val')
|
| 314 |
+
# valdir = os.path.join(args.data_path, '')
|
| 315 |
+
val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 316 |
+
loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 317 |
+
with torch.no_grad():
|
| 318 |
+
image_feat = []
|
| 319 |
+
image_label = []
|
| 320 |
+
for i, (images, target) in enumerate(loader):
|
| 321 |
+
images = images.cuda()
|
| 322 |
+
target = target.cuda()
|
| 323 |
+
image_features = model.encode_image(images)
|
| 324 |
+
image_feat.append(F.normalize(image_features, dim=1))
|
| 325 |
+
image_label.append(target)
|
| 326 |
+
image_feat = torch.cat(image_feat, dim=0)
|
| 327 |
+
image_label = torch.cat(image_label, dim=0)
|
| 328 |
+
n = len(image_label)
|
| 329 |
+
image_feat = image_feat.float()
|
| 330 |
+
|
| 331 |
+
print('obtain text proxy')
|
| 332 |
+
text_classifier = zeroshot_classifier(clip, model, imagenet_classes, imagenet_7_templates)
|
| 333 |
+
text_classifier = text_classifier.float()
|
| 334 |
+
logits_t = image_feat @ text_classifier
|
| 335 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 336 |
+
top1 = (acc1 / n) * 100
|
| 337 |
+
print(f'accuracy with text proxy: {top1:.2f}')
|
| 338 |
+
|
| 339 |
+
print('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 340 |
+
num_class = len(torch.unique(image_label))
|
| 341 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 342 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 343 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 344 |
+
for iter in range(args.repeat):
|
| 345 |
+
idx = torch.randperm(n).cuda()
|
| 346 |
+
combo_label = torch.zeros(n, num_class).cuda()
|
| 347 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 348 |
+
w = text_classifier.clone()
|
| 349 |
+
rho = torch.zeros(num_class).cuda()
|
| 350 |
+
for i in range(n):
|
| 351 |
+
lr = args.cw / math.sqrt(i + 1)
|
| 352 |
+
rlr = args.cr / math.sqrt(i + 1)
|
| 353 |
+
beta = args.beta * math.sqrt((i + 1) / n)
|
| 354 |
+
x = image_feat[idx[i], :]
|
| 355 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 356 |
+
tlabel = tlabel * torch.exp(rho)
|
| 357 |
+
tlabel /= torch.sum(tlabel)
|
| 358 |
+
rho -= rlr * (tlabel - args.alpha / num_class)
|
| 359 |
+
rho[rho < 0] = 0
|
| 360 |
+
text_label[i, :] = tlabel
|
| 361 |
+
vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 362 |
+
combo_label[i, :] = beta * vision_label + (1 - beta) * tlabel
|
| 363 |
+
grad = torch.outer(x, vision_label - tlabel)
|
| 364 |
+
w -= (lr / args.tau_i) * grad
|
| 365 |
+
w = F.normalize(w, dim=0)
|
| 366 |
+
acc1, acc5 = accuracy(text_label, image_label[idx], topk=(1, 5))
|
| 367 |
+
acc_onlab[iter] = (acc1 / n) * 100
|
| 368 |
+
acc1, acc5 = accuracy(combo_label, image_label[idx], topk=(1, 5))
|
| 369 |
+
|
| 370 |
+
# MAPLS - EM Algorithm
|
| 371 |
+
pz = np.full(len(imagenet_classes), 1.0 / len(imagenet_classes))
|
| 372 |
+
qy = mapls(combo_label, pz = pz, qy_mode = "soft", max_iter = 100, lam = lam)
|
| 373 |
+
|
| 374 |
+
w = np.array(qy) / np.array(pz)
|
| 375 |
+
if combo_label.is_cuda:
|
| 376 |
+
combo_label_cpu = combo_label.cpu()
|
| 377 |
+
qy_probs = lsc(combo_label_cpu, 1.0/w)
|
| 378 |
+
qy_probs = torch.from_numpy(qy_probs)
|
| 379 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 380 |
+
|
| 381 |
+
acc_onzeta[iter] = (acc1 / n) * 100
|
| 382 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 383 |
+
print('mean acc of onlab is: {:.2f}'.format(torch.mean(acc_onlab)))
|
| 384 |
+
print('mean acc of onzeta is: {:.2f}'.format(torch.mean(acc_onzeta)))
|
| 385 |
+
print('mean acc of MAPLS is: {:.2f}'.format(torch.mean(acc_ls)))
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 389 |
+
with torch.no_grad():
|
| 390 |
+
zeroshot_weights = []
|
| 391 |
+
for classname in classnames:
|
| 392 |
+
texts = [template.format(classname) for template in templates]
|
| 393 |
+
texts = clip.tokenize(texts).cuda()
|
| 394 |
+
class_embeddings = model.encode_text(texts)
|
| 395 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 396 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 397 |
+
class_embedding /= class_embedding.norm()
|
| 398 |
+
zeroshot_weights.append(class_embedding)
|
| 399 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 400 |
+
return zeroshot_weights
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
def accuracy(output, target, topk=(1,)):
|
| 404 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 405 |
+
pred, target = pred.cpu(), target.cpu()
|
| 406 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 407 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
if __name__ == '__main__':
|
| 411 |
+
# main()
|
| 412 |
+
|
| 413 |
+
# lams = [1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1]
|
| 414 |
+
lams = [0.6]
|
| 415 |
+
for lam in lams:
|
| 416 |
+
main(lam)
|
| 417 |
+
|
OnZeta/main_online_imagenet_mapls_aug.py
ADDED
|
@@ -0,0 +1,434 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
from torchvision.transforms.functional import to_pil_image
|
| 10 |
+
import numpy as np
|
| 11 |
+
|
| 12 |
+
from MAPLS.mapls import mapls
|
| 13 |
+
from MAPLS.common import lsc
|
| 14 |
+
|
| 15 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 16 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 17 |
+
parser.add_argument('--data_path', default='/home/lt_test/projects/datasets/ImageNet/', type=str,
|
| 18 |
+
help='dataset path')
|
| 19 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='RN50',
|
| 20 |
+
choices=model_names,
|
| 21 |
+
help='model architecture: ' +
|
| 22 |
+
' | '.join(model_names) +
|
| 23 |
+
' (default: RN50)')
|
| 24 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 25 |
+
help='number of data loading workers (default: 8)')
|
| 26 |
+
parser.add_argument('-b', '--batch-size', default=512, type=int,
|
| 27 |
+
metavar='N',
|
| 28 |
+
help='mini-batch size (default: 256)')
|
| 29 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 30 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 31 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 32 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 33 |
+
parser.add_argument('--alpha', default=1, type=float)
|
| 34 |
+
parser.add_argument('--beta', default=0.8, type=float)
|
| 35 |
+
parser.add_argument('--repeat', default=5, type=int)
|
| 36 |
+
from torchvision import transforms
|
| 37 |
+
|
| 38 |
+
def main(lam):
|
| 39 |
+
augmentations = [
|
| 40 |
+
transforms.Compose([
|
| 41 |
+
# transforms.RandomResizedCrop(224),
|
| 42 |
+
transforms.RandomHorizontalFlip(),
|
| 43 |
+
transforms.ToTensor()
|
| 44 |
+
]),
|
| 45 |
+
transforms.Compose([
|
| 46 |
+
transforms.RandomRotation(15),
|
| 47 |
+
transforms.ColorJitter(brightness=0.3, contrast=0.3),
|
| 48 |
+
transforms.ToTensor()
|
| 49 |
+
]),
|
| 50 |
+
transforms.Compose([
|
| 51 |
+
transforms.RandomAffine(degrees=20, scale=(0.8, 1.2)),
|
| 52 |
+
transforms.ToTensor()
|
| 53 |
+
]),
|
| 54 |
+
transforms.Compose([
|
| 55 |
+
transforms.RandomGrayscale(p=0.2),
|
| 56 |
+
transforms.RandomHorizontalFlip(),
|
| 57 |
+
transforms.ToTensor()
|
| 58 |
+
]),
|
| 59 |
+
transforms.Compose([
|
| 60 |
+
# transforms.RandomResizedCrop(224),
|
| 61 |
+
# transforms.RandomVerticalFlip(),
|
| 62 |
+
transforms.ToTensor()
|
| 63 |
+
])
|
| 64 |
+
]
|
| 65 |
+
|
| 66 |
+
args = parser.parse_args()
|
| 67 |
+
print(args)
|
| 68 |
+
imagenet_classes = ["tench", "goldfish", "great white shark", "tiger shark", "hammerhead shark", "electric ray",
|
| 69 |
+
"stingray", "rooster", "hen", "ostrich", "brambling", "goldfinch", "house finch", "junco",
|
| 70 |
+
"indigo bunting", "American robin", "bulbul", "jay", "magpie", "chickadee", "American dipper",
|
| 71 |
+
"kite (bird of prey)", "bald eagle", "vulture", "great grey owl", "fire salamander",
|
| 72 |
+
"smooth newt", "newt", "spotted salamander", "axolotl", "American bullfrog", "tree frog",
|
| 73 |
+
"tailed frog", "loggerhead sea turtle", "leatherback sea turtle", "mud turtle", "terrapin",
|
| 74 |
+
"box turtle", "banded gecko", "green iguana", "Carolina anole",
|
| 75 |
+
"desert grassland whiptail lizard", "agama", "frilled-necked lizard", "alligator lizard",
|
| 76 |
+
"Gila monster", "European green lizard", "chameleon", "Komodo dragon", "Nile crocodile",
|
| 77 |
+
"American alligator", "triceratops", "worm snake", "ring-necked snake",
|
| 78 |
+
"eastern hog-nosed snake", "smooth green snake", "kingsnake", "garter snake", "water snake",
|
| 79 |
+
"vine snake", "night snake", "boa constrictor", "African rock python", "Indian cobra",
|
| 80 |
+
"green mamba", "sea snake", "Saharan horned viper", "eastern diamondback rattlesnake",
|
| 81 |
+
"sidewinder rattlesnake", "trilobite", "harvestman", "scorpion", "yellow garden spider",
|
| 82 |
+
"barn spider", "European garden spider", "southern black widow", "tarantula", "wolf spider",
|
| 83 |
+
"tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse", "prairie grouse", "peafowl",
|
| 84 |
+
"quail", "partridge", "african grey parrot", "macaw", "sulphur-crested cockatoo", "lorikeet",
|
| 85 |
+
"coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "duck",
|
| 86 |
+
"red-breasted merganser", "goose", "black swan", "tusker", "echidna", "platypus", "wallaby",
|
| 87 |
+
"koala", "wombat", "jellyfish", "sea anemone", "brain coral", "flatworm", "nematode", "conch",
|
| 88 |
+
"snail", "slug", "sea slug", "chiton", "chambered nautilus", "Dungeness crab", "rock crab",
|
| 89 |
+
"fiddler crab", "red king crab", "American lobster", "spiny lobster", "crayfish", "hermit crab",
|
| 90 |
+
"isopod", "white stork", "black stork", "spoonbill", "flamingo", "little blue heron",
|
| 91 |
+
"great egret", "bittern bird", "crane bird", "limpkin", "common gallinule", "American coot",
|
| 92 |
+
"bustard", "ruddy turnstone", "dunlin", "common redshank", "dowitcher", "oystercatcher",
|
| 93 |
+
"pelican", "king penguin", "albatross", "grey whale", "killer whale", "dugong", "sea lion",
|
| 94 |
+
"Chihuahua", "Japanese Chin", "Maltese", "Pekingese", "Shih Tzu", "King Charles Spaniel",
|
| 95 |
+
"Papillon", "toy terrier", "Rhodesian Ridgeback", "Afghan Hound", "Basset Hound", "Beagle",
|
| 96 |
+
"Bloodhound", "Bluetick Coonhound", "Black and Tan Coonhound", "Treeing Walker Coonhound",
|
| 97 |
+
"English foxhound", "Redbone Coonhound", "borzoi", "Irish Wolfhound", "Italian Greyhound",
|
| 98 |
+
"Whippet", "Ibizan Hound", "Norwegian Elkhound", "Otterhound", "Saluki", "Scottish Deerhound",
|
| 99 |
+
"Weimaraner", "Staffordshire Bull Terrier", "American Staffordshire Terrier",
|
| 100 |
+
"Bedlington Terrier", "Border Terrier", "Kerry Blue Terrier", "Irish Terrier",
|
| 101 |
+
"Norfolk Terrier", "Norwich Terrier", "Yorkshire Terrier", "Wire Fox Terrier",
|
| 102 |
+
"Lakeland Terrier", "Sealyham Terrier", "Airedale Terrier", "Cairn Terrier",
|
| 103 |
+
"Australian Terrier", "Dandie Dinmont Terrier", "Boston Terrier", "Miniature Schnauzer",
|
| 104 |
+
"Giant Schnauzer", "Standard Schnauzer", "Scottish Terrier", "Tibetan Terrier",
|
| 105 |
+
"Australian Silky Terrier", "Soft-coated Wheaten Terrier", "West Highland White Terrier",
|
| 106 |
+
"Lhasa Apso", "Flat-Coated Retriever", "Curly-coated Retriever", "Golden Retriever",
|
| 107 |
+
"Labrador Retriever", "Chesapeake Bay Retriever", "German Shorthaired Pointer", "Vizsla",
|
| 108 |
+
"English Setter", "Irish Setter", "Gordon Setter", "Brittany dog", "Clumber Spaniel",
|
| 109 |
+
"English Springer Spaniel", "Welsh Springer Spaniel", "Cocker Spaniel", "Sussex Spaniel",
|
| 110 |
+
"Irish Water Spaniel", "Kuvasz", "Schipperke", "Groenendael dog", "Malinois", "Briard",
|
| 111 |
+
"Australian Kelpie", "Komondor", "Old English Sheepdog", "Shetland Sheepdog", "collie",
|
| 112 |
+
"Border Collie", "Bouvier des Flandres dog", "Rottweiler", "German Shepherd Dog", "Dobermann",
|
| 113 |
+
"Miniature Pinscher", "Greater Swiss Mountain Dog", "Bernese Mountain Dog",
|
| 114 |
+
"Appenzeller Sennenhund", "Entlebucher Sennenhund", "Boxer", "Bullmastiff", "Tibetan Mastiff",
|
| 115 |
+
"French Bulldog", "Great Dane", "St. Bernard", "husky", "Alaskan Malamute", "Siberian Husky",
|
| 116 |
+
"Dalmatian", "Affenpinscher", "Basenji", "pug", "Leonberger", "Newfoundland dog",
|
| 117 |
+
"Great Pyrenees dog", "Samoyed", "Pomeranian", "Chow Chow", "Keeshond", "brussels griffon",
|
| 118 |
+
"Pembroke Welsh Corgi", "Cardigan Welsh Corgi", "Toy Poodle", "Miniature Poodle",
|
| 119 |
+
"Standard Poodle", "Mexican hairless dog (xoloitzcuintli)", "grey wolf", "Alaskan tundra wolf",
|
| 120 |
+
"red wolf or maned wolf", "coyote", "dingo", "dhole", "African wild dog", "hyena", "red fox",
|
| 121 |
+
"kit fox", "Arctic fox", "grey fox", "tabby cat", "tiger cat", "Persian cat", "Siamese cat",
|
| 122 |
+
"Egyptian Mau", "cougar", "lynx", "leopard", "snow leopard", "jaguar", "lion", "tiger",
|
| 123 |
+
"cheetah", "brown bear", "American black bear", "polar bear", "sloth bear", "mongoose",
|
| 124 |
+
"meerkat", "tiger beetle", "ladybug", "ground beetle", "longhorn beetle", "leaf beetle",
|
| 125 |
+
"dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant", "grasshopper",
|
| 126 |
+
"cricket insect", "stick insect", "cockroach", "praying mantis", "cicada", "leafhopper",
|
| 127 |
+
"lacewing", "dragonfly", "damselfly", "red admiral butterfly", "ringlet butterfly",
|
| 128 |
+
"monarch butterfly", "small white butterfly", "sulphur butterfly", "gossamer-winged butterfly",
|
| 129 |
+
"starfish", "sea urchin", "sea cucumber", "cottontail rabbit", "hare", "Angora rabbit",
|
| 130 |
+
"hamster", "porcupine", "fox squirrel", "marmot", "beaver", "guinea pig", "common sorrel horse",
|
| 131 |
+
"zebra", "pig", "wild boar", "warthog", "hippopotamus", "ox", "water buffalo", "bison",
|
| 132 |
+
"ram (adult male sheep)", "bighorn sheep", "Alpine ibex", "hartebeest", "impala (antelope)",
|
| 133 |
+
"gazelle", "arabian camel", "llama", "weasel", "mink", "European polecat",
|
| 134 |
+
"black-footed ferret", "otter", "skunk", "badger", "armadillo", "three-toed sloth", "orangutan",
|
| 135 |
+
"gorilla", "chimpanzee", "gibbon", "siamang", "guenon", "patas monkey", "baboon", "macaque",
|
| 136 |
+
"langur", "black-and-white colobus", "proboscis monkey", "marmoset", "white-headed capuchin",
|
| 137 |
+
"howler monkey", "titi monkey", "Geoffroy's spider monkey", "common squirrel monkey",
|
| 138 |
+
"ring-tailed lemur", "indri", "Asian elephant", "African bush elephant", "red panda",
|
| 139 |
+
"giant panda", "snoek fish", "eel", "silver salmon", "rock beauty fish", "clownfish",
|
| 140 |
+
"sturgeon", "gar fish", "lionfish", "pufferfish", "abacus", "abaya", "academic gown",
|
| 141 |
+
"accordion", "acoustic guitar", "aircraft carrier", "airliner", "airship", "altar", "ambulance",
|
| 142 |
+
"amphibious vehicle", "analog clock", "apiary", "apron", "trash can", "assault rifle",
|
| 143 |
+
"backpack", "bakery", "balance beam", "balloon", "ballpoint pen", "Band-Aid", "banjo",
|
| 144 |
+
"baluster / handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel",
|
| 145 |
+
"wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "swimming cap", "bath towel",
|
| 146 |
+
"bathtub", "station wagon", "lighthouse", "beaker", "military hat (bearskin or shako)",
|
| 147 |
+
"beer bottle", "beer glass", "bell tower", "baby bib", "tandem bicycle", "bikini",
|
| 148 |
+
"ring binder", "binoculars", "birdhouse", "boathouse", "bobsleigh", "bolo tie", "poke bonnet",
|
| 149 |
+
"bookcase", "bookstore", "bottle cap", "hunting bow", "bow tie", "brass memorial plaque", "bra",
|
| 150 |
+
"breakwater", "breastplate", "broom", "bucket", "buckle", "bulletproof vest",
|
| 151 |
+
"high-speed train", "butcher shop", "taxicab", "cauldron", "candle", "cannon", "canoe",
|
| 152 |
+
"can opener", "cardigan", "car mirror", "carousel", "tool kit", "cardboard box / carton",
|
| 153 |
+
"car wheel", "automated teller machine", "cassette", "cassette player", "castle", "catamaran",
|
| 154 |
+
"CD player", "cello", "mobile phone", "chain", "chain-link fence", "chain mail", "chainsaw",
|
| 155 |
+
"storage chest", "chiffonier", "bell or wind chime", "china cabinet", "Christmas stocking",
|
| 156 |
+
"church", "movie theater", "cleaver", "cliff dwelling", "cloak", "clogs", "cocktail shaker",
|
| 157 |
+
"coffee mug", "coffeemaker", "spiral or coil", "combination lock", "computer keyboard",
|
| 158 |
+
"candy store", "container ship", "convertible", "corkscrew", "cornet", "cowboy boot",
|
| 159 |
+
"cowboy hat", "cradle", "construction crane", "crash helmet", "crate", "infant bed",
|
| 160 |
+
"Crock Pot", "croquet ball", "crutch", "cuirass", "dam", "desk", "desktop computer",
|
| 161 |
+
"rotary dial telephone", "diaper", "digital clock", "digital watch", "dining table",
|
| 162 |
+
"dishcloth", "dishwasher", "disc brake", "dock", "dog sled", "dome", "doormat", "drilling rig",
|
| 163 |
+
"drum", "drumstick", "dumbbell", "Dutch oven", "electric fan", "electric guitar",
|
| 164 |
+
"electric locomotive", "entertainment center", "envelope", "espresso machine", "face powder",
|
| 165 |
+
"feather boa", "filing cabinet", "fireboat", "fire truck", "fire screen", "flagpole", "flute",
|
| 166 |
+
"folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster bed",
|
| 167 |
+
"freight car", "French horn", "frying pan", "fur coat", "garbage truck",
|
| 168 |
+
"gas mask or respirator", "gas pump", "goblet", "go-kart", "golf ball", "golf cart", "gondola",
|
| 169 |
+
"gong", "gown", "grand piano", "greenhouse", "radiator grille", "grocery store", "guillotine",
|
| 170 |
+
"hair clip", "hair spray", "half-track", "hammer", "hamper", "hair dryer", "hand-held computer",
|
| 171 |
+
"handkerchief", "hard disk drive", "harmonica", "harp", "combine harvester", "hatchet",
|
| 172 |
+
"holster", "home theater", "honeycomb", "hook", "hoop skirt", "gymnastic horizontal bar",
|
| 173 |
+
"horse-drawn vehicle", "hourglass", "iPod", "clothes iron", "carved pumpkin", "jeans", "jeep",
|
| 174 |
+
"T-shirt", "jigsaw puzzle", "rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat",
|
| 175 |
+
"ladle", "lampshade", "laptop computer", "lawn mower", "lens cap", "letter opener", "library",
|
| 176 |
+
"lifeboat", "lighter", "limousine", "ocean liner", "lipstick", "slip-on shoe", "lotion",
|
| 177 |
+
"music speaker", "loupe magnifying glass", "sawmill", "magnetic compass", "messenger bag",
|
| 178 |
+
"mailbox", "tights", "one-piece bathing suit", "manhole cover", "maraca", "marimba", "mask",
|
| 179 |
+
"matchstick", "maypole", "maze", "measuring cup", "medicine cabinet", "megalith", "microphone",
|
| 180 |
+
"microwave oven", "military uniform", "milk can", "minibus", "miniskirt", "minivan", "missile",
|
| 181 |
+
"mitten", "mixing bowl", "mobile home", "ford model t", "modem", "monastery", "monitor",
|
| 182 |
+
"moped", "mortar and pestle", "graduation cap", "mosque", "mosquito net", "vespa",
|
| 183 |
+
"mountain bike", "tent", "computer mouse", "mousetrap", "moving van", "muzzle", "metal nail",
|
| 184 |
+
"neck brace", "necklace", "baby pacifier", "notebook computer", "obelisk", "oboe", "ocarina",
|
| 185 |
+
"odometer", "oil filter", "pipe organ", "oscilloscope", "overskirt", "bullock cart",
|
| 186 |
+
"oxygen mask", "product packet / packaging", "paddle", "paddle wheel", "padlock", "paintbrush",
|
| 187 |
+
"pajamas", "palace", "pan flute", "paper towel", "parachute", "parallel bars", "park bench",
|
| 188 |
+
"parking meter", "railroad car", "patio", "payphone", "pedestal", "pencil case",
|
| 189 |
+
"pencil sharpener", "perfume", "Petri dish", "photocopier", "plectrum", "Pickelhaube",
|
| 190 |
+
"picket fence", "pickup truck", "pier", "piggy bank", "pill bottle", "pillow", "ping-pong ball",
|
| 191 |
+
"pinwheel", "pirate ship", "drink pitcher", "block plane", "planetarium", "plastic bag",
|
| 192 |
+
"plate rack", "farm plow", "plunger", "Polaroid camera", "pole", "police van", "poncho",
|
| 193 |
+
"pool table", "soda bottle", "plant pot", "potter's wheel", "power drill", "prayer rug",
|
| 194 |
+
"printer", "prison", "missile", "projector", "hockey puck", "punching bag", "purse", "quill",
|
| 195 |
+
"quilt", "race car", "racket", "radiator", "radio", "radio telescope", "rain barrel",
|
| 196 |
+
"recreational vehicle", "fishing casting reel", "reflex camera", "refrigerator",
|
| 197 |
+
"remote control", "restaurant", "revolver", "rifle", "rocking chair", "rotisserie", "eraser",
|
| 198 |
+
"rugby ball", "ruler measuring stick", "sneaker", "safe", "safety pin", "salt shaker", "sandal",
|
| 199 |
+
"sarong", "saxophone", "scabbard", "weighing scale", "school bus", "schooner", "scoreboard",
|
| 200 |
+
"CRT monitor", "screw", "screwdriver", "seat belt", "sewing machine", "shield", "shoe store",
|
| 201 |
+
"shoji screen / room divider", "shopping basket", "shopping cart", "shovel", "shower cap",
|
| 202 |
+
"shower curtain", "ski", "balaclava ski mask", "sleeping bag", "slide rule", "sliding door",
|
| 203 |
+
"slot machine", "snorkel", "snowmobile", "snowplow", "soap dispenser", "soccer ball", "sock",
|
| 204 |
+
"solar thermal collector", "sombrero", "soup bowl", "keyboard space bar", "space heater",
|
| 205 |
+
"space shuttle", "spatula", "motorboat", "spider web", "spindle", "sports car", "spotlight",
|
| 206 |
+
"stage", "steam locomotive", "through arch bridge", "steel drum", "stethoscope", "scarf",
|
| 207 |
+
"stone wall", "stopwatch", "stove", "strainer", "tram", "stretcher", "couch", "stupa",
|
| 208 |
+
"submarine", "suit", "sundial", "sunglasses", "sunglasses", "sunscreen", "suspension bridge",
|
| 209 |
+
"mop", "sweatshirt", "swim trunks / shorts", "swing", "electrical switch", "syringe",
|
| 210 |
+
"table lamp", "tank", "tape player", "teapot", "teddy bear", "television", "tennis ball",
|
| 211 |
+
"thatched roof", "front curtain", "thimble", "threshing machine", "throne", "tile roof",
|
| 212 |
+
"toaster", "tobacco shop", "toilet seat", "torch", "totem pole", "tow truck", "toy store",
|
| 213 |
+
"tractor", "semi-trailer truck", "tray", "trench coat", "tricycle", "trimaran", "tripod",
|
| 214 |
+
"triumphal arch", "trolleybus", "trombone", "hot tub", "turnstile", "typewriter keyboard",
|
| 215 |
+
"umbrella", "unicycle", "upright piano", "vacuum cleaner", "vase", "vaulted or arched ceiling",
|
| 216 |
+
"velvet fabric", "vending machine", "vestment", "viaduct", "violin", "volleyball",
|
| 217 |
+
"waffle iron", "wall clock", "wallet", "wardrobe", "military aircraft", "sink",
|
| 218 |
+
"washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle",
|
| 219 |
+
"hair wig", "window screen", "window shade", "Windsor tie", "wine bottle", "airplane wing",
|
| 220 |
+
"wok", "wooden spoon", "wool", "split-rail fence", "shipwreck", "sailboat", "yurt", "website",
|
| 221 |
+
"comic book", "crossword", "traffic or street sign", "traffic light", "dust jacket", "menu",
|
| 222 |
+
"plate", "guacamole", "consomme", "hot pot", "trifle", "ice cream", "popsicle", "baguette",
|
| 223 |
+
"bagel", "pretzel", "cheeseburger", "hot dog", "mashed potatoes", "cabbage", "broccoli",
|
| 224 |
+
"cauliflower", "zucchini", "spaghetti squash", "acorn squash", "butternut squash", "cucumber",
|
| 225 |
+
"artichoke", "bell pepper", "cardoon", "mushroom", "Granny Smith apple", "strawberry", "orange",
|
| 226 |
+
"lemon", "fig", "pineapple", "banana", "jackfruit", "cherimoya (custard apple)", "pomegranate",
|
| 227 |
+
"hay", "carbonara", "chocolate syrup", "dough", "meatloaf", "pizza", "pot pie", "burrito",
|
| 228 |
+
"red wine", "espresso", "tea cup", "eggnog", "mountain", "bubble", "cliff", "coral reef",
|
| 229 |
+
"geyser", "lakeshore", "promontory", "sandbar", "beach", "valley", "volcano", "baseball player",
|
| 230 |
+
"bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper", "corn", "acorn",
|
| 231 |
+
"rose hip", "horse chestnut seed", "coral fungus", "agaric", "gyromitra", "stinkhorn mushroom",
|
| 232 |
+
"earth star fungus", "hen of the woods mushroom", "bolete", "corn cob", "toilet paper"]
|
| 233 |
+
|
| 234 |
+
# cifar100_classes = [name for name in os.listdir(args.data_path)
|
| 235 |
+
# if os.path.isdir(os.path.join(args.data_path, name))]
|
| 236 |
+
|
| 237 |
+
imagenet_single_template = [
|
| 238 |
+
'a photo of a {}.',
|
| 239 |
+
]
|
| 240 |
+
|
| 241 |
+
imagenet_7_templates = [
|
| 242 |
+
'itap of a {}.',
|
| 243 |
+
'a origami {}.',
|
| 244 |
+
'a bad photo of the {}.',
|
| 245 |
+
'a photo of the large {}.',
|
| 246 |
+
'a {} in a video game.',
|
| 247 |
+
'art of the {}.',
|
| 248 |
+
'a photo of the small {}.',
|
| 249 |
+
]
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
print('load pre-trained model')
|
| 253 |
+
model, preprocess = clip.load(args.arch)
|
| 254 |
+
model = model.cuda()
|
| 255 |
+
model.eval()
|
| 256 |
+
|
| 257 |
+
print('load data')
|
| 258 |
+
valdir = os.path.join(args.data_path, 'val')
|
| 259 |
+
# valdir = os.path.join(args.data_path, '')
|
| 260 |
+
val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 261 |
+
loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 262 |
+
with torch.no_grad():
|
| 263 |
+
image_feat = []
|
| 264 |
+
image_label = []
|
| 265 |
+
for i, (images, target) in enumerate(loader):
|
| 266 |
+
images = images.cuda()
|
| 267 |
+
target = target.cuda()
|
| 268 |
+
image_features = model.encode_image(images)
|
| 269 |
+
image_feat.append(F.normalize(image_features, dim=1))
|
| 270 |
+
image_label.append(target)
|
| 271 |
+
image_feat = torch.cat(image_feat, dim=0)
|
| 272 |
+
|
| 273 |
+
# aug 1
|
| 274 |
+
with torch.no_grad():
|
| 275 |
+
image_feat_0 = []
|
| 276 |
+
for i, (images, target) in enumerate(loader):
|
| 277 |
+
images = images.cuda()
|
| 278 |
+
# target = target.cuda()
|
| 279 |
+
augmented_images_0 = torch.stack([
|
| 280 |
+
augmentations[0](to_pil_image(image.cpu())) for image in images
|
| 281 |
+
]).cuda()
|
| 282 |
+
image_features_0 = model.encode_image(augmented_images_0)
|
| 283 |
+
image_feat_0.append(F.normalize(image_features_0, dim=1))
|
| 284 |
+
# image_label.append(target)
|
| 285 |
+
image_feat_0 = torch.cat(image_feat_0, dim=0)
|
| 286 |
+
|
| 287 |
+
# aug2
|
| 288 |
+
with torch.no_grad():
|
| 289 |
+
image_feat_1 = []
|
| 290 |
+
for i, (images, target) in enumerate(loader):
|
| 291 |
+
images = images.cuda()
|
| 292 |
+
# target = target.cuda()
|
| 293 |
+
augmented_images_1 = torch.stack([
|
| 294 |
+
augmentations[1](to_pil_image(image.cpu())) for image in images
|
| 295 |
+
]).cuda()
|
| 296 |
+
image_features_1 = model.encode_image(augmented_images_1)
|
| 297 |
+
image_feat_1.append(F.normalize(image_features_1, dim=1))
|
| 298 |
+
# image_label.append(target)
|
| 299 |
+
image_feat_1 = torch.cat(image_feat_1, dim=0)
|
| 300 |
+
|
| 301 |
+
# aug3
|
| 302 |
+
with torch.no_grad():
|
| 303 |
+
image_feat_2 = []
|
| 304 |
+
for i, (images, target) in enumerate(loader):
|
| 305 |
+
images = images.cuda()
|
| 306 |
+
# target = target.cuda()
|
| 307 |
+
augmented_images_2 = torch.stack([
|
| 308 |
+
augmentations[2](to_pil_image(image.cpu())) for image in images
|
| 309 |
+
]).cuda()
|
| 310 |
+
image_features_2 = model.encode_image(augmented_images_2)
|
| 311 |
+
image_feat_2.append(F.normalize(image_features_2, dim=1))
|
| 312 |
+
# image_label.append(target)
|
| 313 |
+
image_feat_2 = torch.cat(image_feat_2, dim=0)
|
| 314 |
+
|
| 315 |
+
# aug4
|
| 316 |
+
with torch.no_grad():
|
| 317 |
+
image_feat_3 = []
|
| 318 |
+
for i, (images, target) in enumerate(loader):
|
| 319 |
+
images = images.cuda()
|
| 320 |
+
# target = target.cuda()
|
| 321 |
+
augmented_images_3 = torch.stack([
|
| 322 |
+
augmentations[3](to_pil_image(image.cpu())) for image in images
|
| 323 |
+
]).cuda()
|
| 324 |
+
image_features_3 = model.encode_image(augmented_images_3)
|
| 325 |
+
image_feat_3.append(F.normalize(image_features_3, dim=1))
|
| 326 |
+
# image_label.append(target)
|
| 327 |
+
image_feat_3 = torch.cat(image_feat_3, dim=0)
|
| 328 |
+
|
| 329 |
+
# aug5
|
| 330 |
+
with torch.no_grad():
|
| 331 |
+
image_feat_4 = []
|
| 332 |
+
for i, (images, target) in enumerate(loader):
|
| 333 |
+
images = images.cuda()
|
| 334 |
+
# target = target.cuda()
|
| 335 |
+
augmented_images_4 = torch.stack([
|
| 336 |
+
augmentations[4](to_pil_image(image.cpu())) for image in images
|
| 337 |
+
]).cuda()
|
| 338 |
+
image_features_4 = model.encode_image(augmented_images_4)
|
| 339 |
+
image_feat_4.append(F.normalize(image_features_4, dim=1))
|
| 340 |
+
# image_label.append(target)
|
| 341 |
+
image_feat_4 = torch.cat(image_feat_4, dim=0)
|
| 342 |
+
|
| 343 |
+
image_feat = torch.mean(torch.stack([image_feat_4], dim=0), dim=0)
|
| 344 |
+
|
| 345 |
+
image_label = torch.cat(image_label, dim=0)
|
| 346 |
+
n = len(image_label)
|
| 347 |
+
image_feat = image_feat.float()
|
| 348 |
+
|
| 349 |
+
print('obtain text proxy')
|
| 350 |
+
text_classifier = zeroshot_classifier(clip, model, imagenet_classes, imagenet_7_templates)
|
| 351 |
+
text_classifier = text_classifier.float()
|
| 352 |
+
logits_t = image_feat @ text_classifier
|
| 353 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 354 |
+
top1 = (acc1 / n) * 100
|
| 355 |
+
print(f'accuracy with text proxy: {top1:.2f}')
|
| 356 |
+
|
| 357 |
+
print('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 358 |
+
num_class = len(torch.unique(image_label))
|
| 359 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 360 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 361 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 362 |
+
for iter in range(args.repeat):
|
| 363 |
+
idx = torch.randperm(n).cuda()
|
| 364 |
+
combo_label = torch.zeros(n, num_class).cuda()
|
| 365 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 366 |
+
w = text_classifier.clone()
|
| 367 |
+
rho = torch.zeros(num_class).cuda()
|
| 368 |
+
for i in range(n):
|
| 369 |
+
lr = args.cw / math.sqrt(i + 1)
|
| 370 |
+
rlr = args.cr / math.sqrt(i + 1)
|
| 371 |
+
beta = args.beta * math.sqrt((i + 1) / n)
|
| 372 |
+
x = image_feat[idx[i], :]
|
| 373 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 374 |
+
tlabel = tlabel * torch.exp(rho)
|
| 375 |
+
tlabel /= torch.sum(tlabel)
|
| 376 |
+
rho -= rlr * (tlabel - args.alpha / num_class)
|
| 377 |
+
rho[rho < 0] = 0
|
| 378 |
+
text_label[i, :] = tlabel
|
| 379 |
+
vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 380 |
+
combo_label[i, :] = beta * vision_label + (1 - beta) * tlabel
|
| 381 |
+
grad = torch.outer(x, vision_label - tlabel)
|
| 382 |
+
w -= (lr / args.tau_i) * grad
|
| 383 |
+
w = F.normalize(w, dim=0)
|
| 384 |
+
acc1, acc5 = accuracy(text_label, image_label[idx], topk=(1, 5))
|
| 385 |
+
acc_onlab[iter] = (acc1 / n) * 100
|
| 386 |
+
acc1, acc5 = accuracy(combo_label, image_label[idx], topk=(1, 5))
|
| 387 |
+
|
| 388 |
+
# MAPLS - EM Algorithm
|
| 389 |
+
pz = np.full(len(imagenet_classes), 1.0 / len(imagenet_classes))
|
| 390 |
+
qy = mapls(combo_label, pz = pz, qy_mode = "soft", max_iter = 100, lam = lam)
|
| 391 |
+
|
| 392 |
+
w = np.array(qy) / np.array(pz)
|
| 393 |
+
if combo_label.is_cuda:
|
| 394 |
+
combo_label_cpu = combo_label.cpu()
|
| 395 |
+
qy_probs = lsc(combo_label_cpu, 1.0/w)
|
| 396 |
+
qy_probs = torch.from_numpy(qy_probs)
|
| 397 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 398 |
+
|
| 399 |
+
acc_onzeta[iter] = (acc1 / n) * 100
|
| 400 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 401 |
+
print('mean acc of onlab is: {:.2f}'.format(torch.mean(acc_onlab)))
|
| 402 |
+
print('mean acc of onzeta is: {:.2f}'.format(torch.mean(acc_onzeta)))
|
| 403 |
+
print('mean acc of MAPLS is: {:.2f}'.format(torch.mean(acc_ls)))
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 407 |
+
with torch.no_grad():
|
| 408 |
+
zeroshot_weights = []
|
| 409 |
+
for classname in classnames:
|
| 410 |
+
texts = [template.format(classname) for template in templates]
|
| 411 |
+
texts = clip.tokenize(texts).cuda()
|
| 412 |
+
class_embeddings = model.encode_text(texts)
|
| 413 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 414 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 415 |
+
class_embedding /= class_embedding.norm()
|
| 416 |
+
zeroshot_weights.append(class_embedding)
|
| 417 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 418 |
+
return zeroshot_weights
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
def accuracy(output, target, topk=(1,)):
|
| 422 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 423 |
+
pred, target = pred.cpu(), target.cpu()
|
| 424 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 425 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
if __name__ == '__main__':
|
| 429 |
+
# main()
|
| 430 |
+
|
| 431 |
+
lams = [0.6]
|
| 432 |
+
for lam in lams:
|
| 433 |
+
main(lam)
|
| 434 |
+
|
OnZeta/main_online_imagenet_mapls_inloop.py
ADDED
|
@@ -0,0 +1,347 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
from MAPLS.mapls import mapls
|
| 12 |
+
from MAPLS.common import lsc
|
| 13 |
+
|
| 14 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 15 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 16 |
+
parser.add_argument('--data_path', default='/home/li325/space_mlai/pengxiao_space/dataset/ImageNet/', type=str,
|
| 17 |
+
help='dataset path')
|
| 18 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='RN50',
|
| 19 |
+
choices=model_names,
|
| 20 |
+
help='model architecture: ' +
|
| 21 |
+
' | '.join(model_names) +
|
| 22 |
+
' (default: RN50)')
|
| 23 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 24 |
+
help='number of data loading workers (default: 8)')
|
| 25 |
+
parser.add_argument('-b', '--batch-size', default=256, type=int,
|
| 26 |
+
metavar='N',
|
| 27 |
+
help='mini-batch size (default: 256)')
|
| 28 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 29 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 30 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 31 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 32 |
+
parser.add_argument('--alpha', default=1, type=float)
|
| 33 |
+
parser.add_argument('--beta', default=0.8, type=float)
|
| 34 |
+
parser.add_argument('--repeat', default=1, type=int)
|
| 35 |
+
|
| 36 |
+
# parser.add_argument('--lam', default=0.6, type=float)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def main(lam):
|
| 40 |
+
|
| 41 |
+
args = parser.parse_args()
|
| 42 |
+
print(args)
|
| 43 |
+
print("lambda = ", lam)
|
| 44 |
+
imagenet_classes = ["tench", "goldfish", "great white shark", "tiger shark", "hammerhead shark", "electric ray",
|
| 45 |
+
"stingray", "rooster", "hen", "ostrich", "brambling", "goldfinch", "house finch", "junco",
|
| 46 |
+
"indigo bunting", "American robin", "bulbul", "jay", "magpie", "chickadee", "American dipper",
|
| 47 |
+
"kite (bird of prey)", "bald eagle", "vulture", "great grey owl", "fire salamander",
|
| 48 |
+
"smooth newt", "newt", "spotted salamander", "axolotl", "American bullfrog", "tree frog",
|
| 49 |
+
"tailed frog", "loggerhead sea turtle", "leatherback sea turtle", "mud turtle", "terrapin",
|
| 50 |
+
"box turtle", "banded gecko", "green iguana", "Carolina anole",
|
| 51 |
+
"desert grassland whiptail lizard", "agama", "frilled-necked lizard", "alligator lizard",
|
| 52 |
+
"Gila monster", "European green lizard", "chameleon", "Komodo dragon", "Nile crocodile",
|
| 53 |
+
"American alligator", "triceratops", "worm snake", "ring-necked snake",
|
| 54 |
+
"eastern hog-nosed snake", "smooth green snake", "kingsnake", "garter snake", "water snake",
|
| 55 |
+
"vine snake", "night snake", "boa constrictor", "African rock python", "Indian cobra",
|
| 56 |
+
"green mamba", "sea snake", "Saharan horned viper", "eastern diamondback rattlesnake",
|
| 57 |
+
"sidewinder rattlesnake", "trilobite", "harvestman", "scorpion", "yellow garden spider",
|
| 58 |
+
"barn spider", "European garden spider", "southern black widow", "tarantula", "wolf spider",
|
| 59 |
+
"tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse", "prairie grouse", "peafowl",
|
| 60 |
+
"quail", "partridge", "african grey parrot", "macaw", "sulphur-crested cockatoo", "lorikeet",
|
| 61 |
+
"coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "duck",
|
| 62 |
+
"red-breasted merganser", "goose", "black swan", "tusker", "echidna", "platypus", "wallaby",
|
| 63 |
+
"koala", "wombat", "jellyfish", "sea anemone", "brain coral", "flatworm", "nematode", "conch",
|
| 64 |
+
"snail", "slug", "sea slug", "chiton", "chambered nautilus", "Dungeness crab", "rock crab",
|
| 65 |
+
"fiddler crab", "red king crab", "American lobster", "spiny lobster", "crayfish", "hermit crab",
|
| 66 |
+
"isopod", "white stork", "black stork", "spoonbill", "flamingo", "little blue heron",
|
| 67 |
+
"great egret", "bittern bird", "crane bird", "limpkin", "common gallinule", "American coot",
|
| 68 |
+
"bustard", "ruddy turnstone", "dunlin", "common redshank", "dowitcher", "oystercatcher",
|
| 69 |
+
"pelican", "king penguin", "albatross", "grey whale", "killer whale", "dugong", "sea lion",
|
| 70 |
+
"Chihuahua", "Japanese Chin", "Maltese", "Pekingese", "Shih Tzu", "King Charles Spaniel",
|
| 71 |
+
"Papillon", "toy terrier", "Rhodesian Ridgeback", "Afghan Hound", "Basset Hound", "Beagle",
|
| 72 |
+
"Bloodhound", "Bluetick Coonhound", "Black and Tan Coonhound", "Treeing Walker Coonhound",
|
| 73 |
+
"English foxhound", "Redbone Coonhound", "borzoi", "Irish Wolfhound", "Italian Greyhound",
|
| 74 |
+
"Whippet", "Ibizan Hound", "Norwegian Elkhound", "Otterhound", "Saluki", "Scottish Deerhound",
|
| 75 |
+
"Weimaraner", "Staffordshire Bull Terrier", "American Staffordshire Terrier",
|
| 76 |
+
"Bedlington Terrier", "Border Terrier", "Kerry Blue Terrier", "Irish Terrier",
|
| 77 |
+
"Norfolk Terrier", "Norwich Terrier", "Yorkshire Terrier", "Wire Fox Terrier",
|
| 78 |
+
"Lakeland Terrier", "Sealyham Terrier", "Airedale Terrier", "Cairn Terrier",
|
| 79 |
+
"Australian Terrier", "Dandie Dinmont Terrier", "Boston Terrier", "Miniature Schnauzer",
|
| 80 |
+
"Giant Schnauzer", "Standard Schnauzer", "Scottish Terrier", "Tibetan Terrier",
|
| 81 |
+
"Australian Silky Terrier", "Soft-coated Wheaten Terrier", "West Highland White Terrier",
|
| 82 |
+
"Lhasa Apso", "Flat-Coated Retriever", "Curly-coated Retriever", "Golden Retriever",
|
| 83 |
+
"Labrador Retriever", "Chesapeake Bay Retriever", "German Shorthaired Pointer", "Vizsla",
|
| 84 |
+
"English Setter", "Irish Setter", "Gordon Setter", "Brittany dog", "Clumber Spaniel",
|
| 85 |
+
"English Springer Spaniel", "Welsh Springer Spaniel", "Cocker Spaniel", "Sussex Spaniel",
|
| 86 |
+
"Irish Water Spaniel", "Kuvasz", "Schipperke", "Groenendael dog", "Malinois", "Briard",
|
| 87 |
+
"Australian Kelpie", "Komondor", "Old English Sheepdog", "Shetland Sheepdog", "collie",
|
| 88 |
+
"Border Collie", "Bouvier des Flandres dog", "Rottweiler", "German Shepherd Dog", "Dobermann",
|
| 89 |
+
"Miniature Pinscher", "Greater Swiss Mountain Dog", "Bernese Mountain Dog",
|
| 90 |
+
"Appenzeller Sennenhund", "Entlebucher Sennenhund", "Boxer", "Bullmastiff", "Tibetan Mastiff",
|
| 91 |
+
"French Bulldog", "Great Dane", "St. Bernard", "husky", "Alaskan Malamute", "Siberian Husky",
|
| 92 |
+
"Dalmatian", "Affenpinscher", "Basenji", "pug", "Leonberger", "Newfoundland dog",
|
| 93 |
+
"Great Pyrenees dog", "Samoyed", "Pomeranian", "Chow Chow", "Keeshond", "brussels griffon",
|
| 94 |
+
"Pembroke Welsh Corgi", "Cardigan Welsh Corgi", "Toy Poodle", "Miniature Poodle",
|
| 95 |
+
"Standard Poodle", "Mexican hairless dog (xoloitzcuintli)", "grey wolf", "Alaskan tundra wolf",
|
| 96 |
+
"red wolf or maned wolf", "coyote", "dingo", "dhole", "African wild dog", "hyena", "red fox",
|
| 97 |
+
"kit fox", "Arctic fox", "grey fox", "tabby cat", "tiger cat", "Persian cat", "Siamese cat",
|
| 98 |
+
"Egyptian Mau", "cougar", "lynx", "leopard", "snow leopard", "jaguar", "lion", "tiger",
|
| 99 |
+
"cheetah", "brown bear", "American black bear", "polar bear", "sloth bear", "mongoose",
|
| 100 |
+
"meerkat", "tiger beetle", "ladybug", "ground beetle", "longhorn beetle", "leaf beetle",
|
| 101 |
+
"dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant", "grasshopper",
|
| 102 |
+
"cricket insect", "stick insect", "cockroach", "praying mantis", "cicada", "leafhopper",
|
| 103 |
+
"lacewing", "dragonfly", "damselfly", "red admiral butterfly", "ringlet butterfly",
|
| 104 |
+
"monarch butterfly", "small white butterfly", "sulphur butterfly", "gossamer-winged butterfly",
|
| 105 |
+
"starfish", "sea urchin", "sea cucumber", "cottontail rabbit", "hare", "Angora rabbit",
|
| 106 |
+
"hamster", "porcupine", "fox squirrel", "marmot", "beaver", "guinea pig", "common sorrel horse",
|
| 107 |
+
"zebra", "pig", "wild boar", "warthog", "hippopotamus", "ox", "water buffalo", "bison",
|
| 108 |
+
"ram (adult male sheep)", "bighorn sheep", "Alpine ibex", "hartebeest", "impala (antelope)",
|
| 109 |
+
"gazelle", "arabian camel", "llama", "weasel", "mink", "European polecat",
|
| 110 |
+
"black-footed ferret", "otter", "skunk", "badger", "armadillo", "three-toed sloth", "orangutan",
|
| 111 |
+
"gorilla", "chimpanzee", "gibbon", "siamang", "guenon", "patas monkey", "baboon", "macaque",
|
| 112 |
+
"langur", "black-and-white colobus", "proboscis monkey", "marmoset", "white-headed capuchin",
|
| 113 |
+
"howler monkey", "titi monkey", "Geoffroy's spider monkey", "common squirrel monkey",
|
| 114 |
+
"ring-tailed lemur", "indri", "Asian elephant", "African bush elephant", "red panda",
|
| 115 |
+
"giant panda", "snoek fish", "eel", "silver salmon", "rock beauty fish", "clownfish",
|
| 116 |
+
"sturgeon", "gar fish", "lionfish", "pufferfish", "abacus", "abaya", "academic gown",
|
| 117 |
+
"accordion", "acoustic guitar", "aircraft carrier", "airliner", "airship", "altar", "ambulance",
|
| 118 |
+
"amphibious vehicle", "analog clock", "apiary", "apron", "trash can", "assault rifle",
|
| 119 |
+
"backpack", "bakery", "balance beam", "balloon", "ballpoint pen", "Band-Aid", "banjo",
|
| 120 |
+
"baluster / handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel",
|
| 121 |
+
"wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "swimming cap", "bath towel",
|
| 122 |
+
"bathtub", "station wagon", "lighthouse", "beaker", "military hat (bearskin or shako)",
|
| 123 |
+
"beer bottle", "beer glass", "bell tower", "baby bib", "tandem bicycle", "bikini",
|
| 124 |
+
"ring binder", "binoculars", "birdhouse", "boathouse", "bobsleigh", "bolo tie", "poke bonnet",
|
| 125 |
+
"bookcase", "bookstore", "bottle cap", "hunting bow", "bow tie", "brass memorial plaque", "bra",
|
| 126 |
+
"breakwater", "breastplate", "broom", "bucket", "buckle", "bulletproof vest",
|
| 127 |
+
"high-speed train", "butcher shop", "taxicab", "cauldron", "candle", "cannon", "canoe",
|
| 128 |
+
"can opener", "cardigan", "car mirror", "carousel", "tool kit", "cardboard box / carton",
|
| 129 |
+
"car wheel", "automated teller machine", "cassette", "cassette player", "castle", "catamaran",
|
| 130 |
+
"CD player", "cello", "mobile phone", "chain", "chain-link fence", "chain mail", "chainsaw",
|
| 131 |
+
"storage chest", "chiffonier", "bell or wind chime", "china cabinet", "Christmas stocking",
|
| 132 |
+
"church", "movie theater", "cleaver", "cliff dwelling", "cloak", "clogs", "cocktail shaker",
|
| 133 |
+
"coffee mug", "coffeemaker", "spiral or coil", "combination lock", "computer keyboard",
|
| 134 |
+
"candy store", "container ship", "convertible", "corkscrew", "cornet", "cowboy boot",
|
| 135 |
+
"cowboy hat", "cradle", "construction crane", "crash helmet", "crate", "infant bed",
|
| 136 |
+
"Crock Pot", "croquet ball", "crutch", "cuirass", "dam", "desk", "desktop computer",
|
| 137 |
+
"rotary dial telephone", "diaper", "digital clock", "digital watch", "dining table",
|
| 138 |
+
"dishcloth", "dishwasher", "disc brake", "dock", "dog sled", "dome", "doormat", "drilling rig",
|
| 139 |
+
"drum", "drumstick", "dumbbell", "Dutch oven", "electric fan", "electric guitar",
|
| 140 |
+
"electric locomotive", "entertainment center", "envelope", "espresso machine", "face powder",
|
| 141 |
+
"feather boa", "filing cabinet", "fireboat", "fire truck", "fire screen", "flagpole", "flute",
|
| 142 |
+
"folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster bed",
|
| 143 |
+
"freight car", "French horn", "frying pan", "fur coat", "garbage truck",
|
| 144 |
+
"gas mask or respirator", "gas pump", "goblet", "go-kart", "golf ball", "golf cart", "gondola",
|
| 145 |
+
"gong", "gown", "grand piano", "greenhouse", "radiator grille", "grocery store", "guillotine",
|
| 146 |
+
"hair clip", "hair spray", "half-track", "hammer", "hamper", "hair dryer", "hand-held computer",
|
| 147 |
+
"handkerchief", "hard disk drive", "harmonica", "harp", "combine harvester", "hatchet",
|
| 148 |
+
"holster", "home theater", "honeycomb", "hook", "hoop skirt", "gymnastic horizontal bar",
|
| 149 |
+
"horse-drawn vehicle", "hourglass", "iPod", "clothes iron", "carved pumpkin", "jeans", "jeep",
|
| 150 |
+
"T-shirt", "jigsaw puzzle", "rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat",
|
| 151 |
+
"ladle", "lampshade", "laptop computer", "lawn mower", "lens cap", "letter opener", "library",
|
| 152 |
+
"lifeboat", "lighter", "limousine", "ocean liner", "lipstick", "slip-on shoe", "lotion",
|
| 153 |
+
"music speaker", "loupe magnifying glass", "sawmill", "magnetic compass", "messenger bag",
|
| 154 |
+
"mailbox", "tights", "one-piece bathing suit", "manhole cover", "maraca", "marimba", "mask",
|
| 155 |
+
"matchstick", "maypole", "maze", "measuring cup", "medicine cabinet", "megalith", "microphone",
|
| 156 |
+
"microwave oven", "military uniform", "milk can", "minibus", "miniskirt", "minivan", "missile",
|
| 157 |
+
"mitten", "mixing bowl", "mobile home", "ford model t", "modem", "monastery", "monitor",
|
| 158 |
+
"moped", "mortar and pestle", "graduation cap", "mosque", "mosquito net", "vespa",
|
| 159 |
+
"mountain bike", "tent", "computer mouse", "mousetrap", "moving van", "muzzle", "metal nail",
|
| 160 |
+
"neck brace", "necklace", "baby pacifier", "notebook computer", "obelisk", "oboe", "ocarina",
|
| 161 |
+
"odometer", "oil filter", "pipe organ", "oscilloscope", "overskirt", "bullock cart",
|
| 162 |
+
"oxygen mask", "product packet / packaging", "paddle", "paddle wheel", "padlock", "paintbrush",
|
| 163 |
+
"pajamas", "palace", "pan flute", "paper towel", "parachute", "parallel bars", "park bench",
|
| 164 |
+
"parking meter", "railroad car", "patio", "payphone", "pedestal", "pencil case",
|
| 165 |
+
"pencil sharpener", "perfume", "Petri dish", "photocopier", "plectrum", "Pickelhaube",
|
| 166 |
+
"picket fence", "pickup truck", "pier", "piggy bank", "pill bottle", "pillow", "ping-pong ball",
|
| 167 |
+
"pinwheel", "pirate ship", "drink pitcher", "block plane", "planetarium", "plastic bag",
|
| 168 |
+
"plate rack", "farm plow", "plunger", "Polaroid camera", "pole", "police van", "poncho",
|
| 169 |
+
"pool table", "soda bottle", "plant pot", "potter's wheel", "power drill", "prayer rug",
|
| 170 |
+
"printer", "prison", "missile", "projector", "hockey puck", "punching bag", "purse", "quill",
|
| 171 |
+
"quilt", "race car", "racket", "radiator", "radio", "radio telescope", "rain barrel",
|
| 172 |
+
"recreational vehicle", "fishing casting reel", "reflex camera", "refrigerator",
|
| 173 |
+
"remote control", "restaurant", "revolver", "rifle", "rocking chair", "rotisserie", "eraser",
|
| 174 |
+
"rugby ball", "ruler measuring stick", "sneaker", "safe", "safety pin", "salt shaker", "sandal",
|
| 175 |
+
"sarong", "saxophone", "scabbard", "weighing scale", "school bus", "schooner", "scoreboard",
|
| 176 |
+
"CRT monitor", "screw", "screwdriver", "seat belt", "sewing machine", "shield", "shoe store",
|
| 177 |
+
"shoji screen / room divider", "shopping basket", "shopping cart", "shovel", "shower cap",
|
| 178 |
+
"shower curtain", "ski", "balaclava ski mask", "sleeping bag", "slide rule", "sliding door",
|
| 179 |
+
"slot machine", "snorkel", "snowmobile", "snowplow", "soap dispenser", "soccer ball", "sock",
|
| 180 |
+
"solar thermal collector", "sombrero", "soup bowl", "keyboard space bar", "space heater",
|
| 181 |
+
"space shuttle", "spatula", "motorboat", "spider web", "spindle", "sports car", "spotlight",
|
| 182 |
+
"stage", "steam locomotive", "through arch bridge", "steel drum", "stethoscope", "scarf",
|
| 183 |
+
"stone wall", "stopwatch", "stove", "strainer", "tram", "stretcher", "couch", "stupa",
|
| 184 |
+
"submarine", "suit", "sundial", "sunglasses", "sunglasses", "sunscreen", "suspension bridge",
|
| 185 |
+
"mop", "sweatshirt", "swim trunks / shorts", "swing", "electrical switch", "syringe",
|
| 186 |
+
"table lamp", "tank", "tape player", "teapot", "teddy bear", "television", "tennis ball",
|
| 187 |
+
"thatched roof", "front curtain", "thimble", "threshing machine", "throne", "tile roof",
|
| 188 |
+
"toaster", "tobacco shop", "toilet seat", "torch", "totem pole", "tow truck", "toy store",
|
| 189 |
+
"tractor", "semi-trailer truck", "tray", "trench coat", "tricycle", "trimaran", "tripod",
|
| 190 |
+
"triumphal arch", "trolleybus", "trombone", "hot tub", "turnstile", "typewriter keyboard",
|
| 191 |
+
"umbrella", "unicycle", "upright piano", "vacuum cleaner", "vase", "vaulted or arched ceiling",
|
| 192 |
+
"velvet fabric", "vending machine", "vestment", "viaduct", "violin", "volleyball",
|
| 193 |
+
"waffle iron", "wall clock", "wallet", "wardrobe", "military aircraft", "sink",
|
| 194 |
+
"washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle",
|
| 195 |
+
"hair wig", "window screen", "window shade", "Windsor tie", "wine bottle", "airplane wing",
|
| 196 |
+
"wok", "wooden spoon", "wool", "split-rail fence", "shipwreck", "sailboat", "yurt", "website",
|
| 197 |
+
"comic book", "crossword", "traffic or street sign", "traffic light", "dust jacket", "menu",
|
| 198 |
+
"plate", "guacamole", "consomme", "hot pot", "trifle", "ice cream", "popsicle", "baguette",
|
| 199 |
+
"bagel", "pretzel", "cheeseburger", "hot dog", "mashed potatoes", "cabbage", "broccoli",
|
| 200 |
+
"cauliflower", "zucchini", "spaghetti squash", "acorn squash", "butternut squash", "cucumber",
|
| 201 |
+
"artichoke", "bell pepper", "cardoon", "mushroom", "Granny Smith apple", "strawberry", "orange",
|
| 202 |
+
"lemon", "fig", "pineapple", "banana", "jackfruit", "cherimoya (custard apple)", "pomegranate",
|
| 203 |
+
"hay", "carbonara", "chocolate syrup", "dough", "meatloaf", "pizza", "pot pie", "burrito",
|
| 204 |
+
"red wine", "espresso", "tea cup", "eggnog", "mountain", "bubble", "cliff", "coral reef",
|
| 205 |
+
"geyser", "lakeshore", "promontory", "sandbar", "beach", "valley", "volcano", "baseball player",
|
| 206 |
+
"bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper", "corn", "acorn",
|
| 207 |
+
"rose hip", "horse chestnut seed", "coral fungus", "agaric", "gyromitra", "stinkhorn mushroom",
|
| 208 |
+
"earth star fungus", "hen of the woods mushroom", "bolete", "corn cob", "toilet paper"]
|
| 209 |
+
|
| 210 |
+
# cifar100_classes = [name for name in os.listdir(args.data_path)
|
| 211 |
+
# if os.path.isdir(os.path.join(args.data_path, name))]
|
| 212 |
+
|
| 213 |
+
imagenet_single_template = [
|
| 214 |
+
'a photo of a {}.',
|
| 215 |
+
]
|
| 216 |
+
|
| 217 |
+
imagenet_7_templates = [
|
| 218 |
+
'itap of a {}.',
|
| 219 |
+
'a origami {}.',
|
| 220 |
+
'a bad photo of the {}.',
|
| 221 |
+
'a photo of the large {}.',
|
| 222 |
+
'a {} in a video game.',
|
| 223 |
+
'art of the {}.',
|
| 224 |
+
'a photo of the small {}.',
|
| 225 |
+
]
|
| 226 |
+
|
| 227 |
+
print('load pre-trained model')
|
| 228 |
+
model, preprocess = clip.load(args.arch)
|
| 229 |
+
model = model.cuda()
|
| 230 |
+
model.eval()
|
| 231 |
+
|
| 232 |
+
print('load data')
|
| 233 |
+
valdir = os.path.join(args.data_path, 'val')
|
| 234 |
+
# valdir = os.path.join(args.data_path, '')
|
| 235 |
+
val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 236 |
+
loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 237 |
+
with torch.no_grad():
|
| 238 |
+
image_feat = []
|
| 239 |
+
image_label = []
|
| 240 |
+
for i, (images, target) in enumerate(loader):
|
| 241 |
+
images = images.cuda()
|
| 242 |
+
target = target.cuda()
|
| 243 |
+
image_features = model.encode_image(images)
|
| 244 |
+
image_feat.append(F.normalize(image_features, dim=1))
|
| 245 |
+
image_label.append(target)
|
| 246 |
+
image_feat = torch.cat(image_feat, dim=0)
|
| 247 |
+
image_label = torch.cat(image_label, dim=0)
|
| 248 |
+
n = len(image_label)
|
| 249 |
+
image_feat = image_feat.float()
|
| 250 |
+
|
| 251 |
+
print('obtain text proxy')
|
| 252 |
+
text_classifier = zeroshot_classifier(clip, model, imagenet_classes, imagenet_7_templates)
|
| 253 |
+
text_classifier = text_classifier.float()
|
| 254 |
+
logits_t = image_feat @ text_classifier
|
| 255 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 256 |
+
top1 = (acc1 / n) * 100
|
| 257 |
+
print(f'accuracy with text proxy: {top1:.2f}')
|
| 258 |
+
|
| 259 |
+
print('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 260 |
+
num_class = len(torch.unique(image_label))
|
| 261 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 262 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 263 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 264 |
+
for iter in range(args.repeat):
|
| 265 |
+
idx = torch.randperm(n).cuda()
|
| 266 |
+
combo_label = torch.zeros(n, num_class).cuda()
|
| 267 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 268 |
+
w = text_classifier.clone()
|
| 269 |
+
rho = torch.zeros(num_class).cuda()
|
| 270 |
+
for i in range(n):
|
| 271 |
+
lr = args.cw / math.sqrt(i + 1)
|
| 272 |
+
rlr = args.cr / math.sqrt(i + 1)
|
| 273 |
+
beta = args.beta * math.sqrt((i + 1) / n)
|
| 274 |
+
x = image_feat[idx[i], :]
|
| 275 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 276 |
+
tlabel = tlabel * torch.exp(rho)
|
| 277 |
+
tlabel /= torch.sum(tlabel)
|
| 278 |
+
rho -= rlr * (tlabel - args.alpha / num_class)
|
| 279 |
+
rho[rho < 0] = 0
|
| 280 |
+
text_label[i, :] = tlabel
|
| 281 |
+
vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 282 |
+
combo_label[i, :] = beta * vision_label + (1 - beta) * tlabel
|
| 283 |
+
# MAPLS - EM Algorithm
|
| 284 |
+
pz = np.full(len(imagenet_classes), 1.0 / len(imagenet_classes))
|
| 285 |
+
qy = mapls(combo_label, pz=pz, qy_mode="soft", max_iter=50, lam=lam)
|
| 286 |
+
|
| 287 |
+
w_mapls = np.array(qy) / np.array(pz)
|
| 288 |
+
if combo_label.is_cuda:
|
| 289 |
+
combo_label = combo_label.cpu()
|
| 290 |
+
qy_probs = lsc(combo_label, 1.0 / w_mapls)
|
| 291 |
+
# MAPLS - EM Algorithm
|
| 292 |
+
grad = torch.outer(x, vision_label - tlabel)
|
| 293 |
+
# v = beta_momentum * v + (1 - beta_momentum) * grad
|
| 294 |
+
w -= (lr / args.tau_i) * grad
|
| 295 |
+
w = F.normalize(w, dim=0) # if normalization is desired
|
| 296 |
+
acc1, acc5 = accuracy(text_label, image_label[idx], topk=(1, 5))
|
| 297 |
+
acc_onlab[iter] = (acc1 / n) * 100
|
| 298 |
+
acc1, acc5 = accuracy(combo_label, image_label[idx], topk=(1, 5))
|
| 299 |
+
# # MAPLS - EM Algorithm
|
| 300 |
+
# pz = np.full(len(imagenet_classes), 1.0 / len(imagenet_classes))
|
| 301 |
+
# qy = mapls(combo_label, pz = pz, qy_mode = "soft", max_iter = 100, lam = lam) # FIXME why return nan
|
| 302 |
+
#
|
| 303 |
+
# w = np.array(qy) / np.array(pz)
|
| 304 |
+
# if combo_label.is_cuda:
|
| 305 |
+
# combo_label_cpu = combo_label.cpu()
|
| 306 |
+
# qy_probs = lsc(combo_label_cpu, 1.0/w)
|
| 307 |
+
# # MAPLS - EM Algorithm
|
| 308 |
+
qy_probs = torch.from_numpy(qy_probs)
|
| 309 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 310 |
+
|
| 311 |
+
acc_onzeta[iter] = (acc1 / n) * 100
|
| 312 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 313 |
+
print('mean acc of onlab is: {:.2f}'.format(torch.mean(acc_onlab)))
|
| 314 |
+
print('mean acc of onzeta is: {:.2f}'.format(torch.mean(acc_onzeta)))
|
| 315 |
+
print('mean acc of MAPLS is: {:.2f}'.format(torch.mean(acc_ls)))
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 319 |
+
with torch.no_grad():
|
| 320 |
+
zeroshot_weights = []
|
| 321 |
+
for classname in classnames:
|
| 322 |
+
texts = [template.format(classname) for template in templates]
|
| 323 |
+
texts = clip.tokenize(texts).cuda()
|
| 324 |
+
class_embeddings = model.encode_text(texts)
|
| 325 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 326 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 327 |
+
class_embedding /= class_embedding.norm()
|
| 328 |
+
zeroshot_weights.append(class_embedding)
|
| 329 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 330 |
+
return zeroshot_weights
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
def accuracy(output, target, topk=(1,)):
|
| 334 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 335 |
+
pred, target = pred.cpu(), target.cpu()
|
| 336 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 337 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
if __name__ == '__main__':
|
| 341 |
+
# main()
|
| 342 |
+
|
| 343 |
+
# lams = [0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1]
|
| 344 |
+
lams = [0.7]
|
| 345 |
+
for lam in lams:
|
| 346 |
+
main(lam)
|
| 347 |
+
|
OnZeta/main_online_imagenet_mapls_lame.py
ADDED
|
@@ -0,0 +1,366 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
from MAPLS.mapls import mapls
|
| 12 |
+
from MAPLS.common import lsc
|
| 13 |
+
from lame.lame import LAME
|
| 14 |
+
|
| 15 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 16 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 17 |
+
parser.add_argument('--data_path', default='/home/lt_test/projects/datasets/ImageNet/', type=str,
|
| 18 |
+
help='dataset path')
|
| 19 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='RN50',
|
| 20 |
+
choices=model_names,
|
| 21 |
+
help='model architecture: ' +
|
| 22 |
+
' | '.join(model_names) +
|
| 23 |
+
' (default: RN50)')
|
| 24 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 25 |
+
help='number of data loading workers (default: 8)')
|
| 26 |
+
parser.add_argument('-b', '--batch-size', default=256, type=int,
|
| 27 |
+
metavar='N',
|
| 28 |
+
help='mini-batch size (default: 256)')
|
| 29 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 30 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 31 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 32 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 33 |
+
parser.add_argument('--alpha', default=1, type=float)
|
| 34 |
+
parser.add_argument('--beta', default=0.8, type=float)
|
| 35 |
+
parser.add_argument('--repeat', default=5, type=int)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def main(lam):
|
| 39 |
+
|
| 40 |
+
args = parser.parse_args()
|
| 41 |
+
args.beta = lam
|
| 42 |
+
print(args)
|
| 43 |
+
imagenet_classes = ["tench", "goldfish", "great white shark", "tiger shark", "hammerhead shark", "electric ray",
|
| 44 |
+
"stingray", "rooster", "hen", "ostrich", "brambling", "goldfinch", "house finch", "junco",
|
| 45 |
+
"indigo bunting", "American robin", "bulbul", "jay", "magpie", "chickadee", "American dipper",
|
| 46 |
+
"kite (bird of prey)", "bald eagle", "vulture", "great grey owl", "fire salamander",
|
| 47 |
+
"smooth newt", "newt", "spotted salamander", "axolotl", "American bullfrog", "tree frog",
|
| 48 |
+
"tailed frog", "loggerhead sea turtle", "leatherback sea turtle", "mud turtle", "terrapin",
|
| 49 |
+
"box turtle", "banded gecko", "green iguana", "Carolina anole",
|
| 50 |
+
"desert grassland whiptail lizard", "agama", "frilled-necked lizard", "alligator lizard",
|
| 51 |
+
"Gila monster", "European green lizard", "chameleon", "Komodo dragon", "Nile crocodile",
|
| 52 |
+
"American alligator", "triceratops", "worm snake", "ring-necked snake",
|
| 53 |
+
"eastern hog-nosed snake", "smooth green snake", "kingsnake", "garter snake", "water snake",
|
| 54 |
+
"vine snake", "night snake", "boa constrictor", "African rock python", "Indian cobra",
|
| 55 |
+
"green mamba", "sea snake", "Saharan horned viper", "eastern diamondback rattlesnake",
|
| 56 |
+
"sidewinder rattlesnake", "trilobite", "harvestman", "scorpion", "yellow garden spider",
|
| 57 |
+
"barn spider", "European garden spider", "southern black widow", "tarantula", "wolf spider",
|
| 58 |
+
"tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse", "prairie grouse", "peafowl",
|
| 59 |
+
"quail", "partridge", "african grey parrot", "macaw", "sulphur-crested cockatoo", "lorikeet",
|
| 60 |
+
"coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "duck",
|
| 61 |
+
"red-breasted merganser", "goose", "black swan", "tusker", "echidna", "platypus", "wallaby",
|
| 62 |
+
"koala", "wombat", "jellyfish", "sea anemone", "brain coral", "flatworm", "nematode", "conch",
|
| 63 |
+
"snail", "slug", "sea slug", "chiton", "chambered nautilus", "Dungeness crab", "rock crab",
|
| 64 |
+
"fiddler crab", "red king crab", "American lobster", "spiny lobster", "crayfish", "hermit crab",
|
| 65 |
+
"isopod", "white stork", "black stork", "spoonbill", "flamingo", "little blue heron",
|
| 66 |
+
"great egret", "bittern bird", "crane bird", "limpkin", "common gallinule", "American coot",
|
| 67 |
+
"bustard", "ruddy turnstone", "dunlin", "common redshank", "dowitcher", "oystercatcher",
|
| 68 |
+
"pelican", "king penguin", "albatross", "grey whale", "killer whale", "dugong", "sea lion",
|
| 69 |
+
"Chihuahua", "Japanese Chin", "Maltese", "Pekingese", "Shih Tzu", "King Charles Spaniel",
|
| 70 |
+
"Papillon", "toy terrier", "Rhodesian Ridgeback", "Afghan Hound", "Basset Hound", "Beagle",
|
| 71 |
+
"Bloodhound", "Bluetick Coonhound", "Black and Tan Coonhound", "Treeing Walker Coonhound",
|
| 72 |
+
"English foxhound", "Redbone Coonhound", "borzoi", "Irish Wolfhound", "Italian Greyhound",
|
| 73 |
+
"Whippet", "Ibizan Hound", "Norwegian Elkhound", "Otterhound", "Saluki", "Scottish Deerhound",
|
| 74 |
+
"Weimaraner", "Staffordshire Bull Terrier", "American Staffordshire Terrier",
|
| 75 |
+
"Bedlington Terrier", "Border Terrier", "Kerry Blue Terrier", "Irish Terrier",
|
| 76 |
+
"Norfolk Terrier", "Norwich Terrier", "Yorkshire Terrier", "Wire Fox Terrier",
|
| 77 |
+
"Lakeland Terrier", "Sealyham Terrier", "Airedale Terrier", "Cairn Terrier",
|
| 78 |
+
"Australian Terrier", "Dandie Dinmont Terrier", "Boston Terrier", "Miniature Schnauzer",
|
| 79 |
+
"Giant Schnauzer", "Standard Schnauzer", "Scottish Terrier", "Tibetan Terrier",
|
| 80 |
+
"Australian Silky Terrier", "Soft-coated Wheaten Terrier", "West Highland White Terrier",
|
| 81 |
+
"Lhasa Apso", "Flat-Coated Retriever", "Curly-coated Retriever", "Golden Retriever",
|
| 82 |
+
"Labrador Retriever", "Chesapeake Bay Retriever", "German Shorthaired Pointer", "Vizsla",
|
| 83 |
+
"English Setter", "Irish Setter", "Gordon Setter", "Brittany dog", "Clumber Spaniel",
|
| 84 |
+
"English Springer Spaniel", "Welsh Springer Spaniel", "Cocker Spaniel", "Sussex Spaniel",
|
| 85 |
+
"Irish Water Spaniel", "Kuvasz", "Schipperke", "Groenendael dog", "Malinois", "Briard",
|
| 86 |
+
"Australian Kelpie", "Komondor", "Old English Sheepdog", "Shetland Sheepdog", "collie",
|
| 87 |
+
"Border Collie", "Bouvier des Flandres dog", "Rottweiler", "German Shepherd Dog", "Dobermann",
|
| 88 |
+
"Miniature Pinscher", "Greater Swiss Mountain Dog", "Bernese Mountain Dog",
|
| 89 |
+
"Appenzeller Sennenhund", "Entlebucher Sennenhund", "Boxer", "Bullmastiff", "Tibetan Mastiff",
|
| 90 |
+
"French Bulldog", "Great Dane", "St. Bernard", "husky", "Alaskan Malamute", "Siberian Husky",
|
| 91 |
+
"Dalmatian", "Affenpinscher", "Basenji", "pug", "Leonberger", "Newfoundland dog",
|
| 92 |
+
"Great Pyrenees dog", "Samoyed", "Pomeranian", "Chow Chow", "Keeshond", "brussels griffon",
|
| 93 |
+
"Pembroke Welsh Corgi", "Cardigan Welsh Corgi", "Toy Poodle", "Miniature Poodle",
|
| 94 |
+
"Standard Poodle", "Mexican hairless dog (xoloitzcuintli)", "grey wolf", "Alaskan tundra wolf",
|
| 95 |
+
"red wolf or maned wolf", "coyote", "dingo", "dhole", "African wild dog", "hyena", "red fox",
|
| 96 |
+
"kit fox", "Arctic fox", "grey fox", "tabby cat", "tiger cat", "Persian cat", "Siamese cat",
|
| 97 |
+
"Egyptian Mau", "cougar", "lynx", "leopard", "snow leopard", "jaguar", "lion", "tiger",
|
| 98 |
+
"cheetah", "brown bear", "American black bear", "polar bear", "sloth bear", "mongoose",
|
| 99 |
+
"meerkat", "tiger beetle", "ladybug", "ground beetle", "longhorn beetle", "leaf beetle",
|
| 100 |
+
"dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant", "grasshopper",
|
| 101 |
+
"cricket insect", "stick insect", "cockroach", "praying mantis", "cicada", "leafhopper",
|
| 102 |
+
"lacewing", "dragonfly", "damselfly", "red admiral butterfly", "ringlet butterfly",
|
| 103 |
+
"monarch butterfly", "small white butterfly", "sulphur butterfly", "gossamer-winged butterfly",
|
| 104 |
+
"starfish", "sea urchin", "sea cucumber", "cottontail rabbit", "hare", "Angora rabbit",
|
| 105 |
+
"hamster", "porcupine", "fox squirrel", "marmot", "beaver", "guinea pig", "common sorrel horse",
|
| 106 |
+
"zebra", "pig", "wild boar", "warthog", "hippopotamus", "ox", "water buffalo", "bison",
|
| 107 |
+
"ram (adult male sheep)", "bighorn sheep", "Alpine ibex", "hartebeest", "impala (antelope)",
|
| 108 |
+
"gazelle", "arabian camel", "llama", "weasel", "mink", "European polecat",
|
| 109 |
+
"black-footed ferret", "otter", "skunk", "badger", "armadillo", "three-toed sloth", "orangutan",
|
| 110 |
+
"gorilla", "chimpanzee", "gibbon", "siamang", "guenon", "patas monkey", "baboon", "macaque",
|
| 111 |
+
"langur", "black-and-white colobus", "proboscis monkey", "marmoset", "white-headed capuchin",
|
| 112 |
+
"howler monkey", "titi monkey", "Geoffroy's spider monkey", "common squirrel monkey",
|
| 113 |
+
"ring-tailed lemur", "indri", "Asian elephant", "African bush elephant", "red panda",
|
| 114 |
+
"giant panda", "snoek fish", "eel", "silver salmon", "rock beauty fish", "clownfish",
|
| 115 |
+
"sturgeon", "gar fish", "lionfish", "pufferfish", "abacus", "abaya", "academic gown",
|
| 116 |
+
"accordion", "acoustic guitar", "aircraft carrier", "airliner", "airship", "altar", "ambulance",
|
| 117 |
+
"amphibious vehicle", "analog clock", "apiary", "apron", "trash can", "assault rifle",
|
| 118 |
+
"backpack", "bakery", "balance beam", "balloon", "ballpoint pen", "Band-Aid", "banjo",
|
| 119 |
+
"baluster / handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel",
|
| 120 |
+
"wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "swimming cap", "bath towel",
|
| 121 |
+
"bathtub", "station wagon", "lighthouse", "beaker", "military hat (bearskin or shako)",
|
| 122 |
+
"beer bottle", "beer glass", "bell tower", "baby bib", "tandem bicycle", "bikini",
|
| 123 |
+
"ring binder", "binoculars", "birdhouse", "boathouse", "bobsleigh", "bolo tie", "poke bonnet",
|
| 124 |
+
"bookcase", "bookstore", "bottle cap", "hunting bow", "bow tie", "brass memorial plaque", "bra",
|
| 125 |
+
"breakwater", "breastplate", "broom", "bucket", "buckle", "bulletproof vest",
|
| 126 |
+
"high-speed train", "butcher shop", "taxicab", "cauldron", "candle", "cannon", "canoe",
|
| 127 |
+
"can opener", "cardigan", "car mirror", "carousel", "tool kit", "cardboard box / carton",
|
| 128 |
+
"car wheel", "automated teller machine", "cassette", "cassette player", "castle", "catamaran",
|
| 129 |
+
"CD player", "cello", "mobile phone", "chain", "chain-link fence", "chain mail", "chainsaw",
|
| 130 |
+
"storage chest", "chiffonier", "bell or wind chime", "china cabinet", "Christmas stocking",
|
| 131 |
+
"church", "movie theater", "cleaver", "cliff dwelling", "cloak", "clogs", "cocktail shaker",
|
| 132 |
+
"coffee mug", "coffeemaker", "spiral or coil", "combination lock", "computer keyboard",
|
| 133 |
+
"candy store", "container ship", "convertible", "corkscrew", "cornet", "cowboy boot",
|
| 134 |
+
"cowboy hat", "cradle", "construction crane", "crash helmet", "crate", "infant bed",
|
| 135 |
+
"Crock Pot", "croquet ball", "crutch", "cuirass", "dam", "desk", "desktop computer",
|
| 136 |
+
"rotary dial telephone", "diaper", "digital clock", "digital watch", "dining table",
|
| 137 |
+
"dishcloth", "dishwasher", "disc brake", "dock", "dog sled", "dome", "doormat", "drilling rig",
|
| 138 |
+
"drum", "drumstick", "dumbbell", "Dutch oven", "electric fan", "electric guitar",
|
| 139 |
+
"electric locomotive", "entertainment center", "envelope", "espresso machine", "face powder",
|
| 140 |
+
"feather boa", "filing cabinet", "fireboat", "fire truck", "fire screen", "flagpole", "flute",
|
| 141 |
+
"folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster bed",
|
| 142 |
+
"freight car", "French horn", "frying pan", "fur coat", "garbage truck",
|
| 143 |
+
"gas mask or respirator", "gas pump", "goblet", "go-kart", "golf ball", "golf cart", "gondola",
|
| 144 |
+
"gong", "gown", "grand piano", "greenhouse", "radiator grille", "grocery store", "guillotine",
|
| 145 |
+
"hair clip", "hair spray", "half-track", "hammer", "hamper", "hair dryer", "hand-held computer",
|
| 146 |
+
"handkerchief", "hard disk drive", "harmonica", "harp", "combine harvester", "hatchet",
|
| 147 |
+
"holster", "home theater", "honeycomb", "hook", "hoop skirt", "gymnastic horizontal bar",
|
| 148 |
+
"horse-drawn vehicle", "hourglass", "iPod", "clothes iron", "carved pumpkin", "jeans", "jeep",
|
| 149 |
+
"T-shirt", "jigsaw puzzle", "rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat",
|
| 150 |
+
"ladle", "lampshade", "laptop computer", "lawn mower", "lens cap", "letter opener", "library",
|
| 151 |
+
"lifeboat", "lighter", "limousine", "ocean liner", "lipstick", "slip-on shoe", "lotion",
|
| 152 |
+
"music speaker", "loupe magnifying glass", "sawmill", "magnetic compass", "messenger bag",
|
| 153 |
+
"mailbox", "tights", "one-piece bathing suit", "manhole cover", "maraca", "marimba", "mask",
|
| 154 |
+
"matchstick", "maypole", "maze", "measuring cup", "medicine cabinet", "megalith", "microphone",
|
| 155 |
+
"microwave oven", "military uniform", "milk can", "minibus", "miniskirt", "minivan", "missile",
|
| 156 |
+
"mitten", "mixing bowl", "mobile home", "ford model t", "modem", "monastery", "monitor",
|
| 157 |
+
"moped", "mortar and pestle", "graduation cap", "mosque", "mosquito net", "vespa",
|
| 158 |
+
"mountain bike", "tent", "computer mouse", "mousetrap", "moving van", "muzzle", "metal nail",
|
| 159 |
+
"neck brace", "necklace", "baby pacifier", "notebook computer", "obelisk", "oboe", "ocarina",
|
| 160 |
+
"odometer", "oil filter", "pipe organ", "oscilloscope", "overskirt", "bullock cart",
|
| 161 |
+
"oxygen mask", "product packet / packaging", "paddle", "paddle wheel", "padlock", "paintbrush",
|
| 162 |
+
"pajamas", "palace", "pan flute", "paper towel", "parachute", "parallel bars", "park bench",
|
| 163 |
+
"parking meter", "railroad car", "patio", "payphone", "pedestal", "pencil case",
|
| 164 |
+
"pencil sharpener", "perfume", "Petri dish", "photocopier", "plectrum", "Pickelhaube",
|
| 165 |
+
"picket fence", "pickup truck", "pier", "piggy bank", "pill bottle", "pillow", "ping-pong ball",
|
| 166 |
+
"pinwheel", "pirate ship", "drink pitcher", "block plane", "planetarium", "plastic bag",
|
| 167 |
+
"plate rack", "farm plow", "plunger", "Polaroid camera", "pole", "police van", "poncho",
|
| 168 |
+
"pool table", "soda bottle", "plant pot", "potter's wheel", "power drill", "prayer rug",
|
| 169 |
+
"printer", "prison", "missile", "projector", "hockey puck", "punching bag", "purse", "quill",
|
| 170 |
+
"quilt", "race car", "racket", "radiator", "radio", "radio telescope", "rain barrel",
|
| 171 |
+
"recreational vehicle", "fishing casting reel", "reflex camera", "refrigerator",
|
| 172 |
+
"remote control", "restaurant", "revolver", "rifle", "rocking chair", "rotisserie", "eraser",
|
| 173 |
+
"rugby ball", "ruler measuring stick", "sneaker", "safe", "safety pin", "salt shaker", "sandal",
|
| 174 |
+
"sarong", "saxophone", "scabbard", "weighing scale", "school bus", "schooner", "scoreboard",
|
| 175 |
+
"CRT monitor", "screw", "screwdriver", "seat belt", "sewing machine", "shield", "shoe store",
|
| 176 |
+
"shoji screen / room divider", "shopping basket", "shopping cart", "shovel", "shower cap",
|
| 177 |
+
"shower curtain", "ski", "balaclava ski mask", "sleeping bag", "slide rule", "sliding door",
|
| 178 |
+
"slot machine", "snorkel", "snowmobile", "snowplow", "soap dispenser", "soccer ball", "sock",
|
| 179 |
+
"solar thermal collector", "sombrero", "soup bowl", "keyboard space bar", "space heater",
|
| 180 |
+
"space shuttle", "spatula", "motorboat", "spider web", "spindle", "sports car", "spotlight",
|
| 181 |
+
"stage", "steam locomotive", "through arch bridge", "steel drum", "stethoscope", "scarf",
|
| 182 |
+
"stone wall", "stopwatch", "stove", "strainer", "tram", "stretcher", "couch", "stupa",
|
| 183 |
+
"submarine", "suit", "sundial", "sunglasses", "sunglasses", "sunscreen", "suspension bridge",
|
| 184 |
+
"mop", "sweatshirt", "swim trunks / shorts", "swing", "electrical switch", "syringe",
|
| 185 |
+
"table lamp", "tank", "tape player", "teapot", "teddy bear", "television", "tennis ball",
|
| 186 |
+
"thatched roof", "front curtain", "thimble", "threshing machine", "throne", "tile roof",
|
| 187 |
+
"toaster", "tobacco shop", "toilet seat", "torch", "totem pole", "tow truck", "toy store",
|
| 188 |
+
"tractor", "semi-trailer truck", "tray", "trench coat", "tricycle", "trimaran", "tripod",
|
| 189 |
+
"triumphal arch", "trolleybus", "trombone", "hot tub", "turnstile", "typewriter keyboard",
|
| 190 |
+
"umbrella", "unicycle", "upright piano", "vacuum cleaner", "vase", "vaulted or arched ceiling",
|
| 191 |
+
"velvet fabric", "vending machine", "vestment", "viaduct", "violin", "volleyball",
|
| 192 |
+
"waffle iron", "wall clock", "wallet", "wardrobe", "military aircraft", "sink",
|
| 193 |
+
"washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle",
|
| 194 |
+
"hair wig", "window screen", "window shade", "Windsor tie", "wine bottle", "airplane wing",
|
| 195 |
+
"wok", "wooden spoon", "wool", "split-rail fence", "shipwreck", "sailboat", "yurt", "website",
|
| 196 |
+
"comic book", "crossword", "traffic or street sign", "traffic light", "dust jacket", "menu",
|
| 197 |
+
"plate", "guacamole", "consomme", "hot pot", "trifle", "ice cream", "popsicle", "baguette",
|
| 198 |
+
"bagel", "pretzel", "cheeseburger", "hot dog", "mashed potatoes", "cabbage", "broccoli",
|
| 199 |
+
"cauliflower", "zucchini", "spaghetti squash", "acorn squash", "butternut squash", "cucumber",
|
| 200 |
+
"artichoke", "bell pepper", "cardoon", "mushroom", "Granny Smith apple", "strawberry", "orange",
|
| 201 |
+
"lemon", "fig", "pineapple", "banana", "jackfruit", "cherimoya (custard apple)", "pomegranate",
|
| 202 |
+
"hay", "carbonara", "chocolate syrup", "dough", "meatloaf", "pizza", "pot pie", "burrito",
|
| 203 |
+
"red wine", "espresso", "tea cup", "eggnog", "mountain", "bubble", "cliff", "coral reef",
|
| 204 |
+
"geyser", "lakeshore", "promontory", "sandbar", "beach", "valley", "volcano", "baseball player",
|
| 205 |
+
"bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper", "corn", "acorn",
|
| 206 |
+
"rose hip", "horse chestnut seed", "coral fungus", "agaric", "gyromitra", "stinkhorn mushroom",
|
| 207 |
+
"earth star fungus", "hen of the woods mushroom", "bolete", "corn cob", "toilet paper"]
|
| 208 |
+
|
| 209 |
+
# cifar100_classes = [name for name in os.listdir(args.data_path)
|
| 210 |
+
# if os.path.isdir(os.path.join(args.data_path, name))]
|
| 211 |
+
|
| 212 |
+
imagenet_single_template = [
|
| 213 |
+
'a photo of a {}.',
|
| 214 |
+
]
|
| 215 |
+
|
| 216 |
+
imagenet_7_templates = [
|
| 217 |
+
'itap of a {}.',
|
| 218 |
+
'a origami {}.',
|
| 219 |
+
'a bad photo of the {}.',
|
| 220 |
+
'a photo of the large {}.',
|
| 221 |
+
'a {} in a video game.',
|
| 222 |
+
'art of the {}.',
|
| 223 |
+
'a photo of the small {}.',
|
| 224 |
+
]
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
print('load pre-trained model')
|
| 228 |
+
model, preprocess = clip.load(args.arch)
|
| 229 |
+
model = model.cuda()
|
| 230 |
+
model.eval()
|
| 231 |
+
|
| 232 |
+
print('load data')
|
| 233 |
+
valdir = os.path.join(args.data_path, 'val')
|
| 234 |
+
# valdir = os.path.join(args.data_path, '')
|
| 235 |
+
val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 236 |
+
loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 237 |
+
with torch.no_grad():
|
| 238 |
+
image_feat = []
|
| 239 |
+
image_label = []
|
| 240 |
+
for i, (images, target) in enumerate(loader):
|
| 241 |
+
images = images.cuda()
|
| 242 |
+
target = target.cuda()
|
| 243 |
+
image_features = model.encode_image(images)
|
| 244 |
+
image_feat.append(F.normalize(image_features, dim=1))
|
| 245 |
+
image_label.append(target)
|
| 246 |
+
image_feat = torch.cat(image_feat, dim=0)
|
| 247 |
+
image_label = torch.cat(image_label, dim=0)
|
| 248 |
+
n = len(image_label)
|
| 249 |
+
image_feat = image_feat.float()
|
| 250 |
+
|
| 251 |
+
print('obtain text proxy')
|
| 252 |
+
text_classifier = zeroshot_classifier(clip, model, imagenet_classes, imagenet_7_templates)
|
| 253 |
+
text_classifier = text_classifier.float()
|
| 254 |
+
logits_t = image_feat @ text_classifier
|
| 255 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 256 |
+
top1 = (acc1 / n) * 100
|
| 257 |
+
print(f'accuracy with text proxy: {top1:.2f}')
|
| 258 |
+
|
| 259 |
+
print('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 260 |
+
num_class = len(torch.unique(image_label))
|
| 261 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 262 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 263 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 264 |
+
acc_lame = torch.zeros(args.repeat).cuda()
|
| 265 |
+
for iter in range(args.repeat):
|
| 266 |
+
idx = torch.randperm(n).cuda()
|
| 267 |
+
combo_label = torch.zeros(n, num_class).cuda()
|
| 268 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 269 |
+
w = text_classifier.clone()
|
| 270 |
+
rho = torch.zeros(num_class).cuda()
|
| 271 |
+
for i in range(n):
|
| 272 |
+
lr = args.cw / math.sqrt(i + 1)
|
| 273 |
+
rlr = args.cr / math.sqrt(i + 1)
|
| 274 |
+
beta = args.beta * math.sqrt((i + 1) / n)
|
| 275 |
+
x = image_feat[idx[i], :]
|
| 276 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 277 |
+
tlabel = tlabel * torch.exp(rho)
|
| 278 |
+
tlabel /= torch.sum(tlabel)
|
| 279 |
+
rho -= rlr * (tlabel - args.alpha / num_class)
|
| 280 |
+
rho[rho < 0] = 0
|
| 281 |
+
text_label[i, :] = tlabel
|
| 282 |
+
vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 283 |
+
combo_label[i, :] = beta * vision_label + (1 - beta) * tlabel
|
| 284 |
+
grad = torch.outer(x, vision_label - tlabel)
|
| 285 |
+
w -= (lr / args.tau_i) * grad
|
| 286 |
+
w = F.normalize(w, dim=0)
|
| 287 |
+
acc1, acc5 = accuracy(text_label, image_label[idx], topk=(1, 5))
|
| 288 |
+
acc_onlab[iter] = (acc1 / n) * 100
|
| 289 |
+
acc1, acc5 = accuracy(combo_label, image_label[idx], topk=(1, 5))
|
| 290 |
+
|
| 291 |
+
# MAPLS - EM Algorithm
|
| 292 |
+
pz = np.full(len(imagenet_classes), 1.0 / len(imagenet_classes))
|
| 293 |
+
qy = mapls(combo_label, pz=pz, qy_mode="soft", max_iter=100, lam=0.6)
|
| 294 |
+
|
| 295 |
+
w = np.array(qy) / np.array(pz)
|
| 296 |
+
if combo_label.is_cuda:
|
| 297 |
+
combo_label_cpu = combo_label.cpu()
|
| 298 |
+
qy_probs = lsc(combo_label_cpu, 1.0 / w)
|
| 299 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 300 |
+
|
| 301 |
+
# LAME
|
| 302 |
+
# lame = LAME()
|
| 303 |
+
# qy_probs_device = qy_probs.to("cuda:1")
|
| 304 |
+
# image_feat_device = image_feat.to("cuda:1")
|
| 305 |
+
# lame_prob = lame.run_step(qy_probs_device, image_feat_device)
|
| 306 |
+
# acc1_lame, acc5_lame = accuracy(lame_prob, image_label[idx], topk=(1, 5))
|
| 307 |
+
|
| 308 |
+
lame = LAME()
|
| 309 |
+
batch_size = 64 # Set batch size for processing
|
| 310 |
+
num_batches = (combo_label.shape[0] + batch_size - 1) // batch_size # Calculate number of batches
|
| 311 |
+
|
| 312 |
+
all_lame_probs = []
|
| 313 |
+
for batch_idx in range(num_batches):
|
| 314 |
+
start_idx = batch_idx * batch_size
|
| 315 |
+
end_idx = min((batch_idx + 1) * batch_size, combo_label.shape[0])
|
| 316 |
+
|
| 317 |
+
combo_label_batch = combo_label[start_idx:end_idx].to("cuda:0")
|
| 318 |
+
image_feat_batch = image_feat[start_idx:end_idx].to("cuda:0")
|
| 319 |
+
|
| 320 |
+
# Process each batch separately to avoid OOM
|
| 321 |
+
lame_prob = lame.run_step(combo_label_batch, image_feat_batch)
|
| 322 |
+
all_lame_probs.append(lame_prob)
|
| 323 |
+
|
| 324 |
+
# Concatenate the results from each batch
|
| 325 |
+
lame_prob = torch.cat(all_lame_probs, dim=0)
|
| 326 |
+
|
| 327 |
+
acc1_lame, acc5_lame = accuracy(lame_prob, image_label[idx], topk=(1, 5))
|
| 328 |
+
|
| 329 |
+
acc_onzeta[iter] = (acc1 / n) * 100
|
| 330 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 331 |
+
acc_lame[iter] = (acc1_lame / n) * 100
|
| 332 |
+
print('mean acc of onlab is: {:.2f}'.format(torch.mean(acc_onlab)))
|
| 333 |
+
print('mean acc of onzeta is: {:.2f}'.format(torch.mean(acc_onzeta)))
|
| 334 |
+
print('mean acc of MAPLS is: {:.2f}'.format(torch.mean(acc_ls)))
|
| 335 |
+
print('mean acc of lame is: {:.2f}'.format(torch.mean(acc_lame)))
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 339 |
+
with torch.no_grad():
|
| 340 |
+
zeroshot_weights = []
|
| 341 |
+
for classname in classnames:
|
| 342 |
+
texts = [template.format(classname) for template in templates]
|
| 343 |
+
texts = clip.tokenize(texts).cuda()
|
| 344 |
+
class_embeddings = model.encode_text(texts)
|
| 345 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 346 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 347 |
+
class_embedding /= class_embedding.norm()
|
| 348 |
+
zeroshot_weights.append(class_embedding)
|
| 349 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 350 |
+
return zeroshot_weights
|
| 351 |
+
|
| 352 |
+
|
| 353 |
+
def accuracy(output, target, topk=(1,)):
|
| 354 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 355 |
+
pred, target = pred.cpu(), target.cpu()
|
| 356 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 357 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
if __name__ == '__main__':
|
| 361 |
+
# main()
|
| 362 |
+
|
| 363 |
+
lams = [0.6]
|
| 364 |
+
for lam in lams:
|
| 365 |
+
main(lam)
|
| 366 |
+
|
OnZeta/main_online_imagenet_mapls_nonlinear.py
ADDED
|
@@ -0,0 +1,368 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
from MAPLS.mapls import mapls
|
| 12 |
+
from MAPLS.common import lsc
|
| 13 |
+
|
| 14 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 15 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 16 |
+
parser.add_argument('--data_path', default='/home/lt_test/projects/datasets/ImageNet/', type=str,
|
| 17 |
+
help='dataset path')
|
| 18 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='RN50',
|
| 19 |
+
choices=model_names,
|
| 20 |
+
help='model architecture: ' +
|
| 21 |
+
' | '.join(model_names) +
|
| 22 |
+
' (default: RN50)')
|
| 23 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 24 |
+
help='number of data loading workers (default: 8)')
|
| 25 |
+
parser.add_argument('-b', '--batch-size', default=512, type=int,
|
| 26 |
+
metavar='N',
|
| 27 |
+
help='mini-batch size (default: 256)')
|
| 28 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 29 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 30 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 31 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 32 |
+
parser.add_argument('--alpha', default=1, type=float)
|
| 33 |
+
parser.add_argument('--beta', default=0.8, type=float)
|
| 34 |
+
parser.add_argument('--repeat', default=5, type=int)
|
| 35 |
+
|
| 36 |
+
class VisionProxyMLP(torch.nn.Module):
|
| 37 |
+
def __init__(self, input_dim, hidden_dim, output_dim):
|
| 38 |
+
super().__init__()
|
| 39 |
+
self.fc1 = torch.nn.Linear(input_dim, hidden_dim)
|
| 40 |
+
self.relu = torch.nn.ReLU()
|
| 41 |
+
self.fc2 = torch.nn.Linear(hidden_dim, output_dim)
|
| 42 |
+
|
| 43 |
+
def forward(self, x):
|
| 44 |
+
x = self.fc1(x)
|
| 45 |
+
x = self.relu(x)
|
| 46 |
+
x = self.fc2(x)
|
| 47 |
+
return x
|
| 48 |
+
|
| 49 |
+
def main(lam):
|
| 50 |
+
|
| 51 |
+
args = parser.parse_args()
|
| 52 |
+
print(args)
|
| 53 |
+
imagenet_classes = ["tench", "goldfish", "great white shark", "tiger shark", "hammerhead shark", "electric ray",
|
| 54 |
+
"stingray", "rooster", "hen", "ostrich", "brambling", "goldfinch", "house finch", "junco",
|
| 55 |
+
"indigo bunting", "American robin", "bulbul", "jay", "magpie", "chickadee", "American dipper",
|
| 56 |
+
"kite (bird of prey)", "bald eagle", "vulture", "great grey owl", "fire salamander",
|
| 57 |
+
"smooth newt", "newt", "spotted salamander", "axolotl", "American bullfrog", "tree frog",
|
| 58 |
+
"tailed frog", "loggerhead sea turtle", "leatherback sea turtle", "mud turtle", "terrapin",
|
| 59 |
+
"box turtle", "banded gecko", "green iguana", "Carolina anole",
|
| 60 |
+
"desert grassland whiptail lizard", "agama", "frilled-necked lizard", "alligator lizard",
|
| 61 |
+
"Gila monster", "European green lizard", "chameleon", "Komodo dragon", "Nile crocodile",
|
| 62 |
+
"American alligator", "triceratops", "worm snake", "ring-necked snake",
|
| 63 |
+
"eastern hog-nosed snake", "smooth green snake", "kingsnake", "garter snake", "water snake",
|
| 64 |
+
"vine snake", "night snake", "boa constrictor", "African rock python", "Indian cobra",
|
| 65 |
+
"green mamba", "sea snake", "Saharan horned viper", "eastern diamondback rattlesnake",
|
| 66 |
+
"sidewinder rattlesnake", "trilobite", "harvestman", "scorpion", "yellow garden spider",
|
| 67 |
+
"barn spider", "European garden spider", "southern black widow", "tarantula", "wolf spider",
|
| 68 |
+
"tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse", "prairie grouse", "peafowl",
|
| 69 |
+
"quail", "partridge", "african grey parrot", "macaw", "sulphur-crested cockatoo", "lorikeet",
|
| 70 |
+
"coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "duck",
|
| 71 |
+
"red-breasted merganser", "goose", "black swan", "tusker", "echidna", "platypus", "wallaby",
|
| 72 |
+
"koala", "wombat", "jellyfish", "sea anemone", "brain coral", "flatworm", "nematode", "conch",
|
| 73 |
+
"snail", "slug", "sea slug", "chiton", "chambered nautilus", "Dungeness crab", "rock crab",
|
| 74 |
+
"fiddler crab", "red king crab", "American lobster", "spiny lobster", "crayfish", "hermit crab",
|
| 75 |
+
"isopod", "white stork", "black stork", "spoonbill", "flamingo", "little blue heron",
|
| 76 |
+
"great egret", "bittern bird", "crane bird", "limpkin", "common gallinule", "American coot",
|
| 77 |
+
"bustard", "ruddy turnstone", "dunlin", "common redshank", "dowitcher", "oystercatcher",
|
| 78 |
+
"pelican", "king penguin", "albatross", "grey whale", "killer whale", "dugong", "sea lion",
|
| 79 |
+
"Chihuahua", "Japanese Chin", "Maltese", "Pekingese", "Shih Tzu", "King Charles Spaniel",
|
| 80 |
+
"Papillon", "toy terrier", "Rhodesian Ridgeback", "Afghan Hound", "Basset Hound", "Beagle",
|
| 81 |
+
"Bloodhound", "Bluetick Coonhound", "Black and Tan Coonhound", "Treeing Walker Coonhound",
|
| 82 |
+
"English foxhound", "Redbone Coonhound", "borzoi", "Irish Wolfhound", "Italian Greyhound",
|
| 83 |
+
"Whippet", "Ibizan Hound", "Norwegian Elkhound", "Otterhound", "Saluki", "Scottish Deerhound",
|
| 84 |
+
"Weimaraner", "Staffordshire Bull Terrier", "American Staffordshire Terrier",
|
| 85 |
+
"Bedlington Terrier", "Border Terrier", "Kerry Blue Terrier", "Irish Terrier",
|
| 86 |
+
"Norfolk Terrier", "Norwich Terrier", "Yorkshire Terrier", "Wire Fox Terrier",
|
| 87 |
+
"Lakeland Terrier", "Sealyham Terrier", "Airedale Terrier", "Cairn Terrier",
|
| 88 |
+
"Australian Terrier", "Dandie Dinmont Terrier", "Boston Terrier", "Miniature Schnauzer",
|
| 89 |
+
"Giant Schnauzer", "Standard Schnauzer", "Scottish Terrier", "Tibetan Terrier",
|
| 90 |
+
"Australian Silky Terrier", "Soft-coated Wheaten Terrier", "West Highland White Terrier",
|
| 91 |
+
"Lhasa Apso", "Flat-Coated Retriever", "Curly-coated Retriever", "Golden Retriever",
|
| 92 |
+
"Labrador Retriever", "Chesapeake Bay Retriever", "German Shorthaired Pointer", "Vizsla",
|
| 93 |
+
"English Setter", "Irish Setter", "Gordon Setter", "Brittany dog", "Clumber Spaniel",
|
| 94 |
+
"English Springer Spaniel", "Welsh Springer Spaniel", "Cocker Spaniel", "Sussex Spaniel",
|
| 95 |
+
"Irish Water Spaniel", "Kuvasz", "Schipperke", "Groenendael dog", "Malinois", "Briard",
|
| 96 |
+
"Australian Kelpie", "Komondor", "Old English Sheepdog", "Shetland Sheepdog", "collie",
|
| 97 |
+
"Border Collie", "Bouvier des Flandres dog", "Rottweiler", "German Shepherd Dog", "Dobermann",
|
| 98 |
+
"Miniature Pinscher", "Greater Swiss Mountain Dog", "Bernese Mountain Dog",
|
| 99 |
+
"Appenzeller Sennenhund", "Entlebucher Sennenhund", "Boxer", "Bullmastiff", "Tibetan Mastiff",
|
| 100 |
+
"French Bulldog", "Great Dane", "St. Bernard", "husky", "Alaskan Malamute", "Siberian Husky",
|
| 101 |
+
"Dalmatian", "Affenpinscher", "Basenji", "pug", "Leonberger", "Newfoundland dog",
|
| 102 |
+
"Great Pyrenees dog", "Samoyed", "Pomeranian", "Chow Chow", "Keeshond", "brussels griffon",
|
| 103 |
+
"Pembroke Welsh Corgi", "Cardigan Welsh Corgi", "Toy Poodle", "Miniature Poodle",
|
| 104 |
+
"Standard Poodle", "Mexican hairless dog (xoloitzcuintli)", "grey wolf", "Alaskan tundra wolf",
|
| 105 |
+
"red wolf or maned wolf", "coyote", "dingo", "dhole", "African wild dog", "hyena", "red fox",
|
| 106 |
+
"kit fox", "Arctic fox", "grey fox", "tabby cat", "tiger cat", "Persian cat", "Siamese cat",
|
| 107 |
+
"Egyptian Mau", "cougar", "lynx", "leopard", "snow leopard", "jaguar", "lion", "tiger",
|
| 108 |
+
"cheetah", "brown bear", "American black bear", "polar bear", "sloth bear", "mongoose",
|
| 109 |
+
"meerkat", "tiger beetle", "ladybug", "ground beetle", "longhorn beetle", "leaf beetle",
|
| 110 |
+
"dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant", "grasshopper",
|
| 111 |
+
"cricket insect", "stick insect", "cockroach", "praying mantis", "cicada", "leafhopper",
|
| 112 |
+
"lacewing", "dragonfly", "damselfly", "red admiral butterfly", "ringlet butterfly",
|
| 113 |
+
"monarch butterfly", "small white butterfly", "sulphur butterfly", "gossamer-winged butterfly",
|
| 114 |
+
"starfish", "sea urchin", "sea cucumber", "cottontail rabbit", "hare", "Angora rabbit",
|
| 115 |
+
"hamster", "porcupine", "fox squirrel", "marmot", "beaver", "guinea pig", "common sorrel horse",
|
| 116 |
+
"zebra", "pig", "wild boar", "warthog", "hippopotamus", "ox", "water buffalo", "bison",
|
| 117 |
+
"ram (adult male sheep)", "bighorn sheep", "Alpine ibex", "hartebeest", "impala (antelope)",
|
| 118 |
+
"gazelle", "arabian camel", "llama", "weasel", "mink", "European polecat",
|
| 119 |
+
"black-footed ferret", "otter", "skunk", "badger", "armadillo", "three-toed sloth", "orangutan",
|
| 120 |
+
"gorilla", "chimpanzee", "gibbon", "siamang", "guenon", "patas monkey", "baboon", "macaque",
|
| 121 |
+
"langur", "black-and-white colobus", "proboscis monkey", "marmoset", "white-headed capuchin",
|
| 122 |
+
"howler monkey", "titi monkey", "Geoffroy's spider monkey", "common squirrel monkey",
|
| 123 |
+
"ring-tailed lemur", "indri", "Asian elephant", "African bush elephant", "red panda",
|
| 124 |
+
"giant panda", "snoek fish", "eel", "silver salmon", "rock beauty fish", "clownfish",
|
| 125 |
+
"sturgeon", "gar fish", "lionfish", "pufferfish", "abacus", "abaya", "academic gown",
|
| 126 |
+
"accordion", "acoustic guitar", "aircraft carrier", "airliner", "airship", "altar", "ambulance",
|
| 127 |
+
"amphibious vehicle", "analog clock", "apiary", "apron", "trash can", "assault rifle",
|
| 128 |
+
"backpack", "bakery", "balance beam", "balloon", "ballpoint pen", "Band-Aid", "banjo",
|
| 129 |
+
"baluster / handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel",
|
| 130 |
+
"wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "swimming cap", "bath towel",
|
| 131 |
+
"bathtub", "station wagon", "lighthouse", "beaker", "military hat (bearskin or shako)",
|
| 132 |
+
"beer bottle", "beer glass", "bell tower", "baby bib", "tandem bicycle", "bikini",
|
| 133 |
+
"ring binder", "binoculars", "birdhouse", "boathouse", "bobsleigh", "bolo tie", "poke bonnet",
|
| 134 |
+
"bookcase", "bookstore", "bottle cap", "hunting bow", "bow tie", "brass memorial plaque", "bra",
|
| 135 |
+
"breakwater", "breastplate", "broom", "bucket", "buckle", "bulletproof vest",
|
| 136 |
+
"high-speed train", "butcher shop", "taxicab", "cauldron", "candle", "cannon", "canoe",
|
| 137 |
+
"can opener", "cardigan", "car mirror", "carousel", "tool kit", "cardboard box / carton",
|
| 138 |
+
"car wheel", "automated teller machine", "cassette", "cassette player", "castle", "catamaran",
|
| 139 |
+
"CD player", "cello", "mobile phone", "chain", "chain-link fence", "chain mail", "chainsaw",
|
| 140 |
+
"storage chest", "chiffonier", "bell or wind chime", "china cabinet", "Christmas stocking",
|
| 141 |
+
"church", "movie theater", "cleaver", "cliff dwelling", "cloak", "clogs", "cocktail shaker",
|
| 142 |
+
"coffee mug", "coffeemaker", "spiral or coil", "combination lock", "computer keyboard",
|
| 143 |
+
"candy store", "container ship", "convertible", "corkscrew", "cornet", "cowboy boot",
|
| 144 |
+
"cowboy hat", "cradle", "construction crane", "crash helmet", "crate", "infant bed",
|
| 145 |
+
"Crock Pot", "croquet ball", "crutch", "cuirass", "dam", "desk", "desktop computer",
|
| 146 |
+
"rotary dial telephone", "diaper", "digital clock", "digital watch", "dining table",
|
| 147 |
+
"dishcloth", "dishwasher", "disc brake", "dock", "dog sled", "dome", "doormat", "drilling rig",
|
| 148 |
+
"drum", "drumstick", "dumbbell", "Dutch oven", "electric fan", "electric guitar",
|
| 149 |
+
"electric locomotive", "entertainment center", "envelope", "espresso machine", "face powder",
|
| 150 |
+
"feather boa", "filing cabinet", "fireboat", "fire truck", "fire screen", "flagpole", "flute",
|
| 151 |
+
"folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster bed",
|
| 152 |
+
"freight car", "French horn", "frying pan", "fur coat", "garbage truck",
|
| 153 |
+
"gas mask or respirator", "gas pump", "goblet", "go-kart", "golf ball", "golf cart", "gondola",
|
| 154 |
+
"gong", "gown", "grand piano", "greenhouse", "radiator grille", "grocery store", "guillotine",
|
| 155 |
+
"hair clip", "hair spray", "half-track", "hammer", "hamper", "hair dryer", "hand-held computer",
|
| 156 |
+
"handkerchief", "hard disk drive", "harmonica", "harp", "combine harvester", "hatchet",
|
| 157 |
+
"holster", "home theater", "honeycomb", "hook", "hoop skirt", "gymnastic horizontal bar",
|
| 158 |
+
"horse-drawn vehicle", "hourglass", "iPod", "clothes iron", "carved pumpkin", "jeans", "jeep",
|
| 159 |
+
"T-shirt", "jigsaw puzzle", "rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat",
|
| 160 |
+
"ladle", "lampshade", "laptop computer", "lawn mower", "lens cap", "letter opener", "library",
|
| 161 |
+
"lifeboat", "lighter", "limousine", "ocean liner", "lipstick", "slip-on shoe", "lotion",
|
| 162 |
+
"music speaker", "loupe magnifying glass", "sawmill", "magnetic compass", "messenger bag",
|
| 163 |
+
"mailbox", "tights", "one-piece bathing suit", "manhole cover", "maraca", "marimba", "mask",
|
| 164 |
+
"matchstick", "maypole", "maze", "measuring cup", "medicine cabinet", "megalith", "microphone",
|
| 165 |
+
"microwave oven", "military uniform", "milk can", "minibus", "miniskirt", "minivan", "missile",
|
| 166 |
+
"mitten", "mixing bowl", "mobile home", "ford model t", "modem", "monastery", "monitor",
|
| 167 |
+
"moped", "mortar and pestle", "graduation cap", "mosque", "mosquito net", "vespa",
|
| 168 |
+
"mountain bike", "tent", "computer mouse", "mousetrap", "moving van", "muzzle", "metal nail",
|
| 169 |
+
"neck brace", "necklace", "baby pacifier", "notebook computer", "obelisk", "oboe", "ocarina",
|
| 170 |
+
"odometer", "oil filter", "pipe organ", "oscilloscope", "overskirt", "bullock cart",
|
| 171 |
+
"oxygen mask", "product packet / packaging", "paddle", "paddle wheel", "padlock", "paintbrush",
|
| 172 |
+
"pajamas", "palace", "pan flute", "paper towel", "parachute", "parallel bars", "park bench",
|
| 173 |
+
"parking meter", "railroad car", "patio", "payphone", "pedestal", "pencil case",
|
| 174 |
+
"pencil sharpener", "perfume", "Petri dish", "photocopier", "plectrum", "Pickelhaube",
|
| 175 |
+
"picket fence", "pickup truck", "pier", "piggy bank", "pill bottle", "pillow", "ping-pong ball",
|
| 176 |
+
"pinwheel", "pirate ship", "drink pitcher", "block plane", "planetarium", "plastic bag",
|
| 177 |
+
"plate rack", "farm plow", "plunger", "Polaroid camera", "pole", "police van", "poncho",
|
| 178 |
+
"pool table", "soda bottle", "plant pot", "potter's wheel", "power drill", "prayer rug",
|
| 179 |
+
"printer", "prison", "missile", "projector", "hockey puck", "punching bag", "purse", "quill",
|
| 180 |
+
"quilt", "race car", "racket", "radiator", "radio", "radio telescope", "rain barrel",
|
| 181 |
+
"recreational vehicle", "fishing casting reel", "reflex camera", "refrigerator",
|
| 182 |
+
"remote control", "restaurant", "revolver", "rifle", "rocking chair", "rotisserie", "eraser",
|
| 183 |
+
"rugby ball", "ruler measuring stick", "sneaker", "safe", "safety pin", "salt shaker", "sandal",
|
| 184 |
+
"sarong", "saxophone", "scabbard", "weighing scale", "school bus", "schooner", "scoreboard",
|
| 185 |
+
"CRT monitor", "screw", "screwdriver", "seat belt", "sewing machine", "shield", "shoe store",
|
| 186 |
+
"shoji screen / room divider", "shopping basket", "shopping cart", "shovel", "shower cap",
|
| 187 |
+
"shower curtain", "ski", "balaclava ski mask", "sleeping bag", "slide rule", "sliding door",
|
| 188 |
+
"slot machine", "snorkel", "snowmobile", "snowplow", "soap dispenser", "soccer ball", "sock",
|
| 189 |
+
"solar thermal collector", "sombrero", "soup bowl", "keyboard space bar", "space heater",
|
| 190 |
+
"space shuttle", "spatula", "motorboat", "spider web", "spindle", "sports car", "spotlight",
|
| 191 |
+
"stage", "steam locomotive", "through arch bridge", "steel drum", "stethoscope", "scarf",
|
| 192 |
+
"stone wall", "stopwatch", "stove", "strainer", "tram", "stretcher", "couch", "stupa",
|
| 193 |
+
"submarine", "suit", "sundial", "sunglasses", "sunglasses", "sunscreen", "suspension bridge",
|
| 194 |
+
"mop", "sweatshirt", "swim trunks / shorts", "swing", "electrical switch", "syringe",
|
| 195 |
+
"table lamp", "tank", "tape player", "teapot", "teddy bear", "television", "tennis ball",
|
| 196 |
+
"thatched roof", "front curtain", "thimble", "threshing machine", "throne", "tile roof",
|
| 197 |
+
"toaster", "tobacco shop", "toilet seat", "torch", "totem pole", "tow truck", "toy store",
|
| 198 |
+
"tractor", "semi-trailer truck", "tray", "trench coat", "tricycle", "trimaran", "tripod",
|
| 199 |
+
"triumphal arch", "trolleybus", "trombone", "hot tub", "turnstile", "typewriter keyboard",
|
| 200 |
+
"umbrella", "unicycle", "upright piano", "vacuum cleaner", "vase", "vaulted or arched ceiling",
|
| 201 |
+
"velvet fabric", "vending machine", "vestment", "viaduct", "violin", "volleyball",
|
| 202 |
+
"waffle iron", "wall clock", "wallet", "wardrobe", "military aircraft", "sink",
|
| 203 |
+
"washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle",
|
| 204 |
+
"hair wig", "window screen", "window shade", "Windsor tie", "wine bottle", "airplane wing",
|
| 205 |
+
"wok", "wooden spoon", "wool", "split-rail fence", "shipwreck", "sailboat", "yurt", "website",
|
| 206 |
+
"comic book", "crossword", "traffic or street sign", "traffic light", "dust jacket", "menu",
|
| 207 |
+
"plate", "guacamole", "consomme", "hot pot", "trifle", "ice cream", "popsicle", "baguette",
|
| 208 |
+
"bagel", "pretzel", "cheeseburger", "hot dog", "mashed potatoes", "cabbage", "broccoli",
|
| 209 |
+
"cauliflower", "zucchini", "spaghetti squash", "acorn squash", "butternut squash", "cucumber",
|
| 210 |
+
"artichoke", "bell pepper", "cardoon", "mushroom", "Granny Smith apple", "strawberry", "orange",
|
| 211 |
+
"lemon", "fig", "pineapple", "banana", "jackfruit", "cherimoya (custard apple)", "pomegranate",
|
| 212 |
+
"hay", "carbonara", "chocolate syrup", "dough", "meatloaf", "pizza", "pot pie", "burrito",
|
| 213 |
+
"red wine", "espresso", "tea cup", "eggnog", "mountain", "bubble", "cliff", "coral reef",
|
| 214 |
+
"geyser", "lakeshore", "promontory", "sandbar", "beach", "valley", "volcano", "baseball player",
|
| 215 |
+
"bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper", "corn", "acorn",
|
| 216 |
+
"rose hip", "horse chestnut seed", "coral fungus", "agaric", "gyromitra", "stinkhorn mushroom",
|
| 217 |
+
"earth star fungus", "hen of the woods mushroom", "bolete", "corn cob", "toilet paper"]
|
| 218 |
+
|
| 219 |
+
# cifar100_classes = [name for name in os.listdir(args.data_path)
|
| 220 |
+
# if os.path.isdir(os.path.join(args.data_path, name))]
|
| 221 |
+
|
| 222 |
+
imagenet_single_template = [
|
| 223 |
+
'a photo of a {}.',
|
| 224 |
+
]
|
| 225 |
+
|
| 226 |
+
imagenet_7_templates = [
|
| 227 |
+
'itap of a {}.',
|
| 228 |
+
'a origami {}.',
|
| 229 |
+
'a bad photo of the {}.',
|
| 230 |
+
'a photo of the large {}.',
|
| 231 |
+
'a {} in a video game.',
|
| 232 |
+
'art of the {}.',
|
| 233 |
+
'a photo of the small {}.',
|
| 234 |
+
]
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
print('load pre-trained model')
|
| 238 |
+
model, preprocess = clip.load(args.arch)
|
| 239 |
+
model = model.cuda()
|
| 240 |
+
model.eval()
|
| 241 |
+
|
| 242 |
+
print('load data')
|
| 243 |
+
valdir = os.path.join(args.data_path, 'val')
|
| 244 |
+
# valdir = os.path.join(args.data_path, '')
|
| 245 |
+
val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 246 |
+
loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 247 |
+
with torch.no_grad():
|
| 248 |
+
image_feat = []
|
| 249 |
+
image_label = []
|
| 250 |
+
for i, (images, target) in enumerate(loader):
|
| 251 |
+
images = images.cuda()
|
| 252 |
+
target = target.cuda()
|
| 253 |
+
image_features = model.encode_image(images)
|
| 254 |
+
image_feat.append(F.normalize(image_features, dim=1))
|
| 255 |
+
image_label.append(target)
|
| 256 |
+
image_feat = torch.cat(image_feat, dim=0)
|
| 257 |
+
image_label = torch.cat(image_label, dim=0)
|
| 258 |
+
n = len(image_label)
|
| 259 |
+
image_feat = image_feat.float()
|
| 260 |
+
|
| 261 |
+
print('obtain text proxy')
|
| 262 |
+
text_classifier = zeroshot_classifier(clip, model, imagenet_classes, imagenet_7_templates)
|
| 263 |
+
text_classifier = text_classifier.float()
|
| 264 |
+
logits_t = image_feat @ text_classifier
|
| 265 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 266 |
+
top1 = (acc1 / n) * 100
|
| 267 |
+
print(f'accuracy with text proxy: {top1:.2f}')
|
| 268 |
+
|
| 269 |
+
print('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 270 |
+
num_class = len(torch.unique(image_label))
|
| 271 |
+
feat_dim = image_feat.shape[1]
|
| 272 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 273 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 274 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 275 |
+
for iter in range(args.repeat):
|
| 276 |
+
idx = torch.randperm(n).cuda()
|
| 277 |
+
combo_label = torch.zeros(n, num_class).cuda()
|
| 278 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 279 |
+
mlp = VisionProxyMLP(feat_dim, hidden_dim=512, output_dim=num_class).cuda()
|
| 280 |
+
optimizer = torch.optim.SGD(mlp.parameters(), lr=args.cw)
|
| 281 |
+
rho = torch.zeros(num_class).cuda()
|
| 282 |
+
for i in range(n):
|
| 283 |
+
lr = args.cw / math.sqrt(i + 1)
|
| 284 |
+
rlr = args.cr / math.sqrt(i + 1)
|
| 285 |
+
|
| 286 |
+
for param_group in optimizer.param_groups:
|
| 287 |
+
param_group['lr'] = lr
|
| 288 |
+
|
| 289 |
+
beta = args.beta * math.sqrt((i + 1) / n)
|
| 290 |
+
|
| 291 |
+
x = image_feat[idx[i], :]
|
| 292 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 293 |
+
tlabel = tlabel * torch.exp(rho)
|
| 294 |
+
tlabel /= torch.sum(tlabel)
|
| 295 |
+
rho -= rlr * (tlabel - args.alpha / num_class)
|
| 296 |
+
rho[rho < 0] = 0
|
| 297 |
+
text_label[i, :] = tlabel
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
mlp.train()
|
| 301 |
+
vision_logits = mlp(x) / args.tau_i # [1, num_class]
|
| 302 |
+
vision_label = F.softmax(vision_logits, dim=0)
|
| 303 |
+
|
| 304 |
+
# --- 组合标签 ---
|
| 305 |
+
combo_label[i, :] = (beta * vision_label + (1 - beta) * tlabel).detach()
|
| 306 |
+
|
| 307 |
+
# --- 计算 loss 并优化 MLP ---
|
| 308 |
+
# grad_vector = torch.outer(x, vision_label - tlabel) # shape: [1, C]
|
| 309 |
+
# vision_logits.backward(gradient=grad_vector)
|
| 310 |
+
grad_vector = vision_label - tlabel # shape: [1000]
|
| 311 |
+
vision_logits.backward(gradient=grad_vector)
|
| 312 |
+
optimizer.step()
|
| 313 |
+
# vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 314 |
+
# combo_label[i, :] = beta * vision_label + (1 - beta) * tlabel
|
| 315 |
+
# grad = torch.outer(x, vision_label - tlabel)
|
| 316 |
+
# w -= (lr / args.tau_i) * grad
|
| 317 |
+
# w = F.normalize(w, dim=0)
|
| 318 |
+
acc1, acc5 = accuracy(text_label, image_label[idx], topk=(1, 5))
|
| 319 |
+
acc_onlab[iter] = (acc1 / n) * 100
|
| 320 |
+
acc1, acc5 = accuracy(combo_label, image_label[idx], topk=(1, 5))
|
| 321 |
+
|
| 322 |
+
# MAPLS - EM Algorithm
|
| 323 |
+
pz = np.full(len(imagenet_classes), 1.0 / len(imagenet_classes))
|
| 324 |
+
qy = mapls(combo_label, pz = pz, qy_mode = "soft", max_iter = 100, lam = lam)
|
| 325 |
+
|
| 326 |
+
w = np.array(qy) / np.array(pz)
|
| 327 |
+
if combo_label.is_cuda:
|
| 328 |
+
combo_label_cpu = combo_label.cpu()
|
| 329 |
+
qy_probs = lsc(combo_label_cpu, 1.0/w)
|
| 330 |
+
qy_probs = torch.from_numpy(qy_probs)
|
| 331 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 332 |
+
|
| 333 |
+
acc_onzeta[iter] = (acc1 / n) * 100
|
| 334 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 335 |
+
print('mean acc of onlab is: {:.2f}'.format(torch.mean(acc_onlab)))
|
| 336 |
+
print('mean acc of onzeta is: {:.2f}'.format(torch.mean(acc_onzeta)))
|
| 337 |
+
print('mean acc of MAPLS is: {:.2f}'.format(torch.mean(acc_ls)))
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 341 |
+
with torch.no_grad():
|
| 342 |
+
zeroshot_weights = []
|
| 343 |
+
for classname in classnames:
|
| 344 |
+
texts = [template.format(classname) for template in templates]
|
| 345 |
+
texts = clip.tokenize(texts).cuda()
|
| 346 |
+
class_embeddings = model.encode_text(texts)
|
| 347 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 348 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 349 |
+
class_embedding /= class_embedding.norm()
|
| 350 |
+
zeroshot_weights.append(class_embedding)
|
| 351 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 352 |
+
return zeroshot_weights
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
def accuracy(output, target, topk=(1,)):
|
| 356 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 357 |
+
pred, target = pred.cpu(), target.cpu()
|
| 358 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 359 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
if __name__ == '__main__':
|
| 363 |
+
# main()
|
| 364 |
+
|
| 365 |
+
lams = [0.6]
|
| 366 |
+
for lam in lams:
|
| 367 |
+
main(lam)
|
| 368 |
+
|
OnZeta/main_online_imagenet_margin_softmax.py
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) Alibaba Group
|
| 2 |
+
import argparse
|
| 3 |
+
import torch
|
| 4 |
+
import torchvision.datasets as datasets
|
| 5 |
+
import torch.nn.functional as F
|
| 6 |
+
import clip
|
| 7 |
+
import os
|
| 8 |
+
import math
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
from MAPLS.mapls import mapls
|
| 12 |
+
from MAPLS.common import lsc
|
| 13 |
+
|
| 14 |
+
model_names = ['RN50', 'ViT-B/32', 'ViT-B/16', 'ViT-L/14', 'ViT-L/14@336px']
|
| 15 |
+
parser = argparse.ArgumentParser(description='OnZeta for ImageNet')
|
| 16 |
+
parser.add_argument('--data_path', default='/home/lt_test/projects/datasets/ImageNet/', type=str,
|
| 17 |
+
help='dataset path')
|
| 18 |
+
parser.add_argument('-a', '--arch', metavar='ARCH', default='ViT-B/16',
|
| 19 |
+
choices=model_names,
|
| 20 |
+
help='model architecture: ' +
|
| 21 |
+
' | '.join(model_names) +
|
| 22 |
+
' (default: RN50)')
|
| 23 |
+
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
|
| 24 |
+
help='number of data loading workers (default: 8)')
|
| 25 |
+
parser.add_argument('-b', '--batch-size', default=256, type=int,
|
| 26 |
+
metavar='N',
|
| 27 |
+
help='mini-batch size (default: 256)')
|
| 28 |
+
parser.add_argument('--tau_t', default=0.01, type=float)
|
| 29 |
+
parser.add_argument('--tau_i', default=0.04, type=float)
|
| 30 |
+
parser.add_argument('--cw', default=0.5, type=float)
|
| 31 |
+
parser.add_argument('--cr', default=20, type=float)
|
| 32 |
+
parser.add_argument('--alpha', default=1, type=float)
|
| 33 |
+
parser.add_argument('--beta', default=0.8, type=float)
|
| 34 |
+
parser.add_argument('--repeat', default=5, type=int)
|
| 35 |
+
|
| 36 |
+
parser.add_argument('--lam', default=0.6, type=float)
|
| 37 |
+
parser.add_argument('--margin', default=0.2, type=float, help='margin for margin softmax')
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def main(margin):
|
| 41 |
+
|
| 42 |
+
args = parser.parse_args()
|
| 43 |
+
print(args)
|
| 44 |
+
imagenet_classes = ["tench", "goldfish", "great white shark", "tiger shark", "hammerhead shark", "electric ray",
|
| 45 |
+
"stingray", "rooster", "hen", "ostrich", "brambling", "goldfinch", "house finch", "junco",
|
| 46 |
+
"indigo bunting", "American robin", "bulbul", "jay", "magpie", "chickadee", "American dipper",
|
| 47 |
+
"kite (bird of prey)", "bald eagle", "vulture", "great grey owl", "fire salamander",
|
| 48 |
+
"smooth newt", "newt", "spotted salamander", "axolotl", "American bullfrog", "tree frog",
|
| 49 |
+
"tailed frog", "loggerhead sea turtle", "leatherback sea turtle", "mud turtle", "terrapin",
|
| 50 |
+
"box turtle", "banded gecko", "green iguana", "Carolina anole",
|
| 51 |
+
"desert grassland whiptail lizard", "agama", "frilled-necked lizard", "alligator lizard",
|
| 52 |
+
"Gila monster", "European green lizard", "chameleon", "Komodo dragon", "Nile crocodile",
|
| 53 |
+
"American alligator", "triceratops", "worm snake", "ring-necked snake",
|
| 54 |
+
"eastern hog-nosed snake", "smooth green snake", "kingsnake", "garter snake", "water snake",
|
| 55 |
+
"vine snake", "night snake", "boa constrictor", "African rock python", "Indian cobra",
|
| 56 |
+
"green mamba", "sea snake", "Saharan horned viper", "eastern diamondback rattlesnake",
|
| 57 |
+
"sidewinder rattlesnake", "trilobite", "harvestman", "scorpion", "yellow garden spider",
|
| 58 |
+
"barn spider", "European garden spider", "southern black widow", "tarantula", "wolf spider",
|
| 59 |
+
"tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse", "prairie grouse", "peafowl",
|
| 60 |
+
"quail", "partridge", "african grey parrot", "macaw", "sulphur-crested cockatoo", "lorikeet",
|
| 61 |
+
"coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "duck",
|
| 62 |
+
"red-breasted merganser", "goose", "black swan", "tusker", "echidna", "platypus", "wallaby",
|
| 63 |
+
"koala", "wombat", "jellyfish", "sea anemone", "brain coral", "flatworm", "nematode", "conch",
|
| 64 |
+
"snail", "slug", "sea slug", "chiton", "chambered nautilus", "Dungeness crab", "rock crab",
|
| 65 |
+
"fiddler crab", "red king crab", "American lobster", "spiny lobster", "crayfish", "hermit crab",
|
| 66 |
+
"isopod", "white stork", "black stork", "spoonbill", "flamingo", "little blue heron",
|
| 67 |
+
"great egret", "bittern bird", "crane bird", "limpkin", "common gallinule", "American coot",
|
| 68 |
+
"bustard", "ruddy turnstone", "dunlin", "common redshank", "dowitcher", "oystercatcher",
|
| 69 |
+
"pelican", "king penguin", "albatross", "grey whale", "killer whale", "dugong", "sea lion",
|
| 70 |
+
"Chihuahua", "Japanese Chin", "Maltese", "Pekingese", "Shih Tzu", "King Charles Spaniel",
|
| 71 |
+
"Papillon", "toy terrier", "Rhodesian Ridgeback", "Afghan Hound", "Basset Hound", "Beagle",
|
| 72 |
+
"Bloodhound", "Bluetick Coonhound", "Black and Tan Coonhound", "Treeing Walker Coonhound",
|
| 73 |
+
"English foxhound", "Redbone Coonhound", "borzoi", "Irish Wolfhound", "Italian Greyhound",
|
| 74 |
+
"Whippet", "Ibizan Hound", "Norwegian Elkhound", "Otterhound", "Saluki", "Scottish Deerhound",
|
| 75 |
+
"Weimaraner", "Staffordshire Bull Terrier", "American Staffordshire Terrier",
|
| 76 |
+
"Bedlington Terrier", "Border Terrier", "Kerry Blue Terrier", "Irish Terrier",
|
| 77 |
+
"Norfolk Terrier", "Norwich Terrier", "Yorkshire Terrier", "Wire Fox Terrier",
|
| 78 |
+
"Lakeland Terrier", "Sealyham Terrier", "Airedale Terrier", "Cairn Terrier",
|
| 79 |
+
"Australian Terrier", "Dandie Dinmont Terrier", "Boston Terrier", "Miniature Schnauzer",
|
| 80 |
+
"Giant Schnauzer", "Standard Schnauzer", "Scottish Terrier", "Tibetan Terrier",
|
| 81 |
+
"Australian Silky Terrier", "Soft-coated Wheaten Terrier", "West Highland White Terrier",
|
| 82 |
+
"Lhasa Apso", "Flat-Coated Retriever", "Curly-coated Retriever", "Golden Retriever",
|
| 83 |
+
"Labrador Retriever", "Chesapeake Bay Retriever", "German Shorthaired Pointer", "Vizsla",
|
| 84 |
+
"English Setter", "Irish Setter", "Gordon Setter", "Brittany dog", "Clumber Spaniel",
|
| 85 |
+
"English Springer Spaniel", "Welsh Springer Spaniel", "Cocker Spaniel", "Sussex Spaniel",
|
| 86 |
+
"Irish Water Spaniel", "Kuvasz", "Schipperke", "Groenendael dog", "Malinois", "Briard",
|
| 87 |
+
"Australian Kelpie", "Komondor", "Old English Sheepdog", "Shetland Sheepdog", "collie",
|
| 88 |
+
"Border Collie", "Bouvier des Flandres dog", "Rottweiler", "German Shepherd Dog", "Dobermann",
|
| 89 |
+
"Miniature Pinscher", "Greater Swiss Mountain Dog", "Bernese Mountain Dog",
|
| 90 |
+
"Appenzeller Sennenhund", "Entlebucher Sennenhund", "Boxer", "Bullmastiff", "Tibetan Mastiff",
|
| 91 |
+
"French Bulldog", "Great Dane", "St. Bernard", "husky", "Alaskan Malamute", "Siberian Husky",
|
| 92 |
+
"Dalmatian", "Affenpinscher", "Basenji", "pug", "Leonberger", "Newfoundland dog",
|
| 93 |
+
"Great Pyrenees dog", "Samoyed", "Pomeranian", "Chow Chow", "Keeshond", "brussels griffon",
|
| 94 |
+
"Pembroke Welsh Corgi", "Cardigan Welsh Corgi", "Toy Poodle", "Miniature Poodle",
|
| 95 |
+
"Standard Poodle", "Mexican hairless dog (xoloitzcuintli)", "grey wolf", "Alaskan tundra wolf",
|
| 96 |
+
"red wolf or maned wolf", "coyote", "dingo", "dhole", "African wild dog", "hyena", "red fox",
|
| 97 |
+
"kit fox", "Arctic fox", "grey fox", "tabby cat", "tiger cat", "Persian cat", "Siamese cat",
|
| 98 |
+
"Egyptian Mau", "cougar", "lynx", "leopard", "snow leopard", "jaguar", "lion", "tiger",
|
| 99 |
+
"cheetah", "brown bear", "American black bear", "polar bear", "sloth bear", "mongoose",
|
| 100 |
+
"meerkat", "tiger beetle", "ladybug", "ground beetle", "longhorn beetle", "leaf beetle",
|
| 101 |
+
"dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant", "grasshopper",
|
| 102 |
+
"cricket insect", "stick insect", "cockroach", "praying mantis", "cicada", "leafhopper",
|
| 103 |
+
"lacewing", "dragonfly", "damselfly", "red admiral butterfly", "ringlet butterfly",
|
| 104 |
+
"monarch butterfly", "small white butterfly", "sulphur butterfly", "gossamer-winged butterfly",
|
| 105 |
+
"starfish", "sea urchin", "sea cucumber", "cottontail rabbit", "hare", "Angora rabbit",
|
| 106 |
+
"hamster", "porcupine", "fox squirrel", "marmot", "beaver", "guinea pig", "common sorrel horse",
|
| 107 |
+
"zebra", "pig", "wild boar", "warthog", "hippopotamus", "ox", "water buffalo", "bison",
|
| 108 |
+
"ram (adult male sheep)", "bighorn sheep", "Alpine ibex", "hartebeest", "impala (antelope)",
|
| 109 |
+
"gazelle", "arabian camel", "llama", "weasel", "mink", "European polecat",
|
| 110 |
+
"black-footed ferret", "otter", "skunk", "badger", "armadillo", "three-toed sloth", "orangutan",
|
| 111 |
+
"gorilla", "chimpanzee", "gibbon", "siamang", "guenon", "patas monkey", "baboon", "macaque",
|
| 112 |
+
"langur", "black-and-white colobus", "proboscis monkey", "marmoset", "white-headed capuchin",
|
| 113 |
+
"howler monkey", "titi monkey", "Geoffroy's spider monkey", "common squirrel monkey",
|
| 114 |
+
"ring-tailed lemur", "indri", "Asian elephant", "African bush elephant", "red panda",
|
| 115 |
+
"giant panda", "snoek fish", "eel", "silver salmon", "rock beauty fish", "clownfish",
|
| 116 |
+
"sturgeon", "gar fish", "lionfish", "pufferfish", "abacus", "abaya", "academic gown",
|
| 117 |
+
"accordion", "acoustic guitar", "aircraft carrier", "airliner", "airship", "altar", "ambulance",
|
| 118 |
+
"amphibious vehicle", "analog clock", "apiary", "apron", "trash can", "assault rifle",
|
| 119 |
+
"backpack", "bakery", "balance beam", "balloon", "ballpoint pen", "Band-Aid", "banjo",
|
| 120 |
+
"baluster / handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel",
|
| 121 |
+
"wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "swimming cap", "bath towel",
|
| 122 |
+
"bathtub", "station wagon", "lighthouse", "beaker", "military hat (bearskin or shako)",
|
| 123 |
+
"beer bottle", "beer glass", "bell tower", "baby bib", "tandem bicycle", "bikini",
|
| 124 |
+
"ring binder", "binoculars", "birdhouse", "boathouse", "bobsleigh", "bolo tie", "poke bonnet",
|
| 125 |
+
"bookcase", "bookstore", "bottle cap", "hunting bow", "bow tie", "brass memorial plaque", "bra",
|
| 126 |
+
"breakwater", "breastplate", "broom", "bucket", "buckle", "bulletproof vest",
|
| 127 |
+
"high-speed train", "butcher shop", "taxicab", "cauldron", "candle", "cannon", "canoe",
|
| 128 |
+
"can opener", "cardigan", "car mirror", "carousel", "tool kit", "cardboard box / carton",
|
| 129 |
+
"car wheel", "automated teller machine", "cassette", "cassette player", "castle", "catamaran",
|
| 130 |
+
"CD player", "cello", "mobile phone", "chain", "chain-link fence", "chain mail", "chainsaw",
|
| 131 |
+
"storage chest", "chiffonier", "bell or wind chime", "china cabinet", "Christmas stocking",
|
| 132 |
+
"church", "movie theater", "cleaver", "cliff dwelling", "cloak", "clogs", "cocktail shaker",
|
| 133 |
+
"coffee mug", "coffeemaker", "spiral or coil", "combination lock", "computer keyboard",
|
| 134 |
+
"candy store", "container ship", "convertible", "corkscrew", "cornet", "cowboy boot",
|
| 135 |
+
"cowboy hat", "cradle", "construction crane", "crash helmet", "crate", "infant bed",
|
| 136 |
+
"Crock Pot", "croquet ball", "crutch", "cuirass", "dam", "desk", "desktop computer",
|
| 137 |
+
"rotary dial telephone", "diaper", "digital clock", "digital watch", "dining table",
|
| 138 |
+
"dishcloth", "dishwasher", "disc brake", "dock", "dog sled", "dome", "doormat", "drilling rig",
|
| 139 |
+
"drum", "drumstick", "dumbbell", "Dutch oven", "electric fan", "electric guitar",
|
| 140 |
+
"electric locomotive", "entertainment center", "envelope", "espresso machine", "face powder",
|
| 141 |
+
"feather boa", "filing cabinet", "fireboat", "fire truck", "fire screen", "flagpole", "flute",
|
| 142 |
+
"folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster bed",
|
| 143 |
+
"freight car", "French horn", "frying pan", "fur coat", "garbage truck",
|
| 144 |
+
"gas mask or respirator", "gas pump", "goblet", "go-kart", "golf ball", "golf cart", "gondola",
|
| 145 |
+
"gong", "gown", "grand piano", "greenhouse", "radiator grille", "grocery store", "guillotine",
|
| 146 |
+
"hair clip", "hair spray", "half-track", "hammer", "hamper", "hair dryer", "hand-held computer",
|
| 147 |
+
"handkerchief", "hard disk drive", "harmonica", "harp", "combine harvester", "hatchet",
|
| 148 |
+
"holster", "home theater", "honeycomb", "hook", "hoop skirt", "gymnastic horizontal bar",
|
| 149 |
+
"horse-drawn vehicle", "hourglass", "iPod", "clothes iron", "carved pumpkin", "jeans", "jeep",
|
| 150 |
+
"T-shirt", "jigsaw puzzle", "rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat",
|
| 151 |
+
"ladle", "lampshade", "laptop computer", "lawn mower", "lens cap", "letter opener", "library",
|
| 152 |
+
"lifeboat", "lighter", "limousine", "ocean liner", "lipstick", "slip-on shoe", "lotion",
|
| 153 |
+
"music speaker", "loupe magnifying glass", "sawmill", "magnetic compass", "messenger bag",
|
| 154 |
+
"mailbox", "tights", "one-piece bathing suit", "manhole cover", "maraca", "marimba", "mask",
|
| 155 |
+
"matchstick", "maypole", "maze", "measuring cup", "medicine cabinet", "megalith", "microphone",
|
| 156 |
+
"microwave oven", "military uniform", "milk can", "minibus", "miniskirt", "minivan", "missile",
|
| 157 |
+
"mitten", "mixing bowl", "mobile home", "ford model t", "modem", "monastery", "monitor",
|
| 158 |
+
"moped", "mortar and pestle", "graduation cap", "mosque", "mosquito net", "vespa",
|
| 159 |
+
"mountain bike", "tent", "computer mouse", "mousetrap", "moving van", "muzzle", "metal nail",
|
| 160 |
+
"neck brace", "necklace", "baby pacifier", "notebook computer", "obelisk", "oboe", "ocarina",
|
| 161 |
+
"odometer", "oil filter", "pipe organ", "oscilloscope", "overskirt", "bullock cart",
|
| 162 |
+
"oxygen mask", "product packet / packaging", "paddle", "paddle wheel", "padlock", "paintbrush",
|
| 163 |
+
"pajamas", "palace", "pan flute", "paper towel", "parachute", "parallel bars", "park bench",
|
| 164 |
+
"parking meter", "railroad car", "patio", "payphone", "pedestal", "pencil case",
|
| 165 |
+
"pencil sharpener", "perfume", "Petri dish", "photocopier", "plectrum", "Pickelhaube",
|
| 166 |
+
"picket fence", "pickup truck", "pier", "piggy bank", "pill bottle", "pillow", "ping-pong ball",
|
| 167 |
+
"pinwheel", "pirate ship", "drink pitcher", "block plane", "planetarium", "plastic bag",
|
| 168 |
+
"plate rack", "farm plow", "plunger", "Polaroid camera", "pole", "police van", "poncho",
|
| 169 |
+
"pool table", "soda bottle", "plant pot", "potter's wheel", "power drill", "prayer rug",
|
| 170 |
+
"printer", "prison", "missile", "projector", "hockey puck", "punching bag", "purse", "quill",
|
| 171 |
+
"quilt", "race car", "racket", "radiator", "radio", "radio telescope", "rain barrel",
|
| 172 |
+
"recreational vehicle", "fishing casting reel", "reflex camera", "refrigerator",
|
| 173 |
+
"remote control", "restaurant", "revolver", "rifle", "rocking chair", "rotisserie", "eraser",
|
| 174 |
+
"rugby ball", "ruler measuring stick", "sneaker", "safe", "safety pin", "salt shaker", "sandal",
|
| 175 |
+
"sarong", "saxophone", "scabbard", "weighing scale", "school bus", "schooner", "scoreboard",
|
| 176 |
+
"CRT monitor", "screw", "screwdriver", "seat belt", "sewing machine", "shield", "shoe store",
|
| 177 |
+
"shoji screen / room divider", "shopping basket", "shopping cart", "shovel", "shower cap",
|
| 178 |
+
"shower curtain", "ski", "balaclava ski mask", "sleeping bag", "slide rule", "sliding door",
|
| 179 |
+
"slot machine", "snorkel", "snowmobile", "snowplow", "soap dispenser", "soccer ball", "sock",
|
| 180 |
+
"solar thermal collector", "sombrero", "soup bowl", "keyboard space bar", "space heater",
|
| 181 |
+
"space shuttle", "spatula", "motorboat", "spider web", "spindle", "sports car", "spotlight",
|
| 182 |
+
"stage", "steam locomotive", "through arch bridge", "steel drum", "stethoscope", "scarf",
|
| 183 |
+
"stone wall", "stopwatch", "stove", "strainer", "tram", "stretcher", "couch", "stupa",
|
| 184 |
+
"submarine", "suit", "sundial", "sunglasses", "sunglasses", "sunscreen", "suspension bridge",
|
| 185 |
+
"mop", "sweatshirt", "swim trunks / shorts", "swing", "electrical switch", "syringe",
|
| 186 |
+
"table lamp", "tank", "tape player", "teapot", "teddy bear", "television", "tennis ball",
|
| 187 |
+
"thatched roof", "front curtain", "thimble", "threshing machine", "throne", "tile roof",
|
| 188 |
+
"toaster", "tobacco shop", "toilet seat", "torch", "totem pole", "tow truck", "toy store",
|
| 189 |
+
"tractor", "semi-trailer truck", "tray", "trench coat", "tricycle", "trimaran", "tripod",
|
| 190 |
+
"triumphal arch", "trolleybus", "trombone", "hot tub", "turnstile", "typewriter keyboard",
|
| 191 |
+
"umbrella", "unicycle", "upright piano", "vacuum cleaner", "vase", "vaulted or arched ceiling",
|
| 192 |
+
"velvet fabric", "vending machine", "vestment", "viaduct", "violin", "volleyball",
|
| 193 |
+
"waffle iron", "wall clock", "wallet", "wardrobe", "military aircraft", "sink",
|
| 194 |
+
"washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle",
|
| 195 |
+
"hair wig", "window screen", "window shade", "Windsor tie", "wine bottle", "airplane wing",
|
| 196 |
+
"wok", "wooden spoon", "wool", "split-rail fence", "shipwreck", "sailboat", "yurt", "website",
|
| 197 |
+
"comic book", "crossword", "traffic or street sign", "traffic light", "dust jacket", "menu",
|
| 198 |
+
"plate", "guacamole", "consomme", "hot pot", "trifle", "ice cream", "popsicle", "baguette",
|
| 199 |
+
"bagel", "pretzel", "cheeseburger", "hot dog", "mashed potatoes", "cabbage", "broccoli",
|
| 200 |
+
"cauliflower", "zucchini", "spaghetti squash", "acorn squash", "butternut squash", "cucumber",
|
| 201 |
+
"artichoke", "bell pepper", "cardoon", "mushroom", "Granny Smith apple", "strawberry", "orange",
|
| 202 |
+
"lemon", "fig", "pineapple", "banana", "jackfruit", "cherimoya (custard apple)", "pomegranate",
|
| 203 |
+
"hay", "carbonara", "chocolate syrup", "dough", "meatloaf", "pizza", "pot pie", "burrito",
|
| 204 |
+
"red wine", "espresso", "tea cup", "eggnog", "mountain", "bubble", "cliff", "coral reef",
|
| 205 |
+
"geyser", "lakeshore", "promontory", "sandbar", "beach", "valley", "volcano", "baseball player",
|
| 206 |
+
"bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper", "corn", "acorn",
|
| 207 |
+
"rose hip", "horse chestnut seed", "coral fungus", "agaric", "gyromitra", "stinkhorn mushroom",
|
| 208 |
+
"earth star fungus", "hen of the woods mushroom", "bolete", "corn cob", "toilet paper"]
|
| 209 |
+
|
| 210 |
+
# cifar100_classes = [name for name in os.listdir(args.data_path)
|
| 211 |
+
# if os.path.isdir(os.path.join(args.data_path, name))]
|
| 212 |
+
|
| 213 |
+
imagenet_single_template = [
|
| 214 |
+
'a photo of a {}.',
|
| 215 |
+
]
|
| 216 |
+
|
| 217 |
+
imagenet_7_templates = [
|
| 218 |
+
'itap of a {}.',
|
| 219 |
+
'a origami {}.',
|
| 220 |
+
'a bad photo of the {}.',
|
| 221 |
+
'a photo of the large {}.',
|
| 222 |
+
'a {} in a video game.',
|
| 223 |
+
'art of the {}.',
|
| 224 |
+
'a photo of the small {}.',
|
| 225 |
+
]
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
print('load pre-trained model')
|
| 229 |
+
model, preprocess = clip.load(args.arch)
|
| 230 |
+
model = model.cuda()
|
| 231 |
+
model.eval()
|
| 232 |
+
|
| 233 |
+
print('load data')
|
| 234 |
+
print("margin is ",margin)
|
| 235 |
+
valdir = os.path.join(args.data_path, 'val')
|
| 236 |
+
# valdir = os.path.join(args.data_path, '')
|
| 237 |
+
val_set = datasets.ImageFolder(valdir, transform=preprocess)
|
| 238 |
+
loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size, num_workers=args.workers)
|
| 239 |
+
with torch.no_grad():
|
| 240 |
+
image_feat = []
|
| 241 |
+
image_label = []
|
| 242 |
+
for i, (images, target) in enumerate(loader):
|
| 243 |
+
images = images.cuda()
|
| 244 |
+
target = target.cuda()
|
| 245 |
+
image_features = model.encode_image(images)
|
| 246 |
+
image_feat.append(F.normalize(image_features, dim=1))
|
| 247 |
+
image_label.append(target)
|
| 248 |
+
image_feat = torch.cat(image_feat, dim=0)
|
| 249 |
+
image_label = torch.cat(image_label, dim=0)
|
| 250 |
+
n = len(image_label)
|
| 251 |
+
image_feat = image_feat.float()
|
| 252 |
+
|
| 253 |
+
print('obtain text proxy')
|
| 254 |
+
text_classifier = zeroshot_classifier(clip, model, imagenet_classes, imagenet_7_templates)
|
| 255 |
+
text_classifier = text_classifier.float()
|
| 256 |
+
logits_t = image_feat @ text_classifier
|
| 257 |
+
acc1, acc5 = accuracy(logits_t, image_label, topk=(1, 5))
|
| 258 |
+
top1 = (acc1 / n) * 100
|
| 259 |
+
print(f'accuracy with text proxy: {top1:.2f}')
|
| 260 |
+
|
| 261 |
+
print('online zero-shot transfer: repeat {} times'.format(args.repeat))
|
| 262 |
+
num_class = len(torch.unique(image_label))
|
| 263 |
+
acc_onzeta = torch.zeros(args.repeat).cuda()
|
| 264 |
+
acc_onlab = torch.zeros(args.repeat).cuda()
|
| 265 |
+
acc_ls = torch.zeros(args.repeat).cuda()
|
| 266 |
+
for iter in range(args.repeat):
|
| 267 |
+
idx = torch.randperm(n).cuda()
|
| 268 |
+
combo_label = torch.zeros(n, num_class).cuda()
|
| 269 |
+
text_label = torch.zeros(n, num_class).cuda()
|
| 270 |
+
w = text_classifier.clone()
|
| 271 |
+
rho = torch.zeros(num_class).cuda()
|
| 272 |
+
for i in range(n):
|
| 273 |
+
lr = args.cw / math.sqrt(i + 1)
|
| 274 |
+
rlr = args.cr / math.sqrt(i + 1)
|
| 275 |
+
beta = args.beta * math.sqrt((i + 1) / n)
|
| 276 |
+
x = image_feat[idx[i], :]
|
| 277 |
+
tlabel = F.softmax(x @ text_classifier / args.tau_t, dim=0)
|
| 278 |
+
tlabel = tlabel * torch.exp(rho)
|
| 279 |
+
tlabel /= torch.sum(tlabel)
|
| 280 |
+
rho -= rlr * (tlabel - args.alpha / num_class)
|
| 281 |
+
rho[rho < 0] = 0
|
| 282 |
+
text_label[i, :] = tlabel
|
| 283 |
+
|
| 284 |
+
# FIXME margin softmax
|
| 285 |
+
logits = x @ w / args.tau_i
|
| 286 |
+
pseudo_y = torch.argmax(tlabel).item()
|
| 287 |
+
# margin = args.margin if hasattr(args, 'margin') else 0.2
|
| 288 |
+
logits[pseudo_y] -= margin
|
| 289 |
+
vision_label = F.softmax(logits, dim=0)
|
| 290 |
+
|
| 291 |
+
# vision_label = F.softmax(x @ w / args.tau_i, dim=0)
|
| 292 |
+
combo_label[i, :] = beta * vision_label + (1 - beta) * tlabel
|
| 293 |
+
grad = torch.outer(x, vision_label - tlabel)
|
| 294 |
+
w -= (lr / args.tau_i) * grad
|
| 295 |
+
w = F.normalize(w, dim=0)
|
| 296 |
+
acc1, acc5 = accuracy(text_label, image_label[idx], topk=(1, 5))
|
| 297 |
+
acc_onlab[iter] = (acc1 / n) * 100
|
| 298 |
+
acc1, acc5 = accuracy(combo_label, image_label[idx], topk=(1, 5))
|
| 299 |
+
|
| 300 |
+
# MAPLS - EM Algorithm
|
| 301 |
+
pz = np.full(len(imagenet_classes), 1.0 / len(imagenet_classes))
|
| 302 |
+
qy = mapls(combo_label, pz = pz, qy_mode = "soft", max_iter = 100, lam = args.lam) # FIXME why return nan
|
| 303 |
+
|
| 304 |
+
w = np.array(qy) / np.array(pz)
|
| 305 |
+
if combo_label.is_cuda:
|
| 306 |
+
combo_label_cpu = combo_label.cpu()
|
| 307 |
+
qy_probs = lsc(combo_label_cpu, 1.0/w)
|
| 308 |
+
acc1_ls, acc5_ls = accuracy(qy_probs, image_label[idx], topk=(1, 5))
|
| 309 |
+
|
| 310 |
+
acc_onzeta[iter] = (acc1 / n) * 100
|
| 311 |
+
acc_ls[iter] = (acc1_ls / n) * 100
|
| 312 |
+
print('mean acc of onlab is: {:.2f}'.format(torch.mean(acc_onlab)))
|
| 313 |
+
print('mean acc of onzeta is: {:.2f}'.format(torch.mean(acc_onzeta)))
|
| 314 |
+
print('mean acc of MAPLS is: {:.2f}'.format(torch.mean(acc_ls)))
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
def zeroshot_classifier(clip, model, classnames, templates):
|
| 318 |
+
with torch.no_grad():
|
| 319 |
+
zeroshot_weights = []
|
| 320 |
+
for classname in classnames:
|
| 321 |
+
texts = [template.format(classname) for template in templates]
|
| 322 |
+
texts = clip.tokenize(texts).cuda()
|
| 323 |
+
class_embeddings = model.encode_text(texts)
|
| 324 |
+
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
|
| 325 |
+
class_embedding = class_embeddings.mean(dim=0)
|
| 326 |
+
class_embedding /= class_embedding.norm()
|
| 327 |
+
zeroshot_weights.append(class_embedding)
|
| 328 |
+
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).cuda()
|
| 329 |
+
return zeroshot_weights
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def accuracy(output, target, topk=(1,)):
|
| 333 |
+
pred = output.topk(max(topk), 1, True, True)[1].t()
|
| 334 |
+
pred, target = pred.cpu(), target.cpu()
|
| 335 |
+
correct = pred.eq(target.view(1, -1).expand_as(pred))
|
| 336 |
+
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) for k in topk]
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
if __name__ == '__main__':
|
| 340 |
+
# main()
|
| 341 |
+
|
| 342 |
+
margins = [1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1, 0.01, 0.001, 0.0001, 0]
|
| 343 |
+
# margins = [0.01, 0.001, 0.0001]
|
| 344 |
+
for margin in margins:
|
| 345 |
+
main(margin)
|
| 346 |
+
|