AnonymousUser20 commited on
Commit
178d33b
·
verified ·
1 Parent(s): e85e699

Upload 1314 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. OpenOOD/.gitignore +169 -0
  2. OpenOOD/.pre-commit-config.yaml +32 -0
  3. OpenOOD/CODE_OF_CONDUCT.md +128 -0
  4. OpenOOD/CONTRIBUTING.md +68 -0
  5. OpenOOD/LICENSE +21 -0
  6. OpenOOD/README.md +367 -0
  7. OpenOOD/bash_allocation.slurm +16 -0
  8. OpenOOD/bash_allocation2.slurm +16 -0
  9. OpenOOD/batch_file_deal2.py +120 -0
  10. OpenOOD/batch_file_deal3_train_method.sh +32 -0
  11. OpenOOD/batch_file_deal_post_method_Ours_Notline.py +128 -0
  12. OpenOOD/batch_file_deal_post_method_p2pNet.py +128 -0
  13. OpenOOD/batch_file_deal_vim_ablation.py +26 -0
  14. OpenOOD/codespell_ignored.txt +5 -0
  15. OpenOOD/configs/datasets/aircraft/aircraft.yml +33 -0
  16. OpenOOD/configs/datasets/aircraft/aircraft_oe.yml +12 -0
  17. OpenOOD/configs/datasets/aircraft/aircraft_ood.yml +28 -0
  18. OpenOOD/configs/datasets/bronze2/bronze2.yml +36 -0
  19. OpenOOD/configs/datasets/bronze2/bronze2_ood.yml +62 -0
  20. OpenOOD/configs/datasets/cifar10/cifar10.yml +33 -0
  21. OpenOOD/configs/datasets/cifar10/cifar10_double_label.yml +32 -0
  22. OpenOOD/configs/datasets/cifar10/cifar10_extra.yml +37 -0
  23. OpenOOD/configs/datasets/cifar10/cifar10_fsood.yml +43 -0
  24. OpenOOD/configs/datasets/cifar10/cifar10_oe.yml +12 -0
  25. OpenOOD/configs/datasets/cifar10/cifar10_ood.yml +38 -0
  26. OpenOOD/configs/datasets/cifar100/cifar100.yml +33 -0
  27. OpenOOD/configs/datasets/cifar100/cifar100_double_label.yml +32 -0
  28. OpenOOD/configs/datasets/cifar100/cifar100_extra.yml +37 -0
  29. OpenOOD/configs/datasets/cifar100/cifar100_fsood.yml +43 -0
  30. OpenOOD/configs/datasets/cifar100/cifar100_oe.yml +12 -0
  31. OpenOOD/configs/datasets/cifar100/cifar100_ood.yml +38 -0
  32. OpenOOD/configs/datasets/covid/covid.yml +29 -0
  33. OpenOOD/configs/datasets/covid/covid_fsood.yml +47 -0
  34. OpenOOD/configs/datasets/covid/covid_ood.yml +39 -0
  35. OpenOOD/configs/datasets/imagenet/imagenet.yml +33 -0
  36. OpenOOD/configs/datasets/imagenet/imagenet_double_label.yml +32 -0
  37. OpenOOD/configs/datasets/imagenet/imagenet_double_label_fsood.yml +48 -0
  38. OpenOOD/configs/datasets/imagenet/imagenet_fsood.yml +48 -0
  39. OpenOOD/configs/datasets/imagenet/imagenet_ood.yml +37 -0
  40. OpenOOD/configs/datasets/imagenet200/imagenet200.yml +33 -0
  41. OpenOOD/configs/datasets/imagenet200/imagenet200_double_label.yml +32 -0
  42. OpenOOD/configs/datasets/imagenet200/imagenet200_double_label_fsood.yml +48 -0
  43. OpenOOD/configs/datasets/imagenet200/imagenet200_fsood.yml +48 -0
  44. OpenOOD/configs/datasets/imagenet200/imagenet200_oe.yml +12 -0
  45. OpenOOD/configs/datasets/imagenet200/imagenet200_ood.yml +37 -0
  46. OpenOOD/configs/datasets/mnist/mnist.yml +33 -0
  47. OpenOOD/configs/datasets/mnist/mnist_fsood.yml +43 -0
  48. OpenOOD/configs/datasets/mnist/mnist_ood.yml +38 -0
  49. OpenOOD/configs/datasets/mvtec/bottle.yml +52 -0
  50. OpenOOD/configs/datasets/mvtec/cable.yml +52 -0
OpenOOD/.gitignore ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ignore some temp/test files
2
+ _test_*
3
+ *-backup*
4
+
5
+ # ignore data and output directory
6
+ data
7
+ data/
8
+ results/
9
+ checkpoints/
10
+ ipynb_checkpoints/
11
+
12
+ # Byte-compiled / optimized / DLL files
13
+ __pycache__/
14
+ *.py[cod]
15
+ *$py.class
16
+
17
+ # C extensions
18
+ *.so
19
+
20
+ # Distribution / packaging
21
+ .Python
22
+ build/
23
+ develop-eggs/
24
+ dist/
25
+ downloads/
26
+ eggs/
27
+ .eggs/
28
+ lib/
29
+ lib64/
30
+ parts/
31
+ sdist/
32
+ var/
33
+ .vs/
34
+ wheels/
35
+ pip-wheel-metadata/
36
+ share/python-wheels/
37
+ *.egg-info/
38
+ .installed.cfg
39
+ *.egg
40
+ MANIFEST
41
+
42
+ # PyInstaller
43
+ # Usually these files are written by a python script from a template
44
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
45
+ *.manifest
46
+ *.spec
47
+
48
+ # Installer logs
49
+ pip-log.txt
50
+ pip-delete-this-directory.txt
51
+
52
+ # Unit test / coverage reports
53
+ htmlcov/
54
+ .tox/
55
+ .nox/
56
+ .coverage
57
+ .coverage.*
58
+ .cache
59
+ nosetests.xml
60
+ coverage.xml
61
+ *.cover
62
+ *.py,cover
63
+ .hypothesis/
64
+ .pytest_cache/
65
+
66
+ # Translations
67
+ *.mo
68
+ *.pot
69
+
70
+ # Django stuff:
71
+ *.log
72
+ local_settings.py
73
+ db.sqlite3
74
+ db.sqlite3-journal
75
+
76
+ # Flask stuff:
77
+ instance/
78
+ .webassets-cache
79
+
80
+ # Scrapy stuff:
81
+ .scrapy
82
+
83
+ # Sphinx documentation
84
+ docs/
85
+ OpenOOD.wiki/
86
+
87
+ # PyBuilder
88
+ target/
89
+
90
+ # Jupyter Notebook
91
+ .ipynb
92
+ .ipynb_checkpoints
93
+
94
+ # IPython
95
+ profile_default/
96
+ ipython_config.py
97
+
98
+ # pyenv
99
+ .python-version
100
+
101
+ # pipenv
102
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
103
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
104
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
105
+ # install all needed dependencies.
106
+ #Pipfile.lock
107
+
108
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
109
+ __pypackages__/
110
+
111
+ # Celery stuff
112
+ celerybeat-schedule
113
+ celerybeat.pid
114
+
115
+ # SageMath parsed files
116
+ *.sage.py
117
+
118
+ # Environments
119
+ .env
120
+ .venv
121
+ env/
122
+ venv/
123
+ ENV/
124
+ env.bak/
125
+ venv.bak/
126
+
127
+ # Spyder project settings
128
+ .spyderproject
129
+ .spyproject
130
+
131
+ # Rope project settings
132
+ .ropeproject
133
+
134
+ # mkdocs documentation
135
+ /site
136
+
137
+ # mypy
138
+ .mypy_cache/
139
+ .dmypy.json
140
+ dmypy.json
141
+
142
+ # Pyre type checker
143
+ .pyre/
144
+
145
+ # vscode debug
146
+ .vscode/
147
+
148
+ # macos files
149
+ .DS_Store
150
+
151
+ # check format
152
+ .isort.cfg
153
+
154
+ # no jupyter notebook
155
+ *.ipynb_checkpoints
156
+ *.ipynb
157
+
158
+ # ignore custom config and scripts
159
+ config/*/_*/
160
+ scripts/_*/
161
+ tools/mytools/
162
+
163
+ # ignore pretrained bit model
164
+ bit_pretrained_models/
165
+ group_config/
166
+
167
+ # local dev
168
+ local/
169
+ *legacy*
OpenOOD/.pre-commit-config.yaml ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ exclude: ^tests/data/
2
+ repos:
3
+ - repo: https://github.com/PyCQA/flake8.git
4
+ rev: 3.8.3
5
+ hooks:
6
+ - id: flake8
7
+ - repo: https://github.com/pre-commit/mirrors-yapf
8
+ rev: v0.30.0
9
+ hooks:
10
+ - id: yapf
11
+ - repo: https://github.com/pre-commit/pre-commit-hooks
12
+ rev: v3.1.0
13
+ hooks:
14
+ - id: trailing-whitespace
15
+ - id: check-yaml
16
+ - id: end-of-file-fixer
17
+ - id: double-quote-string-fixer
18
+ - id: check-merge-conflict
19
+ - id: fix-encoding-pragma
20
+ args: ["--remove"]
21
+ - id: mixed-line-ending
22
+ args: ["--fix=lf"]
23
+ - repo: https://github.com/codespell-project/codespell
24
+ rev: v2.1.0
25
+ hooks:
26
+ - id: codespell
27
+ args: ["--ignore-words=codespell_ignored.txt"]
28
+ - repo: https://github.com/myint/docformatter
29
+ rev: v1.3.1
30
+ hooks:
31
+ - id: docformatter
32
+ args: ["--in-place", "--wrap-descriptions", "79"]
OpenOOD/CODE_OF_CONDUCT.md ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Contributor Covenant Code of Conduct
2
+
3
+ ## Our Pledge
4
+
5
+ We as members, contributors, and leaders pledge to make participation in our
6
+ community a harassment-free experience for everyone, regardless of age, body
7
+ size, visible or invisible disability, ethnicity, sex characteristics, gender
8
+ identity and expression, level of experience, education, socio-economic status,
9
+ nationality, personal appearance, race, religion, or sexual identity
10
+ and orientation.
11
+
12
+ We pledge to act and interact in ways that contribute to an open, welcoming,
13
+ diverse, inclusive, and healthy community.
14
+
15
+ ## Our Standards
16
+
17
+ Examples of behavior that contributes to a positive environment for our
18
+ community include:
19
+
20
+ * Demonstrating empathy and kindness toward other people
21
+ * Being respectful of differing opinions, viewpoints, and experiences
22
+ * Giving and gracefully accepting constructive feedback
23
+ * Accepting responsibility and apologizing to those affected by our mistakes,
24
+ and learning from the experience
25
+ * Focusing on what is best not just for us as individuals, but for the
26
+ overall community
27
+
28
+ Examples of unacceptable behavior include:
29
+
30
+ * The use of sexualized language or imagery, and sexual attention or
31
+ advances of any kind
32
+ * Trolling, insulting or derogatory comments, and personal or political attacks
33
+ * Public or private harassment
34
+ * Publishing others' private information, such as a physical or email
35
+ address, without their explicit permission
36
+ * Other conduct which could reasonably be considered inappropriate in a
37
+ professional setting
38
+
39
+ ## Enforcement Responsibilities
40
+
41
+ Community leaders are responsible for clarifying and enforcing our standards of
42
+ acceptable behavior and will take appropriate and fair corrective action in
43
+ response to any behavior that they deem inappropriate, threatening, offensive,
44
+ or harmful.
45
+
46
+ Community leaders have the right and responsibility to remove, edit, or reject
47
+ comments, commits, code, wiki edits, issues, and other contributions that are
48
+ not aligned to this Code of Conduct, and will communicate reasons for moderation
49
+ decisions when appropriate.
50
+
51
+ ## Scope
52
+
53
+ This Code of Conduct applies within all community spaces, and also applies when
54
+ an individual is officially representing the community in public spaces.
55
+ Examples of representing our community include using an official e-mail address,
56
+ posting via an official social media account, or acting as an appointed
57
+ representative at an online or offline event.
58
+
59
+ ## Enforcement
60
+
61
+ Instances of abusive, harassing, or otherwise unacceptable behavior may be
62
+ reported to the community leaders responsible for enforcement at
63
+ yangjingkang001@gmail.com.
64
+ All complaints will be reviewed and investigated promptly and fairly.
65
+
66
+ All community leaders are obligated to respect the privacy and security of the
67
+ reporter of any incident.
68
+
69
+ ## Enforcement Guidelines
70
+
71
+ Community leaders will follow these Community Impact Guidelines in determining
72
+ the consequences for any action they deem in violation of this Code of Conduct:
73
+
74
+ ### 1. Correction
75
+
76
+ **Community Impact**: Use of inappropriate language or other behavior deemed
77
+ unprofessional or unwelcome in the community.
78
+
79
+ **Consequence**: A private, written warning from community leaders, providing
80
+ clarity around the nature of the violation and an explanation of why the
81
+ behavior was inappropriate. A public apology may be requested.
82
+
83
+ ### 2. Warning
84
+
85
+ **Community Impact**: A violation through a single incident or series
86
+ of actions.
87
+
88
+ **Consequence**: A warning with consequences for continued behavior. No
89
+ interaction with the people involved, including unsolicited interaction with
90
+ those enforcing the Code of Conduct, for a specified period of time. This
91
+ includes avoiding interactions in community spaces as well as external channels
92
+ like social media. Violating these terms may lead to a temporary or
93
+ permanent ban.
94
+
95
+ ### 3. Temporary Ban
96
+
97
+ **Community Impact**: A serious violation of community standards, including
98
+ sustained inappropriate behavior.
99
+
100
+ **Consequence**: A temporary ban from any sort of interaction or public
101
+ communication with the community for a specified period of time. No public or
102
+ private interaction with the people involved, including unsolicited interaction
103
+ with those enforcing the Code of Conduct, is allowed during this period.
104
+ Violating these terms may lead to a permanent ban.
105
+
106
+ ### 4. Permanent Ban
107
+
108
+ **Community Impact**: Demonstrating a pattern of violation of community
109
+ standards, including sustained inappropriate behavior, harassment of an
110
+ individual, or aggression toward or disparagement of classes of individuals.
111
+
112
+ **Consequence**: A permanent ban from any sort of public interaction within
113
+ the community.
114
+
115
+ ## Attribution
116
+
117
+ This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118
+ version 2.0, available at
119
+ https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120
+
121
+ Community Impact Guidelines were inspired by [Mozilla's code of conduct
122
+ enforcement ladder](https://github.com/mozilla/diversity).
123
+
124
+ [homepage]: https://www.contributor-covenant.org
125
+
126
+ For answers to common questions about this code of conduct, see the FAQ at
127
+ https://www.contributor-covenant.org/faq. Translations are available at
128
+ https://www.contributor-covenant.org/translations.
OpenOOD/CONTRIBUTING.md ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## Contributing to OpenOOD
2
+
3
+ All kinds of contributions are welcome, including but not limited to the following.
4
+
5
+ - Integrate more methods under generalized OOD detection
6
+ - Fix typo or bugs
7
+ - Add new features and components
8
+
9
+ ### Workflow
10
+
11
+ 1. fork and pull the latest OpenOOD repository
12
+ 2. checkout a new branch (do not use master branch for PRs)
13
+ 3. commit your changes
14
+ 4. create a PR
15
+
16
+ ```{note}
17
+ If you plan to add some new features that involve large changes, it is encouraged to open an issue for discussion first.
18
+ ```
19
+ ### Code style
20
+
21
+ #### Python
22
+
23
+ We adopt [PEP8](https://www.python.org/dev/peps/pep-0008/) as the preferred code style.
24
+
25
+ We use the following tools for linting and formatting:
26
+
27
+ - [flake8](http://flake8.pycqa.org/en/latest/): A wrapper around some linter tools.
28
+ - [yapf](https://github.com/google/yapf): A formatter for Python files.
29
+ - [isort](https://github.com/timothycrosley/isort): A Python utility to sort imports.
30
+ - [markdownlint](https://github.com/markdownlint/markdownlint): A linter to check markdown files and flag style issues.
31
+ - [docformatter](https://github.com/myint/docformatter): A formatter to format docstring.
32
+
33
+ Style configurations of yapf and isort can be found in [setup.cfg](./setup.cfg).
34
+
35
+ We use [pre-commit hook](https://pre-commit.com/) that checks and formats for `flake8`, `yapf`, `isort`, `trailing whitespaces`, `markdown files`,
36
+ fixes `end-of-files`, `double-quoted-strings`, `python-encoding-pragma`, `mixed-line-ending`, sorts `requirments.txt` automatically on every commit.
37
+ The config for a pre-commit hook is stored in [.pre-commit-config](./.pre-commit-config.yaml).
38
+
39
+ After you clone the repository, you will need to install initialize pre-commit hook.
40
+
41
+ ```shell
42
+ pip install -U pre-commit
43
+ ```
44
+
45
+ From the repository folder
46
+
47
+ ```shell
48
+ pre-commit install
49
+ ```
50
+
51
+ ## Contributing to OpenOOD leaderboard
52
+
53
+ We welcome new entries submitted to the leaderboard. Please follow the instructions below to submit your results.
54
+
55
+ 1. Evaluate your model/method with OpenOOD's benchmark and evaluator such that the comparison is fair.
56
+
57
+ 2. Report your new results by opening an issue. Remember to specify the following information:
58
+
59
+ - **`Training`**: The training method of your model, e.g., `CrossEntropy`.
60
+ - **`Postprocessor`**: The postprocessor of your model, e.g., `MSP`, `ReAct`, etc.
61
+ - **`Near-OOD AUROC`**: The AUROC score of your model on the near-OOD split.
62
+ - **`Far-OOD AUROC`**: The AUROC score of your model on the far-OOD split.
63
+ - **`ID Accuracy`**: The accuracy of your model on the ID test data.
64
+ - **`Outlier Data`**: Whether your model uses the outlier data for training.
65
+ - **`Model Arch.`**: The architecture of your base classifier, e.g., `ResNet18`.
66
+ - **`Additional Description`**: Any additional description of your model, e.g., `100 epochs`, `torchvision pretrained`, etc.
67
+
68
+ 3. Ideally, send us a copy of your model checkpoint so that we can verify your results on our end. You can either upload the checkpoint to a cloud storage and share the link in the issue, or send us an email at [jz288@duke.edu](mailto:jz288@duke.edu).
OpenOOD/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2021 Jingkang Yang
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
OpenOOD/README.md ADDED
@@ -0,0 +1,367 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # OpenOOD: Benchmarking Generalized OOD Detection
2
+
3
+ <!--
4
+ | :exclamation: We are looking forward to further extending the scope and building OpenOOD v2.0. Specifically, we are interested in 1) incorporating more modalities (e.g., text/language), 2) OOD in vision-language models, multi-modal foundation models, and large language models. If you want to join us or have any other ideas/thoughts, please don't heisitate to contact [jingkang001@e.ntu.edu.sg](mailto:jingkang001@e.ntu.edu.sg)! |
5
+ |-----------------------------------------|
6
+ --->
7
+
8
+
9
+ | :exclamation: When using OpenOOD in your research, it is vital to cite both the OpenOOD benchmark (versions 1 and 1.5) and the individual works that have contributed to your research. Accurate citation acknowledges the efforts and contributions of all researchers involved. For example, if your work involves the NINCO benchmark within OpenOOD, please include a citation for NINCO apart of OpenOOD.|
10
+ |-----------------------------------------|
11
+
12
+
13
+ [![paper](https://img.shields.io/badge/Paper-OpenReview%20(v1.0)-b31b1b?style=for-the-badge)](https://openreview.net/pdf?id=gT6j4_tskUt)
14
+ &nbsp;&nbsp;&nbsp;
15
+ [![paper](https://img.shields.io/badge/PAPER-arXiv%20(v1.5)-yellowgreen?style=for-the-badge)](https://arxiv.org/abs/2306.09301)
16
+ &nbsp;&nbsp;&nbsp;
17
+
18
+
19
+
20
+ [![paper](https://img.shields.io/badge/leaderboard-35%2B%20Methods-228c22?style=for-the-badge)](https://zjysteven.github.io/OpenOOD/)
21
+ &nbsp;&nbsp;&nbsp;
22
+ [![paper](https://img.shields.io/badge/colab-tutorial-orange?style=for-the-badge)](https://colab.research.google.com/drive/1tvTpCM1_ju82Yygu40fy7Lc0L1YrlkQF?usp=sharing)
23
+ &nbsp;&nbsp;&nbsp;
24
+ [![paper](https://img.shields.io/badge/Forum-SLACK-797ef6?style=for-the-badge)](https://openood.slack.com/)
25
+
26
+
27
+ <img src="https://live.staticflickr.com/65535/52145428300_78fd595193_k.jpg" width="800">
28
+
29
+
30
+ This repository reproduces representative methods within the [`Generalized Out-of-Distribution Detection Framework`](https://arxiv.org/abs/2110.11334),
31
+ aiming to make a fair comparison across methods that were initially developed for anomaly detection, novelty detection, open set recognition, and out-of-distribution detection.
32
+ This codebase is still under construction.
33
+ Comments, issues, contributions, and collaborations are all welcomed!
34
+
35
+ | ![timeline.jpg](https://live.staticflickr.com/65535/52144751937_95282e7de3_k.jpg) |
36
+ |:--:|
37
+ | <b>Timeline of the methods that OpenOOD supports. More methods are included as OpenOOD iterates.</b>|
38
+
39
+
40
+ ## Updates
41
+ - **27 Oct, 2023**: A short version of OpenOOD `v1.5` is accepted to [NeurIPS 2023 Workshop on Distribution Shifts](https://sites.google.com/view/distshift2023/home?authuser=0) as an oral presentation. You may want to check out our [presentation slides](https://drive.google.com/file/d/1qlLQxWpYqFMwjgAHayV_ly2MSGbQ8b18/view?usp=drive_link) and [video recording](https://youtu.be/l58qYmY9NVw).
42
+ - **25 Sept, 2023**: OpenOOD now supports OOD detection with foundation models including zero-shot CLIP and DINOv2 linear probe. Check out the example evaluation script [here](https://github.com/Jingkang50/OpenOOD/blob/main/scripts/eval_ood_imagenet_foundation_models.py).
43
+ - **16 June, 2023**: :boom::boom: We are releasing OpenOOD `v1.5`, which includes the following exciting updates. A detailed changelog is provided in the [Wiki](https://github.com/Jingkang50/OpenOOD/wiki/OpenOOD-v1.5-change-log). An overview of the supported methods and benchmarks (with paper links) is available [here](https://github.com/Jingkang50/OpenOOD/wiki/OpenOOD-v1.5-methods-&-benchmarks-overview).
44
+ - A new [report](https://arxiv.org/abs/2306.09301) which provides benchmarking results on ImageNet and for full-spectrum detection.
45
+ - A unified, easy-to-use evaluator that allows evaluation by simply creating an evaluator instance and calling its functions. Check out this [colab tutorial](https://colab.research.google.com/drive/1tvTpCM1_ju82Yygu40fy7Lc0L1YrlkQF?usp=sharing)!
46
+ - A live [leaderboard](https://zjysteven.github.io/OpenOOD/) that tracks the state-of-the-art of this field.
47
+ - **14 October, 2022**: OpenOOD `v1.0` is accepted to NeurIPS 2022. Check the report [here](https://arxiv.org/abs/2210.07242).
48
+ - **14 June, 2022**: We release `v0.5`.
49
+ - **12 April, 2022**: Primary release to support [Full-Spectrum OOD Detection](https://arxiv.org/abs/2204.05306).
50
+
51
+ ## FAQ
52
+ - `APS_mode` means Automatic (hyper)Parameter Searching mode, which enables the model to validate all the hyperparameters in the sweep list based on the validation ID/OOD set. The default value is False. Check [here](https://github.com/Jingkang50/OpenOOD/blob/main/configs/postprocessors/dice.yml) for example.
53
+
54
+
55
+ ## Get Started
56
+
57
+ ### v1.5 (up-to-date)
58
+ #### Installation
59
+ OpenOOD now supports installation via pip.
60
+ ```
61
+ pip install git+https://github.com/Jingkang50/OpenOOD
62
+ # optional, if you want to use CLIP
63
+ # pip install git+https://github.com/openai/CLIP.git
64
+ ```
65
+
66
+ #### Data
67
+ If you only use our evaluator, the benchmarks for evaluation will be automatically downloaded by the evaluator (again check out this [tutorial](https://colab.research.google.com/drive/1tvTpCM1_ju82Yygu40fy7Lc0L1YrlkQF?usp=sharing)). If you would like to also use OpenOOD for training, you can get all data with our [downloading script](https://github.com/Jingkang50/OpenOOD/tree/main/scripts/download). Note that ImageNet-1K training images should be downloaded from its official website.
68
+
69
+ #### Pre-trained checkpoints
70
+ OpenOOD v1.5 focuses on 4 ID datasets, and we release pre-trained models accordingly.
71
+ - CIFAR-10 [[Google Drive]](https://drive.google.com/file/d/1byGeYxM_PlLjT72wZsMQvP6popJeWBgt/view?usp=drive_link): ResNet-18 classifiers trained with cross-entropy loss from 3 training runs.
72
+ - CIFAR-100 [[Google Drive]](https://drive.google.com/file/d/1s-1oNrRtmA0pGefxXJOUVRYpaoAML0C-/view?usp=drive_link): ResNet-18 classifiers trained with cross-entropy loss from 3 training runs.
73
+ - ImageNet-200 [[Google Drive]](https://drive.google.com/file/d/1ddVmwc8zmzSjdLUO84EuV4Gz1c7vhIAs/view?usp=drive_link): ResNet-18 classifiers trained with cross-entropy loss from 3 training runs.
74
+ - ImageNet-1K [[Google Drive]](https://drive.google.com/file/d/15PdDMNRfnJ7f2oxW6lI-Ge4QJJH3Z0Fy/view?usp=drive_link): ResNet-50 classifiers including 1) the one from torchvision, 2) the ones that are trained by us with specific methods such as MOS, CIDER, and 3) the official checkpoints of data augmentation methods such as AugMix, PixMix.
75
+
76
+ Again, these checkpoints can be downloaded with the downloading script [here](https://github.com/Jingkang50/OpenOOD/tree/main/scripts/download).
77
+
78
+
79
+ Our codebase accesses the datasets from `./data/` and pretrained models from `./results/checkpoints/` by default.
80
+ ```
81
+ ├── ...
82
+ ├── data
83
+ │ ├── benchmark_imglist
84
+ │ ├── images_classic
85
+ │ └── images_largescale
86
+ ├── openood
87
+ ├── results
88
+ │ ├── checkpoints
89
+ │ └── ...
90
+ ├── scripts
91
+ ├── main.py
92
+ ├── ...
93
+ ```
94
+
95
+ #### Training and evaluation scripts
96
+ We provide training and evaluation scripts for all the methods we support in [scripts folder](https://github.com/Jingkang50/OpenOOD/tree/main/scripts).
97
+
98
+ ---
99
+ ## Supported Benchmarks (10)
100
+ This part lists all the benchmarks we support. Feel free to include more.
101
+
102
+ <img src="https://live.staticflickr.com/65535/52146310895_7458dd8cbc_k.jpg" width="800">
103
+
104
+ <details open>
105
+ <summary><b>Anomaly Detection (1)</b></summary>
106
+
107
+ > - [x] [MVTec-AD](https://www.mvtec.com/company/research/datasets/mvtec-ad)
108
+ </details>
109
+
110
+ <details open>
111
+ <summary><b>Open Set Recognition (4)</b></summary>
112
+
113
+ > - [x] [MNIST-4/6]()
114
+ > - [x] [CIFAR-4/6]()
115
+ > - [x] [CIFAR-40/60]()
116
+ > - [x] [TinyImageNet-20/180]()
117
+ </details>
118
+
119
+ <details open>
120
+ <summary><b>Out-of-Distribution Detection (6)</b></summary>
121
+
122
+ > - [x] [BIMCV (A COVID X-Ray Dataset)]()
123
+ > > Near-OOD: `CT-SCAN`, `X-Ray-Bone`;<br>
124
+ > > Far-OOD: `MNIST`, `CIFAR-10`, `Texture`, `Tiny-ImageNet`;<br>
125
+ > - [x] [MNIST]()
126
+ > > Near-OOD: `NotMNIST`, `FashionMNIST`;<br>
127
+ > > Far-OOD: `Texture`, `CIFAR-10`, `TinyImageNet`, `Places365`;<br>
128
+ > - [x] [CIFAR-10]()
129
+ > > Near-OOD: `CIFAR-100`, `TinyImageNet`;<br>
130
+ > > Far-OOD: `MNIST`, `SVHN`, `Texture`, `Places365`;<br>
131
+ > - [x] [CIFAR-100]()
132
+ > > Near-OOD: `CIFAR-10`, `TinyImageNet`;<br>
133
+ > > Far-OOD: `MNIST`, `SVHN`, `Texture`, `Places365`;<br>
134
+ > - [x] [ImageNet-200]()
135
+ > > Near-OOD: `SSB-hard`, `NINCO`;<br>
136
+ > > Far-OOD: `iNaturalist`, `Texture`, `OpenImage-O`;<br>
137
+ > > Covariate-Shifted ID: `ImageNet-C`, `ImageNet-R`, `ImageNet-v2`;
138
+ > - [x] [ImageNet-1K]()
139
+ > > Near-OOD: `SSB-hard`, `NINCO`;<br>
140
+ > > Far-OOD: `iNaturalist`, `Texture`, `OpenImage-O`;<br>
141
+ > > Covariate-Shifted ID: `ImageNet-C`, `ImageNet-R`, `ImageNet-v2`;
142
+ </details>
143
+
144
+ Note that OpenOOD v1.5 emphasizes and focuses on the last 4 benchmarks for OOD detection.
145
+
146
+ ---
147
+ ## Supported Backbones (6)
148
+ This part lists all the backbones we will support in our codebase, including CNN-based and Transformer-based models. Backbones like ResNet-50 and Transformer have ImageNet-1K/22K pretrained models.
149
+
150
+ <details open>
151
+ <summary><b>CNN-based Backbones (4)</b></summary>
152
+
153
+ > - [x] [LeNet-5](http://yann.lecun.com/exdb/lenet/)
154
+ > - [x] [ResNet-18](https://openaccess.thecvf.com/content_cvpr_2016/html/He_Deep_Residual_Learning_CVPR_2016_paper.html)
155
+ > - [x] [WideResNet-28](https://arxiv.org/abs/1605.07146)
156
+ > - [x] [ResNet-50](https://openaccess.thecvf.com/content_cvpr_2016/html/He_Deep_Residual_Learning_CVPR_2016_paper.html) ([BiT](https://github.com/google-research/big_transfer))
157
+ </details>
158
+
159
+
160
+ <details open>
161
+ <summary><b>Transformer-based Architectures (2)</b></summary>
162
+
163
+ > - [x] [ViT](https://github.com/google-research/vision_transformer) ([DeiT](https://github.com/facebookresearch/deit))
164
+ > - [x] [Swin Transformer](https://openaccess.thecvf.com/content/ICCV2021/html/Liu_Swin_Transformer_Hierarchical_Vision_Transformer_Using_Shifted_Windows_ICCV_2021_paper.html)
165
+ </details>
166
+
167
+ ---
168
+ ## Supported Methods (50+)
169
+ This part lists all the methods we include in this codebase. Up to `v1.5`, we totally support **more than 50 popular methods** for generalized OOD detection.
170
+
171
+ All the supported methodolgies can be placed in the following four categories.
172
+
173
+ ![density] &nbsp; ![reconstruction] &nbsp; ![classification] &nbsp; ![distance]
174
+
175
+ We also note our supported methodolgies with the following tags if they have special designs in the corresponding steps, compared to the standard classifier training process.
176
+
177
+ ![preprocess] &nbsp; ![extradata] &nbsp; ![training] &nbsp; ![postprocess]
178
+
179
+ <!--
180
+ density: d0e9ff,
181
+ reconstruction: c2e2de,
182
+ classification: fdd7e6,
183
+ distance: f4d5b3 -->
184
+
185
+ <details open>
186
+ <summary><b>Anomaly Detection (5)</b></summary>
187
+
188
+ > - [x] [![](https://img.shields.io/badge/ICML'18-Deep&#8211;SVDD-f4d5b3?style=for-the-badge)](https://github.com/lukasruff/Deep-SVDD-PyTorch) ![training] ![postprocess]
189
+ > - [x] [![](https://img.shields.io/badge/arXiv'20-KDAD-f4d5b3?style=for-the-badge)]()
190
+ ![training] ![postprocess]
191
+ > - [x] [![](https://img.shields.io/badge/CVPR'21-CutPaste-d0e9ff?style=for-the-badge)](https://github.com/lukasruff/Deep-SVDD-PyTorch)
192
+ ![training] ![postprocess]
193
+ > - [x] [![](https://img.shields.io/badge/arXiv'21-PatchCore-f4d5b3?style=for-the-badge)](https://github.com/lukasruff/Deep-SVDD-PyTorch) ![training] ![postprocess]
194
+ > - [x] [![](https://img.shields.io/badge/ICCV'21-DRÆM-c2e2de?style=for-the-badge)](https://github.com/lukasruff/Deep-SVDD-PyTorch) ![training] ![postprocess]
195
+ </details>
196
+
197
+
198
+ <details open>
199
+ <summary><b>Open Set Recognition (3)</b></summary>
200
+
201
+ > Post-Hoc Methods (1):
202
+ > - [x] [![](https://img.shields.io/badge/CVPR'16-OpenMax-d0e9ff?style=for-the-badge)](https://github.com/13952522076/Open-Set-Recognition) ![postprocess]
203
+ > - [x] [![](https://img.shields.io/badge/ICCV'21-OpenGAN-fdd7e6?style=for-the-badge)](https://github.com/aimerykong/OpenGAN/tree/main/utils) ![postprocess]
204
+
205
+ > Training Methods (1):
206
+ > - [x] [![](https://img.shields.io/badge/TPAMI'21-ARPL-f4d5b3?style=for-the-badge)](https://github.com/iCGY96/ARPL) ![training] ![postprocess]
207
+ </details>
208
+
209
+
210
+ <details open>
211
+ <summary><b>Out-of-Distribution Detection (22)</b></summary>
212
+
213
+ <!--
214
+ density: d0e9ff,
215
+ reconstruction: c2e2de,
216
+ classification: fdd7e6,
217
+ distance: f4d5b3 -->
218
+
219
+ > Post-Hoc Methods (13):
220
+ > - [x] [![msp](https://img.shields.io/badge/ICLR'17-MSP-fdd7e6?style=for-the-badge)](https://openreview.net/forum?id=Hkg4TI9xl)
221
+ > - [x] [![odin](https://img.shields.io/badge/ICLR'18-ODIN-fdd7e6?style=for-the-badge)](https://openreview.net/forum?id=H1VGkIxRZ) &nbsp;&nbsp; ![postprocess]
222
+ > - [x] [![mds](https://img.shields.io/badge/NeurIPS'18-MDS-f4d5b3?style=for-the-badge)](https://papers.nips.cc/paper/2018/hash/abdeb6f575ac5c6676b747bca8d09cc2-Abstract.html) &nbsp;&nbsp; ![postprocess]
223
+ > - [x] [![mdsensemble](https://img.shields.io/badge/NeurIPS'18-MDSEns-f4d5b3?style=for-the-badge)](https://papers.nips.cc/paper/2018/hash/abdeb6f575ac5c6676b747bca8d09cc2-Abstract.html) &nbsp;&nbsp; ![postprocess]
224
+ > - [x] [![gram](https://img.shields.io/badge/ICML'20-Gram-f4d5b3?style=for-the-badge)](https://github.com/VectorInstitute/gram-ood-detection) &nbsp;&nbsp; ![postprocess]
225
+ > - [x] [![ebo](https://img.shields.io/badge/NeurIPS'20-EBO-d0e9ff?style=for-the-badge)](https://github.com/wetliu/energy_ood) &nbsp;&nbsp; ![postprocess]
226
+ > - [x] [![rmds](https://img.shields.io/badge/ARXIV'21-RMDS-f4d5b3?style=for-the-badge)](https://arxiv.org/abs/2106.09022) &nbsp;&nbsp; ![postprocess]
227
+ > - [x] [![gradnorm](https://img.shields.io/badge/NeurIPS'21-GradNorm-fdd7e6?style=for-the-badge)](https://github.com/deeplearning-wisc/gradnorm_ood) &nbsp;&nbsp; ![postprocess]
228
+ > - [x] [![react](https://img.shields.io/badge/NeurIPS'21-ReAct-fdd7e6?style=for-the-badge)](https://github.com/deeplearning-wisc/react) &nbsp;&nbsp; ![postprocess]
229
+ > - [x] [![mls](https://img.shields.io/badge/ICML'22-MLS-fdd7e6?style=for-the-badge)](https://github.com/hendrycks/anomaly-seg) &nbsp;&nbsp; ![postprocess]
230
+ > - [x] [![klm](https://img.shields.io/badge/ICML'22-KL&#8211;Matching-fdd7e6?style=for-the-badge)](https://github.com/hendrycks/anomaly-seg) &nbsp;&nbsp; ![postprocess]
231
+ > - [x] [![sem](https://img.shields.io/badge/arXiv'22-SEM-fdd7e6?style=for-the-badge)]() &nbsp;&nbsp; ![postprocess]
232
+ > - [x] [![vim](https://img.shields.io/badge/CVPR'22-VIM-fdd7e6?style=for-the-badge)](https://ooddetection.github.io/) &nbsp;&nbsp; ![postprocess]
233
+ > - [x] [![knn](https://img.shields.io/badge/ICML'22-KNN-fdd7e6?style=for-the-badge)](https://github.com/deeplearning-wisc/knn-ood) &nbsp;&nbsp; ![postprocess]
234
+ > - [x] [![dice](https://img.shields.io/badge/ECCV'22-DICE-d0e9ff?style=for-the-badge)](https://github.com/deeplearning-wisc/dice) &nbsp;&nbsp; ![postprocess]
235
+ > - [x] [![rankfeat](https://img.shields.io/badge/NEURIPS'22-RANKFEAT-fdd7e6?style=for-the-badge)](https://github.com/KingJamesSong/RankFeat) &nbsp;&nbsp; ![postprocess]
236
+ > - [x] [![ash](https://img.shields.io/badge/ICLR'23-ASH-fdd7e6?style=for-the-badge)](https://andrijazz.github.io/ash) &nbsp;&nbsp; ![postprocess]
237
+ > - [x] [![she](https://img.shields.io/badge/ICLR'23-SHE-fdd7e6?style=for-the-badge)](https://github.com/zjs975584714/SHE) &nbsp;&nbsp; ![postprocess]
238
+ > - [x] [![gen](https://img.shields.io/badge/CVPR'23-GEN-fdd7e6?style=for-the-badge)](https://openaccess.thecvf.com/content/CVPR2023/papers/Liu_GEN_Pushing_the_Limits_of_Softmax-Based_Out-of-Distribution_Detection_CVPR_2023_paper.pdf) &nbsp;&nbsp; ![postprocess]
239
+ > - [x] [![nnguide](https://img.shields.io/badge/ICCV'23-NNGuide-fdd7e6?style=for-the-badge)](https://arxiv.org/abs/2309.14888) &nbsp;&nbsp; ![postprocess]
240
+ > - [x] [![relation](https://img.shields.io/badge/NEURIPS'23-Relation-fdd7e6?style=for-the-badge)](https://arxiv.org/abs/2301.12321) &nbsp;&nbsp; ![postprocess]
241
+ > - [x] [![scale](https://img.shields.io/badge/ICLR'24-Scale-fdd7e6?style=for-the-badge)](https://github.com/kai422/SCALE) &nbsp;&nbsp; ![postprocess]
242
+
243
+ > Training Methods (6):
244
+ > - [x] [![confbranch](https://img.shields.io/badge/arXiv'18-ConfBranch-fdd7e6?style=for-the-badge)](https://github.com/uoguelph-mlrg/confidence_estimation) &nbsp;&nbsp; ![preprocess] &nbsp; ![training]
245
+ > - [x] [![rotpred](https://img.shields.io/badge/neurips'19-RotPred-fdd7e6?style=for-the-badge)](https://github.com/hendrycks/ss-ood) &nbsp;&nbsp; ![preprocess] &nbsp; ![training]
246
+ > - [x] [![godin](https://img.shields.io/badge/CVPR'20-G&#8211;ODIN-fdd7e6?style=for-the-badge)](https://github.com/guyera/Generalized-ODIN-Implementation) &nbsp;&nbsp; ![training] &nbsp; ![postprocess]
247
+ > - [x] [![csi](https://img.shields.io/badge/NeurIPS'20-CSI-fdd7e6?style=for-the-badge)](https://github.com/alinlab/CSI) &nbsp;&nbsp; ![preprocess] &nbsp; ![training] &nbsp; ![postprocess]
248
+ > - [x] [![ssd](https://img.shields.io/badge/ICLR'21-SSD-fdd7e6?style=for-the-badge)](https://github.com/inspire-group/SSD) &nbsp;&nbsp; ![training] &nbsp; ![postprocess]
249
+ > - [x] [![mos](https://img.shields.io/badge/CVPR'21-MOS-fdd7e6?style=for-the-badge)](https://github.com/deeplearning-wisc/large_scale_ood) &nbsp;&nbsp; ![training]
250
+ > - [x] [![vos](https://img.shields.io/badge/ICLR'22-VOS-d0e9ff?style=for-the-badge)](https://github.com/deeplearning-wisc/vos) &nbsp;&nbsp; ![training] &nbsp; ![postprocess]
251
+ > - [x] [![logitnorm](https://img.shields.io/badge/ICML'22-LogitNorm-fdd7e6?style=for-the-badge)](https://github.com/hongxin001/logitnorm_ood) &nbsp;&nbsp; ![training] &nbsp; ![preprocess]
252
+ > - [x] [![cider](https://img.shields.io/badge/ICLR'23-CIDER-f4d5b3?style=for-the-badge)](https://github.com/deeplearning-wisc/cider) &nbsp;&nbsp; ![training] &nbsp; ![postprocess]
253
+ > - [x] [![npos](https://img.shields.io/badge/ICLR'23-NPOS-f4d5b3?style=for-the-badge)](https://github.com/deeplearning-wisc/npos) &nbsp;&nbsp; ![training] &nbsp; ![postprocess]
254
+ > - [x] [![t2fnorm](https://img.shields.io/badge/arXiv'23-T2FNorm-f4d5b3?style=for-the-badge)](https://arxiv.org/abs/2305.17797) &nbsp;&nbsp; ![training]
255
+ > - [x] [![ish](https://img.shields.io/badge/ICLR'24-ish-fdd7e6?style=for-the-badge)](https://github.com/kai422/SCALE) &nbsp;&nbsp; ![training]
256
+
257
+
258
+ > Training With Extra Data (3):
259
+ > - [x] [![oe](https://img.shields.io/badge/ICLR'19-OE-fdd7e6?style=for-the-badge)](https://openreview.net/forum?id=HyxCxhRcY7) &nbsp;&nbsp; ![extradata] &nbsp; ![training]
260
+ > - [x] [![mcd](https://img.shields.io/badge/ICCV'19-MCD-fdd7e6?style=for-the-badge)](https://openaccess.thecvf.com/content_ICCV_2019/papers/Yu_Unsupervised_Out-of-Distribution_Detection_by_Maximum_Classifier_Discrepancy_ICCV_2019_paper.pdf) &nbsp;&nbsp; ![extradata] &nbsp; ![training]
261
+ > - [x] [![udg](https://img.shields.io/badge/ICCV'21-UDG-fdd7e6?style=for-the-badge)](https://openaccess.thecvf.com/content/ICCV2021/html/Yang_Semantically_Coherent_Out-of-Distribution_Detection_ICCV_2021_paper.html) &nbsp;&nbsp; ![extradata] &nbsp; ![training]
262
+ > - [x] [![mixoe](https://img.shields.io/badge/WACV'23-MixOE-fdd7e6?style=for-the-badge)](https://openaccess.thecvf.com/content/WACV2023/html/Zhang_Mixture_Outlier_Exposure_Towards_Out-of-Distribution_Detection_in_Fine-Grained_Environments_WACV_2023_paper.html) &nbsp;&nbsp; ![extradata] &nbsp; ![training]
263
+ </details>
264
+
265
+
266
+ <details open>
267
+ <summary><b>Method Uncertainty (4)</b></summary>
268
+
269
+ > - [x] [![mcdropout](https://img.shields.io/badge/ICML'16-MC&#8211;Dropout-fdd7e6?style=for-the-badge)]() &nbsp;&nbsp; ![training] &nbsp; ![postprocess]
270
+ > - [x] [![deepensemble](https://img.shields.io/badge/NeurIPS'17-Deep&#8211;Ensemble-fdd7e6?style=for-the-badge)]() &nbsp;&nbsp; ![training]
271
+ > - [x] [![tempscale](https://img.shields.io/badge/ICML'17-Temp&#8211;Scaling-fdd7e6?style=for-the-badge)](https://proceedings.mlr.press/v70/guo17a.html) &nbsp;&nbsp; ![postprocess]
272
+ > - [x] [![rts](https://img.shields.io/badge/AAAI'23-RTS-fdd7e6?style=for-the-badge)]() &nbsp;&nbsp; ![training] &nbsp; ![postprocess]
273
+ </details>
274
+
275
+
276
+ <details open>
277
+ <summary><b>Data Augmentation (3)</b></summary>
278
+
279
+ > - [x] [![mixup](https://img.shields.io/badge/ICLR'18-Mixup-fdd7e6?style=for-the-badge)]() &nbsp;&nbsp; ![preprocess]
280
+ > - [x] [![cutmix](https://img.shields.io/badge/ICCV'19-CutMix-fdd7e6?style=for-the-badge)]() &nbsp;&nbsp; ![preprocess]
281
+ > - [x] [![styleaugment](https://img.shields.io/badge/ICLR'19-StyleAugment-fdd7e6?style=for-the-badge)](https://openreview.net/forum?id=Bygh9j09KX) &nbsp;&nbsp; ![preprocess]
282
+ > - [x] [![randaugment](https://img.shields.io/badge/CVPRW'20-RandAugment-fdd7e6?style=for-the-badge)](https://openaccess.thecvf.com/content_CVPRW_2020/html/w40/Cubuk_Randaugment_Practical_Automated_Data_Augmentation_With_a_Reduced_Search_Space_CVPRW_2020_paper.html) &nbsp;&nbsp; ![preprocess]
283
+ > - [x] [![augmix](https://img.shields.io/badge/ICLR'20-AugMix-fdd7e6?style=for-the-badge)](https://github.com/google-research/augmix) &nbsp;&nbsp; ![preprocess]
284
+ > - [x] [![deepaugment](https://img.shields.io/badge/ICCV'21-DeepAugment-fdd7e6?style=for-the-badge)](https://github.com/hendrycks/imagenet-r) &nbsp;&nbsp; ![preprocess]
285
+ > - [x] [![pixmix](https://img.shields.io/badge/CVPR'21-PixMix-fdd7e6?style=for-the-badge)](https://openaccess.thecvf.com/content/CVPR2022/html/Hendrycks_PixMix_Dreamlike_Pictures_Comprehensively_Improve_Safety_Measures_CVPR_2022_paper.html) &nbsp;&nbsp; ![preprocess]
286
+ > - [x] [![regmixup](https://img.shields.io/badge/ICLR'23-RegMixup-fdd7e6?style=for-the-badge)](https://github.com/FrancescoPinto/RegMixup) &nbsp;&nbsp; ![preprocess]
287
+ </details>
288
+
289
+ ---
290
+ ## Contributing
291
+ We appreciate all contributions to improve OpenOOD.
292
+ We sincerely welcome community users to participate in these projects. Please refer to [CONTRIBUTING.md](https://github.com/Jingkang50/OpenOOD/blob/main/CONTRIBUTING.md) for the contributing guideline.
293
+
294
+ ## Contributors
295
+ <a href="https://github.com/jingkang50/openood/graphs/contributors">
296
+ <img src="https://contrib.rocks/image?repo=jingkang50/openood" />
297
+ </a>
298
+
299
+
300
+ ## Citation
301
+ If you find our repository useful for your research, please consider citing our paper:
302
+ ```bibtex
303
+ # v1.5 report
304
+ @article{zhang2023openood,
305
+ title={OpenOOD v1.5: Enhanced Benchmark for Out-of-Distribution Detection},
306
+ author={Zhang, Jingyang and Yang, Jingkang and Wang, Pengyun and Wang, Haoqi and Lin, Yueqian and Zhang, Haoran and Sun, Yiyou and Du, Xuefeng and Zhou, Kaiyang and Zhang, Wayne and Li, Yixuan and Liu, Ziwei and Chen, Yiran and Li, Hai},
307
+ journal={arXiv preprint arXiv:2306.09301},
308
+ year={2023}
309
+ }
310
+
311
+ # v1.0 report
312
+ @article{yang2022openood,
313
+ author = {Yang, Jingkang and Wang, Pengyun and Zou, Dejian and Zhou, Zitang and Ding, Kunyuan and Peng, Wenxuan and Wang, Haoqi and Chen, Guangyao and Li, Bo and Sun, Yiyou and Du, Xuefeng and Zhou, Kaiyang and Zhang, Wayne and Hendrycks, Dan and Li, Yixuan and Liu, Ziwei},
314
+ title = {OpenOOD: Benchmarking Generalized Out-of-Distribution Detection},
315
+ year = {2022}
316
+ }
317
+
318
+ @article{yang2022fsood,
319
+ title = {Full-Spectrum Out-of-Distribution Detection},
320
+ author = {Yang, Jingkang and Zhou, Kaiyang and Liu, Ziwei},
321
+ journal={arXiv preprint arXiv:2204.05306},
322
+ year = {2022}
323
+ }
324
+
325
+ @article{yang2021oodsurvey,
326
+ title={Generalized Out-of-Distribution Detection: A Survey},
327
+ author={Yang, Jingkang and Zhou, Kaiyang and Li, Yixuan and Liu, Ziwei},
328
+ journal={arXiv preprint arXiv:2110.11334},
329
+ year={2021}
330
+ }
331
+
332
+ @inproceedings{bitterwolf2023ninco,
333
+ title={In or Out? Fixing ImageNet Out-of-Distribution Detection Evaluation},
334
+ author={Julian Bitterwolf and Maximilian Mueller and Matthias Hein},
335
+ booktitle={ICML},
336
+ year={2023},
337
+ url={https://proceedings.mlr.press/v202/bitterwolf23a.html}
338
+ }
339
+ ```
340
+
341
+
342
+
343
+
344
+
345
+
346
+
347
+ [density]: https://img.shields.io/badge/Density-d0e9ff?style=for-the-badge&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADIAAAAyCAYAAAAeP4ixAAAABmJLR0QA/wD/AP+gvaeTAAACuElEQVRoge2Zu2tUQRSHv9UomO0SFKNBrJQkFnY2Ij6iJig+EMRKbIUk+B9YWxn/gdSCIIIkgmiChY3BwkZN8IVoIgQECw0KSSxmxj1z9e7eOTObXeR+cGEm95wz58e8zt1AScm68AR4a58DLc4liI5Mfxew27a3rHMuUWxodQKpKIW0G6WQdqMU0m5k75GUdAJHgb3AGjAHTAPLTRzzD+/toGvAYWWMCjAGLIlY7lkCRq1NU4kVUgEm+FtA9pmgyWJihdzCT/gbMAU8sG357maCfHOJETKEn+hjoEe877F/kzYnIvPNRSukM+P7DKj+w64KzAq7dzSpONUKGRV+P4GBOrb7rI2zv6rKtAEaIZuAD8JvvIDPOP6sJL8GNEIuCJ9fmG+aRvTiz8rZ4EwzpLjZL4v2HeBjAZ9PwF3Rv5IgD4/QGenGzILzOR4w1kn8fdUVlGmG2BkZxuwRgAXM8VqUR8AX296MOb7VxAo5Jdr3gdUA3xVgMidWMDFCNuIvpck8wzpInyESnl4he+SQsP2BuRRDqWKqYRfnoCIGEDcjw6I9gxETynfMb2kO9fKKESLrpKmIOHJ5Jau9ii6tbsxmdbZ7IsbsE3FWgK2aINoZGRS+n4F5ZRyAV5gL0uVzRBMkRojjoTKGZFq0j2kCpBAScgnmIWMM5loFUGSPDAibVfyPJy07bCwXtz80gGZGzov2LLCoiJFlAXgu+udCA2iEyJL7nsI/DxkruqxvtLR24i+BvtgBBf34S7Y3xDl0Ri5R+xlnHnN0puIltWO8AlwMcQ4R0gGMiP7tkIEKImOOYApTFfWW1jXxbhnYrh2kDtswNZsbZ6yoY72y+TS10qMLuC7evQbOhOVYmDlgv23fwFTVX21/EfPd0xA5I+34zOQl/t/+W+Ep8KYViRTkRasTKCkpKUnDb6XM8jMAxEX4AAAAAElFTkSuQmCC
348
+
349
+ [reconstruction]: https://img.shields.io/badge/Reconstruction-c2e2de?style=for-the-badge&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADIAAAAyCAYAAAAeP4ixAAAABmJLR0QA/wD/AP+gvaeTAAADj0lEQVRoge3aS2tdVRQH8F80bS5Y2yYx1qEFrYpSMxBHKjrRFEV8oFB1Wuqotv0A+h3EfBQpitLGRGhsrQ9Ek0adVIUKWvARoxWvg7U35yTcpLnnnnvuLfQPhwX7+V/7sfbaax9uYLgw0kAf83gYf+IyvsfXWMBHuNIAh1owj/Ym3794D69hbFAEu8EOjOM+PIc3cRprCqV+xEm06uhwq9Hb+NWBvTiC86V2v8WhXhtuWpEyZvBZqf1ZNc1OGVM4q7+KwM14A6upj3PYV1fjd2IlNfyd/iqScRDLqZ8V7O+1wduwlBr8RIxOE4oQxmFBMYB3VG2oJaY2T/GtKb0pReAWxZI+p+KeeUcxtVOl9CYVgUnFMpvttvLT+E/Y+ekNeU0rQuyZbABmtltpp2Jzn+iQPwhF4LhihWxriZ1IFb7CaIf8QSkyis9T38evVXgHfkqFNztd54WzNwgcEtx+cA3f7HAq+EUDpKpgBJ8Kjq9sVfCDVOhIA6Sq4qjgeGqzArvxD66Kw2hYMY6/Bc89nQq8KDQ90yCpqjgjuD6bE24qZT6U5OkmGVVEHuxHckJZkYNJftkYnerIxujenFBW5O4kv2mMTnVcTPJATigrsjfJXxqjUx2Xk5zolJnvz9dDEGBMcF3LCeUZyaGhQbgfPaOsyB9J7hoEkS6xO8nfckJZkbw3arsj9xG3J/lrTigrspLkAcOPe5LM1mudItnsPtgYnerIZ95STigrspDk443RqY4nkux4nRgXsdg1xZkyjJhQOI1506+bkSv4UNjolxql1h1eFtfx95Ws1ka8Ks6R8w2R6hYjuCA4Ht6q4Ji4RrbxVP95dY1nBLdLtuGBnEyFL4hY7LBgVHi9bRzbToWWCO23RUB5WJAHeFkX/mCOVqwqbPYgMY2/BKcnu608qxiByXp5dYUpRcDw7SoNlIPYZ0VAuWnswmLisKiHK8aU9c8KU1sXrxUT+FjxHNezM7tfMbXLmvHFpkt9XhSPTbVgn2KZrYrYa6e4cK8YFdYpb+xFhcteG1oKA9AWAeWeX18TRsRTRj4n8sbu67V7RjHtbRGLPapadHICryvcjryUujaxVdESyyu7M23hkc7hLTyP+4XZ3pm+STyAF1KZuVQn178kTuyBBD/GRFT8lLgCbPeNPn9X8a5wAHtSoM6favbgMTwqftW4S5jr/Ij6O34WpnRJvLPM2cIVv4HrGf8Dfs0JOaMPQmgAAAAASUVORK5CYII=
350
+
351
+ [classification]: https://img.shields.io/badge/Classification-fdd7e6?style=for-the-badge&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADIAAAAyCAYAAAAeP4ixAAAABmJLR0QA/wD/AP+gvaeTAAAD3klEQVRogcWZu09UQRTGfwILhTw0cQFRG0tQEwsrG4ydiQExFsQEY6h4KJLYU6M22qGxImpjYvwHLBQBDSq6KqBg1MSOGOWlaHAtztnMZbl79965j/2SCRvuzPnm3Jlz5ptzwaAKuABMAivaJoB+oJLoECvPHmAayBZoL4GmsCRx81Q5jL8D2oAabe3AjD57Qbg3FjvPRTXwHqhzeV7nIOmzIUiK55kObvPoc0r7TNgQJMWzrINrPPrUap8lG4IkeMp0YDFsc/S3xZ8APIFRhuxZgOMe/XLPMrZEyP73y/PWhqAfWZUZ3INwBzCnfbpsCBTdAXh6bQgqkfydBWaRgKvV1uEwvg402hAoUsCYD54p7WuFJowzbm1d/2aAelsSoAF44sEzBewOYR+QlekDxpEMs4qkwS5kJTJE48w+tbOhHMvAU2Q7Wa9EEKSBN5h9bvvmLquNBxHNywppzMrMYufMax3fHuG8rFDPZmeCiLzDOm4R0V4lRwMi/HLayZnNUkAncBdYQOJgDZhHzocscCPJyRZDA+KE05l24AuFs1KuLQAnk59yYTRinFnETDSDqN1mYLu2FmAAkzD+AcOEkCVRoxHjxG8kjXppsjKgR/tmgStxT9Av2jFOtAYY14pxxkvaJ4IUJiZ6LMb36thPRFsXCIxOTEzYSPxyTCo/E2YiYe4XYDLPLSR4g2IDuK2/T4ecSygsIG+zOYSNFkxKdkMKuQKMIUklljLVik6iOoSNGrWx6vIsjSjiuMtUrFH8Hl4MOUd+5f2/AnhF/GUqIJqtdQBzQE4DI8B5YIhkylQA3FNDAyFsDOItZ+IuUwGSabKI7LBNvzkheQ44ClwC7mAcibtMBcjenMf+QMwVPj6z9Xa4RHFH6ojIkQrgPnYS5RimFuCmhCcpfgHr0D7jAXi3IA08wgSqUzSWe4wrR1Yi58RwgX5BylRW5SOAI8BXNfIN2dtDGIcyyF5vQc6YaiQ7DWJiIgtcpbCM91umsi4fdWNU62M23w5PKGmxi5Xf4kOxMpVr+cjtS9IkssRV2kYcRq7j/iZSyNsbRc6AZbX1Abn+9gF/ka2134cz+WUqz/KRny9JueBbBc76mIAXbqqt0ZB2NsHvl6QskmoPRcC5F5EkG8DBCOwBwb4kWWcHF1xTmw+jMhjkS9JYVKTALuAnJmhXCSnRg3xJ+h7UuAfSSAKITKInKgUUFcBztfkRORdqCSnRE5MCDvSovTnkhM6HlURPRArkYVztdXj0CSzRY5cCLvihNnd69LGS6FZSIARijctAUiAkShGXsaAUcRkLShGXsSHpuIwVkcXlf7gl3GNHJu+DAAAAAElFTkSuQmCC
352
+
353
+ [distance]: https://img.shields.io/badge/Distance-f4d5b3?style=for-the-badge&&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADIAAAAyCAYAAAAeP4ixAAAABmJLR0QA/wD/AP+gvaeTAAAB3ElEQVRoge2aO07DQBRFDzT5LABFNLRIFIiSgoYC0SGxAgoaPoVXQ4OggR2AxH8XSBErgB4huoTCfnjk2HHi+fiNlNMkhT2ak+vnGzmBBc7ZAo6ApbY3YsMO8A2MgUtgud3tNGMX+CGVGGWvF0SWjJnENbAP/BJZMkUJ2fQeEcmYl9MNk5s1k1F7mZlJjIEPYLXkONXJmEncAsPs/RAYlByvMhlT4or0Ex6Qy5xXnKdKZtpMDICzmvNVyMw6E3W0OjPzzkQdrSRT1hMrwDsRJVM3E7bJHJBfqodWO52CKVG1WRuZLvCQnfsJrFnut5R5ZmKWW2+RLvBILrFuv+VJmvTELLdeIbhEk56oI4iEq56oogPcZWt/ARsO1/7HdU8UCZ6Ey54QOsA9AZPw0RNd4AkFM2GTTPAkfPREjwBJ+O6J4BI+eqIHPKNgJmxQNxNNCJ6Ej57oAS9E3hNqZsImmeBJ+OoJkYi6J7xLbDIpIbjqCe8SkD6ZkJk4drx2MAkhIf2hZQScOFozyGCX4VKmNQnBlDltuEbrEoKNjBoJoYlMH2USwjwyfeAVhRKCKVPVJ+olhGky0UgIZTLRSQgJ+T8UEuCNgI3tGklGvtJElUQRkYlaQtjG7UOIBQB/hf9HJ+Iv7O8AAAAASUVORK5CYII=
354
+
355
+
356
+
357
+
358
+ [preprocess]: https://img.shields.io/badge/PreProcess-f4d5b3?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABmJLR0QA/wD/AP+gvaeTAAABDUlEQVQ4jc3TPy8EURQF8N8uS/wJGxuh0tH7CBKthk/gk6iIQiFRSEhEFEQhGoSQbERUEo2SGp1CwTa7infJZE3sbuckr5j75p5z7pk7/BesoIZGm6eG5SxBDSMdCFbwmS002mi6xU1zT7ED1fpfQtmLAexhtAVhI++hGyd4wD3KUS/jUJr9G8P8HmETBUzjGqdSuMeYwno4PMMjZrMOlnCHwagVsI23UC9iHNWoz+AlS/CEsSZHXTjABvpwiZ0YdSsc/hBMykcJEziXwi0FSTXGQVqkSl43ekNpHz1BcoV+YQXW8BwvZLGKVymPRexKoc7hQ1y0whHepXzqWJBZ41abWJA+3xAuMK/pH/gCPJhBnIabIDQAAAAASUVORK5CYII=
359
+
360
+ [training]: https://img.shields.io/badge/Training-f4d5b3?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAABmJLR0QA/wD/AP+gvaeTAAAH5UlEQVR4nO2dW2wVRRjHf6WVwgNVlKCERJDihUiQxhvGFjCgFEXig8QgiSHGywOCD14wmpgYiakEEyMGNUEgImBQ4+VBJdqICMGgKAhYvIBGgaIhEAVLKbTHhzkbjqczszOzs2d3YX/JvJye/c+337dnZvabSyEnJycnJycnJycd1AILgQNAwbIcKF7bt+JWn0EsxN7x5aWl4lafQbQTPQDtFbfakaqkDZBQ8KSTxnvrRZ+kDTjbyQOQMDUxas8EpgCHgSXALxH1VE1K1CarHngIGAh8Cqz2oJkoNZy+iaD8CzQbXFuPumNVofr+KIP6JgHHyq5bTbwPZqzUAG8hd8hx5EEYDMwFNiuucw1AAfgWeAQYKrluEtChuG4NUG10xylC53xZEIYjmqbOkGsKwClNvacMru8CliJ+YaB3fukvITNBMHF+aRDeRjjFdFz/nabu7RY6J4F3CHd+poJg43yX0onozFU0AydirH8VKQ5CnM5vAxYDYwzsGFP87q6YbHmTFAbBxPnfY9bGB2ULMAcYEsGuC4EHgC+BHsN6O4u2ZiYIJs7fDNQBU9EHoRtYC4yNwc7LgWXo+5vOoo11hI/EVpKCIFTRe5yvcn6AKgitwJUVsHk48K6k/sD5ASZBWFEBe7XMxM75ATcCWxHNwjbgrkoYW0YzsBE4AmwCxkm+YxKEGZUwVsUKhVFBmy9zftaoQ98nLPNVkUsy7rDmb5cBTY62pIkmxL2oOFQpQ2SMROR2dJ3atMSsi8409IOGo8AliVlXZAr6t8kTwITErHOnEf1LXQcijZEKJqP/JWxLzjRnviIjzg+4id7p3NLxfSamBUv4hww5P2A5cqN3JGmUIxuR38vaJI3SMQR1XzDLg35fYCLwHKJ5+IvTv64/i5+9BIzHz/RqM/J76UK8zKWORcgNXk+05qc/8DB2i7N+B54E+kWoF+A9hf7LEXW9MxB5+98DXBVB91bcVsUFZQ9we4T66xHzBrJ+4IIIut65H7kD3nfUqwLmI5oXV+eXlhbcf4WqXNeDjnqx0IrcSFl+JYwqxOu9D8eXltdx6xsaFHqfO2jFwhDkc7G7HPWelWj5Ks842iSb6uxGPsEfCy6rlB93qGe6ge5HwGxECmBA0bZRxc/WoZ946cZsSUw5TxjYFZRYVmW7rFIeaVlHP0SnqdJrA6410GkEdmp09mE/Ohql0VMVr6uybUcifzjU8ZRG7zPE025KHep+qQDMs7StCjio0ZOV/ZZ1aLGN/nJL/T6IsbtMawdu8woDgN0Kzf3AOZZ6qxRauuINm0pPAtdb6k9QaPUgZs9cGaexc7ylVhNmC78SC8ApxNM63UF/sULzk6iGFzVk2gsctO5EjO5M30+8EXcFqtTvPR6071Vob/KgHZD5AASJtfJyhQftkQrtgx60AzIfAFXb2t+D9gCFdpcH7QAv/klyh4zKWB9r81WrqVO3IyhJg44oPh/sQVuVu090NYOMJAOwR/G5j2UtqiGxqs7EcAlA+TB0O265FlVm0ceKubsVn29x0LoDMQwt77Nix+bl4wRmy8hLUb2IFYDrItjdhDo5Z/uCdw3yyZlE3wNUxXbKri/qfNNu7PJAAecCPyg0f8V+guZVhVZFAmCbjGtzqGOeRq8Vu3xQWDJuvoN9ezV6suI1Gfe8ZeUF7DdW9EMYrdLbiUg1h3ED8KNG52fEHIINIzR6quI1HV2LCILOQeVljkM9zejzLD2ISZfZiBx9LeJpvxSRclgXYlMPYqLflsdCdMuf/BYqeExOPfKOzmWUAWLa0PZpMy1PO9okW57eQ4rWB6k2MTQ4aPVBTKD7dv5SpztTp7TXO+rFgqoDjbKMbz7mG+vCmp0W3PdzfajQvc9RLxYGIzZdlxvZDYyOoHsbotN0df4+4JYI9TcgfwiOAedF0I2FJcid0BpRtxbxa/hNoa9y/FyiLU2sAjYo9BdF0I2N4ai3gPpIKVQhhqEvIiZT2jk9YmovfrYAMdVoO98rYxbye+kALvKgHwtrkRu9PUmjHGlDfi/eNuX5ZgryfqCASF5laYNGDeoBwDFSuOXqZvT7xLYmZ5ozupNXUhWEyeid30m0pSVJ0Yh+h2QqgjAC9b6wwPlTlVenn7CzLf4GhiVmHfCCxKgzxfkBYUFY6Ksilxkx3S6Rn/C79iYpNiHuRcWgShkiw/WwjkbE3uECYopvFpUfJc1AHH/WDXxDRg/rqCJ80arpcTVfEM85QeWMRj5Z08H/0xaZOK4GxJh5DWZBCGtPe4APcNvaFEYD4pA+3XxDcJJjZg5sCqgm/OAm2yPLdgCPIkZargxF5IS+tqj3OOFHlqXK+QEmQXAte4HXgKsN7BiLWBSg2hsQtaTS+QHVuG1kMC0n0S9/vxW7M0jPKOcH2AShA3ECoW3TpCKs6SgtXYjkoSp/lUnnB1QjHBvm/ODUkaGINLPuyJugRD26uBMxdxHM54blsTLn/IBqhOFhzi/lfMQO9A3oRysqdE7cjOiQZQt+J6EOfiadH1BN74P9jmJ23s7F+AtAveaagIn0PiNoBRV0fpxvojMQ7wCHgFcQSwNNUDnb9h84mN7bMMR6pkHAx4iDxStGGidMdE+7DWm8t16kbsfI2UYegIRJYwB8/BM2l2MTEiGNAVjpQeMNDxpnLS6rsoOyH3HAX/7PPHNycnJycnJycnJycnJycnJyUsd/Xk5Gaglg9FgAAAAASUVORK5CYII=
361
+
362
+
363
+ [extradata]: https://img.shields.io/badge/ExtraData-f4d5b3?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAGYElEQVR4nO2d228VRRzHP6fl2ipFvECN1gpoQR6MVEXFN28hkQdS0FdFRf0P8E8QjXh5ML6aiAFvKD6p9U2lGkuJxEtiI1ClRWy5lYrc6sNvT/acPbN7dk9nd6dnf59ksifl7G9+85uzM7PznWFKuEcL0A3cCtwE3OJdrweu8dISoN37/kJggff5PPCv9/kcMAGc9K7/AH8CR4AR4A/gMDCdamkSUso5/4XAWuBe77raS20Z5T8F/AL8DBwAvgcG8Ss1c7KukPnAeuAR4GHgTmBuxj7U4yIwBPQDXwDfABdy9cgy7cCTwIfAJNJEzKY06fn+BBk8uWk+IeuBF4FNJC/IaeA3/Pb+CDAGjCP9wQRwxvtuZb9R2Z904Pc31wLLkP7oZu+6CliU0K8p4GPgbeDbhPfmwhzgKeAg8X+Bx4BPgO3AY0BXhv52eXlu93w4lsDvg0hZ52Tob2xagWeAYeoXZBzY7X2/Ow9n69ANPAvsQXytV55hYCsSAyfoBQaoXwnvAhtxrxOPogV4EHgDGCW6jEPAA/m4KSwA3gKuYHbwCvAV0Iejj3VC5gKbkdFXWJkvA28io8lMuQ0Zr5ucugS8B6zJ2qkMWYOU8RLmGAwCK7Nyphc4EeLIZ8DtWTniAD3APsyx+Bt52U2V+4BThsxHgMfTztxhNiIxCMblFBKzVLgR+MuQ6afIOL/odCAjs2B8xpB3H6u0IvM8wcxeI//5MJcoATupjdMAlofFLxgyedlmBk3GDmrj9bwt421IBxVsplpsZdCEtCADnMqYHUemdmbMloDhM0CnDcNNzlJkTq4ydn02DO8OGN1hw2hBeIXq2L1vw+ihgNF7bBgtCOuojt1PNowGH7urbRgtCIuobe4jidMxBzUDHebGJxjfuj/mRkZKPQ3cU1QSx6qRCtnSwD1FZXMaRoMvOGeRaRQlmmXU9r/TNgybZjL3oS+GUbQQPgs8Y8IUMn0fCcc0dZJ6hUwjiqE+KT4l4FWiYzZjooxPI4vJltrIaJZzHeHNVKoVYuqoRhCRpqiECVSZdOprEdElrLNfZSPTWcJq4HPMsRgF7jL8fcaYDK4gfJHDZWAXzb/IYRdSVlMMfgSWe9/NpEJAlrq8TvQyoH7kRXI2rcUKYx6yvvdrosu8k+plQJlVSJm7gf0hDpbTBLNzoVwr/kK540SXcRDzQrnMK6Ts+FbiLSWdAD4AnsN/rF1iObANWfF+kvrlGQaeJnz4n0uFlCkvth4y3BeWRoG9wEvABmRtbRYzyiUvrw1e3nsJH6yY0hDxFlsnqpA4BQ8aiRus+5HFEX3428/iMom/HeGodx3F36I2jqx5AtlMc8773I609eBvRVjife5EKqC8HaEHuKoBvz4C3gG+i3lPo/GLNDiTR64NmfXcg0xMxv0FupLOIjJ2H40tUnDmCTExD3lyHgUeQsbp8yLvyJ4LSAfdD3yJbMy5OAN7ieKXdYUEWYBUyjrvegfyYpm0KWmUSeBX/E2fA0hl/Gcxj1lVISZKyM6mbu/ahWyLvoHqbdFlOXQ+/pa5KfxgnqV6W/QJZFv0US8dRvqmtEm9D0m6L6/ILCZhH6Kaerqopu4Yqqk7hGrqDqGaumOopu4Iqqk7hGrqDqGauiOopu4Iqqk7gGrqDqCaugOopo5q6lVO1SNoRDV11dRzTaqp54xq6qimHu8fGzFoAdXUYxisTKqpx0c1dcdQTd0xVFN3CNXUHUI1dcdQTd0RVFN3CNXUHUI1dUdQTd0RVFN3ANXUHUA1dQdQTR3V1KucqkfQiGrqqqnnmlRTzxnnNfXTVKuEHcT4L7Nj0uyaegd+0woSy8VRN8Q5zm6E6pe8HuCHxK6ZOY+0xZXtcTNp6sFZi5F6N8SpkENUV8gW7FWIiWkkUFksQEiboGJo5XSEzdR2cqoY1sekGG6yYXghtW+pqhhGY1IMrR15BHKgVXD4pophOCbFcJvNDFoxH0C8Ez1PpJISMuEajNN+UjhNuhPzwZKqGAphiuEYMkpMhaijV1UxrI1LqkevlllL7bmGlZ29KoaSMjmcuMxKRBUzOaKKocRmRdZOzUcOcw9zqoiKYW4H3FfSi3kEVpmKoBgewKwY5kKRFcPfEcXQ+rDWBqoYWnAqLVQxdJSiK4aJyHrao4iKYSLynodqdsUwMXlXiIlmUgwVRbHK/zvve/uixi6eAAAAAElFTkSuQmCC
364
+
365
+
366
+
367
+ [postprocess]: https://img.shields.io/badge/PostProcess-f4d5b3?style=social&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGAAAABgCAYAAADimHc4AAAABmJLR0QA/wD/AP+gvaeTAAAFXElEQVR4nO2dW2gdRRjHf7bRIiKiVNSK2IqkCV4ajdfUqEWrSOKt1KpVUi+NN0SliD5oVfClCj4Vi/oo6oNI38UHxUbEB29QwUtVvEWbqmlMTc31+DCNrbDz7ew5szu7O98Pljxk58+3//85s7O7s3NAURRFURRFURRFURRFUVpnAbAGeAP4FpgAGgG3F4HDcj3iEnEh8BlhDY82hJuA/YQ3O8oQVgL/EN7ktO0VTBdZK44AdhHe3GhDuJ/wpkbdHb1H8kH+AQwAxxZYy3JLLbUNoQ2YJvkA+wPUkyWAWoRwMskHtpcwB5Y1gMqfE2wH/GXJ6ildCJVNvEmGgc+F/w8CWynwWxtbAOPAKuBjYZ8HgJcpyJvYAgAYBVYjhzAIvEQB/sQYAJQohFgDgJKEEHMAUIIQYg8AAoegARiChdDmU6wCzF+gNcvggb/3AXOtl6PfgGYYBF7wJaYBNMc1voQ0gMBoAIHRAAJT1wC+wtzRbHXryLvQugZQGTSAwGgAgdEAAqMBBCa2e0FZmR9N5YZ+AwKjAQRGAwiMBhAYDSAwGoAfjgc2Ax8Bu4ExYCdmlt1ZRRRQtrmhRXInxnDbfNNZYBvmBZbciDWAZ3Cf+PsucFRehcQYwNNkn309BBydRzGxBfAozU1/zy2EmAJ4jObNP7Q78npOiCWATcjG7gJWAIuB7Sn7bvNZWAwBPISZjGUz9Adg6SH7LwReF/afBbp8FVf3ADYim/8jsCyhXVoIW30VWOcABkk3/zShfRv27minryLrGsBdmK7CZv7PwOkOOost7cd8FVrHAAaQzR/GHLcLZ1g09vgqtm4BrAdmsJv/G9DpqHUi8IVF50NfBdcpgHXI5o8AZzpqLcF4YNN6ylfRdQlgDTCF3bBRoNtR6xTga0FrHDjBV+F1COB6ZPP/BM5x1DoVs0ybTavBwZc9vFD1APqBSeRP/nmOWsuA7wWtBvCcx9qBagdwFfISa2OYNfBcWAp8J2g1gOc91v4fVQ1gNbL5+4BLHbXagZ8ErQawxWPt/6OKAfRiDLaZ9TdwuaPWcuAXQStX8+cLqFIAKzGjEMn8VY5aHZiLMsn8zR5rT6RKAfQAf2E3awK4wlGrE/hV0GoAT3is3UpVArgI2fxJoM9RqwtzK8GmNQc84rF2kSoEcC5mLC+Zf20Grd8FrTnM84PCKHsAXZjVG22GTQHXOWp1p2jNAQ96rN2JMgewAvnTOgPc7KjVgzwHaA6z4lbhlDWAs5H76RngVketS5DPH7PAHR5rz8QSS1FjhFuPswN5hDID3Oao1Yts/gywwWPtmSnbwq2dmHv2kmG3O2pdibkusGlNA7d4rL1ppKWLNwDHFVRHO/KFUZau4mrkH56YAtZ6rL0l7sVeqI/NhWMwz2mlk+Tdjlp9yEvxTwI3OmoVwuHAN4QNoE9on2WE0od8k24SuMFRq1B6yO8HHFzYYmmbZWzen3IMk7hfMwRhLfn8hIkLQ5a2zzq2X4f8VGwC8/yg9JwPfEKxARyJ/ZPrMoNhPfaRXAMzEnK9SVcKFmD6ydcw54ZWf8Yqjcss7UZIvxYZQJ4JMX5AXxF4kmTztqe0S5v9tg/3ZwNR8zbJBm4S2mxENn8vcHF+JdeHhdhvkl2QsH8HZlKUNOl21NJWSaAbe/exCDM38x7gVcxs5rTzjZqfkYdJNnI/8k20pG0PHl+ciIW38DPUHcHcwlYykjYrwWXbTUFvtdeNdlo3f5gClqhMog4rZvU20WYas1T9ELADeB8z5CycWAKYAD7FGP4BxnBvrwfFTtI08DHgHeBxzLPcRcGqqzkncbAPfxMzHO2mQsvw/Atv0E+fkVDOBwAAAABJRU5ErkJggg==
OpenOOD/bash_allocation.slurm ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ #SBATCH --job-name=zzzz2
3
+ #SBATCH --output=output2.txt
4
+ #SBATCH --error=error2.txt
5
+ #SBATCH --cpus-per-task=5
6
+ #SBATCH --ntasks=4
7
+ #SBATCH --gres=gpu:1
8
+ #SBATCH --mem=100000
9
+ #SBATCH -N 1
10
+
11
+
12
+ ./batch_file_deal3_train_method.sh
13
+
14
+
15
+ # 取消当前作业以释放节点
16
+ scancel $SLURM_JOB_ID
OpenOOD/bash_allocation2.slurm ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ #SBATCH --job-name=OOD_post_method
3
+ #SBATCH --output=output_OOD_post_method.txt
4
+ #SBATCH --error=error_OOD_post_method.txt
5
+ #SBATCH --cpus-per-task=5
6
+ #SBATCH --ntasks=4
7
+ #SBATCH --gres=gpu:1
8
+ #SBATCH --mem=100000
9
+ #SBATCH -N 1
10
+
11
+
12
+ python batch_file_deal_post_method_Ours_Notline.py
13
+
14
+
15
+ # 取消当前作业以释放节点
16
+ scancel $SLURM_JOB_ID
OpenOOD/batch_file_deal2.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import subprocess
2
+ import os
3
+
4
+ # 设置 PYTHONPATH 环境变量
5
+ pythonpath = '.'
6
+ if 'PYTHONPATH' in os.environ:
7
+ pythonpath += ':' + os.environ['PYTHONPATH']
8
+ os.environ['PYTHONPATH'] = pythonpath
9
+
10
+ ROOT = "/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD"
11
+
12
+
13
+ run_file = ROOT+"/eval_ood.py"
14
+
15
+
16
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
17
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
18
+ "--postprocessor=gram",\
19
+ "--batch-size=20",\
20
+ "--save-score",\
21
+ "--save-csv",\
22
+ ])
23
+
24
+ # subprocess.run(["python", run_file, "--id-data=bronze2", \
25
+ # "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
26
+ # "--postprocessor=gradnorm",\
27
+ # "--batch-size=20",\
28
+ # "--save-score",\
29
+ # "--save-csv",\
30
+ # ])
31
+
32
+
33
+
34
+ # subprocess.run(["python", run_file, "--id-data=bronze2", \
35
+ # "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
36
+ # "--postprocessor=react",\
37
+ # "--batch-size=100",\
38
+ # "--save-score",\
39
+ # "--save-csv",\
40
+ # ])
41
+
42
+ # subprocess.run(["python", run_file, "--id-data=bronze2", \
43
+ # "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
44
+ # "--postprocessor=mls",\
45
+ # "--batch-size=100",\
46
+ # "--save-score",\
47
+ # "--save-csv",\
48
+ # ])
49
+
50
+ # subprocess.run(["python", run_file, "--id-data=bronze2", \
51
+ # "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
52
+ # "--postprocessor=klm",\
53
+ # "--batch-size=100",\
54
+ # "--save-score",\
55
+ # "--save-csv",\
56
+ # ])
57
+
58
+ # subprocess.run(["python", run_file, "--id-data=bronze2", \
59
+ # "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
60
+ # "--postprocessor=vim",\
61
+ # "--batch-size=100",\
62
+ # "--save-score",\
63
+ # "--save-csv",\
64
+ # ])
65
+
66
+ # subprocess.run(["python", run_file, "--id-data=bronze2", \
67
+ # "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
68
+ # "--postprocessor=knn",\
69
+ # "--batch-size=100",\
70
+ # "--save-score",\
71
+ # "--save-csv",\
72
+ # ])
73
+
74
+ # subprocess.run(["python", run_file, "--id-data=bronze2", \
75
+ # "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
76
+ # "--postprocessor=dice",\
77
+ # "--batch-size=100",\
78
+ # "--save-score",\
79
+ # "--save-csv",\
80
+ # ])
81
+
82
+
83
+
84
+
85
+
86
+
87
+ # run_file = ROOT+"/main.py"
88
+ # subprocess.run(["python", run_file, "--config configs/datasets/cifar10/cifar10.yml \
89
+ # configs/datasets/cifar10/cifar10_ood.yml \
90
+ # configs/networks/resnet18_32x32.yml \
91
+ # configs/pipelines/test/test_ood.yml \
92
+ # configs/preprocessors/base_preprocessor.yml \
93
+ # configs/postprocessors/knn.yml ", "--num_workers=8",
94
+ # "--network.checkpoint='results/cifar10_resnet18_32x32_base_e100_lr0.1_default/s0/best.ckpt'",
95
+ # "--mark=0"])
96
+
97
+
98
+ # cmd = "python main.py \
99
+ # --config configs/datasets/cifar10/cifar10.yml \
100
+ # configs/datasets/cifar10/cifar10_ood.yml \
101
+ # configs/networks/resnet18_32x32.yml \
102
+ # configs/pipelines/test/test_ood.yml \
103
+ # configs/preprocessors/base_preprocessor.yml \
104
+ # configs/postprocessors/knn.yml \
105
+ # --num_workers 8 \
106
+ # --network.checkpoint 'results/cifar10_resnet18_32x32_base_e100_lr0.1_default/s0/best.ckpt' \
107
+ # --mark 0"
108
+
109
+ # subprocess.run(cmd, shell=True, cwd=ROOT)
110
+
111
+ # # subprocess.run(["python", run_file, "--model_type=test"])
112
+ # # subprocess.run(["python", run_file, "--model_type=stage1"])
113
+ # # subprocess.run(["python", run_file, "--model_type=stage2"])
114
+ # # subprocess.run(["python", run_file, "--model_type=stage2_searching"])
115
+
116
+ # path = ROOT
117
+ # # cmd = 'python -m torch.distributed.launch --nproc_per_node 4 texture_countour_double_GCN.py --model_type=stage2'
118
+ # # subprocess.run(cmd, shell=True, cwd=path)
119
+ # cmd = 'python texture_countour_double_GCN.py --model_type=stage2_searching'
120
+ # subprocess.run(cmd, shell=True, cwd=path)
OpenOOD/batch_file_deal3_train_method.sh ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ SEED=0
3
+ # train
4
+ # python main.py \
5
+ # --config configs/datasets/bronze2/bronze2.yml \
6
+ # configs/networks/opengan.yml \
7
+ # configs/pipelines/train/train_opengan.yml \
8
+ # configs/preprocessors/base_preprocessor.yml \
9
+ # configs/postprocessors/opengan.yml \
10
+ # --dataset.feat_root ./results/bronze2_OursBronze2_feat_extract_opengan_default/s${SEED} \
11
+ # --network.backbone.pretrained True \
12
+ # --network.backbone.checkpoint ./results/pretrained_weights/resnet50_imagenet1k_v1.pth \
13
+ # --optimizer.num_epochs 90 \
14
+ # --seed ${SEED} \
15
+ # --proj_ROOT /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD
16
+
17
+ # test
18
+ SCHEME="ood" # "ood" or "fsood"
19
+ python main.py \
20
+ --config configs/datasets/bronze2/bronze2.yml \
21
+ configs/datasets/bronze2/bronze2_ood.yml \
22
+ configs/networks/opengan.yml \
23
+ configs/pipelines/test/test_opengan.yml \
24
+ configs/preprocessors/base_preprocessor.yml \
25
+ configs/postprocessors/opengan.yml \
26
+ --num_workers 8 \
27
+ --network.backbone.name OursBronze2 \
28
+ --network.backbone.pretrained True \
29
+ --network.backbone.checkpoint ./results/bronze2_ours_resnet50_415_NotLine_train/s0/model_state_dict_epoch90.pth \
30
+ --evaluator.ood_scheme ${SCHEME} \
31
+ --seed ${SEED} \
32
+ --proj_ROOT /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD
OpenOOD/batch_file_deal_post_method_Ours_Notline.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import subprocess
2
+ import os
3
+
4
+ # 设置 PYTHONPATH 环境变量
5
+ pythonpath = '.'
6
+ if 'PYTHONPATH' in os.environ:
7
+ pythonpath += ':' + os.environ['PYTHONPATH']
8
+ os.environ['PYTHONPATH'] = pythonpath
9
+
10
+ ROOT = "/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD"
11
+
12
+
13
+ run_file = ROOT+"/eval_ood.py"
14
+
15
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
16
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
17
+ "--postprocessor=openmax",\
18
+ "--save-score",\
19
+ "--save-csv",\
20
+ ])
21
+
22
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
23
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
24
+ "--postprocessor=msp",\
25
+ "--batch-size=200",\
26
+ "--save-score",\
27
+ "--save-csv",\
28
+ ])
29
+
30
+
31
+
32
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
33
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
34
+ "--postprocessor=odin",\
35
+ "--batch-size=10",\
36
+ "--save-score",\
37
+ "--save-csv",\
38
+ ])
39
+
40
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
41
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
42
+ "--postprocessor=mds",\
43
+ "--batch-size=100",\
44
+ "--save-score",\
45
+ "--save-csv",\
46
+ ])
47
+
48
+
49
+
50
+
51
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
52
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
53
+ "--postprocessor=ebo",\
54
+ "--batch-size=100",\
55
+ "--save-score",\
56
+ "--save-csv",\
57
+ ])
58
+
59
+
60
+
61
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
62
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
63
+ "--postprocessor=gram",\
64
+ "--batch-size=20",\
65
+ "--save-score",\
66
+ "--save-csv",\
67
+ ])
68
+
69
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
70
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
71
+ "--postprocessor=gradnorm",\
72
+ "--batch-size=20",\
73
+ "--save-score",\
74
+ "--save-csv",\
75
+ ])
76
+
77
+
78
+
79
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
80
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
81
+ "--postprocessor=react",\
82
+ "--batch-size=100",\
83
+ "--save-score",\
84
+ "--save-csv",\
85
+ ])
86
+
87
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
88
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
89
+ "--postprocessor=mls",\
90
+ "--batch-size=100",\
91
+ "--save-score",\
92
+ "--save-csv",\
93
+ ])
94
+
95
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
96
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
97
+ "--postprocessor=klm",\
98
+ "--batch-size=100",\
99
+ "--save-score",\
100
+ "--save-csv",\
101
+ ])
102
+
103
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
104
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
105
+ "--postprocessor=vim",\
106
+ "--batch-size=100",\
107
+ "--save-score",\
108
+ "--save-csv",\
109
+ ])
110
+
111
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
112
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
113
+ "--postprocessor=knn",\
114
+ "--batch-size=100",\
115
+ "--save-score",\
116
+ "--save-csv",\
117
+ ])
118
+
119
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
120
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
121
+ "--postprocessor=dice",\
122
+ "--batch-size=100",\
123
+ "--save-score",\
124
+ "--save-csv",\
125
+ ])
126
+
127
+
128
+
OpenOOD/batch_file_deal_post_method_p2pNet.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import subprocess
2
+ import os
3
+
4
+ # 设置 PYTHONPATH 环境变量
5
+ pythonpath = '.'
6
+ if 'PYTHONPATH' in os.environ:
7
+ pythonpath += ':' + os.environ['PYTHONPATH']
8
+ os.environ['PYTHONPATH'] = pythonpath
9
+
10
+ ROOT = "/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD"
11
+
12
+
13
+ run_file = ROOT+"/eval_ood.py"
14
+
15
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
16
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
17
+ "--postprocessor=openmax",\
18
+ "--save-score",\
19
+ "--save-csv",\
20
+ ])
21
+
22
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
23
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
24
+ "--postprocessor=msp",\
25
+ "--batch-size=200",\
26
+ "--save-score",\
27
+ "--save-csv",\
28
+ ])
29
+
30
+
31
+
32
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
33
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
34
+ "--postprocessor=odin",\
35
+ "--batch-size=10",\
36
+ "--save-score",\
37
+ "--save-csv",\
38
+ ])
39
+
40
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
41
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
42
+ "--postprocessor=mds",\
43
+ "--batch-size=100",\
44
+ "--save-score",\
45
+ "--save-csv",\
46
+ ])
47
+
48
+
49
+
50
+
51
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
52
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
53
+ "--postprocessor=ebo",\
54
+ "--batch-size=100",\
55
+ "--save-score",\
56
+ "--save-csv",\
57
+ ])
58
+
59
+
60
+
61
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
62
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
63
+ "--postprocessor=gram",\
64
+ "--batch-size=20",\
65
+ "--save-score",\
66
+ "--save-csv",\
67
+ ])
68
+
69
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
70
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
71
+ "--postprocessor=gradnorm",\
72
+ "--batch-size=20",\
73
+ "--save-score",\
74
+ "--save-csv",\
75
+ ])
76
+
77
+
78
+
79
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
80
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
81
+ "--postprocessor=react",\
82
+ "--batch-size=100",\
83
+ "--save-score",\
84
+ "--save-csv",\
85
+ ])
86
+
87
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
88
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
89
+ "--postprocessor=mls",\
90
+ "--batch-size=100",\
91
+ "--save-score",\
92
+ "--save-csv",\
93
+ ])
94
+
95
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
96
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
97
+ "--postprocessor=klm",\
98
+ "--batch-size=100",\
99
+ "--save-score",\
100
+ "--save-csv",\
101
+ ])
102
+
103
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
104
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
105
+ "--postprocessor=vim",\
106
+ "--batch-size=100",\
107
+ "--save-score",\
108
+ "--save-csv",\
109
+ ])
110
+
111
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
112
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
113
+ "--postprocessor=knn",\
114
+ "--batch-size=100",\
115
+ "--save-score",\
116
+ "--save-csv",\
117
+ ])
118
+
119
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
120
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze_2_p2pnet_415", \
121
+ "--postprocessor=dice",\
122
+ "--batch-size=100",\
123
+ "--save-score",\
124
+ "--save-csv",\
125
+ ])
126
+
127
+
128
+
OpenOOD/batch_file_deal_vim_ablation.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import subprocess
2
+ import os
3
+
4
+ # 设置 PYTHONPATH 环境变量
5
+ pythonpath = '.'
6
+ if 'PYTHONPATH' in os.environ:
7
+ pythonpath += ':' + os.environ['PYTHONPATH']
8
+ os.environ['PYTHONPATH'] = pythonpath
9
+
10
+ ROOT = "/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD"
11
+
12
+
13
+ run_file = ROOT+"/eval_ood.py"
14
+
15
+
16
+
17
+ subprocess.run(["python", run_file, "--id-data=bronze2", \
18
+ "--root=/home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/results/bronze2_ours_resnet50_415_NotLine_train", \
19
+ "--postprocessor=vim",\
20
+ "--batch-size=100",\
21
+ "--save-score",\
22
+ "--save-csv",\
23
+ ])
24
+
25
+
26
+ # subprocess.r
OpenOOD/codespell_ignored.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ ans
2
+ fpr
3
+ als
4
+ hist
5
+ tha
OpenOOD/configs/datasets/aircraft/aircraft.yml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: aircraft
3
+ num_classes: 50
4
+ pre_size: 512
5
+ image_size: 448
6
+
7
+ interpolation: bilinear
8
+ normalization_type: aircraft
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, val, test]
15
+
16
+ train:
17
+ dataset_class: ImglistDataset
18
+ data_dir: ./data/images_largescale/
19
+ imglist_pth: ./data/benchmark_imglist/aircraft/train_id.txt
20
+ batch_size: 32
21
+ shuffle: True
22
+ val:
23
+ dataset_class: ImglistDataset
24
+ data_dir: ./data/images_largescale/
25
+ imglist_pth: ./data/benchmark_imglist/aircraft/val_id.txt
26
+ batch_size: 200
27
+ shuffle: False
28
+ test:
29
+ dataset_class: ImglistDataset
30
+ data_dir: ./data/images_largescale/
31
+ imglist_pth: ./data/benchmark_imglist/aircraft/test_id.txt
32
+ batch_size: 200
33
+ shuffle: False
OpenOOD/configs/datasets/aircraft/aircraft_oe.yml ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: aircraft_oe
2
+
3
+ dataset:
4
+ name: aircraft_oe
5
+ split_names: [train, oe, val, test]
6
+ oe:
7
+ dataset_class: ImglistDataset
8
+ data_dir: ./data/images_largescale/
9
+ imglist_pth: ./data/benchmark_imglist/aircraft/train_oe.txt
10
+ batch_size: 32
11
+ shuffle: True
12
+ interpolation: bilinear
OpenOOD/configs/datasets/aircraft/aircraft_ood.yml ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: aircraft_ood
3
+ num_classes: 50
4
+
5
+ dataset_class: ImglistDataset
6
+ interpolation: bilinear
7
+ batch_size: 64
8
+ shuffle: False
9
+
10
+ pre_size: 512
11
+ image_size: 448
12
+ num_workers: '@{num_workers}'
13
+ num_gpus: '@{num_gpus}'
14
+ num_machines: '@{num_machines}'
15
+ split_names: [val, nearood, farood]
16
+ val:
17
+ data_dir: ./data/images_largescale/
18
+ imglist_pth: ./data/benchmark_imglist/aircraft/val_ood.txt
19
+ nearood:
20
+ datasets: [hardood]
21
+ hard:
22
+ data_dir: ./data/images_largescale/
23
+ imglist_pth: ./data/benchmark_imglist/aircraft/test_ood_hard.txt
24
+ farood:
25
+ datasets: [easyood]
26
+ easy:
27
+ data_dir: ./data/images_largescale/
28
+ imglist_pth: ./data/benchmark_imglist/aircraft/test_ood_easy.txt
OpenOOD/configs/datasets/bronze2/bronze2.yml ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: bronze2
3
+ num_classes: 11
4
+ pre_size: 420
5
+ image_size: 400
6
+
7
+ interpolation: bilinear
8
+ normalization_type: imagenet
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, val, test]
15
+
16
+ train:
17
+ dataset_class: Bronze2ExcelDataset
18
+ data_dir: /data/bronze_ID_and_OOD/bronze2NotLine/image_not_line
19
+ imglist_pth: /data/bronze_ID_and_OOD/bronze2NotLine/not_line_ding_gui_train_val_test/ding_gui_not_line_train.xlsx
20
+ xml_path: /data/bronze_ID_and_OOD/bronze2NotLine/xmls
21
+ batch_size: 128
22
+ shuffle: True
23
+ val:
24
+ dataset_class: Bronze2ExcelDataset
25
+ data_dir: /data/bronze_ID_and_OOD/bronze2NotLine/image_not_line
26
+ imglist_pth: /data/bronze_ID_and_OOD/bronze2NotLine/not_line_ding_gui_train_val_test/ding_gui_not_line_val.xlsx
27
+ xml_path: /data/bronze_ID_and_OOD/bronze2NotLine/xmls
28
+ batch_size: 128
29
+ shuffle: False
30
+ test:
31
+ dataset_class: Bronze2ExcelDataset
32
+ data_dir: /data/bronze_ID_and_OOD/bronze2NotLine/image_not_line
33
+ imglist_pth: /data/bronze_ID_and_OOD/bronze2NotLine/not_line_ding_gui_train_val_test/ding_gui_not_line_test.xlsx
34
+ xml_path: /data/bronze_ID_and_OOD/bronze2NotLine/xmls
35
+ batch_size: 128
36
+ shuffle: False
OpenOOD/configs/datasets/bronze2/bronze2_ood.yml ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: bronze2_ood
3
+ num_classes: 11
4
+
5
+ dataset_class: ImglistDataset
6
+ interpolation: bilinear
7
+ batch_size: 32
8
+ shuffle: False
9
+
10
+ pre_size: 256
11
+ image_size: 224
12
+ num_workers: '@{num_workers}'
13
+ num_gpus: '@{num_gpus}'
14
+ num_machines: '@{num_machines}'
15
+ split_names: [val, nearood, midood, farood]
16
+ val:
17
+ data_dir: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/images_largescale/
18
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/benchmark_imglist/imagenet/val_openimage_o.txt
19
+
20
+ nearood:
21
+ datasets: [imagenet21k_container, imagenet21k_container_refine, bronzeS_containerM, bronzeM_containerS, bronze_Line]
22
+
23
+ imagenet21k_container:
24
+ data_dir: ./data/images_largescale
25
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/benchmark_imglist/imagenet21k_container/imagenet21k_container_file-list.txt
26
+ imagenet21k_container_refine:
27
+ data_dir: ./data/images_largescale
28
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/benchmark_imglist/imagenet21k_container_refine/imagenet21k_container_file-list-refine.txt
29
+ bronzeS_containerM:
30
+ data_dir: ./data/images_largescale
31
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/images_largescale/transfer_dataset/bronze_structure_container_material/bronze_structure_container_material_test.txt
32
+ bronzeM_containerS:
33
+ data_dir: ./data/images_largescale
34
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/images_largescale/transfer_dataset/container_structure_bronze_material/container_structure_bronze_material_test.txt
35
+ bronze_Line:
36
+ data_dir: ./data/images_largescale
37
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/images_largescale/bronze_line/bronze2_Line_OOD_list.txt
38
+
39
+ midood:
40
+ datasets: [ssb_hard, ninco]
41
+
42
+ ssb_hard:
43
+ data_dir: ./data/images_largescale
44
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/benchmark_imglist/imagenet/test_ssb_hard.txt
45
+ ninco:
46
+ data_dir: ./data/images_largescale
47
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/benchmark_imglist/imagenet/test_ninco.txt
48
+
49
+
50
+ farood:
51
+ datasets: [inaturalist, textures, openimageo]
52
+
53
+ textures:
54
+ data_dir: ./data/images_classic
55
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/benchmark_imglist/imagenet/test_textures.txt
56
+ inaturalist:
57
+ data_dir: ./data/images_largescale
58
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/benchmark_imglist/imagenet/test_inaturalist.txt
59
+ openimageo:
60
+ data_dir: ./data/images_largescale
61
+ imglist_pth: /home/zhourixin/OOD_Folder/CODE/other_methods/openOOD_code/OpenOOD/data/benchmark_imglist/imagenet/test_openimage_o.txt
62
+
OpenOOD/configs/datasets/cifar10/cifar10.yml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: cifar10
3
+ num_classes: 10
4
+ pre_size: 32
5
+ image_size: 32
6
+
7
+ interpolation: bilinear
8
+ normalization_type: cifar10
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, val, test]
15
+
16
+ train:
17
+ dataset_class: ImglistDataset
18
+ data_dir: ./data/images_classic/
19
+ imglist_pth: ./data/benchmark_imglist/cifar10/train_cifar10.txt
20
+ batch_size: 128
21
+ shuffle: True
22
+ val:
23
+ dataset_class: ImglistDataset
24
+ data_dir: ./data/images_classic/
25
+ imglist_pth: ./data/benchmark_imglist/cifar10/val_cifar10.txt
26
+ batch_size: 200
27
+ shuffle: False
28
+ test:
29
+ dataset_class: ImglistDataset
30
+ data_dir: ./data/images_classic/
31
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_cifar10.txt
32
+ batch_size: 200
33
+ shuffle: False
OpenOOD/configs/datasets/cifar10/cifar10_double_label.yml ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: cifar10_double_label
3
+ interpolation: bilinear
4
+ normalization_type: cifar10
5
+ split_names: [train, val, test]
6
+ num_classes: 12 # actually it's 10 classes but it has 2 groups
7
+ image_size: 32
8
+ pre_size: 32
9
+ num_workers: '@{num_workers}'
10
+ num_gpus: '@{num_gpus}'
11
+ num_machines: '@{num_machines}'
12
+ train:
13
+ dataset_class: ImglistDataset
14
+ data_dir: ./data/images_classic/
15
+ imglist_pth: ./data/benchmark_imglist/cifar10/train_cifar10_mos.txt
16
+ batch_size: 128
17
+ shuffle: True
18
+ interpolation: bilinear
19
+ val:
20
+ dataset_class: ImglistDataset
21
+ data_dir: ./data/images_classic/
22
+ imglist_pth: ./data/benchmark_imglist/cifar10/val_cifar10_mos.txt
23
+ batch_size: 128
24
+ shuffle: False
25
+ interpolation: bilinear
26
+ test:
27
+ dataset_class: ImglistDataset
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_cifar10_mos.txt
30
+ batch_size: 128
31
+ shuffle: False
32
+ interpolation: bilinear
OpenOOD/configs/datasets/cifar10/cifar10_extra.yml ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: cifar10
3
+ num_classes: 10
4
+ pre_size: 32
5
+ image_size: 32
6
+
7
+ interpolation: bilinear
8
+ normalization_type: cifar10
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, val, test]
15
+
16
+ train:
17
+ dataset_class: ImglistExtraDataDataset
18
+ data_dir: ./data/images_classic/
19
+ imglist_pth: ./data/benchmark_imglist/cifar10/train_cifar10.txt
20
+ batch_size: 128
21
+ shuffle: True
22
+ extra_data_pth: ./data/images_classic/cifar10_extra/stylegan_images.npy
23
+ extra_label_pth: ./data/images_classic/cifar10_extra/stylegan_labels.npy
24
+ extra_percent: 100
25
+ orig_ratio: 0.8
26
+ val:
27
+ dataset_class: ImglistDataset
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/cifar10/val_cifar10.txt
30
+ batch_size: 200
31
+ shuffle: False
32
+ test:
33
+ dataset_class: ImglistDataset
34
+ data_dir: ./data/images_classic/
35
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_cifar10.txt
36
+ batch_size: 200
37
+ shuffle: False
OpenOOD/configs/datasets/cifar10/cifar10_fsood.yml ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: cifar10_fsood
3
+ num_classes: 10
4
+
5
+ num_workers: '@{num_workers}'
6
+ num_gpus: '@{num_gpus}'
7
+ num_machines: '@{num_machines}'
8
+
9
+ dataset_class: ImglistDataset
10
+ batch_size: 128
11
+ shuffle: False
12
+
13
+ split_names: [val, nearood, farood, csid]
14
+ val:
15
+ data_dir: ./data/images_classic/
16
+ imglist_pth: ./data/benchmark_imglist/cifar10/val_cifar100.txt
17
+ nearood:
18
+ datasets: [cifar100, tin]
19
+ cifar100:
20
+ data_dir: ./data/images_classic/
21
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_cifar100.txt
22
+ tin:
23
+ data_dir: ./data/images_classic/
24
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_tin.txt
25
+ farood:
26
+ datasets: [mnist, svhn, texture, place365]
27
+ mnist:
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_mnist.txt
30
+ svhn:
31
+ data_dir: ./data/images_classic/
32
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_svhn.txt
33
+ texture:
34
+ data_dir: ./data/images_classic/
35
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_texture.txt
36
+ place365:
37
+ data_dir: ./data/images_classic/
38
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_places365.txt
39
+ csid:
40
+ datasets: [cinic10]
41
+ cinic10:
42
+ data_dir: ./data/images_classic/
43
+ imglist_pth: ./data/benchmark_imglist/cifar10/val_cinic10.txt
OpenOOD/configs/datasets/cifar10/cifar10_oe.yml ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: cifar10_oe
2
+
3
+ dataset:
4
+ name: cifar10_oe
5
+ split_names: [train, oe, val, test]
6
+ oe:
7
+ dataset_class: ImglistDataset
8
+ data_dir: ./data/images_classic/
9
+ imglist_pth: ./data/benchmark_imglist/cifar10/train_tin597.txt
10
+ batch_size: 256
11
+ shuffle: True
12
+ interpolation: bilinear
OpenOOD/configs/datasets/cifar10/cifar10_ood.yml ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: cifar10_ood
3
+ num_classes: 10
4
+
5
+ num_workers: '@{num_workers}'
6
+ num_gpus: '@{num_gpus}'
7
+ num_machines: '@{num_machines}'
8
+
9
+ dataset_class: ImglistDataset
10
+ batch_size: 128
11
+ shuffle: False
12
+
13
+ split_names: [val, nearood, farood]
14
+ val:
15
+ data_dir: ./data/images_classic/
16
+ imglist_pth: ./data/benchmark_imglist/cifar10/val_tin.txt
17
+ nearood:
18
+ datasets: [cifar100, tin]
19
+ cifar100:
20
+ data_dir: ./data/images_classic/
21
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_cifar100.txt
22
+ tin:
23
+ data_dir: ./data/images_classic/
24
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_tin.txt
25
+ farood:
26
+ datasets: [mnist, svhn, texture, place365]
27
+ mnist:
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_mnist.txt
30
+ svhn:
31
+ data_dir: ./data/images_classic/
32
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_svhn.txt
33
+ texture:
34
+ data_dir: ./data/images_classic/
35
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_texture.txt
36
+ place365:
37
+ data_dir: ./data/images_classic/
38
+ imglist_pth: ./data/benchmark_imglist/cifar10/test_places365.txt
OpenOOD/configs/datasets/cifar100/cifar100.yml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: cifar100
3
+ num_classes: 100
4
+ image_size: 32
5
+ pre_size: 32
6
+
7
+ interpolation: bilinear
8
+ normalization_type: cifar100
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, val, test]
15
+
16
+ train:
17
+ dataset_class: ImglistDataset
18
+ data_dir: ./data/images_classic/
19
+ imglist_pth: ./data/benchmark_imglist/cifar100/train_cifar100.txt
20
+ batch_size: 128
21
+ shuffle: True
22
+ val:
23
+ dataset_class: ImglistDataset
24
+ data_dir: ./data/images_classic/
25
+ imglist_pth: ./data/benchmark_imglist/cifar100/val_cifar100.txt
26
+ batch_size: 200
27
+ shuffle: False
28
+ test:
29
+ dataset_class: ImglistDataset
30
+ data_dir: ./data/images_classic/
31
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_cifar100.txt
32
+ batch_size: 200
33
+ shuffle: False
OpenOOD/configs/datasets/cifar100/cifar100_double_label.yml ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: cifar100_double_label
3
+ interpolation: bilinear
4
+ normalization_type: cifar100
5
+ split_names: [train, val, test]
6
+ num_classes: 120 # actually it's 100 classes but it has 20 groups
7
+ image_size: 32
8
+ pre_size: 32
9
+ num_workers: '@{num_workers}'
10
+ num_gpus: '@{num_gpus}'
11
+ num_machines: '@{num_machines}'
12
+ train:
13
+ dataset_class: ImglistDataset
14
+ data_dir: ./data/images_classic/
15
+ imglist_pth: ./data/benchmark_imglist/cifar100/train_cifar100_mos.txt
16
+ batch_size: 128
17
+ shuffle: True
18
+ interpolation: bilinear
19
+ val:
20
+ dataset_class: ImglistDataset
21
+ data_dir: ./data/images_classic/
22
+ imglist_pth: ./data/benchmark_imglist/cifar100/val_cifar100_mos.txt
23
+ batch_size: 128
24
+ shuffle: False
25
+ interpolation: bilinear
26
+ test:
27
+ dataset_class: ImglistDataset
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_cifar100_mos.txt
30
+ batch_size: 128
31
+ shuffle: False
32
+ interpolation: bilinear
OpenOOD/configs/datasets/cifar100/cifar100_extra.yml ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: cifar100
3
+ num_classes: 100
4
+ pre_size: 32
5
+ image_size: 32
6
+
7
+ interpolation: bilinear
8
+ normalization_type: cifar100
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, val, test]
15
+
16
+ train:
17
+ dataset_class: ImglistExtraDataDataset
18
+ data_dir: ./data/images_classic/
19
+ imglist_pth: ./data/benchmark_imglist/cifar100/train_cifar100.txt
20
+ batch_size: 128
21
+ shuffle: True
22
+ extra_data_pth: ./data/images_classic/cifar100_extra/stylegan_images.npy
23
+ extra_label_pth: ./data/images_classic/cifar100_extra/stylegan_labels.npy
24
+ extra_percent: 100
25
+ orig_ratio: 0.8
26
+ val:
27
+ dataset_class: ImglistDataset
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/cifar100/val_cifar100.txt
30
+ batch_size: 200
31
+ shuffle: False
32
+ test:
33
+ dataset_class: ImglistDataset
34
+ data_dir: ./data/images_classic/
35
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_cifar100.txt
36
+ batch_size: 200
37
+ shuffle: False
OpenOOD/configs/datasets/cifar100/cifar100_fsood.yml ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: cifar100_ood
3
+ num_classes: 100
4
+
5
+ num_workers: '@{num_workers}'
6
+ num_gpus: '@{num_gpus}'
7
+ num_machines: '@{num_machines}'
8
+
9
+ dataset_class: ImglistDataset
10
+ batch_size: 128
11
+ shuffle: False
12
+
13
+ split_names: [val, nearood, farood, csid]
14
+ val:
15
+ data_dir: ./data/images_classic/
16
+ imglist_pth: ./data/benchmark_imglist/cifar100/val_cifar10.txt
17
+ nearood:
18
+ datasets: [cifar10, tin]
19
+ cifar10:
20
+ data_dir: ./data/images_classic/
21
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_cifar10.txt
22
+ tin:
23
+ data_dir: ./data/images_classic/
24
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_tin.txt
25
+ farood:
26
+ datasets: [mnist, svhn, texture, places365]
27
+ mnist:
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_mnist.txt
30
+ svhn:
31
+ data_dir: ./data/images_classic/
32
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_svhn.txt
33
+ texture:
34
+ data_dir: ./data/images_classic/
35
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_texture.txt
36
+ places365:
37
+ data_dir: ./data/images_classic/
38
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_places365.txt
39
+ csid:
40
+ datasets: [cifar100c]
41
+ cifar100c:
42
+ data_dir: ./data/images_classic/
43
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_cifar100c.txt
OpenOOD/configs/datasets/cifar100/cifar100_oe.yml ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: cifar100_oe
2
+
3
+ dataset:
4
+ name: cifar100_oe
5
+ split_names: [train, oe, val, test]
6
+ oe:
7
+ dataset_class: ImglistDataset
8
+ data_dir: ./data/images_classic/
9
+ imglist_pth: ./data/benchmark_imglist/cifar100/train_tin597.txt
10
+ batch_size: 256
11
+ shuffle: True
12
+ interpolation: bilinear
OpenOOD/configs/datasets/cifar100/cifar100_ood.yml ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: cifar100_ood
3
+ num_classes: 100
4
+
5
+ num_workers: '@{num_workers}'
6
+ num_gpus: '@{num_gpus}'
7
+ num_machines: '@{num_machines}'
8
+
9
+ dataset_class: ImglistDataset
10
+ batch_size: 128
11
+ shuffle: False
12
+
13
+ split_names: [val, nearood, farood]
14
+ val:
15
+ data_dir: ./data/images_classic/
16
+ imglist_pth: ./data/benchmark_imglist/cifar100/val_tin.txt
17
+ nearood:
18
+ datasets: [cifar10, tin]
19
+ cifar10:
20
+ data_dir: ./data/images_classic/
21
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_cifar10.txt
22
+ tin:
23
+ data_dir: ./data/images_classic/
24
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_tin.txt
25
+ farood:
26
+ datasets: [mnist, svhn, texture, places365]
27
+ mnist:
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_mnist.txt
30
+ svhn:
31
+ data_dir: ./data/images_classic/
32
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_svhn.txt
33
+ texture:
34
+ data_dir: ./data/images_classic/
35
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_texture.txt
36
+ places365:
37
+ data_dir: ./data/images_classic/
38
+ imglist_pth: ./data/benchmark_imglist/cifar100/test_places365.txt
OpenOOD/configs/datasets/covid/covid.yml ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: covid
3
+ split_names: [train, val, test]
4
+ num_classes: 2
5
+ image_size: 224
6
+ num_workers: '@{num_workers}'
7
+ num_gpus: '@{num_gpus}'
8
+ num_machines: '@{num_machines}'
9
+ train:
10
+ dataset_class: ImglistDataset
11
+ data_dir: ./data/covid_images/
12
+ imglist_pth: ./data/imglist/covid/train_bimcv.txt
13
+ batch_size: 128
14
+ shuffle: True
15
+ interpolation: bilinear
16
+ val:
17
+ dataset_class: ImglistDataset
18
+ data_dir: ./data/covid_images/
19
+ imglist_pth: ./data/imglist/covid/val_bimcv.txt
20
+ batch_size: 200
21
+ shuffle: False
22
+ interpolation: bilinear
23
+ test:
24
+ dataset_class: ImglistDataset
25
+ data_dir: ./data/covid_images/
26
+ imglist_pth: ./data/imglist/covid/test_bimcv.txt
27
+ batch_size: 200
28
+ shuffle: False
29
+ interpolation: bilinear
OpenOOD/configs/datasets/covid/covid_fsood.yml ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: covid_fsood
3
+ dataset_class: ImglistDataset
4
+ interpolation: bilinear
5
+
6
+ batch_size: 20
7
+ shuffle: False
8
+ num_classes: 2
9
+ image_size: 224
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [val, csid, nearood, farood]
15
+ val:
16
+ data_dir: ./data/covid_images/
17
+ imglist_pth: ./data/imglist/covid/val_ct.txt
18
+ csid:
19
+ datasets: [actmed, hannover]
20
+ actmed:
21
+ data_dir: ./data/covid_images/
22
+ imglist_pth: ./data/imglist/covid/test_actmed.txt
23
+ hannover:
24
+ data_dir: ./data/covid_images/
25
+ imglist_pth: ./data/imglist/covid/test_hannover.txt
26
+ nearood:
27
+ datasets: [ct, xraybone]
28
+ ct:
29
+ data_dir: ./data/covid_images/
30
+ imglist_pth: ./data/imglist/covid/test_ct.txt
31
+ xraybone:
32
+ data_dir: ./data/covid_images/
33
+ imglist_pth: ./data/imglist/covid/test_xraybone.txt
34
+ farood:
35
+ datasets: [mnist, cifar10, texture, tin]
36
+ mnist:
37
+ data_dir: ./data/images/
38
+ imglist_pth: ./data/imglist/covid/test_mnist.txt
39
+ cifar10:
40
+ data_dir: ./data/images/
41
+ imglist_pth: ./data/imglist/covid/test_cifar10.txt
42
+ texture:
43
+ data_dir: ./data/images/
44
+ imglist_pth: ./data/imglist/covid/test_texture.txt
45
+ tin:
46
+ data_dir: ./data/images/
47
+ imglist_pth: ./data/imglist/covid/test_tin.txt
OpenOOD/configs/datasets/covid/covid_ood.yml ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: covid_ood
3
+ dataset_class: ImglistDataset
4
+ interpolation: bilinear
5
+
6
+ batch_size: 128
7
+ shuffle: False
8
+ num_classes: 2
9
+ image_size: 224
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [val, nearood, farood]
15
+ val:
16
+ data_dir: ./data/covid_images/
17
+ imglist_pth: ./data/imglist/covid/val_ct.txt
18
+ nearood:
19
+ datasets: [ct, xraybone]
20
+ ct:
21
+ data_dir: ./data/covid_images/
22
+ imglist_pth: ./data/imglist/covid/test_ct.txt
23
+ xraybone:
24
+ data_dir: ./data/covid_images/
25
+ imglist_pth: ./data/imglist/covid/test_xraybone.txt
26
+ farood:
27
+ datasets: [mnist, cifar10, texture, tin]
28
+ mnist:
29
+ data_dir: ./data/images/
30
+ imglist_pth: ./data/imglist/covid/test_mnist.txt
31
+ cifar10:
32
+ data_dir: ./data/images/
33
+ imglist_pth: ./data/imglist/covid/test_cifar10.txt
34
+ texture:
35
+ data_dir: ./data/images/
36
+ imglist_pth: ./data/imglist/covid/test_texture.txt
37
+ tin:
38
+ data_dir: ./data/images/
39
+ imglist_pth: ./data/imglist/covid/test_tin.txt
OpenOOD/configs/datasets/imagenet/imagenet.yml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: imagenet
3
+ num_classes: 1000
4
+ pre_size: 256
5
+ image_size: 224
6
+
7
+ interpolation: bilinear
8
+ normalization_type: imagenet
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, val, test]
15
+
16
+ train:
17
+ dataset_class: ImglistDataset
18
+ data_dir: ./data/images_largescale/
19
+ imglist_pth: ./data/benchmark_imglist/imagenet/train_imagenet.txt
20
+ batch_size: 128
21
+ shuffle: True
22
+ val:
23
+ dataset_class: ImglistDataset
24
+ data_dir: ./data/images_largescale/
25
+ imglist_pth: ./data/benchmark_imglist/imagenet/val_imagenet.txt
26
+ batch_size: 128
27
+ shuffle: False
28
+ test:
29
+ dataset_class: ImglistDataset
30
+ data_dir: ./data/images_largescale/
31
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_imagenet.txt
32
+ batch_size: 128
33
+ shuffle: False
OpenOOD/configs/datasets/imagenet/imagenet_double_label.yml ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: imagenet_double_label
3
+ interpolation: bilinear
4
+ normalization_type: imagenet
5
+ split_names: [train, val, test]
6
+ num_classes: 1008 # actually it's 1000 classes but it has 8 groups
7
+ image_size: 224
8
+ pre_size: 256
9
+ num_workers: '@{num_workers}'
10
+ num_gpus: '@{num_gpus}'
11
+ num_machines: '@{num_machines}'
12
+ train:
13
+ dataset_class: ImglistDataset
14
+ data_dir: ./data/images_largescale/
15
+ imglist_pth: ./data/benchmark_imglist/imagenet/train_imagenet_mos.txt
16
+ batch_size: 256
17
+ shuffle: True
18
+ interpolation: bilinear
19
+ val:
20
+ dataset_class: ImglistDataset
21
+ data_dir: ./data/images_largescale/
22
+ imglist_pth: ./data/benchmark_imglist/imagenet/val_imagenet_mos.txt
23
+ batch_size: 256
24
+ shuffle: False
25
+ interpolation: bilinear
26
+ test:
27
+ dataset_class: ImglistDataset
28
+ data_dir: ./data/images_largescale/
29
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_imagenet_mos.txt
30
+ batch_size: 256
31
+ shuffle: False
32
+ interpolation: bilinear
OpenOOD/configs/datasets/imagenet/imagenet_double_label_fsood.yml ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: imagenet_fsood
3
+ num_classes: 200
4
+
5
+ dataset_class: ImglistDataset
6
+ interpolation: bilinear
7
+ batch_size: 256
8
+ shuffle: False
9
+
10
+ pre_size: 256
11
+ image_size: 224
12
+ num_workers: '@{num_workers}'
13
+ num_gpus: '@{num_gpus}'
14
+ num_machines: '@{num_machines}'
15
+ split_names: [val, nearood, farood, csid]
16
+ val:
17
+ data_dir: ./data/images_largescale/
18
+ imglist_pth: ./data/benchmark_imglist/imagenet/val_openimage_o.txt
19
+ nearood:
20
+ datasets: [ssb_hard, ninco]
21
+ ssb_hard:
22
+ data_dir: ./data/images_largescale/
23
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ssb_hard.txt
24
+ ninco:
25
+ data_dir: ./data/images_largescale/
26
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ninco.txt
27
+ farood:
28
+ datasets: [inaturalist, textures, openimageo]
29
+ textures:
30
+ data_dir: ./data/images_classic/
31
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_textures.txt
32
+ inaturalist:
33
+ data_dir: ./data/images_largescale/
34
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_inaturalist.txt
35
+ openimageo:
36
+ data_dir: ./data/images_largescale/
37
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_openimage_o.txt
38
+ csid:
39
+ datasets: [imagenetv2, imagenetc, imagenetr]
40
+ imagenetv2:
41
+ data_dir: ./data/images_largescale/
42
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_imagenet_v2_mos.txt
43
+ imagenetc:
44
+ data_dir: ./data/images_largescale/
45
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_imagenet_c_mos.txt
46
+ imagenetr:
47
+ data_dir: ./data/images_largescale/
48
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_imagenet_r_mos.txt
OpenOOD/configs/datasets/imagenet/imagenet_fsood.yml ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: imagenet_ood
3
+ num_classes: 1000
4
+
5
+ dataset_class: ImglistDataset
6
+ interpolation: bilinear
7
+ batch_size: 32
8
+ shuffle: False
9
+
10
+ pre_size: 256
11
+ image_size: 224
12
+ num_workers: '@{num_workers}'
13
+ num_gpus: '@{num_gpus}'
14
+ num_machines: '@{num_machines}'
15
+ split_names: [val, nearood, farood, csid]
16
+ val:
17
+ data_dir: ./data/images_largescale/
18
+ imglist_pth: ./data/benchmark_imglist/imagenet/val_openimage_o.txt
19
+ nearood:
20
+ datasets: [ssb_hard, ninco]
21
+ ssb_hard:
22
+ data_dir: ./data/images_largescale/
23
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ssb_hard.txt
24
+ ninco:
25
+ data_dir: ./data/images_largescale/
26
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ninco.txt
27
+ farood:
28
+ datasets: [inaturalist, textures, openimageo]
29
+ textures:
30
+ data_dir: ./data/images_classic/
31
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_textures.txt
32
+ inaturalist:
33
+ data_dir: ./data/images_largescale/
34
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_inaturalist.txt
35
+ openimageo:
36
+ data_dir: ./data/images_largescale/
37
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_openimage_o.txt
38
+ csid:
39
+ datasets: [imagenetv2, imagenetc, imagenetr]
40
+ imagenetv2:
41
+ data_dir: ./data/images_largescale/
42
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_imagenet_v2.txt
43
+ imagenetc:
44
+ data_dir: ./data/images_largescale/
45
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_imagenet_c.txt
46
+ imagenetr:
47
+ data_dir: ./data/images_largescale/
48
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_imagenet_r.txt
OpenOOD/configs/datasets/imagenet/imagenet_ood.yml ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: imagenet_ood
3
+ num_classes: 1000
4
+
5
+ dataset_class: ImglistDataset
6
+ interpolation: bilinear
7
+ batch_size: 32
8
+ shuffle: False
9
+
10
+ pre_size: 256
11
+ image_size: 224
12
+ num_workers: '@{num_workers}'
13
+ num_gpus: '@{num_gpus}'
14
+ num_machines: '@{num_machines}'
15
+ split_names: [val, nearood, farood]
16
+ val:
17
+ data_dir: ./data/images_largescale/
18
+ imglist_pth: ./data/benchmark_imglist/imagenet/val_openimage_o.txt
19
+ nearood:
20
+ datasets: [ssb_hard, ninco]
21
+ ssb_hard:
22
+ data_dir: ./data/images_largescale/
23
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ssb_hard.txt
24
+ ninco:
25
+ data_dir: ./data/images_largescale/
26
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ninco.txt
27
+ farood:
28
+ datasets: [inaturalist, textures, openimageo]
29
+ textures:
30
+ data_dir: ./data/images_classic/
31
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_textures.txt
32
+ inaturalist:
33
+ data_dir: ./data/images_largescale/
34
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_inaturalist.txt
35
+ openimageo:
36
+ data_dir: ./data/images_largescale/
37
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_openimage_o.txt
OpenOOD/configs/datasets/imagenet200/imagenet200.yml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: imagenet200
3
+ num_classes: 200
4
+ pre_size: 256
5
+ image_size: 224
6
+
7
+ interpolation: bilinear
8
+ normalization_type: imagenet
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, val, test]
15
+
16
+ train:
17
+ dataset_class: ImglistDataset
18
+ data_dir: ./data/images_largescale/
19
+ imglist_pth: ./data/benchmark_imglist/imagenet200/train_imagenet200.txt
20
+ batch_size: 256
21
+ shuffle: True
22
+ val:
23
+ dataset_class: ImglistDataset
24
+ data_dir: ./data/images_largescale/
25
+ imglist_pth: ./data/benchmark_imglist/imagenet200/val_imagenet200.txt
26
+ batch_size: 256
27
+ shuffle: False
28
+ test:
29
+ dataset_class: ImglistDataset
30
+ data_dir: ./data/images_largescale/
31
+ imglist_pth: ./data/benchmark_imglist/imagenet200/test_imagenet200.txt
32
+ batch_size: 256
33
+ shuffle: False
OpenOOD/configs/datasets/imagenet200/imagenet200_double_label.yml ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: imagenet200_double_label
3
+ interpolation: bilinear
4
+ normalization_type: imagenet
5
+ split_names: [train, val, test]
6
+ num_classes: 206 # actually it's 200 classes but it has 6 groups
7
+ image_size: 224
8
+ pre_size: 256
9
+ num_workers: '@{num_workers}'
10
+ num_gpus: '@{num_gpus}'
11
+ num_machines: '@{num_machines}'
12
+ train:
13
+ dataset_class: ImglistDataset
14
+ data_dir: ./data/images_largescale/
15
+ imglist_pth: ./data/benchmark_imglist/imagenet200/train_imagenet200_mos.txt
16
+ batch_size: 256
17
+ shuffle: True
18
+ interpolation: bilinear
19
+ val:
20
+ dataset_class: ImglistDataset
21
+ data_dir: ./data/images_largescale/
22
+ imglist_pth: ./data/benchmark_imglist/imagenet200/val_imagenet200_mos.txt
23
+ batch_size: 256
24
+ shuffle: False
25
+ interpolation: bilinear
26
+ test:
27
+ dataset_class: ImglistDataset
28
+ data_dir: ./data/images_largescale/
29
+ imglist_pth: ./data/benchmark_imglist/imagenet200/test_imagenet200_mos.txt
30
+ batch_size: 256
31
+ shuffle: False
32
+ interpolation: bilinear
OpenOOD/configs/datasets/imagenet200/imagenet200_double_label_fsood.yml ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: imagenet200_fsood
3
+ num_classes: 200
4
+
5
+ dataset_class: ImglistDataset
6
+ interpolation: bilinear
7
+ batch_size: 256
8
+ shuffle: False
9
+
10
+ pre_size: 256
11
+ image_size: 224
12
+ num_workers: '@{num_workers}'
13
+ num_gpus: '@{num_gpus}'
14
+ num_machines: '@{num_machines}'
15
+ split_names: [val, nearood, farood, csid]
16
+ val:
17
+ data_dir: ./data/images_largescale/
18
+ imglist_pth: ./data/benchmark_imglist/imagenet200/val_openimage_o.txt
19
+ nearood:
20
+ datasets: [ssb_hard, ninco]
21
+ ssb_hard:
22
+ data_dir: ./data/images_largescale/
23
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ssb_hard.txt
24
+ ninco:
25
+ data_dir: ./data/images_largescale/
26
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ninco.txt
27
+ farood:
28
+ datasets: [inaturalist, textures, openimageo]
29
+ textures:
30
+ data_dir: ./data/images_classic/
31
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_textures.txt
32
+ inaturalist:
33
+ data_dir: ./data/images_largescale/
34
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_inaturalist.txt
35
+ openimageo:
36
+ data_dir: ./data/images_largescale/
37
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_openimage_o.txt
38
+ csid:
39
+ datasets: [imagenetv2, imagenetc, imagenetr]
40
+ imagenetv2:
41
+ data_dir: ./data/images_largescale/
42
+ imglist_pth: ./data/benchmark_imglist/imagenet200/test_imagenet200_v2_mos.txt
43
+ imagenetc:
44
+ data_dir: ./data/images_largescale/
45
+ imglist_pth: ./data/benchmark_imglist/imagenet200/test_imagenet200_c_mos.txt
46
+ imagenetr:
47
+ data_dir: ./data/images_largescale/
48
+ imglist_pth: ./data/benchmark_imglist/imagenet200/test_imagenet200_r_mos.txt
OpenOOD/configs/datasets/imagenet200/imagenet200_fsood.yml ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: imagenet200_fsood
3
+ num_classes: 200
4
+
5
+ dataset_class: ImglistDataset
6
+ interpolation: bilinear
7
+ batch_size: 256
8
+ shuffle: False
9
+
10
+ pre_size: 256
11
+ image_size: 224
12
+ num_workers: '@{num_workers}'
13
+ num_gpus: '@{num_gpus}'
14
+ num_machines: '@{num_machines}'
15
+ split_names: [val, nearood, farood, csid]
16
+ val:
17
+ data_dir: ./data/images_largescale/
18
+ imglist_pth: ./data/benchmark_imglist/imagenet200/val_openimage_o.txt
19
+ nearood:
20
+ datasets: [ssb_hard, ninco]
21
+ ssb_hard:
22
+ data_dir: ./data/images_largescale/
23
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ssb_hard.txt
24
+ ninco:
25
+ data_dir: ./data/images_largescale/
26
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ninco.txt
27
+ farood:
28
+ datasets: [inaturalist, textures, openimageo]
29
+ textures:
30
+ data_dir: ./data/images_classic/
31
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_textures.txt
32
+ inaturalist:
33
+ data_dir: ./data/images_largescale/
34
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_inaturalist.txt
35
+ openimageo:
36
+ data_dir: ./data/images_largescale/
37
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_openimage_o.txt
38
+ csid:
39
+ datasets: [imagenetv2, imagenetc, imagenetr]
40
+ imagenetv2:
41
+ data_dir: ./data/images_largescale/
42
+ imglist_pth: ./data/benchmark_imglist/imagenet200/test_imagenet200_v2.txt
43
+ imagenetc:
44
+ data_dir: ./data/images_largescale/
45
+ imglist_pth: ./data/benchmark_imglist/imagenet200/test_imagenet200_c.txt
46
+ imagenetr:
47
+ data_dir: ./data/images_largescale/
48
+ imglist_pth: ./data/benchmark_imglist/imagenet200/test_imagenet200_r.txt
OpenOOD/configs/datasets/imagenet200/imagenet200_oe.yml ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: imagenet200_oe
2
+
3
+ dataset:
4
+ name: imagenet200_oe
5
+ split_names: [train, oe, val, test]
6
+ oe:
7
+ dataset_class: ImglistDataset
8
+ data_dir: ./data/images_largescale/
9
+ imglist_pth: ./data/benchmark_imglist/imagenet200/train_imagenet800.txt
10
+ batch_size: 256
11
+ shuffle: True
12
+ interpolation: bilinear
OpenOOD/configs/datasets/imagenet200/imagenet200_ood.yml ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: imagenet200_ood
3
+ num_classes: 200
4
+
5
+ dataset_class: ImglistDataset
6
+ interpolation: bilinear
7
+ batch_size: 256
8
+ shuffle: False
9
+
10
+ pre_size: 256
11
+ image_size: 224
12
+ num_workers: '@{num_workers}'
13
+ num_gpus: '@{num_gpus}'
14
+ num_machines: '@{num_machines}'
15
+ split_names: [val, nearood, farood]
16
+ val:
17
+ data_dir: ./data/images_largescale/
18
+ imglist_pth: ./data/benchmark_imglist/imagenet200/val_openimage_o.txt
19
+ nearood:
20
+ datasets: [ssb_hard, ninco]
21
+ ssb_hard:
22
+ data_dir: ./data/images_largescale/
23
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ssb_hard.txt
24
+ ninco:
25
+ data_dir: ./data/images_largescale/
26
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_ninco.txt
27
+ farood:
28
+ datasets: [inaturalist, textures, openimageo]
29
+ textures:
30
+ data_dir: ./data/images_classic/
31
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_textures.txt
32
+ inaturalist:
33
+ data_dir: ./data/images_largescale/
34
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_inaturalist.txt
35
+ openimageo:
36
+ data_dir: ./data/images_largescale/
37
+ imglist_pth: ./data/benchmark_imglist/imagenet/test_openimage_o.txt
OpenOOD/configs/datasets/mnist/mnist.yml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: mnist
3
+ num_classes: 10
4
+ image_size: 28
5
+ pre_size: 28
6
+
7
+ interpolation: bilinear
8
+ normalization_type: mnist
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, val, test]
15
+
16
+ train:
17
+ dataset_class: ImglistDataset
18
+ data_dir: ./data/images_classic/
19
+ imglist_pth: ./data/benchmark_imglist/mnist/train_mnist.txt
20
+ batch_size: 128
21
+ shuffle: True
22
+ val:
23
+ dataset_class: ImglistDataset
24
+ data_dir: ./data/images_classic/
25
+ imglist_pth: ./data/benchmark_imglist/mnist/val_mnist.txt
26
+ batch_size: 200
27
+ shuffle: False
28
+ test:
29
+ dataset_class: ImglistDataset
30
+ data_dir: ./data/images_classic/
31
+ imglist_pth: ./data/benchmark_imglist/mnist/test_mnist.txt
32
+ batch_size: 200
33
+ shuffle: False
OpenOOD/configs/datasets/mnist/mnist_fsood.yml ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: mnist_fsood
3
+ num_classes: 10
4
+
5
+ num_workers: '@{num_workers}'
6
+ num_gpus: '@{num_gpus}'
7
+ num_machines: '@{num_machines}'
8
+
9
+ dataset_class: ImglistDataset
10
+ batch_size: 128
11
+ shuffle: False
12
+
13
+ split_names: [val, nearood, farood, csid]
14
+ val:
15
+ data_dir: ./data/images_classic/
16
+ imglist_pth: ./data/benchmark_imglist/mnist/val_mnist.txt
17
+ nearood:
18
+ datasets: [notmnist, fashionmnist]
19
+ notmnist:
20
+ data_dir: ./data/images_classic/
21
+ imglist_pth: ./data/benchmark_imglist/mnist/test_notmnist.txt
22
+ fashionmnist:
23
+ data_dir: ./data/images_classic/
24
+ imglist_pth: ./data/benchmark_imglist/mnist/test_fashionmnist.txt
25
+ farood:
26
+ datasets: [texture, cifar10, tin, places365]
27
+ texture:
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/mnist/test_texture.txt
30
+ cifar10:
31
+ data_dir: ./data/images_classic/
32
+ imglist_pth: ./data/benchmark_imglist/mnist/test_cifar10.txt
33
+ tin:
34
+ data_dir: ./data/images_classic/
35
+ imglist_pth: ./data/benchmark_imglist/mnist/test_tin.txt
36
+ places365:
37
+ data_dir: ./data/images_classic/
38
+ imglist_pth: ./data/benchmark_imglist/mnist/test_places365.txt
39
+ csid:
40
+ datasets: [svhn]
41
+ svhn:
42
+ data_dir: ./data/images_classic/
43
+ imglist_pth: ./data/benchmark_imglist/mnist/test_svhn.txt
OpenOOD/configs/datasets/mnist/mnist_ood.yml ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ood_dataset:
2
+ name: mnist_ood
3
+ num_classes: 10
4
+
5
+ num_workers: '@{num_workers}'
6
+ num_gpus: '@{num_gpus}'
7
+ num_machines: '@{num_machines}'
8
+
9
+ dataset_class: ImglistDataset
10
+ batch_size: 128
11
+ shuffle: False
12
+
13
+ split_names: [val, nearood, farood]
14
+ val:
15
+ data_dir: ./data/images_classic/
16
+ imglist_pth: ./data/benchmark_imglist/mnist/val_notmnist.txt
17
+ nearood:
18
+ datasets: [notmnist, fashionmnist]
19
+ notmnist:
20
+ data_dir: ./data/images_classic/
21
+ imglist_pth: ./data/benchmark_imglist/mnist/test_notmnist.txt
22
+ fashionmnist:
23
+ data_dir: ./data/images_classic/
24
+ imglist_pth: ./data/benchmark_imglist/mnist/test_fashionmnist.txt
25
+ farood:
26
+ datasets: [texture, cifar10, tin, places365]
27
+ texture:
28
+ data_dir: ./data/images_classic/
29
+ imglist_pth: ./data/benchmark_imglist/mnist/test_texture.txt
30
+ cifar10:
31
+ data_dir: ./data/images_classic/
32
+ imglist_pth: ./data/benchmark_imglist/mnist/test_cifar10.txt
33
+ tin:
34
+ data_dir: ./data/images_classic/
35
+ imglist_pth: ./data/benchmark_imglist/mnist/test_tin.txt
36
+ places365:
37
+ data_dir: ./data/images_classic/
38
+ imglist_pth: ./data/benchmark_imglist/mnist/test_places365.txt
OpenOOD/configs/datasets/mvtec/bottle.yml ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: bottle
3
+ num_classes: 2
4
+ pre_size: 256
5
+ image_size: 256
6
+
7
+ interpolation: bilinear
8
+ normalization_type: cifar10
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, test, val]
15
+
16
+ train:
17
+ dataset_class: ImglistDataset
18
+ data_dir: ./data/images/
19
+ interpolation: bilinear
20
+ imglist_pth: ./data/benchmark_imglist/mvtecList/bottle_train_good.txt
21
+ batch_size: 2
22
+ shuffle: True
23
+ test:
24
+ dataset_class: ImglistDataset
25
+ data_dir: ./data/images/
26
+ interpolation: bilinear
27
+ imglist_pth: ./data/benchmark_imglist/mvtecList/bottle_test_id.txt
28
+ batch_size: 1
29
+ shuffle: False
30
+ val:
31
+ dataset_class: ImglistDataset
32
+ data_dir: ./data/images/
33
+ interpolation: bilinear
34
+ imglist_pth: ./data/benchmark_imglist/mvtecList/bottle_test_id.txt
35
+ batch_size: 1
36
+ shuffle: False
37
+
38
+ ood_dataset:
39
+ name: bottle_ood
40
+ num_classes: 2
41
+ image_size: 256
42
+ num_workers: 4
43
+
44
+ dataset_class: ImglistDataset
45
+ interpolation: bilinear
46
+ batch_size: 1
47
+ shuffle: False
48
+
49
+ split_names: [val]
50
+ val:
51
+ data_dir: ./data/images/
52
+ imglist_pth: ./data/benchmark_imglist/mvtecList/bottle_test.txt
OpenOOD/configs/datasets/mvtec/cable.yml ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ name: cable
3
+ num_classes: 2
4
+ pre_size: 256
5
+ image_size: 256
6
+
7
+ interpolation: bilinear
8
+ normalization_type: cifar10
9
+
10
+ num_workers: '@{num_workers}'
11
+ num_gpus: '@{num_gpus}'
12
+ num_machines: '@{num_machines}'
13
+
14
+ split_names: [train, test, val]
15
+
16
+ train:
17
+ dataset_class: ImglistDataset
18
+ data_dir: ./data/images/
19
+ interpolation: bilinear
20
+ imglist_pth: ./data/benchmark_imglist/mvtecList/cable_train_good.txt
21
+ batch_size: 2
22
+ shuffle: True
23
+ test:
24
+ dataset_class: ImglistDataset
25
+ data_dir: ./data/images/
26
+ interpolation: bilinear
27
+ imglist_pth: ./data/benchmark_imglist/mvtecList/cable_test_id.txt
28
+ batch_size: 1
29
+ shuffle: False
30
+ val:
31
+ dataset_class: ImglistDataset
32
+ data_dir: ./data/images/
33
+ interpolation: bilinear
34
+ imglist_pth: ./data/benchmark_imglist/mvtecList/cable_test_id.txt
35
+ batch_size: 1
36
+ shuffle: False
37
+
38
+ ood_dataset:
39
+ name: cable_ood
40
+ num_classes: 2
41
+ image_size: 256
42
+ num_workers: 4
43
+
44
+ dataset_class: ImglistDataset
45
+ interpolation: bilinear
46
+ batch_size: 1
47
+ shuffle: False
48
+
49
+ split_names: [val]
50
+ val:
51
+ data_dir: ./data/images/
52
+ imglist_pth: ./data/benchmark_imglist/mvtecList/cable_test.txt