dlxj commited on
Commit ·
a7c2243
1
Parent(s): 996beb7
init
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .coveragerc +36 -0
- .dockerignore +19 -0
- .flake8 +9 -0
- .flake8.other +9 -0
- .flake8.speech +9 -0
- .gitattributes +1 -0
- .github/CODEOWNERS +0 -0
- .github/ISSUE_TEMPLATE/bug_report.md +42 -0
- .github/ISSUE_TEMPLATE/config.yml +2 -0
- .github/ISSUE_TEMPLATE/dev_container_bug_report.md +35 -0
- .github/ISSUE_TEMPLATE/feature_request.md +25 -0
- .github/PULL_REQUEST_TEMPLATE.md +57 -0
- .github/actions/cancel-workflow/action.yml +25 -0
- .github/actions/test-template/action.yml +231 -0
- .github/labeler.yml +41 -0
- .github/scripts/__init__.py +0 -0
- .github/scripts/notify.py +79 -0
- .github/workflows/_build_container.yml +89 -0
- .github/workflows/_bump_mcore_tag.yml +56 -0
- .github/workflows/build-docs.yml +76 -0
- .github/workflows/build-test-publish-wheel.yml +38 -0
- .github/workflows/cherry-pick-release-commit.yml +14 -0
- .github/workflows/cicd-approve-test-queue.yml +175 -0
- .github/workflows/cicd-main-speech.yml +483 -0
- .github/workflows/cicd-main-unit-tests.yml +105 -0
- .github/workflows/cicd-main.yml +395 -0
- .github/workflows/cicd-relabel-bot.yml +36 -0
- .github/workflows/claude-answer.yml +65 -0
- .github/workflows/claude-fix.yml +114 -0
- .github/workflows/claude-review.yml +58 -0
- .github/workflows/close-inactive-issue-pr.yml +25 -0
- .github/workflows/code-formatting.yml +73 -0
- .github/workflows/code-init-file-checker.yml +23 -0
- .github/workflows/code-linting.yml +160 -0
- .github/workflows/codeql.yml +75 -0
- .github/workflows/community-bot.yml +15 -0
- .github/workflows/config/changelog-config.json +134 -0
- .github/workflows/config/codeql.yml +9 -0
- .github/workflows/copyright-check.yml +22 -0
- .github/workflows/install-test.yml +283 -0
- .github/workflows/labeler.yml +14 -0
- .github/workflows/mcore-tag-bump-bot.yml +62 -0
- .github/workflows/monitor-single-vm.yml +54 -0
- .github/workflows/monitor-vms.yml +54 -0
- .github/workflows/release-docs.yml +123 -0
- .github/workflows/release-freeze.yml +85 -0
- .github/workflows/release-nightly-docs.yml +29 -0
- .github/workflows/release.yml +83 -0
- .github/workflows/secrets-detector.yml +43 -0
- .github/workflows/update-buildcache.yml +110 -0
.coveragerc
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[run]
|
| 2 |
+
concurrency = thread,multiprocessing
|
| 3 |
+
omit =
|
| 4 |
+
/tmp/*
|
| 5 |
+
/home/TestData/*
|
| 6 |
+
/workspace/Megatron-LM/*
|
| 7 |
+
nemo/collections/multimodal/*
|
| 8 |
+
nemo/collections/multimodal_autoregressive/*
|
| 9 |
+
nemo/collections/vision/*
|
| 10 |
+
nemo/collections/diffusion/*
|
| 11 |
+
nemo/collections/nlp/*
|
| 12 |
+
|
| 13 |
+
nemo/collections/asr/*
|
| 14 |
+
nemo/collections/speechlm/*
|
| 15 |
+
nemo/collections/tts/*
|
| 16 |
+
|
| 17 |
+
# omit from audio
|
| 18 |
+
nemo/collections/audio/data/data_simulation.py
|
| 19 |
+
nemo/collections/audio/metrics/squim.py
|
| 20 |
+
nemo/collections/audio/losses/maxine/*
|
| 21 |
+
nemo/collections/audio/models/maxine/*
|
| 22 |
+
nemo/collections/audio/parts/utils/maxine.py
|
| 23 |
+
|
| 24 |
+
nemo/core/*
|
| 25 |
+
nemo/collections/common/*
|
| 26 |
+
|
| 27 |
+
/workspace/config-3.12.py
|
| 28 |
+
/workspace/config-3.py
|
| 29 |
+
/workspace/config.py
|
| 30 |
+
|
| 31 |
+
[paths]
|
| 32 |
+
source =
|
| 33 |
+
nemo/
|
| 34 |
+
/home/runner/work/NeMo/NeMo/nemo
|
| 35 |
+
/workspace/nemo
|
| 36 |
+
|
.dockerignore
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__pycache__
|
| 2 |
+
*.pyc
|
| 3 |
+
*.pyo
|
| 4 |
+
*.pyd
|
| 5 |
+
.Python
|
| 6 |
+
env
|
| 7 |
+
pip-log.txt
|
| 8 |
+
pip-delete-this-directory.txt
|
| 9 |
+
.tox
|
| 10 |
+
.coverage
|
| 11 |
+
.coverage.*
|
| 12 |
+
.cache
|
| 13 |
+
nosetests.xml
|
| 14 |
+
coverage.xml
|
| 15 |
+
*,cover
|
| 16 |
+
*.log
|
| 17 |
+
.git
|
| 18 |
+
**/*.nemo
|
| 19 |
+
**/*.ckpt
|
.flake8
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[flake8]
|
| 2 |
+
max-line-length = 119
|
| 3 |
+
select =
|
| 4 |
+
F541, # f-string without any placeholders
|
| 5 |
+
F841, # local variable 'x' is assigned to but never used
|
| 6 |
+
F401, # 'x' imported but unused
|
| 7 |
+
E741, # ambiguous variable name 'l'
|
| 8 |
+
F821, # undefined name 'x'
|
| 9 |
+
E266, # too many leading '#' for block comment
|
.flake8.other
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[flake8]
|
| 2 |
+
max-line-length = 119
|
| 3 |
+
select =
|
| 4 |
+
F541, # f-string without any placeholders
|
| 5 |
+
F841, # local variable 'x' is assigned to but never used
|
| 6 |
+
F401, # 'x' imported but unused
|
| 7 |
+
E741, # ambiguous variable name 'l'
|
| 8 |
+
F821, # undefined name 'x'
|
| 9 |
+
E266, # too many leading '#' for block comment
|
.flake8.speech
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[flake8]
|
| 2 |
+
max-line-length = 119
|
| 3 |
+
select =
|
| 4 |
+
F541, # f-string without any placeholders
|
| 5 |
+
F841, # local variable 'x' is assigned to but never used
|
| 6 |
+
F401, # 'x' imported but unused
|
| 7 |
+
E741, # ambiguous variable name 'l'
|
| 8 |
+
F821, # undefined name 'x'
|
| 9 |
+
E266, # too many leading '#' for block comment
|
.gitattributes
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
*.7z filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 2 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
*.avro filter=lfs diff=lfs merge=lfs -text
|
| 4 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 1 |
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.ipynb filter=lfs diff=lfs merge=lfs -text
|
| 3 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 4 |
*.avro filter=lfs diff=lfs merge=lfs -text
|
| 5 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
.github/CODEOWNERS
ADDED
|
File without changes
|
.github/ISSUE_TEMPLATE/bug_report.md
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: Bug report
|
| 3 |
+
about: Create a report to help us improve
|
| 4 |
+
title: ''
|
| 5 |
+
labels: bug
|
| 6 |
+
assignees: ''
|
| 7 |
+
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
**Describe the bug**
|
| 11 |
+
|
| 12 |
+
A clear and concise description of what the bug is.
|
| 13 |
+
|
| 14 |
+
**Steps/Code to reproduce bug**
|
| 15 |
+
|
| 16 |
+
Please list *minimal* steps or code snippet for us to be able to reproduce the bug.
|
| 17 |
+
|
| 18 |
+
A helpful guide on on how to craft a minimal bug report http://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports.
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
**Expected behavior**
|
| 22 |
+
|
| 23 |
+
A clear and concise description of what you expected to happen.
|
| 24 |
+
|
| 25 |
+
**Environment overview (please complete the following information)**
|
| 26 |
+
|
| 27 |
+
- Environment location: [Bare-metal, Docker, Cloud(specify cloud provider - AWS, Azure, GCP, Collab)]
|
| 28 |
+
- Method of NeMo install: [pip install or from source]. Please specify exact commands you used to install.
|
| 29 |
+
- If method of install is [Docker], provide `docker pull` & `docker run` commands used
|
| 30 |
+
|
| 31 |
+
**Environment details**
|
| 32 |
+
|
| 33 |
+
If NVIDIA docker image is used you don't need to specify these.
|
| 34 |
+
Otherwise, please provide:
|
| 35 |
+
- OS version
|
| 36 |
+
- PyTorch version
|
| 37 |
+
- Python version
|
| 38 |
+
|
| 39 |
+
**Additional context**
|
| 40 |
+
|
| 41 |
+
Add any other context about the problem here.
|
| 42 |
+
Example: GPU model
|
.github/ISSUE_TEMPLATE/config.yml
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
blank_issues_enabled: false
|
| 2 |
+
|
.github/ISSUE_TEMPLATE/dev_container_bug_report.md
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
container pulled on date: mm/dd/yyyy
|
| 3 |
+
name: Dev container - Bug report
|
| 4 |
+
about: Create a report to help us improve
|
| 5 |
+
title: ''
|
| 6 |
+
labels: bug
|
| 7 |
+
assignees: ''
|
| 8 |
+
|
| 9 |
+
---
|
| 10 |
+
|
| 11 |
+
**Describe the bug**
|
| 12 |
+
|
| 13 |
+
A clear and concise description of what the bug is.
|
| 14 |
+
|
| 15 |
+
**Steps/Code to reproduce bug**
|
| 16 |
+
|
| 17 |
+
Please list *minimal* steps or code snippet for us to be able to reproduce the bug.
|
| 18 |
+
|
| 19 |
+
A helpful guide on on how to craft a minimal bug report http://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports.
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
**Expected behavior**
|
| 23 |
+
|
| 24 |
+
A clear and concise description of what you expected to happen.
|
| 25 |
+
|
| 26 |
+
**Environment overview (please complete the following information)**
|
| 27 |
+
|
| 28 |
+
- Environment location: Docker
|
| 29 |
+
- Method of install: Please specify exact commands you used to install.
|
| 30 |
+
- If method of install is [Docker], provide `docker pull` & `docker run` commands used
|
| 31 |
+
|
| 32 |
+
**Additional context**
|
| 33 |
+
|
| 34 |
+
Add any other context about the problem here.
|
| 35 |
+
Example: GPU model
|
.github/ISSUE_TEMPLATE/feature_request.md
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: Feature request
|
| 3 |
+
about: Suggest an idea for this project
|
| 4 |
+
title: ''
|
| 5 |
+
labels: feature request
|
| 6 |
+
assignees: ''
|
| 7 |
+
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
**Is your feature request related to a problem? Please describe.**
|
| 11 |
+
|
| 12 |
+
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
| 13 |
+
|
| 14 |
+
**Describe the solution you'd like**
|
| 15 |
+
|
| 16 |
+
A clear and concise description of what you want to happen.
|
| 17 |
+
Provide a code snippet on how new APIs/changes would be used by others.
|
| 18 |
+
|
| 19 |
+
**Describe alternatives you've considered**
|
| 20 |
+
|
| 21 |
+
A clear and concise description of any alternative solutions or features you've considered.
|
| 22 |
+
|
| 23 |
+
**Additional context**
|
| 24 |
+
|
| 25 |
+
Add any other context or screenshots about the feature request here.
|
.github/PULL_REQUEST_TEMPLATE.md
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
> [!IMPORTANT]
|
| 2 |
+
> The `Update branch` button must only be pressed in very rare occassions.
|
| 3 |
+
> An outdated branch is never blocking the merge of a PR.
|
| 4 |
+
> Please reach out to the automation team before pressing that button.
|
| 5 |
+
|
| 6 |
+
# What does this PR do ?
|
| 7 |
+
|
| 8 |
+
Add a one line overview of what this PR aims to accomplish.
|
| 9 |
+
|
| 10 |
+
**Collection**: [Note which collection this PR will affect]
|
| 11 |
+
|
| 12 |
+
# Changelog
|
| 13 |
+
|
| 14 |
+
- Add specific line by line info of high level changes in this PR.
|
| 15 |
+
|
| 16 |
+
# Usage
|
| 17 |
+
|
| 18 |
+
- You can potentially add a usage example below
|
| 19 |
+
|
| 20 |
+
```python
|
| 21 |
+
# Add a code snippet demonstrating how to use this
|
| 22 |
+
```
|
| 23 |
+
|
| 24 |
+
# GitHub Actions CI
|
| 25 |
+
|
| 26 |
+
The Jenkins CI system has been replaced by GitHub Actions self-hosted runners.
|
| 27 |
+
|
| 28 |
+
The GitHub Actions CI will run automatically when the "Run CICD" label is added to the PR.
|
| 29 |
+
To re-run CI remove and add the label again.
|
| 30 |
+
To run CI on an untrusted fork, a NeMo user with write access must first click "Approve and run".
|
| 31 |
+
|
| 32 |
+
# Before your PR is "Ready for review"
|
| 33 |
+
|
| 34 |
+
**Pre checks**:
|
| 35 |
+
|
| 36 |
+
- [ ] Make sure you read and followed [Contributor guidelines](https://github.com/NVIDIA/NeMo/blob/main/CONTRIBUTING.md)
|
| 37 |
+
- [ ] Did you write any new necessary tests?
|
| 38 |
+
- [ ] Did you add or update any necessary documentation?
|
| 39 |
+
- [ ] Does the PR affect components that are optional to install? (Ex: Numba, Pynini, Apex etc)
|
| 40 |
+
- [ ] Reviewer: Does the PR have correct import guards for all optional libraries?
|
| 41 |
+
|
| 42 |
+
**PR Type**:
|
| 43 |
+
|
| 44 |
+
- [ ] New Feature
|
| 45 |
+
- [ ] Bugfix
|
| 46 |
+
- [ ] Documentation
|
| 47 |
+
|
| 48 |
+
If you haven't finished some of the above items you can still open "Draft" PR.
|
| 49 |
+
|
| 50 |
+
## Who can review?
|
| 51 |
+
|
| 52 |
+
Anyone in the community is free to review the PR once the checks have passed.
|
| 53 |
+
[Contributor guidelines](https://github.com/NVIDIA/NeMo/blob/main/CONTRIBUTING.md) contains specific people who can review PRs to various areas.
|
| 54 |
+
|
| 55 |
+
# Additional Information
|
| 56 |
+
|
| 57 |
+
- Related to # (issue)
|
.github/actions/cancel-workflow/action.yml
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Cancel Workflow
|
| 2 |
+
description: >
|
| 3 |
+
Cancels the current workflow run, i.e. all jobs. Useful if you want to cancel the rest of the workflow when one job
|
| 4 |
+
fails. Note that this will cause the workflow to appear cancelled, not failed.
|
| 5 |
+
|
| 6 |
+
# Cancelling the workflow in a post-script (like this:
|
| 7 |
+
# https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions#runspost; can also be done with
|
| 8 |
+
# this action: https://github.com/webiny/action-post-run, see Git history of this file) wouldn't help the status, it
|
| 9 |
+
# would still be cancelled. It actually indeed is, but it would be nicer to set it to failed, but there seems to be no
|
| 10 |
+
# way to do this.
|
| 11 |
+
|
| 12 |
+
runs:
|
| 13 |
+
using: "composite"
|
| 14 |
+
steps:
|
| 15 |
+
- name: Cancel Workflow
|
| 16 |
+
# # Fork PRs won't have a token with write access to Actions, thus won't be able to cancel the workflow.
|
| 17 |
+
# if: github.event.pull_request == '' || github.event.pull_request.head.repo.fork == false
|
| 18 |
+
shell: bash
|
| 19 |
+
run: |
|
| 20 |
+
curl --verbose \
|
| 21 |
+
-X POST \
|
| 22 |
+
-H "Accept: application/vnd.github+json" \
|
| 23 |
+
-H "Authorization: Bearer ${{ github.token }}" \
|
| 24 |
+
-H "X-GitHub-Api-Version: 2022-11-28" \
|
| 25 |
+
https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/cancel
|
.github/actions/test-template/action.yml
ADDED
|
@@ -0,0 +1,231 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2025, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
name: "Test Template"
|
| 15 |
+
description: "Template for running NeMo tests in a containerized environment"
|
| 16 |
+
|
| 17 |
+
inputs:
|
| 18 |
+
runner:
|
| 19 |
+
description: "Runner to use for test"
|
| 20 |
+
required: true
|
| 21 |
+
timeout:
|
| 22 |
+
description: "Max runtime of test in minutes"
|
| 23 |
+
required: false
|
| 24 |
+
default: "10"
|
| 25 |
+
script:
|
| 26 |
+
description: "Test script to execute"
|
| 27 |
+
required: true
|
| 28 |
+
after_script:
|
| 29 |
+
description: "Script to run after main test"
|
| 30 |
+
required: false
|
| 31 |
+
default: ":"
|
| 32 |
+
is_optional:
|
| 33 |
+
description: "Failure will cancel all other tests if set to true"
|
| 34 |
+
required: false
|
| 35 |
+
default: "false"
|
| 36 |
+
is_unit_test:
|
| 37 |
+
description: "Upload coverage as unit test"
|
| 38 |
+
required: false
|
| 39 |
+
default: "false"
|
| 40 |
+
tests_to_run:
|
| 41 |
+
description: "Tests to run"
|
| 42 |
+
required: false
|
| 43 |
+
default: '["all"]'
|
| 44 |
+
image:
|
| 45 |
+
description: "Image to use for test"
|
| 46 |
+
required: false
|
| 47 |
+
default: "nemo_container"
|
| 48 |
+
cpu-only:
|
| 49 |
+
description: "Run tests on CPU only"
|
| 50 |
+
required: false
|
| 51 |
+
default: "false"
|
| 52 |
+
test_dir:
|
| 53 |
+
description: "Directory under tests/ containing the test scripts"
|
| 54 |
+
required: false
|
| 55 |
+
default: "functional_tests"
|
| 56 |
+
runs:
|
| 57 |
+
using: "composite"
|
| 58 |
+
steps:
|
| 59 |
+
- name: Noop
|
| 60 |
+
shell: bash
|
| 61 |
+
run: |
|
| 62 |
+
chmod -R u+rwX ${{ github.run_id }}
|
| 63 |
+
echo "noop"
|
| 64 |
+
|
| 65 |
+
- name: Docker system cleanup
|
| 66 |
+
shell: bash
|
| 67 |
+
run: |
|
| 68 |
+
docker system prune -af --filter "until=24h" --filter "label!=nemo.pr_number=${{ github.event.pull_request.number || 0 }}" --force || true
|
| 69 |
+
|
| 70 |
+
- name: Docker pull image
|
| 71 |
+
shell: bash
|
| 72 |
+
run: |
|
| 73 |
+
docker pull nemoci.azurecr.io/${{ inputs.image }}:${{ github.run_id }}
|
| 74 |
+
|
| 75 |
+
- name: Clean repos
|
| 76 |
+
shell: bash
|
| 77 |
+
run: |
|
| 78 |
+
|
| 79 |
+
- name: Create UUID
|
| 80 |
+
id: uuid
|
| 81 |
+
shell: bash
|
| 82 |
+
run: |
|
| 83 |
+
echo "id=$(uuidgen)" >> "$GITHUB_OUTPUT"
|
| 84 |
+
|
| 85 |
+
- name: Checkout NeMo
|
| 86 |
+
uses: actions/checkout@v6
|
| 87 |
+
env:
|
| 88 |
+
DIR: ${{ github.run_id }}
|
| 89 |
+
with:
|
| 90 |
+
path: ${{ github.run_id }}/${{steps.uuid.outputs.id }}/NeMo
|
| 91 |
+
|
| 92 |
+
- name: Start container
|
| 93 |
+
shell: bash
|
| 94 |
+
env:
|
| 95 |
+
DIR: ${{ github.run_id }}
|
| 96 |
+
run: |
|
| 97 |
+
mkdir -p $DIR
|
| 98 |
+
|
| 99 |
+
# Map of runner names to GPU device configurations
|
| 100 |
+
declare -A GPU_CONFIGS=(
|
| 101 |
+
["myVm-01"]="0,1"
|
| 102 |
+
["myVm-02"]="2,3"
|
| 103 |
+
["myVm-03"]="4,5"
|
| 104 |
+
["myVm-04"]="6,7"
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
ARG=("")
|
| 108 |
+
if [[ "${{ inputs.cpu-only }}" == "false" ]]; then
|
| 109 |
+
ARG=("--runtime=nvidia --gpus all")
|
| 110 |
+
fi
|
| 111 |
+
|
| 112 |
+
cmd=$(cat <<RUN_TEST_EOF
|
| 113 |
+
#!/bin/bash
|
| 114 |
+
docker container rm -f nemo_container_${{ github.run_id }}_${{ inputs.runner }} || true
|
| 115 |
+
docker run \
|
| 116 |
+
--rm \
|
| 117 |
+
-d \
|
| 118 |
+
--name nemo_container_${{ github.run_id }}_${{ inputs.runner }} ${ARG[@]} \
|
| 119 |
+
--shm-size=64g \
|
| 120 |
+
--env TRANSFORMERS_OFFLINE=0 \
|
| 121 |
+
--env HYDRA_FULL_ERROR=1 \
|
| 122 |
+
--env HF_HOME=/home/TestData/HF_HOME \
|
| 123 |
+
--env RUN_ID=${{ github.run_id }} \
|
| 124 |
+
--volume $(pwd)/${{ github.run_id }}/${{steps.uuid.outputs.id }}/NeMo:/workspace \
|
| 125 |
+
--volume /mnt/datadrive/TestData:/home/TestData nemoci.azurecr.io/${{ inputs.image }}:${{ github.run_id }} \
|
| 126 |
+
bash -c "sleep $(( ${{ inputs.timeout }} * 60 + 60 ))"
|
| 127 |
+
RUN_TEST_EOF
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
echo "$cmd" | tee "$DIR/retry_job.sh"
|
| 131 |
+
bash $DIR/retry_job.sh
|
| 132 |
+
|
| 133 |
+
- name: Create run-script
|
| 134 |
+
id: create
|
| 135 |
+
env:
|
| 136 |
+
DIR: ${{ github.run_id }}
|
| 137 |
+
shell: bash
|
| 138 |
+
run: |
|
| 139 |
+
COVERAGE_PREFIX=$([[ "${{ inputs.is_unit_test }}" == "true" ]] && echo "unit-test" || echo "e2e")
|
| 140 |
+
echo "coverage-prefix=$COVERAGE_PREFIX" | tee -a "$GITHUB_OUTPUT"
|
| 141 |
+
|
| 142 |
+
mkdir -p $DIR
|
| 143 |
+
rm $DIR/.coverage || true
|
| 144 |
+
rm $DIR/err.log || true
|
| 145 |
+
|
| 146 |
+
cmd=$(cat <<RUN_TEST_EOF
|
| 147 |
+
#!/bin/bash
|
| 148 |
+
|
| 149 |
+
(
|
| 150 |
+
set -e
|
| 151 |
+
|
| 152 |
+
docker exec -t nemo_container_${{ github.run_id }}_${{ inputs.runner }} bash -c '\
|
| 153 |
+
cp -r /opt/Megatron-LM/ /workspace/ && \
|
| 154 |
+
bash tests/${{ inputs.test_dir }}/${{ inputs.script }}.sh && \
|
| 155 |
+
echo "Finished successfully." || echo "Did not finish."'
|
| 156 |
+
) 2>&1 | tee $DIR/err.log
|
| 157 |
+
|
| 158 |
+
RUN_TEST_EOF
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
echo "timeout_in_seconds=$(( ${{ inputs.timeout }} * 60 ))" | tee -a "$GITHUB_OUTPUT"
|
| 162 |
+
echo "$cmd" | tee "$DIR/job.sh"
|
| 163 |
+
|
| 164 |
+
- name: Run main script
|
| 165 |
+
uses: nick-fields/retry@v3
|
| 166 |
+
with:
|
| 167 |
+
timeout_seconds: ${{ steps.create.outputs.timeout_in_seconds }}
|
| 168 |
+
max_attempts: 3
|
| 169 |
+
shell: bash
|
| 170 |
+
retry_on: timeout
|
| 171 |
+
command: /bin/bash ${{ github.run_id }}/job.sh
|
| 172 |
+
on_retry_command: /bin/bash ${{ github.run_id }}/retry_job.sh
|
| 173 |
+
|
| 174 |
+
- name: Check result
|
| 175 |
+
id: check
|
| 176 |
+
shell: bash
|
| 177 |
+
env:
|
| 178 |
+
DIR: ${{ github.run_id }}
|
| 179 |
+
run: |
|
| 180 |
+
cat $DIR/err.log
|
| 181 |
+
|
| 182 |
+
log=$(tail -c 2000 $DIR/err.log | base64 -w 0)
|
| 183 |
+
echo "log=$log" >> "$GITHUB_OUTPUT"
|
| 184 |
+
|
| 185 |
+
potential_infra_failure=$(cat $DIR/err.log | grep -Eqiw "device" && echo true || echo false)
|
| 186 |
+
echo "potential_infra_failure=$potential_infra_failure" >> "$GITHUB_OUTPUT"
|
| 187 |
+
|
| 188 |
+
docker exec nemo_container_${{ github.run_id }}_${{ inputs.runner }} coverage combine
|
| 189 |
+
docker exec nemo_container_${{ github.run_id }}_${{ inputs.runner }} coverage xml
|
| 190 |
+
docker cp nemo_container_${{ github.run_id }}_${{ inputs.runner }}:/workspace/.coverage $DIR/.coverage
|
| 191 |
+
docker cp nemo_container_${{ github.run_id }}_${{ inputs.runner }}:/workspace/coverage.xml $DIR/coverage.xml
|
| 192 |
+
|
| 193 |
+
coverage_report=coverage-${{ steps.create.outputs.coverage-prefix }}-${{ github.run_id }}-$(uuidgen)
|
| 194 |
+
echo "coverage_report=$coverage_report" >> "$GITHUB_OUTPUT"
|
| 195 |
+
|
| 196 |
+
IS_SUCCESS=$(tail -n 1 $DIR/err.log | grep -q "Finished successfully." && echo "true" || echo "false")
|
| 197 |
+
|
| 198 |
+
if [[ "$IS_SUCCESS" == "false" && "${{ inputs.is_optional }}" == "true" ]]; then
|
| 199 |
+
echo "::warning:: Test failed, but displayed as successful because it is marked as optional."
|
| 200 |
+
IS_SUCCESS=true
|
| 201 |
+
fi
|
| 202 |
+
|
| 203 |
+
if [[ "$IS_SUCCESS" == "false" ]]; then
|
| 204 |
+
echo Test did not finish successfully.
|
| 205 |
+
exit 1
|
| 206 |
+
fi
|
| 207 |
+
|
| 208 |
+
exit $EXIT_CODE
|
| 209 |
+
|
| 210 |
+
- name: Test coverage
|
| 211 |
+
shell: bash -x -e -u -o pipefail {0}
|
| 212 |
+
run: |
|
| 213 |
+
docker exec -t nemo_container_${{ github.run_id }}_${{ inputs.runner }} coverage report -i
|
| 214 |
+
|
| 215 |
+
- name: Upload artifacts
|
| 216 |
+
uses: actions/upload-artifact@v6
|
| 217 |
+
if: ${{ steps.check.outputs.coverage_report != 'none' }}
|
| 218 |
+
with:
|
| 219 |
+
name: ${{ steps.check.outputs.coverage_report }}
|
| 220 |
+
path: |
|
| 221 |
+
${{ github.run_id }}/coverage.xml
|
| 222 |
+
${{ github.run_id }}/.coverage
|
| 223 |
+
include-hidden-files: true
|
| 224 |
+
|
| 225 |
+
- name: Container shutdown
|
| 226 |
+
if: always()
|
| 227 |
+
shell: bash
|
| 228 |
+
run: |
|
| 229 |
+
docker exec nemo_container_${{ github.run_id }}_${{ inputs.runner }} bash -c "chown -R $(id -u):$(id -g) /workspace"
|
| 230 |
+
rm -rf $(pwd)/${{ github.run_id }}/${{steps.uuid.outputs.id }} || true
|
| 231 |
+
docker container rm -f nemo_container_${{ github.run_id }}_${{ inputs.runner }} || true
|
.github/labeler.yml
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
ASR:
|
| 2 |
+
- nemo/collections/asr/**/*
|
| 3 |
+
- examples/asr/**/*
|
| 4 |
+
- tutorials/asr/**/*
|
| 5 |
+
- docs/source/asr/**/*
|
| 6 |
+
- tests/collections/asr/**
|
| 7 |
+
|
| 8 |
+
Speaker Tasks:
|
| 9 |
+
- examples/speaker_tasks/**/*
|
| 10 |
+
- tutorials/speaker_tasks/**/*
|
| 11 |
+
|
| 12 |
+
TTS:
|
| 13 |
+
- nemo/collections/tts/**/*
|
| 14 |
+
- nemo/collections/common/tokenizers/text_to_speech/**
|
| 15 |
+
- examples/tts/**/*
|
| 16 |
+
- tutorials/tts/**/*
|
| 17 |
+
- docs/source/tts/**/*
|
| 18 |
+
- scripts/dataset_processing/tts/**
|
| 19 |
+
- scripts/tts_dataset_files/**
|
| 20 |
+
- tests/collections/tts/**
|
| 21 |
+
- tests/collections/common/tokenizers/text_to_speech/**
|
| 22 |
+
|
| 23 |
+
Audio:
|
| 24 |
+
- nemo/collections/audio/**/*
|
| 25 |
+
- examples/audio/**/*
|
| 26 |
+
- tutorials/audio/**/*
|
| 27 |
+
- docs/source/audio/**/*
|
| 28 |
+
- tests/collections/audio/**
|
| 29 |
+
|
| 30 |
+
core:
|
| 31 |
+
- nemo/core/**/*
|
| 32 |
+
- tests/core/**
|
| 33 |
+
|
| 34 |
+
common:
|
| 35 |
+
- nemo/collections/common/**/*
|
| 36 |
+
|
| 37 |
+
CI:
|
| 38 |
+
- .github/**/*
|
| 39 |
+
- Jenkinsfile
|
| 40 |
+
- Dockerfile
|
| 41 |
+
- ci.groovy
|
.github/scripts/__init__.py
ADDED
|
File without changes
|
.github/scripts/notify.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2025, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
import os
|
| 15 |
+
|
| 16 |
+
import requests
|
| 17 |
+
from github import Github
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def send_slack_notification():
|
| 21 |
+
# Get environment variables
|
| 22 |
+
gh_token = os.environ.get('GH_TOKEN')
|
| 23 |
+
webhook_url = os.environ.get('SLACK_WEBHOOK')
|
| 24 |
+
repository = os.environ.get('REPOSITORY')
|
| 25 |
+
run_id = os.environ.get('RUN_ID')
|
| 26 |
+
server_url = os.environ.get('SERVER_URL', 'https://github.com')
|
| 27 |
+
pr_number = int(os.environ.get('PR_NUMBER', 0))
|
| 28 |
+
branch_name = os.environ.get('BRANCH_NAME')
|
| 29 |
+
|
| 30 |
+
# Get failure info from GitHub API
|
| 31 |
+
gh = Github(gh_token)
|
| 32 |
+
repo = gh.get_repo(repository)
|
| 33 |
+
|
| 34 |
+
# Get failed jobs
|
| 35 |
+
failed_jobs = [job.name for job in repo.get_workflow_run(int(run_id)).jobs() if job.conclusion == 'failure']
|
| 36 |
+
|
| 37 |
+
if pr_number != 0:
|
| 38 |
+
pr = repo.get_pull(pr_number)
|
| 39 |
+
|
| 40 |
+
title = f"*<{server_url}/{repository}/pull/{pr_number}|PR#{pr_number}>: {pr.title.replace('`', '')}*"
|
| 41 |
+
author = f"<{server_url}/{pr.user.login}|{pr.user.login}>"
|
| 42 |
+
branch = f"<{server_url}/{pr.head.repo.full_name}/tree/{pr.head.ref}|{pr.head.ref}>"
|
| 43 |
+
else:
|
| 44 |
+
title = f"*Run on <{server_url}/{repository}/tree/{branch_name}|{branch_name}>*"
|
| 45 |
+
author = "No author"
|
| 46 |
+
branch = f"<{server_url}/{repository}/tree/{branch_name}|{branch_name}>"
|
| 47 |
+
|
| 48 |
+
blocks = [
|
| 49 |
+
{
|
| 50 |
+
"type": "section",
|
| 51 |
+
"text": {
|
| 52 |
+
"type": "mrkdwn",
|
| 53 |
+
"text": (
|
| 54 |
+
f"{title}\n"
|
| 55 |
+
f"• Author: {author}\n"
|
| 56 |
+
f"• Branch: {branch}\n"
|
| 57 |
+
f"• Pipeline: <{server_url}/{repository}/actions/runs/{run_id}|View Run>\n"
|
| 58 |
+
f"• Failed Jobs:\n"
|
| 59 |
+
+ "\n".join(
|
| 60 |
+
[
|
| 61 |
+
f" • <{server_url}/{repository}/actions/runs/{run_id}|{job.split('/')[-1]}>"
|
| 62 |
+
for job in failed_jobs
|
| 63 |
+
if job.split('/')[-1] != 'Nemo_CICD_Test'
|
| 64 |
+
]
|
| 65 |
+
)
|
| 66 |
+
),
|
| 67 |
+
},
|
| 68 |
+
}
|
| 69 |
+
]
|
| 70 |
+
|
| 71 |
+
print({"blocks": blocks})
|
| 72 |
+
|
| 73 |
+
# Send to Slack
|
| 74 |
+
response = requests.post(webhook_url, json={"blocks": blocks})
|
| 75 |
+
response.raise_for_status()
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
if __name__ == "__main__":
|
| 79 |
+
send_slack_notification()
|
.github/workflows/_build_container.yml
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: ~Build container template
|
| 2 |
+
on:
|
| 3 |
+
workflow_call:
|
| 4 |
+
inputs:
|
| 5 |
+
image-name:
|
| 6 |
+
required: true
|
| 7 |
+
type: string
|
| 8 |
+
description: "The name of the image to build"
|
| 9 |
+
dockerfile:
|
| 10 |
+
required: true
|
| 11 |
+
type: string
|
| 12 |
+
runner:
|
| 13 |
+
required: false
|
| 14 |
+
default: self-hosted-azure-builder
|
| 15 |
+
type: string
|
| 16 |
+
description: "The runner to use for the build"
|
| 17 |
+
|
| 18 |
+
jobs:
|
| 19 |
+
pre-flight:
|
| 20 |
+
runs-on: ubuntu-latest
|
| 21 |
+
outputs:
|
| 22 |
+
build_args: ${{ steps.manifest.outputs.BUILD_ARGS }}
|
| 23 |
+
cache-from: ${{ steps.cache_from.outputs.LAST_PRS }}
|
| 24 |
+
steps:
|
| 25 |
+
- name: Checkout repository
|
| 26 |
+
uses: actions/checkout@v6
|
| 27 |
+
|
| 28 |
+
- name: Parse manifest.json
|
| 29 |
+
id: manifest
|
| 30 |
+
run: |
|
| 31 |
+
BUILD_ARGS=$(cat << EOF
|
| 32 |
+
BASE_IMAGE=$(cat requirements/manifest.json | jq -r '."ngc-pytorch"')
|
| 33 |
+
TRTLLM_REPO=$(cat requirements/manifest.json | jq -r '."vcs-dependencies"."trt-llm".repo')
|
| 34 |
+
TRTLLM_TAG=$(cat requirements/manifest.json | jq -r '."vcs-dependencies"."trt-llm".ref')
|
| 35 |
+
MLM_REPO=$(cat requirements/manifest.json | jq -r '."vcs-dependencies"."megatron-lm".repo')
|
| 36 |
+
MLM_TAG=$(cat requirements/manifest.json | jq -r '."vcs-dependencies"."megatron-lm".ref')
|
| 37 |
+
TE_REPO=$(cat requirements/manifest.json | jq -r '."vcs-dependencies".transformer_engine.repo')
|
| 38 |
+
TE_TAG=$(cat requirements/manifest.json | jq -r '."vcs-dependencies".transformer_engine.ref')
|
| 39 |
+
APEX_REPO=$(cat requirements/manifest.json | jq -r '."vcs-dependencies".apex.repo')
|
| 40 |
+
APEX_TAG=$(cat requirements/manifest.json | jq -r '."vcs-dependencies".apex.ref')
|
| 41 |
+
EOF
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
echo "BUILD_ARGS<<EOF" >> $GITHUB_OUTPUT
|
| 45 |
+
echo "$BUILD_ARGS" >> $GITHUB_OUTPUT
|
| 46 |
+
echo "EOF" >> $GITHUB_OUTPUT
|
| 47 |
+
|
| 48 |
+
- name: Get last merged PR
|
| 49 |
+
id: cache_from
|
| 50 |
+
env:
|
| 51 |
+
GH_TOKEN: ${{ github.token }}
|
| 52 |
+
run: |
|
| 53 |
+
LAST_PRS=$(gh api graphql -f query='
|
| 54 |
+
query {
|
| 55 |
+
repository(owner: "NVIDIA", name: "NeMo") {
|
| 56 |
+
pullRequests(states: MERGED, first: 100, orderBy: {field: UPDATED_AT, direction: DESC}) {
|
| 57 |
+
nodes {
|
| 58 |
+
number
|
| 59 |
+
}
|
| 60 |
+
}
|
| 61 |
+
}
|
| 62 |
+
}' | jq -r '.data.repository.pullRequests.nodes[].number' | while read -r number; do
|
| 63 |
+
echo "nemoci.azurecr.io/${{ inputs.image-name }}-buildcache:$number"
|
| 64 |
+
done)
|
| 65 |
+
|
| 66 |
+
echo "LAST_PRS<<EOF" >> $GITHUB_OUTPUT
|
| 67 |
+
echo "$LAST_PRS" >> $GITHUB_OUTPUT
|
| 68 |
+
echo "EOF" >> $GITHUB_OUTPUT
|
| 69 |
+
|
| 70 |
+
build:
|
| 71 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_build_container.yml@v0.27.0
|
| 72 |
+
needs: [pre-flight]
|
| 73 |
+
with:
|
| 74 |
+
image-name: ${{ inputs.image-name }}
|
| 75 |
+
dockerfile: ${{ inputs.dockerfile }}
|
| 76 |
+
image-label: nemo-core
|
| 77 |
+
build-args: |
|
| 78 |
+
IMAGE_LABEL=nemo-core
|
| 79 |
+
NEMO_TAG=${{ github.sha }}
|
| 80 |
+
NEMO_REPO=https://github.com/NVIDIA/NeMo
|
| 81 |
+
PR_NUMBER=${{ github.event.pull_request.number || 0 }}
|
| 82 |
+
${{ needs.pre-flight.outputs.build_args }}
|
| 83 |
+
prune-filter-timerange: 24h
|
| 84 |
+
use-inline-cache: false
|
| 85 |
+
cache-from: |
|
| 86 |
+
nemoci.azurecr.io/${{ inputs.image-name }}-buildcache:main
|
| 87 |
+
nemoci.azurecr.io/${{ inputs.image-name }}-buildcache:${{ github.event.pull_request.number || 0 }}
|
| 88 |
+
${{ needs.pre-flight.outputs.cache-from }}
|
| 89 |
+
runner: ${{ inputs.runner }}
|
.github/workflows/_bump_mcore_tag.yml
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: ~Bump Megatron Tag template
|
| 2 |
+
on:
|
| 3 |
+
workflow_call:
|
| 4 |
+
inputs:
|
| 5 |
+
nemo-target-branch:
|
| 6 |
+
required: true
|
| 7 |
+
type: string
|
| 8 |
+
description: "The target branch to bump"
|
| 9 |
+
mcore-target-branch:
|
| 10 |
+
required: true
|
| 11 |
+
type: string
|
| 12 |
+
description: "The target branch to bump"
|
| 13 |
+
secrets:
|
| 14 |
+
PAT:
|
| 15 |
+
required: true
|
| 16 |
+
|
| 17 |
+
jobs:
|
| 18 |
+
update-branch:
|
| 19 |
+
runs-on: ubuntu-latest
|
| 20 |
+
steps:
|
| 21 |
+
- uses: actions/checkout@v6
|
| 22 |
+
with:
|
| 23 |
+
ref: ${{ inputs.nemo-target-branch }}
|
| 24 |
+
|
| 25 |
+
- name: Set Git config
|
| 26 |
+
run: |
|
| 27 |
+
git config --local user.email "actions@github.com"
|
| 28 |
+
git config --local user.name "Github Actions"
|
| 29 |
+
- name: Merge weekly-bump-${{ inputs.nemo-target-branch }} back to base branch
|
| 30 |
+
env:
|
| 31 |
+
SOURCE_BRANCH: weekly-bump-${{ inputs.nemo-target-branch }}
|
| 32 |
+
TARGET_BRANCH: ${{ inputs.nemo-target-branch }}
|
| 33 |
+
run: |
|
| 34 |
+
if git ls-remote --exit-code origin $SOURCE_BRANCH; then
|
| 35 |
+
git fetch --unshallow
|
| 36 |
+
git checkout $SOURCE_BRANCH
|
| 37 |
+
git pull
|
| 38 |
+
git merge --no-ff $TARGET_BRANCH -m "chore: Auto-merge $TARGET_BRANCH into $SOURCE_BRANCH"
|
| 39 |
+
else
|
| 40 |
+
git checkout -b $SOURCE_BRANCH $TARGET_BRANCH
|
| 41 |
+
fi
|
| 42 |
+
git push -u origin $SOURCE_BRANCH
|
| 43 |
+
|
| 44 |
+
mcore:
|
| 45 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_bump_yamlfile.yml@v0.27.1
|
| 46 |
+
needs: [update-branch]
|
| 47 |
+
with:
|
| 48 |
+
source-repository: NVIDIA/Megatron-LM
|
| 49 |
+
source-ref: ${{ inputs.mcore-target-branch }}
|
| 50 |
+
yaml-path: '."vcs-dependencies"."megatron-lm".ref'
|
| 51 |
+
file: requirements/manifest.json
|
| 52 |
+
base-branch: weekly-bump-${{ inputs.nemo-target-branch }}
|
| 53 |
+
cicd-labels: Run CICD,no-fail-fast
|
| 54 |
+
pr-reviewers: ${{ inputs.pr-reviewers }}
|
| 55 |
+
secrets:
|
| 56 |
+
PAT: ${{ secrets.PAT }}
|
.github/workflows/build-docs.yml
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2026, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
name: Build docs
|
| 16 |
+
|
| 17 |
+
on:
|
| 18 |
+
pull_request:
|
| 19 |
+
branches:
|
| 20 |
+
- main
|
| 21 |
+
- r**
|
| 22 |
+
paths:
|
| 23 |
+
- "docs/**"
|
| 24 |
+
push:
|
| 25 |
+
branches:
|
| 26 |
+
- main
|
| 27 |
+
- r**
|
| 28 |
+
paths:
|
| 29 |
+
- "docs/**"
|
| 30 |
+
|
| 31 |
+
concurrency:
|
| 32 |
+
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-${{ github.event.label.name || 'main' }}-${{ github.event_name }}
|
| 33 |
+
cancel-in-progress: true
|
| 34 |
+
|
| 35 |
+
jobs:
|
| 36 |
+
pre-flight:
|
| 37 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.74.0
|
| 38 |
+
|
| 39 |
+
build-docs:
|
| 40 |
+
needs: [pre-flight]
|
| 41 |
+
if: needs.pre-flight.outputs.is_deployment_workflow != 'true'
|
| 42 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_build_docs.yml@v0.83.0
|
| 43 |
+
with:
|
| 44 |
+
docs-directory: docs/source
|
| 45 |
+
sync-all: true
|
| 46 |
+
no-extras: "--no-extra cu12"
|
| 47 |
+
|
| 48 |
+
build-docs-summary:
|
| 49 |
+
needs: [pre-flight, build-docs]
|
| 50 |
+
if: |
|
| 51 |
+
(
|
| 52 |
+
needs.pre-flight.outputs.is_deployment_workflow == 'true'
|
| 53 |
+
|| always()
|
| 54 |
+
)
|
| 55 |
+
&& !cancelled()
|
| 56 |
+
runs-on: ubuntu-latest
|
| 57 |
+
steps:
|
| 58 |
+
- name: Get workflow result
|
| 59 |
+
id: result
|
| 60 |
+
shell: bash -x -e -u -o pipefail {0}
|
| 61 |
+
env:
|
| 62 |
+
GH_TOKEN: ${{ github.token }}
|
| 63 |
+
RUN_ID: ${{ github.run_id }}
|
| 64 |
+
SKIPPING_IS_ALLOWED: ${{ needs.pre-flight.outputs.docs_only == 'true' || needs.pre-flight.outputs.is_deployment_workflow == 'true' }}
|
| 65 |
+
run: |
|
| 66 |
+
FAILED_JOBS=$(gh run view $GITHUB_RUN_ID --json jobs --jq '[.jobs[] | select(.status == "completed" and .conclusion != "success")] | length') || echo 0
|
| 67 |
+
|
| 68 |
+
if [ "${FAILED_JOBS:-0}" -eq 0 ] || [ "$SKIPPING_IS_ALLOWED" == "true" ]; then
|
| 69 |
+
echo "✅ All previous jobs completed successfully"
|
| 70 |
+
exit 0
|
| 71 |
+
else
|
| 72 |
+
echo "❌ Found $FAILED_JOBS failed job(s)"
|
| 73 |
+
# Show which jobs failed
|
| 74 |
+
gh run view $GITHUB_RUN_ID --json jobs --jq '.jobs[] | select(.status == "completed" and .conclusion != "success") | .name'
|
| 75 |
+
exit 1
|
| 76 |
+
fi
|
.github/workflows/build-test-publish-wheel.yml
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
name: Build, test, and publish a PyPi wheel (to testpypi).
|
| 16 |
+
|
| 17 |
+
on:
|
| 18 |
+
push:
|
| 19 |
+
branches:
|
| 20 |
+
- main
|
| 21 |
+
- "r**"
|
| 22 |
+
|
| 23 |
+
defaults:
|
| 24 |
+
run:
|
| 25 |
+
shell: bash -x -e -u -o pipefail {0}
|
| 26 |
+
|
| 27 |
+
jobs:
|
| 28 |
+
build-test-publish-wheel:
|
| 29 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_build_test_publish_wheel.yml@v0.22.3
|
| 30 |
+
with:
|
| 31 |
+
dry-run: true
|
| 32 |
+
python-package: nemo
|
| 33 |
+
python-version: "3.10"
|
| 34 |
+
secrets:
|
| 35 |
+
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
| 36 |
+
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
| 37 |
+
SLACK_WEBHOOK: ${{ secrets.SLACK_RELEASE_ENDPOINT }}
|
| 38 |
+
SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }}
|
.github/workflows/cherry-pick-release-commit.yml
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Create PR to main with cherry-pick from release
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
push:
|
| 5 |
+
branches:
|
| 6 |
+
- main
|
| 7 |
+
|
| 8 |
+
jobs:
|
| 9 |
+
cherry-pick:
|
| 10 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cherry_pick.yml@v0.63.0
|
| 11 |
+
secrets:
|
| 12 |
+
PAT: ${{ secrets.PAT }}
|
| 13 |
+
SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }}
|
| 14 |
+
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
.github/workflows/cicd-approve-test-queue.yml
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
name: Approve Test Queue
|
| 16 |
+
|
| 17 |
+
on:
|
| 18 |
+
schedule:
|
| 19 |
+
- cron: '*/5 * * * *' # Runs every 5 minutes
|
| 20 |
+
workflow_dispatch: # Allows manual triggering
|
| 21 |
+
|
| 22 |
+
jobs:
|
| 23 |
+
approve-queue:
|
| 24 |
+
runs-on: ubuntu-latest
|
| 25 |
+
environment: main
|
| 26 |
+
steps:
|
| 27 |
+
- name: Checkout repository
|
| 28 |
+
uses: actions/checkout@v6
|
| 29 |
+
|
| 30 |
+
- name: Set up Python
|
| 31 |
+
uses: actions/setup-python@v6
|
| 32 |
+
with:
|
| 33 |
+
python-version: "3.12"
|
| 34 |
+
|
| 35 |
+
- name: Install dependencies
|
| 36 |
+
run: |
|
| 37 |
+
python -m pip install --upgrade pip
|
| 38 |
+
pip install requests
|
| 39 |
+
|
| 40 |
+
- name: Approve waiting deployments
|
| 41 |
+
env:
|
| 42 |
+
GITHUB_TOKEN: ${{ secrets.PAT }}
|
| 43 |
+
MAX_CONCURRENCY: ${{ vars.MAX_CONCURRENCY || 1 }}
|
| 44 |
+
run: |
|
| 45 |
+
python - <<EOF
|
| 46 |
+
import os
|
| 47 |
+
import requests
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
# GitHub API configuration
|
| 51 |
+
GITHUB_TOKEN = os.environ["GITHUB_TOKEN"]
|
| 52 |
+
REPO = os.environ["GITHUB_REPOSITORY"]
|
| 53 |
+
MAX_CONCURRENCY = int(os.environ["MAX_CONCURRENCY"])
|
| 54 |
+
API_BASE = f"https://api.github.com/repos/{REPO}"
|
| 55 |
+
|
| 56 |
+
# Headers for GitHub API
|
| 57 |
+
headers = {
|
| 58 |
+
"Authorization": f"token {GITHUB_TOKEN}",
|
| 59 |
+
"Accept": "application/vnd.github.v3+json",
|
| 60 |
+
"X-GitHub-Api-Version": "2022-11-28",
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
def make_request(endpoint, method="GET", data=None):
|
| 64 |
+
"""Make a request to the GitHub API with error handling."""
|
| 65 |
+
url = f"{API_BASE}/{endpoint}"
|
| 66 |
+
try:
|
| 67 |
+
if method == "GET":
|
| 68 |
+
response = requests.get(url, headers=headers)
|
| 69 |
+
else:
|
| 70 |
+
response = requests.post(url, headers=headers, json=data)
|
| 71 |
+
response.raise_for_status()
|
| 72 |
+
response_json = response.json()
|
| 73 |
+
if hasattr(response, "links") and "actions/runs?status" in endpoint:
|
| 74 |
+
response_json["next"] = response.links.get("next", {}).get("url")
|
| 75 |
+
|
| 76 |
+
return response_json
|
| 77 |
+
except requests.exceptions.RequestException as e:
|
| 78 |
+
print(f"Error making request to {endpoint}: {str(e)}")
|
| 79 |
+
if hasattr(e.response, 'text'):
|
| 80 |
+
print(f"Response: {e.response.text}")
|
| 81 |
+
return None
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def get_workflow_runs(status):
|
| 85 |
+
"""Get all workflow runs for a given status."""
|
| 86 |
+
all_results = []
|
| 87 |
+
endpoint = f"actions/runs?status={status}"
|
| 88 |
+
while endpoint:
|
| 89 |
+
response = make_request(endpoint)
|
| 90 |
+
if not response:
|
| 91 |
+
break
|
| 92 |
+
|
| 93 |
+
all_results.extend(response.get("workflow_runs", []))
|
| 94 |
+
endpoint = None
|
| 95 |
+
next_url = response.get("next")
|
| 96 |
+
if next_url:
|
| 97 |
+
endpoint = f"actions/runs?{next_url.split('?')[1]}"
|
| 98 |
+
|
| 99 |
+
return all_results
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
# Get current running and queued workflows
|
| 103 |
+
print("Fetching workflow runs...")
|
| 104 |
+
queued_workflow_runs = get_workflow_runs("queued")
|
| 105 |
+
in_progress_workflow_runs = get_workflow_runs("in_progress")
|
| 106 |
+
|
| 107 |
+
# Count running and queued workflows
|
| 108 |
+
queued_workflows = sum(1 for run in queued_workflow_runs if run["name"] == "CICD NeMo")
|
| 109 |
+
in_progress_workflows = sum(1 for run in in_progress_workflow_runs if run["name"] == "CICD NeMo")
|
| 110 |
+
|
| 111 |
+
total_workflows = queued_workflows + in_progress_workflows
|
| 112 |
+
print(f"Current queued workflows: {queued_workflows}")
|
| 113 |
+
print(f"Current running workflows: {in_progress_workflows}")
|
| 114 |
+
print(f"Total workflows: {total_workflows}")
|
| 115 |
+
print(f"Max concurrency: {MAX_CONCURRENCY}")
|
| 116 |
+
|
| 117 |
+
if total_workflows >= MAX_CONCURRENCY:
|
| 118 |
+
print("Maximum concurrency reached, no new approvals will be made")
|
| 119 |
+
exit(0)
|
| 120 |
+
|
| 121 |
+
# Get waiting CI workflows for test environment
|
| 122 |
+
print("Fetching deployments...")
|
| 123 |
+
pending_workflows = get_workflow_runs("waiting")
|
| 124 |
+
pending_workflows = [run for run in pending_workflows if run["name"] == "CICD NeMo"]
|
| 125 |
+
|
| 126 |
+
# Sort deployments by creation date (oldest first)
|
| 127 |
+
print("Sorting workflows...")
|
| 128 |
+
pending_workflows = sorted(pending_workflows, key=lambda x: x["created_at"])
|
| 129 |
+
|
| 130 |
+
# Process each deployment
|
| 131 |
+
print("Processing ...")
|
| 132 |
+
for workflow in pending_workflows:
|
| 133 |
+
if total_workflows >= MAX_CONCURRENCY:
|
| 134 |
+
print("Maximum concurrency reached, stopping approvals")
|
| 135 |
+
break
|
| 136 |
+
|
| 137 |
+
workflow_id = workflow["id"]
|
| 138 |
+
workflow_name = workflow["display_title"]
|
| 139 |
+
print(f"Approving workflow {workflow_name} with Run Id: {workflow_id}")
|
| 140 |
+
|
| 141 |
+
deployment_url = f"actions/runs/{workflow_id}/pending_deployments"
|
| 142 |
+
deployment = make_request(deployment_url)[0]
|
| 143 |
+
environment_id = deployment["environment"]["id"]
|
| 144 |
+
|
| 145 |
+
# Approve the deployment
|
| 146 |
+
status_data = {
|
| 147 |
+
"environment_ids": [environment_id],
|
| 148 |
+
"state": "approved",
|
| 149 |
+
"comment": "Automatically approved by queue manager"
|
| 150 |
+
}
|
| 151 |
+
result = make_request(deployment_url, method="POST", data=status_data)
|
| 152 |
+
|
| 153 |
+
if result:
|
| 154 |
+
total_workflows += 1
|
| 155 |
+
else:
|
| 156 |
+
print(f"Failed to approve deployment {deployment['id']}")
|
| 157 |
+
exit(1)
|
| 158 |
+
|
| 159 |
+
EOF
|
| 160 |
+
notify:
|
| 161 |
+
if: failure()
|
| 162 |
+
runs-on: ubuntu-latest
|
| 163 |
+
needs: [approve-queue]
|
| 164 |
+
steps:
|
| 165 |
+
- name: Notify
|
| 166 |
+
env:
|
| 167 |
+
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
| 168 |
+
SLACK_WEBHOOK_ADMIN: <!subteam^${{ secrets.SLACK_WEBHOOK_ADMIN }}>
|
| 169 |
+
GITHUB_RUN_ID: ${{ github.run_id }}
|
| 170 |
+
GITHUB_REPOSITORY: ${{ github.repository }}
|
| 171 |
+
run: |
|
| 172 |
+
curl -X POST \
|
| 173 |
+
-H 'Content-type: application/json' \
|
| 174 |
+
--data "{\"text\":\":robot_joy: <https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}|Test-queue-approval-bot workflow> failed. Please review manually.\n\ncc ${SLACK_WEBHOOK_ADMIN}\"}" \
|
| 175 |
+
$SLACK_WEBHOOK
|
.github/workflows/cicd-main-speech.yml
ADDED
|
@@ -0,0 +1,483 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2025, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
name: NeMo E2E Speech Tests
|
| 15 |
+
on:
|
| 16 |
+
workflow_call:
|
| 17 |
+
inputs:
|
| 18 |
+
test_to_run:
|
| 19 |
+
required: true
|
| 20 |
+
type: string
|
| 21 |
+
image-name:
|
| 22 |
+
required: false
|
| 23 |
+
default: nemo_container_speech
|
| 24 |
+
type: string
|
| 25 |
+
|
| 26 |
+
jobs:
|
| 27 |
+
build:
|
| 28 |
+
uses: ./.github/workflows/_build_container.yml
|
| 29 |
+
with:
|
| 30 |
+
image-name: ${{ inputs.image-name }}
|
| 31 |
+
dockerfile: docker/Dockerfile.ci
|
| 32 |
+
|
| 33 |
+
unit-tests:
|
| 34 |
+
strategy:
|
| 35 |
+
fail-fast: false
|
| 36 |
+
matrix:
|
| 37 |
+
include:
|
| 38 |
+
- script: L0_Unit_Tests_GPU_ASR
|
| 39 |
+
runner: self-hosted-azure-gpus-1
|
| 40 |
+
timeout: 30
|
| 41 |
+
- script: L0_Unit_Tests_CPU_ASR
|
| 42 |
+
runner: azure-gpu-vm-runner1-cpu
|
| 43 |
+
cpu-only: true
|
| 44 |
+
timeout: 30
|
| 45 |
+
- script: L0_Unit_Tests_GPU_TTS
|
| 46 |
+
runner: self-hosted-azure-gpus-1
|
| 47 |
+
- script: L0_Unit_Tests_CPU_TTS
|
| 48 |
+
runner: self-hosted-azure-cpu
|
| 49 |
+
cpu-only: true
|
| 50 |
+
- script: L0_Unit_Tests_GPU_Audio
|
| 51 |
+
runner: self-hosted-azure-gpus-1
|
| 52 |
+
- script: L0_Unit_Tests_CPU_Audio
|
| 53 |
+
runner: self-hosted-azure-cpu
|
| 54 |
+
cpu-only: true
|
| 55 |
+
- script: L0_Unit_Tests_GPU_SpeechLM2
|
| 56 |
+
runner: self-hosted-azure-gpus-1
|
| 57 |
+
timeout: 20
|
| 58 |
+
- script: L0_Unit_Tests_CPU_SpeechLM2
|
| 59 |
+
runner: self-hosted-azure-cpu
|
| 60 |
+
cpu-only: true
|
| 61 |
+
timeout: 20
|
| 62 |
+
needs: [build]
|
| 63 |
+
runs-on: ${{ matrix.runner }}
|
| 64 |
+
name: ${{ matrix.script }}
|
| 65 |
+
steps:
|
| 66 |
+
- name: Checkout
|
| 67 |
+
uses: actions/checkout@v6
|
| 68 |
+
with:
|
| 69 |
+
path: ${{ github.run_id }}
|
| 70 |
+
- name: main
|
| 71 |
+
uses: NVIDIA/NeMo/.github/actions/test-template@main
|
| 72 |
+
with:
|
| 73 |
+
runner: ${{ runner.name }}
|
| 74 |
+
script: ${{ matrix.script }}
|
| 75 |
+
is_unit_test: true
|
| 76 |
+
tests_to_run: ${{ inputs.test_to_run }}
|
| 77 |
+
image: ${{ inputs.image-name }}
|
| 78 |
+
timeout: ${{ matrix.timeout || 10 }}
|
| 79 |
+
cpu-only: ${{ matrix.cpu-only || false }}
|
| 80 |
+
is_optional: ${{ matrix.is-optional || false }}
|
| 81 |
+
|
| 82 |
+
e2e-tests:
|
| 83 |
+
strategy:
|
| 84 |
+
fail-fast: false
|
| 85 |
+
matrix:
|
| 86 |
+
include:
|
| 87 |
+
- runner: self-hosted-azure-gpus-1
|
| 88 |
+
script: ASR_dev_run_Speech_to_Text
|
| 89 |
+
- runner: self-hosted-azure-gpus-1
|
| 90 |
+
script: Optional_ASR_dev_run_Speech_To_Text_Finetuning
|
| 91 |
+
is-optional: true
|
| 92 |
+
- runner: self-hosted-azure-gpus-1
|
| 93 |
+
script: Optional_ASR_dev_run_Speech_To_Text_HF_Finetuning
|
| 94 |
+
is-optional: true
|
| 95 |
+
- runner: self-hosted-azure-gpus-1
|
| 96 |
+
script: ASR_dev_run_Speech_to_Text_WPE_-_Conformer
|
| 97 |
+
- runner: self-hosted-azure-gpus-1
|
| 98 |
+
script: ASR_dev_run_Speech_to_Text_Hybrid_RNNT_CTC_Prompt
|
| 99 |
+
- runner: self-hosted-azure-gpus-1
|
| 100 |
+
script: L2_ASR_Multi-dataloader_dev_run_Speech_to_Text_multi-dataloader
|
| 101 |
+
- runner: self-hosted-azure-gpus-1
|
| 102 |
+
script: L2_ASR_Multi-dataloader_dev_run_Speech_to_Label_multi-dataloader
|
| 103 |
+
- runner: self-hosted-azure-gpus-1
|
| 104 |
+
script: L2_ASR_Adapters_Linear_Adapters
|
| 105 |
+
- runner: self-hosted-azure-gpus-1
|
| 106 |
+
script: L2_ASR_Adapters_RelPos_MHA_Adapters
|
| 107 |
+
- runner: self-hosted-azure
|
| 108 |
+
script: L2_Speech_to_Text_EMA
|
| 109 |
+
- runner: self-hosted-azure-gpus-1
|
| 110 |
+
script: L2_Speech_to_Text_AED
|
| 111 |
+
- runner: self-hosted-azure-gpus-1
|
| 112 |
+
script: L2_Speaker_dev_run_Speech_to_Label
|
| 113 |
+
- runner: self-hosted-azure
|
| 114 |
+
script: L2_Speech_Estimate_Duration_Bins
|
| 115 |
+
- runner: self-hosted-azure
|
| 116 |
+
script: L2_Speech_Batch_Size_OOMptimizer
|
| 117 |
+
- runner: self-hosted-azure
|
| 118 |
+
script: Optional_L2_Speech_Batch_Size_OOMptimizer_Canary
|
| 119 |
+
is-optional: true
|
| 120 |
+
- runner: self-hosted-azure
|
| 121 |
+
script: L2_Speech_Transcription_Speech_to_Text_Transcribe
|
| 122 |
+
- runner: self-hosted-azure
|
| 123 |
+
script: L2_Speech_Transcription_Speech_to_Text_Streaming_Infer
|
| 124 |
+
- runner: self-hosted-azure
|
| 125 |
+
script: L2_Speech_Transcription_Speech_to_Text_Cache_Aware_Infer
|
| 126 |
+
- runner: self-hosted-azure
|
| 127 |
+
script: L2_Speech_Transcription_Streaming_Inference
|
| 128 |
+
- runner: self-hosted-azure
|
| 129 |
+
script: L2_Speech_Transcription_Speech_to_Text_Inference_Boost_GT
|
| 130 |
+
- runner: self-hosted-azure
|
| 131 |
+
script: L2_Speech_Transcription_Speech_to_Text_Transcribe_Boost_GT
|
| 132 |
+
- runner: self-hosted-azure
|
| 133 |
+
script: L2_Speech_Transcription_Canary_Transcribe_Full_Manifest
|
| 134 |
+
- runner: self-hosted-azure
|
| 135 |
+
script: L2_Speech_Transcription_Canary_Transcribe_With_Prompt
|
| 136 |
+
- runner: self-hosted-azure
|
| 137 |
+
script: L2_Speech_Transcription_Canary_Transcribe_Audio_Dir
|
| 138 |
+
- runner: self-hosted-azure
|
| 139 |
+
script: L2_Speech_Transcription_Canary_Streaming_Full_Manifest
|
| 140 |
+
- runner: self-hosted-azure
|
| 141 |
+
script: L2_Longform_Speech_Transcription_Canary_Chunked_Infer_from_Audio_Dir
|
| 142 |
+
- runner: self-hosted-azure
|
| 143 |
+
script: L2_Longform_Speech_Transcription_with_TimeStamps_Canary_Chunked_Infer_from_Audio_Dir
|
| 144 |
+
- runner: self-hosted-azure
|
| 145 |
+
script: L2_Longform_Speech_Transcription_with_TimeStamps_Canary_Chunked_Infer_from_Manifest
|
| 146 |
+
- runner: self-hosted-azure-gpus-1
|
| 147 |
+
script: Speech_Checkpoints_tests
|
| 148 |
+
timeout: 20
|
| 149 |
+
- runner: self-hosted-azure-gpus-1
|
| 150 |
+
script: L2_Speaker_dev_run_Speaker_Recognition
|
| 151 |
+
- runner: self-hosted-azure-gpus-1
|
| 152 |
+
script: L2_Speaker_dev_run_EndtoEnd_Speaker_Diarization_Sortformer
|
| 153 |
+
- runner: self-hosted-azure
|
| 154 |
+
script: L2_Speaker_dev_run_EndtoEnd_Diarizer_Inference
|
| 155 |
+
- runner: self-hosted-azure
|
| 156 |
+
script: L2_Speaker_dev_run_Speaker_Diarization_with_ASR_Inference
|
| 157 |
+
- runner: self-hosted-azure
|
| 158 |
+
script: L2_Speaker_dev_run_Clustering_Diarizer_Inference
|
| 159 |
+
- runner: self-hosted-azure
|
| 160 |
+
script: L2_Speaker_dev_run_Multispeaker_ASR_Data_Simulation
|
| 161 |
+
- runner: self-hosted-azure
|
| 162 |
+
script: L2_TTS_Fast_dev_runs_1_FastPitch
|
| 163 |
+
- runner: self-hosted-azure
|
| 164 |
+
script: L2_TTS_Fast_dev_runs_1_Hifigan
|
| 165 |
+
- runner: self-hosted-azure
|
| 166 |
+
script: L2_G2P_Models_G2P_Conformer_training_evaluation_and_inference
|
| 167 |
+
- runner: self-hosted-azure
|
| 168 |
+
script: SPEECHLM_HF_Training_DuplexS2S
|
| 169 |
+
- runner: self-hosted-azure
|
| 170 |
+
script: SPEECHLM_HF_Training_DuplexS2SSpeechDecoder
|
| 171 |
+
- runner: self-hosted-azure
|
| 172 |
+
script: SPEECHLM_HF_Training_SALM
|
| 173 |
+
timeout: 20
|
| 174 |
+
- runner: self-hosted-azure
|
| 175 |
+
script: L2_TTS_Fast_dev_runs_Magpietts_DecoderContext
|
| 176 |
+
- runner: self-hosted-azure
|
| 177 |
+
script: L2_TTS_Fast_dev_runs_Magpietts_MultiEncoder
|
| 178 |
+
- runner: self-hosted-azure
|
| 179 |
+
script: L2_TTS_Fast_dev_runs_Magpietts_MoE
|
| 180 |
+
- runner: self-hosted-azure
|
| 181 |
+
script: L2_TTS_Fast_dev_runs_Magpietts_OnlinePO
|
| 182 |
+
- runner: self-hosted-azure
|
| 183 |
+
script: L2_TTS_InferEvaluate_Magpietts_ZeroShot
|
| 184 |
+
- runner: self-hosted-azure
|
| 185 |
+
script: L2_TTS_InferEvaluate_Magpietts_SeenSpeakers
|
| 186 |
+
- runner: self-hosted-azure
|
| 187 |
+
script: L2_TTS_InferEvaluatelongform_Magpietts_ZeroShot
|
| 188 |
+
- runner: self-hosted-azure
|
| 189 |
+
script: L2_TTS_InferEvaluate_Magpietts_MoE_ZeroShot
|
| 190 |
+
- runner: self-hosted-azure
|
| 191 |
+
script: L2_TTS_InferEvaluatelongform_Magpietts_MoE_ZeroShot
|
| 192 |
+
- runner: self-hosted-azure
|
| 193 |
+
script: L2_TTS_InferEvaluate_Magpietts_FrameStacking
|
| 194 |
+
- runner: self-hosted-azure
|
| 195 |
+
script: L2_TTS_Fast_dev_runs_EasyMagpietts_Qwen
|
| 196 |
+
- runner: self-hosted-azure
|
| 197 |
+
script: L2_TTS_Fast_dev_runs_EasyMagpietts_Nemotron
|
| 198 |
+
- runner: self-hosted-azure
|
| 199 |
+
script: L2_TTS_Fast_dev_runs_EasyMagpietts_OnlinePO
|
| 200 |
+
timeout: 20
|
| 201 |
+
- runner: self-hosted-azure
|
| 202 |
+
script: L2_TTS_InferEvaluate_EasyMagpietts
|
| 203 |
+
needs: [unit-tests]
|
| 204 |
+
runs-on: ${{ matrix.runner }}
|
| 205 |
+
name: ${{ matrix.is-optional && 'PLEASEFIXME_' || '' }}${{ matrix.script }}
|
| 206 |
+
steps:
|
| 207 |
+
- name: Checkout
|
| 208 |
+
uses: actions/checkout@v6
|
| 209 |
+
with:
|
| 210 |
+
path: ${{ github.run_id }}
|
| 211 |
+
- name: main
|
| 212 |
+
uses: NVIDIA/NeMo/.github/actions/test-template@main
|
| 213 |
+
with:
|
| 214 |
+
runner: ${{ runner.name }}
|
| 215 |
+
script: ${{ matrix.script }}
|
| 216 |
+
tests_to_run: ${{ inputs.test_to_run }}
|
| 217 |
+
image: ${{ inputs.image-name }}
|
| 218 |
+
timeout: ${{ matrix.timeout || 10 }}
|
| 219 |
+
is_optional: ${{ matrix.is-optional || false }}
|
| 220 |
+
|
| 221 |
+
e2e-nightly:
|
| 222 |
+
if: ${{ github.event_name == 'schedule' || contains(github.event.pull_request.labels.*.name, 'Run e2e nightly') }}
|
| 223 |
+
strategy:
|
| 224 |
+
fail-fast: false
|
| 225 |
+
matrix:
|
| 226 |
+
include:
|
| 227 |
+
- runner: self-hosted-azure
|
| 228 |
+
script: L2_Model_Support_nvidia__stt_de_fastconformer_hybrid_large_pc
|
| 229 |
+
timeout: 15
|
| 230 |
+
- runner: self-hosted-azure
|
| 231 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_hybrid_large_pc
|
| 232 |
+
timeout: 15
|
| 233 |
+
- runner: self-hosted-azure
|
| 234 |
+
script: L2_Model_Support_nvidia__stt_es_fastconformer_hybrid_large_pc
|
| 235 |
+
timeout: 15
|
| 236 |
+
- runner: self-hosted-azure
|
| 237 |
+
script: L2_Model_Support_nvidia__stt_it_fastconformer_hybrid_large_pc
|
| 238 |
+
timeout: 15
|
| 239 |
+
- runner: self-hosted-azure
|
| 240 |
+
script: L2_Model_Support_nvidia__stt_ua_fastconformer_hybrid_large_pc
|
| 241 |
+
timeout: 15
|
| 242 |
+
- runner: self-hosted-azure
|
| 243 |
+
script: L2_Model_Support_nvidia__stt_pl_fastconformer_hybrid_large_pc
|
| 244 |
+
timeout: 15
|
| 245 |
+
- runner: self-hosted-azure
|
| 246 |
+
script: L2_Model_Support_nvidia__stt_hr_fastconformer_hybrid_large_pc
|
| 247 |
+
timeout: 15
|
| 248 |
+
- runner: self-hosted-azure
|
| 249 |
+
script: L2_Model_Support_nvidia__stt_be_fastconformer_hybrid_large_pc
|
| 250 |
+
timeout: 15
|
| 251 |
+
- runner: self-hosted-azure
|
| 252 |
+
script: L2_Model_Support_nvidia__stt_fr_fastconformer_hybrid_large_pc
|
| 253 |
+
timeout: 15
|
| 254 |
+
- runner: self-hosted-azure
|
| 255 |
+
script: L2_Model_Support_nvidia__stt_ru_fastconformer_hybrid_large_pc
|
| 256 |
+
timeout: 15
|
| 257 |
+
- runner: self-hosted-azure
|
| 258 |
+
script: L2_Model_Support_nvidia__stt_nl_fastconformer_hybrid_large_pc
|
| 259 |
+
timeout: 15
|
| 260 |
+
- runner: self-hosted-azure
|
| 261 |
+
script: L2_Model_Support_nvidia__stt_fa_fastconformer_hybrid_large
|
| 262 |
+
timeout: 15
|
| 263 |
+
- runner: self-hosted-azure
|
| 264 |
+
script: L2_Model_Support_nvidia__stt_ka_fastconformer_hybrid_large_pc
|
| 265 |
+
timeout: 15
|
| 266 |
+
- runner: self-hosted-azure
|
| 267 |
+
script: L2_Model_Support_nvidia__stt_kk_ru_fastconformer_hybrid_large
|
| 268 |
+
timeout: 15
|
| 269 |
+
- runner: self-hosted-azure
|
| 270 |
+
script: L2_Model_Support_nvidia__stt_ka_fastconformer_hybrid_transducer_ctc_large_streaming_80ms_pc
|
| 271 |
+
timeout: 15
|
| 272 |
+
- runner: self-hosted-azure
|
| 273 |
+
script: L2_Model_Support_nvidia__stt_uz_fastconformer_hybrid_large_pc
|
| 274 |
+
timeout: 15
|
| 275 |
+
- runner: self-hosted-azure
|
| 276 |
+
script: L2_Model_Support_nvidia__stt_ar_fastconformer_hybrid_large_pc_v1_0
|
| 277 |
+
timeout: 15
|
| 278 |
+
- runner: self-hosted-azure
|
| 279 |
+
script: L2_Model_Support_nvidia__stt_hy_fastconformer_hybrid_large_pc
|
| 280 |
+
timeout: 15
|
| 281 |
+
- runner: self-hosted-azure
|
| 282 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_hybrid_medium_streaming_80ms_pc
|
| 283 |
+
timeout: 15
|
| 284 |
+
- runner: self-hosted-azure
|
| 285 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_hybrid_medium_streaming_80ms
|
| 286 |
+
timeout: 15
|
| 287 |
+
- runner: self-hosted-azure
|
| 288 |
+
script: L2_Model_Support_nvidia__stt_pt_fastconformer_hybrid_large_pc
|
| 289 |
+
timeout: 15
|
| 290 |
+
- runner: self-hosted-azure
|
| 291 |
+
script: L2_Model_Support_nvidia__stt_es_fastconformer_hybrid_large_pc_nc
|
| 292 |
+
timeout: 15
|
| 293 |
+
- runner: self-hosted-azure
|
| 294 |
+
script: L2_Model_Support_nvidia__stt_ar_fastconformer_hybrid_large_pcd_v1_0
|
| 295 |
+
timeout: 15
|
| 296 |
+
- runner: self-hosted-azure
|
| 297 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_hybrid_large_streaming_multi
|
| 298 |
+
timeout: 15
|
| 299 |
+
- runner: self-hosted-azure
|
| 300 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_ctc_large
|
| 301 |
+
timeout: 15
|
| 302 |
+
- runner: self-hosted-azure
|
| 303 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_transducer_large
|
| 304 |
+
timeout: 15
|
| 305 |
+
- runner: self-hosted-azure
|
| 306 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_ctc_xlarge
|
| 307 |
+
timeout: 15
|
| 308 |
+
- runner: self-hosted-azure
|
| 309 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_transducer_xlarge
|
| 310 |
+
timeout: 15
|
| 311 |
+
- runner: self-hosted-azure
|
| 312 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_transducer_xxlarge
|
| 313 |
+
timeout: 15
|
| 314 |
+
- runner: self-hosted-azure
|
| 315 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_ctc_xxlarge
|
| 316 |
+
timeout: 15
|
| 317 |
+
- runner: self-hosted-azure
|
| 318 |
+
script: L2_Model_Support_nvidia__stt_en_fastconformer_tdt_large
|
| 319 |
+
timeout: 15
|
| 320 |
+
- runner: self-hosted-azure
|
| 321 |
+
script: L2_Model_Support_stt_en_fastconformer_hybrid_large_streaming_1040ms
|
| 322 |
+
timeout: 15
|
| 323 |
+
- runner: self-hosted-azure
|
| 324 |
+
script: L2_Model_Support_stt_multilingual_fastconformer_hybrid_large_pc_blend_eu
|
| 325 |
+
timeout: 15
|
| 326 |
+
- runner: self-hosted-azure
|
| 327 |
+
script: L2_Model_Support_nvidia__parakeet_rnnt_1_1b
|
| 328 |
+
timeout: 15
|
| 329 |
+
- runner: self-hosted-azure
|
| 330 |
+
script: L2_Model_Support_nvidia__parakeet_ctc_1_1b
|
| 331 |
+
timeout: 15
|
| 332 |
+
- runner: self-hosted-azure
|
| 333 |
+
script: L2_Model_Support_nvidia__parakeet_rnnt_0_6b
|
| 334 |
+
timeout: 15
|
| 335 |
+
- runner: self-hosted-azure
|
| 336 |
+
script: L2_Model_Support_nvidia__parakeet_ctc_0_6b
|
| 337 |
+
timeout: 15
|
| 338 |
+
- runner: self-hosted-azure
|
| 339 |
+
script: L2_Model_Support_nvidia__parakeet_tdt_1_1b
|
| 340 |
+
timeout: 15
|
| 341 |
+
- runner: self-hosted-azure
|
| 342 |
+
script: L2_Model_Support_nvidia__parakeet_tdt_ctc_1_1b
|
| 343 |
+
timeout: 15
|
| 344 |
+
- runner: self-hosted-azure
|
| 345 |
+
script: L2_Model_Support_nvidia__parakeet_tdt_ctc_0_6b_ja
|
| 346 |
+
timeout: 15
|
| 347 |
+
- runner: self-hosted-azure
|
| 348 |
+
script: L2_Model_Support_nvidia__parakeet_tdt_ctc_110m
|
| 349 |
+
timeout: 15
|
| 350 |
+
- runner: self-hosted-azure
|
| 351 |
+
script: L2_Model_Support_nvidia__parakeet_tdt_0_6b_v2
|
| 352 |
+
timeout: 15
|
| 353 |
+
- runner: self-hosted-azure
|
| 354 |
+
script: L2_Model_Support_nvidia__parakeet_rnnt_110m_da_dk
|
| 355 |
+
timeout: 15
|
| 356 |
+
- runner: self-hosted-azure
|
| 357 |
+
script: L2_Model_Support_nvidia__parakeet_tdt_0_6b_v3
|
| 358 |
+
timeout: 15
|
| 359 |
+
- runner: self-hosted-azure
|
| 360 |
+
script: L2_Model_Support_nvidia__parakeet_ctc_0_6b_Vietnamese
|
| 361 |
+
timeout: 15
|
| 362 |
+
- runner: self-hosted-azure
|
| 363 |
+
script: L2_Model_Support_nvidia__canary_1b
|
| 364 |
+
timeout: 15
|
| 365 |
+
- runner: self-hosted-azure
|
| 366 |
+
script: L2_Model_Support_nvidia__canary_1b_flash
|
| 367 |
+
timeout: 15
|
| 368 |
+
- runner: self-hosted-azure
|
| 369 |
+
script: L2_Model_Support_nvidia__canary_180m_flash
|
| 370 |
+
timeout: 15
|
| 371 |
+
- runner: self-hosted-azure
|
| 372 |
+
script: L2_Model_Support_nvidia__canary_1b_v2
|
| 373 |
+
timeout: 15
|
| 374 |
+
- runner: self-hosted-azure
|
| 375 |
+
script: L2_Model_Support_nvidia__parakeet_realtime_eou_120m_v1
|
| 376 |
+
timeout: 15
|
| 377 |
+
- runner: self-hosted-azure
|
| 378 |
+
script: L2_Model_Support_nvidia__multitalker_parakeet_streaming_0_6b_v1
|
| 379 |
+
timeout: 15
|
| 380 |
+
- runner: self-hosted-azure
|
| 381 |
+
script: L2_Model_Support_nvidia__nemotron_speech_streaming_en_0_6b
|
| 382 |
+
timeout: 15
|
| 383 |
+
- runner: self-hosted-azure
|
| 384 |
+
script: L2_Model_Support_nvidia__canary_qwen_2_5b
|
| 385 |
+
timeout: 15
|
| 386 |
+
- runner: self-hosted-azure
|
| 387 |
+
script: L2_Model_Support_nvidia__diar_sortformer_4spk_v1
|
| 388 |
+
timeout: 15
|
| 389 |
+
- runner: self-hosted-azure
|
| 390 |
+
script: L2_Model_Support_nvidia__diar_streaming_sortformer_4spk_v2
|
| 391 |
+
timeout: 15
|
| 392 |
+
- runner: self-hosted-azure
|
| 393 |
+
script: L2_Model_Support_nvidia__diar_streaming_sortformer_4spk_v2_1
|
| 394 |
+
timeout: 15
|
| 395 |
+
- runner: self-hosted-azure
|
| 396 |
+
script: L2_Model_Support_titanet_large
|
| 397 |
+
timeout: 15
|
| 398 |
+
- runner: self-hosted-azure
|
| 399 |
+
script: L2_Model_Support_nvidia__speakerverification_en_titanet_large
|
| 400 |
+
timeout: 15
|
| 401 |
+
- runner: self-hosted-azure
|
| 402 |
+
script: L2_Model_Support_nvidia__ssl_en_nest_large_v1_0
|
| 403 |
+
timeout: 15
|
| 404 |
+
- runner: self-hosted-azure
|
| 405 |
+
script: L2_Model_Support_nvidia__ssl_en_nest_xlarge_v1_0
|
| 406 |
+
timeout: 15
|
| 407 |
+
- runner: self-hosted-azure
|
| 408 |
+
script: L2_Model_Support_vad_multilingual_marblenet
|
| 409 |
+
timeout: 15
|
| 410 |
+
- runner: self-hosted-azure
|
| 411 |
+
script: L2_Model_Support_vad_multilingual_frame_marblenet
|
| 412 |
+
timeout: 15
|
| 413 |
+
- runner: self-hosted-azure
|
| 414 |
+
script: L2_Model_Support_nvidia__Frame_VAD_Multilingual_MarbleNet_v2_0
|
| 415 |
+
timeout: 15
|
| 416 |
+
- runner: self-hosted-azure
|
| 417 |
+
script: L2_Model_Support_nvidia__se_den_sb_16k_small
|
| 418 |
+
timeout: 15
|
| 419 |
+
- runner: self-hosted-azure
|
| 420 |
+
script: L2_Model_Support_nvidia__se_der_sb_16k_small
|
| 421 |
+
timeout: 15
|
| 422 |
+
- runner: self-hosted-azure
|
| 423 |
+
script: L2_Model_Support_nvidia__sr_ssl_flowmatching_16k_430m
|
| 424 |
+
timeout: 15
|
| 425 |
+
- runner: self-hosted-azure
|
| 426 |
+
script: L2_Model_Support_mel_codec_44khz_medium
|
| 427 |
+
timeout: 15
|
| 428 |
+
- runner: self-hosted-azure
|
| 429 |
+
script: L2_Model_Support_mel_codec_22khz_fullband_medium
|
| 430 |
+
timeout: 15
|
| 431 |
+
- runner: self-hosted-azure
|
| 432 |
+
script: L2_Model_Support_nvidia__low_frame_rate_speech_codec_22khz
|
| 433 |
+
timeout: 15
|
| 434 |
+
- runner: self-hosted-azure
|
| 435 |
+
script: L2_Model_Support_nvidia__audio_codec_22khz
|
| 436 |
+
timeout: 15
|
| 437 |
+
- runner: self-hosted-azure
|
| 438 |
+
script: L2_Model_Support_nvidia__audio_codec_44khz
|
| 439 |
+
timeout: 15
|
| 440 |
+
- runner: self-hosted-azure
|
| 441 |
+
script: L2_Model_Support_nvidia__mel_codec_22khz
|
| 442 |
+
timeout: 15
|
| 443 |
+
- runner: self-hosted-azure
|
| 444 |
+
script: L2_Model_Support_nvidia__mel_codec_44khz
|
| 445 |
+
timeout: 15
|
| 446 |
+
- runner: self-hosted-azure
|
| 447 |
+
script: L2_Model_Support_nvidia__nemo_nano_codec_22khz_1_78kbps_12_5fps
|
| 448 |
+
timeout: 15
|
| 449 |
+
- runner: self-hosted-azure
|
| 450 |
+
script: L2_Model_Support_nvidia__nemo_nano_codec_22khz_1_89kbps_21_5fps
|
| 451 |
+
timeout: 15
|
| 452 |
+
- runner: self-hosted-azure
|
| 453 |
+
script: L2_Model_Support_nvidia__nemo_nano_codec_22khz_0_6kbps_12_5fps
|
| 454 |
+
timeout: 15
|
| 455 |
+
- runner: self-hosted-azure
|
| 456 |
+
script: L2_Model_Support_nvidia__tts_en_fastpitch
|
| 457 |
+
timeout: 15
|
| 458 |
+
- runner: self-hosted-azure
|
| 459 |
+
script: L2_Model_Support_nvidia__tts_hifigan
|
| 460 |
+
timeout: 15
|
| 461 |
+
- runner: self-hosted-azure
|
| 462 |
+
script: L2_Model_Support_nvidia__magpie_tts_multilingual_357m
|
| 463 |
+
timeout: 15
|
| 464 |
+
- runner: self-hosted-azure
|
| 465 |
+
script: L2_Model_Support_tts_en_e2e_fastspeech2hifigan
|
| 466 |
+
timeout: 15
|
| 467 |
+
needs: [unit-tests]
|
| 468 |
+
runs-on: ${{ matrix.runner }}
|
| 469 |
+
name: ${{ matrix.script }}
|
| 470 |
+
steps:
|
| 471 |
+
- name: Checkout
|
| 472 |
+
uses: actions/checkout@v6
|
| 473 |
+
with:
|
| 474 |
+
path: ${{ github.run_id }}
|
| 475 |
+
- name: main
|
| 476 |
+
uses: NVIDIA/NeMo/.github/actions/test-template@main
|
| 477 |
+
with:
|
| 478 |
+
runner: ${{ runner.name }}
|
| 479 |
+
script: ${{ matrix.script }}
|
| 480 |
+
tests_to_run: ${{ inputs.test_to_run }}
|
| 481 |
+
image: ${{ inputs.image-name }}
|
| 482 |
+
timeout: ${{ matrix.timeout || 10 }}
|
| 483 |
+
test_dir: e2e_nightly
|
.github/workflows/cicd-main-unit-tests.yml
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2025, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
name: NeMo Unit Tests
|
| 15 |
+
on:
|
| 16 |
+
workflow_call:
|
| 17 |
+
inputs:
|
| 18 |
+
test_to_run:
|
| 19 |
+
required: true
|
| 20 |
+
type: string
|
| 21 |
+
|
| 22 |
+
jobs:
|
| 23 |
+
collections-common-tests:
|
| 24 |
+
strategy:
|
| 25 |
+
fail-fast: false
|
| 26 |
+
matrix:
|
| 27 |
+
include:
|
| 28 |
+
- script: L0_Unit_Tests_GPU_Common
|
| 29 |
+
runner: self-hosted-azure-gpus-1
|
| 30 |
+
- script: L0_Unit_Tests_CPU_Common
|
| 31 |
+
runner: self-hosted-azure-cpu
|
| 32 |
+
cpu-only: true
|
| 33 |
+
runs-on: ${{ matrix.runner }}
|
| 34 |
+
name: ${{ matrix.script }}
|
| 35 |
+
steps:
|
| 36 |
+
- name: Checkout
|
| 37 |
+
uses: actions/checkout@v6
|
| 38 |
+
with:
|
| 39 |
+
path: ${{ github.run_id }}
|
| 40 |
+
- name: main
|
| 41 |
+
uses: NVIDIA/NeMo/.github/actions/test-template@main
|
| 42 |
+
with:
|
| 43 |
+
runner: ${{ runner.name }}
|
| 44 |
+
script: ${{ matrix.script }}
|
| 45 |
+
is_unit_test: true
|
| 46 |
+
tests_to_run: ${{ inputs.test_to_run }}
|
| 47 |
+
cpu-only: ${{ matrix.cpu-only || false }}
|
| 48 |
+
|
| 49 |
+
core-tests:
|
| 50 |
+
strategy:
|
| 51 |
+
fail-fast: false
|
| 52 |
+
matrix:
|
| 53 |
+
include:
|
| 54 |
+
- script: L0_Unit_Tests_GPU_Core
|
| 55 |
+
runner: self-hosted-azure-gpus-1
|
| 56 |
+
- script: L0_Unit_Tests_CPU_Core
|
| 57 |
+
runner: self-hosted-azure-cpu
|
| 58 |
+
cpu-only: true
|
| 59 |
+
- script: L0_Unit_Tests_GPU_Hydra
|
| 60 |
+
runner: self-hosted-azure-gpus-1
|
| 61 |
+
- script: L0_Unit_Tests_CPU_Hydra
|
| 62 |
+
runner: self-hosted-azure-cpu
|
| 63 |
+
cpu-only: true
|
| 64 |
+
runs-on: ${{ matrix.runner }}
|
| 65 |
+
name: ${{ matrix.script }}
|
| 66 |
+
steps:
|
| 67 |
+
- name: Checkout
|
| 68 |
+
uses: actions/checkout@v6
|
| 69 |
+
with:
|
| 70 |
+
path: ${{ github.run_id }}
|
| 71 |
+
- name: main
|
| 72 |
+
uses: NVIDIA/NeMo/.github/actions/test-template@main
|
| 73 |
+
with:
|
| 74 |
+
runner: ${{ runner.name }}
|
| 75 |
+
script: ${{ matrix.script }}
|
| 76 |
+
is_unit_test: true
|
| 77 |
+
tests_to_run: ${{ inputs.test_to_run }}
|
| 78 |
+
cpu-only: ${{ matrix.cpu-only || false }}
|
| 79 |
+
|
| 80 |
+
other-tests:
|
| 81 |
+
strategy:
|
| 82 |
+
fail-fast: false
|
| 83 |
+
matrix:
|
| 84 |
+
include:
|
| 85 |
+
- script: L0_Unit_Tests_GPU_Others
|
| 86 |
+
runner: self-hosted-azure-gpus-1
|
| 87 |
+
- script: L0_Unit_Tests_CPU_Others
|
| 88 |
+
runner: self-hosted-azure-cpu
|
| 89 |
+
cpu-only: true
|
| 90 |
+
runs-on: ${{ matrix.runner }}
|
| 91 |
+
name: ${{ matrix.script }}
|
| 92 |
+
steps:
|
| 93 |
+
- name: Checkout
|
| 94 |
+
uses: actions/checkout@v6
|
| 95 |
+
with:
|
| 96 |
+
path: ${{ github.run_id }}
|
| 97 |
+
- name: main
|
| 98 |
+
uses: NVIDIA/NeMo/.github/actions/test-template@main
|
| 99 |
+
with:
|
| 100 |
+
runner: ${{ runner.name }}
|
| 101 |
+
script: ${{ matrix.script }}
|
| 102 |
+
is_unit_test: true
|
| 103 |
+
tests_to_run: ${{ inputs.test_to_run }}
|
| 104 |
+
cpu-only: ${{ matrix.cpu-only || false }}
|
| 105 |
+
is_optional: ${{ matrix.is-optional || false }}
|
.github/workflows/cicd-main.yml
ADDED
|
@@ -0,0 +1,395 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2025, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
name: CICD NeMo
|
| 15 |
+
on:
|
| 16 |
+
schedule:
|
| 17 |
+
- cron: 0 0 * * *
|
| 18 |
+
pull_request:
|
| 19 |
+
branches:
|
| 20 |
+
- main
|
| 21 |
+
- r**
|
| 22 |
+
- weekly-bump*
|
| 23 |
+
types: [labeled]
|
| 24 |
+
workflow_dispatch:
|
| 25 |
+
inputs:
|
| 26 |
+
test_to_run:
|
| 27 |
+
required: false
|
| 28 |
+
default: all
|
| 29 |
+
type: string
|
| 30 |
+
description: Comma-separated list of tests to run. Use "all" to run the full test suite.
|
| 31 |
+
|
| 32 |
+
concurrency:
|
| 33 |
+
# group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/main' && github.run_id || github.event.pull_request.number || github.ref }}-${{ github.event_name }}
|
| 34 |
+
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-${{ github.event.label.name || 'main' }}-${{ github.event_name }}
|
| 35 |
+
cancel-in-progress: true
|
| 36 |
+
|
| 37 |
+
jobs:
|
| 38 |
+
pre-flight:
|
| 39 |
+
runs-on: ubuntu-latest
|
| 40 |
+
outputs:
|
| 41 |
+
test_to_run: ${{ steps.test_to_run.outputs.main }}
|
| 42 |
+
is_ci_workload: ${{ steps.is_ci_workload.outputs.main }}
|
| 43 |
+
no_fail_fast: ${{ steps.no_fail_fast.outputs.main }}
|
| 44 |
+
components_to_run: ${{ steps.components_to_run.outputs.main }}
|
| 45 |
+
env:
|
| 46 |
+
TESTS_TO_RUN: ${{ inputs.test_to_run }}
|
| 47 |
+
EVENT_NAME: ${{ github.event_name }}
|
| 48 |
+
HAS_LABEL: ${{ github.event.label.name == 'Run CICD' }}
|
| 49 |
+
steps:
|
| 50 |
+
- name: Checkout branch
|
| 51 |
+
uses: actions/checkout@v6
|
| 52 |
+
with:
|
| 53 |
+
fetch-depth: 0
|
| 54 |
+
|
| 55 |
+
- name: Select components to run
|
| 56 |
+
id: components_to_run
|
| 57 |
+
run: |
|
| 58 |
+
echo "main=[\"speech\"]" | tee -a "$GITHUB_OUTPUT"
|
| 59 |
+
|
| 60 |
+
- name: Select tests to run
|
| 61 |
+
id: test_to_run
|
| 62 |
+
run: |
|
| 63 |
+
# For manual dispatch, we replace `all` with the actual job names
|
| 64 |
+
if [[ "$EVENT_NAME" == "workflow_dispatch" ]]; then
|
| 65 |
+
TESTS_TO_RUN=$TESTS_TO_RUN
|
| 66 |
+
|
| 67 |
+
# For correctly labeled PR, we replace `all` with the actual job names
|
| 68 |
+
elif [[ "$EVENT_NAME" == "pull_request" && "$HAS_LABEL" == "true" ]]; then
|
| 69 |
+
TESTS_TO_RUN=all
|
| 70 |
+
|
| 71 |
+
# For incorrectly labeled PR, run no tests
|
| 72 |
+
elif [[ "$EVENT_NAME" == "pull_request" && "$HAS_LABEL" != "true" ]]; then
|
| 73 |
+
TESTS_TO_RUN=""
|
| 74 |
+
|
| 75 |
+
# For push events, run all tests. This is so that we can generate coverage
|
| 76 |
+
# on branch `main`.
|
| 77 |
+
elif [[ "$EVENT_NAME" == "push" || "$EVENT_NAME" == "schedule" ]]; then
|
| 78 |
+
TESTS_TO_RUN=all
|
| 79 |
+
|
| 80 |
+
else
|
| 81 |
+
echo "Unsupported event_name $EVENT_NAME provided".
|
| 82 |
+
exit 1
|
| 83 |
+
fi
|
| 84 |
+
|
| 85 |
+
parsed_string=$(echo "$TESTS_TO_RUN" | jq -c --raw-input 'split(",")')
|
| 86 |
+
echo "main=${parsed_string}" | tee -a "$GITHUB_OUTPUT"
|
| 87 |
+
|
| 88 |
+
- name: Check if this is a CI workload
|
| 89 |
+
shell: bash
|
| 90 |
+
id: is_ci_workload
|
| 91 |
+
run: |
|
| 92 |
+
branch_name=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}
|
| 93 |
+
|
| 94 |
+
if [[ "$branch_name" =~ ^bump-ci-container || "$EVENT_NAME" == "schedule" ]]; then
|
| 95 |
+
is_ci_workload=true
|
| 96 |
+
echo "main=true" | tee -a "$GITHUB_OUTPUT"
|
| 97 |
+
else
|
| 98 |
+
is_ci_workload=false
|
| 99 |
+
fi
|
| 100 |
+
|
| 101 |
+
echo "main=$is_ci_workload" | tee -a "$GITHUB_OUTPUT"
|
| 102 |
+
|
| 103 |
+
- name: Check if no-fail-fast is set
|
| 104 |
+
shell: bash
|
| 105 |
+
id: no_fail_fast
|
| 106 |
+
env:
|
| 107 |
+
HAS_FAIL_FAST_LABEL: ${{ contains(github.event.pull_request.labels.*.name, 'no-fail-fast') }}
|
| 108 |
+
run: |
|
| 109 |
+
if [[ "$HAS_FAIL_FAST_LABEL" == "true" || "$EVENT_NAME" == "schedule" ]]; then
|
| 110 |
+
no_fail_fast=true
|
| 111 |
+
else
|
| 112 |
+
no_fail_fast=false
|
| 113 |
+
fi
|
| 114 |
+
|
| 115 |
+
echo "main=$no_fail_fast" | tee -a "$GITHUB_OUTPUT"
|
| 116 |
+
|
| 117 |
+
code-linting:
|
| 118 |
+
if: needs.pre-flight.outputs.test_to_run != '[]'
|
| 119 |
+
needs: [pre-flight]
|
| 120 |
+
uses: ./.github/workflows/code-linting.yml
|
| 121 |
+
|
| 122 |
+
cicd-wait-in-queue:
|
| 123 |
+
needs: [pre-flight, code-linting]
|
| 124 |
+
runs-on: ubuntu-latest
|
| 125 |
+
environment: test
|
| 126 |
+
if: |
|
| 127 |
+
needs.pre-flight.outputs.test_to_run != '[]'
|
| 128 |
+
&& needs.pre-flight.outputs.components_to_run != '[]'
|
| 129 |
+
&& needs.pre-flight.outputs.is_ci_workload == 'false'
|
| 130 |
+
steps:
|
| 131 |
+
- name: Running CI tests
|
| 132 |
+
run: |
|
| 133 |
+
echo "Running CI tests"
|
| 134 |
+
|
| 135 |
+
cicd-test-container-build:
|
| 136 |
+
uses: ./.github/workflows/_build_container.yml
|
| 137 |
+
needs: [pre-flight, code-linting, cicd-wait-in-queue]
|
| 138 |
+
if: |
|
| 139 |
+
needs.pre-flight.outputs.test_to_run != '[]'
|
| 140 |
+
&& needs.pre-flight.outputs.components_to_run != '[]'
|
| 141 |
+
&& (
|
| 142 |
+
success()
|
| 143 |
+
|| (
|
| 144 |
+
needs.cicd-wait-in-queue.result == 'skipped'
|
| 145 |
+
&& needs.pre-flight.outputs.is_ci_workload == 'true'
|
| 146 |
+
)
|
| 147 |
+
)
|
| 148 |
+
&& !cancelled()
|
| 149 |
+
with:
|
| 150 |
+
image-name: nemo_container
|
| 151 |
+
dockerfile: docker/Dockerfile.ci
|
| 152 |
+
|
| 153 |
+
cicd-import-tests:
|
| 154 |
+
if: |
|
| 155 |
+
needs.pre-flight.outputs.test_to_run != '[]'
|
| 156 |
+
&& needs.pre-flight.outputs.components_to_run != '[]'
|
| 157 |
+
&& (
|
| 158 |
+
success()
|
| 159 |
+
|| (
|
| 160 |
+
needs.cicd-wait-in-queue.result == 'skipped'
|
| 161 |
+
&& needs.pre-flight.outputs.is_ci_workload == 'true'
|
| 162 |
+
)
|
| 163 |
+
)
|
| 164 |
+
&& !cancelled()
|
| 165 |
+
needs: [cicd-test-container-build, pre-flight]
|
| 166 |
+
runs-on: self-hosted-azure-gpus-1
|
| 167 |
+
steps:
|
| 168 |
+
- name: Create UUID
|
| 169 |
+
id: uuid
|
| 170 |
+
run: |
|
| 171 |
+
echo "id=$(uuidgen)" >> "$GITHUB_OUTPUT"
|
| 172 |
+
|
| 173 |
+
- name: Checkout NeMo
|
| 174 |
+
uses: actions/checkout@v6
|
| 175 |
+
with:
|
| 176 |
+
path: ${{ github.run_id }}/${{steps.uuid.outputs.id }}/NeMo
|
| 177 |
+
|
| 178 |
+
- name: Run some checks
|
| 179 |
+
run: |
|
| 180 |
+
docker run \
|
| 181 |
+
--rm \
|
| 182 |
+
--device=/dev/nvidia0 \
|
| 183 |
+
--gpus all \
|
| 184 |
+
--shm-size=8g \
|
| 185 |
+
--volume $(pwd)/${{ github.run_id }}/${{steps.uuid.outputs.id }}/NeMo:/workspace \
|
| 186 |
+
--env TRANSFORMERS_OFFLINE=0 \
|
| 187 |
+
--env HYDRA_FULL_ERROR=1 --env PYTHONUNBUFFERED=1 nemoci.azurecr.io/nemo_container:${{ github.run_id }} bash -c '\
|
| 188 |
+
# PyTorch Lightning version
|
| 189 |
+
python -c "import lightning.pytorch; print(lightning.pytorch.__version__)"
|
| 190 |
+
|
| 191 |
+
# PyTorch Lightning DDP Checks
|
| 192 |
+
CUDA_VISIBLE_DEVICES="0,1" python "tests/core_ptl/check_for_ranks.py"
|
| 193 |
+
|
| 194 |
+
# Basic Import Checks
|
| 195 |
+
python tests/core_ptl/check_imports.py --domain asr
|
| 196 |
+
python tests/core_ptl/check_imports.py --domain tts
|
| 197 |
+
'
|
| 198 |
+
|
| 199 |
+
L0_Setup_Test_Data_And_Models:
|
| 200 |
+
needs: [pre-flight, cicd-test-container-build, cicd-wait-in-queue]
|
| 201 |
+
runs-on: self-hosted-azure
|
| 202 |
+
if: |
|
| 203 |
+
needs.pre-flight.outputs.test_to_run != '[]'
|
| 204 |
+
&& needs.pre-flight.outputs.components_to_run != '[]'
|
| 205 |
+
&& (
|
| 206 |
+
success()
|
| 207 |
+
|| (
|
| 208 |
+
needs.cicd-wait-in-queue.result == 'skipped'
|
| 209 |
+
&& needs.pre-flight.outputs.is_ci_workload == 'true'
|
| 210 |
+
)
|
| 211 |
+
)
|
| 212 |
+
&& !cancelled()
|
| 213 |
+
steps:
|
| 214 |
+
- name: Checkout
|
| 215 |
+
uses: actions/checkout@v6
|
| 216 |
+
with:
|
| 217 |
+
path: ${{ github.run_id }}
|
| 218 |
+
|
| 219 |
+
- name: main
|
| 220 |
+
uses: NVIDIA/NeMo/.github/actions/test-template@main
|
| 221 |
+
with:
|
| 222 |
+
runner: ${{ runner.name }}
|
| 223 |
+
script: L0_Setup_Test_Data_And_Models
|
| 224 |
+
tests_to_run: '["L0_Setup_Test_Data_And_Models"]'
|
| 225 |
+
|
| 226 |
+
cicd-main-unit-tests:
|
| 227 |
+
needs: [pre-flight, cicd-test-container-build]
|
| 228 |
+
uses: ./.github/workflows/cicd-main-unit-tests.yml
|
| 229 |
+
if: |
|
| 230 |
+
needs.pre-flight.outputs.test_to_run != '[]'
|
| 231 |
+
&& needs.pre-flight.outputs.components_to_run != '[]'
|
| 232 |
+
&& (
|
| 233 |
+
success()
|
| 234 |
+
|| (
|
| 235 |
+
needs.cicd-wait-in-queue.result == 'skipped'
|
| 236 |
+
&& needs.pre-flight.outputs.is_ci_workload == 'true'
|
| 237 |
+
)
|
| 238 |
+
)
|
| 239 |
+
&& !cancelled()
|
| 240 |
+
with:
|
| 241 |
+
test_to_run: ${{ needs.pre-flight.outputs.test_to_run }}
|
| 242 |
+
|
| 243 |
+
cicd-main-speech:
|
| 244 |
+
needs: [pre-flight, cicd-test-container-build, cicd-main-unit-tests]
|
| 245 |
+
uses: ./.github/workflows/cicd-main-speech.yml
|
| 246 |
+
if: |
|
| 247 |
+
(
|
| 248 |
+
needs.pre-flight.outputs.test_to_run != '[]'
|
| 249 |
+
&& (
|
| 250 |
+
contains(fromJson(needs.pre-flight.outputs.components_to_run), 'speech')
|
| 251 |
+
)
|
| 252 |
+
)
|
| 253 |
+
&& (
|
| 254 |
+
success()
|
| 255 |
+
|| (
|
| 256 |
+
needs.cicd-wait-in-queue.result == 'skipped'
|
| 257 |
+
&& needs.pre-flight.outputs.is_ci_workload == 'true'
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
&& !cancelled()
|
| 261 |
+
with:
|
| 262 |
+
test_to_run: ${{ needs.pre-flight.outputs.test_to_run }}
|
| 263 |
+
|
| 264 |
+
Nemo_CICD_Test:
|
| 265 |
+
needs:
|
| 266 |
+
- pre-flight
|
| 267 |
+
- cicd-test-container-build
|
| 268 |
+
- cicd-import-tests
|
| 269 |
+
- L0_Setup_Test_Data_And_Models
|
| 270 |
+
- cicd-main-unit-tests
|
| 271 |
+
- cicd-main-speech
|
| 272 |
+
if: always()
|
| 273 |
+
runs-on: ubuntu-latest
|
| 274 |
+
permissions: write-all
|
| 275 |
+
steps:
|
| 276 |
+
- name: Checkout
|
| 277 |
+
uses: actions/checkout@v6
|
| 278 |
+
|
| 279 |
+
- name: Get workflow result
|
| 280 |
+
id: result
|
| 281 |
+
env:
|
| 282 |
+
GH_TOKEN: ${{ github.token }}
|
| 283 |
+
RUN_ID: ${{ github.run_id }}
|
| 284 |
+
HAS_LABEL: ${{ github.event.label.name == 'Run CICD' }}
|
| 285 |
+
IS_SCHEDULED: ${{ github.event_name == 'schedule' }}
|
| 286 |
+
run: |
|
| 287 |
+
# Get workflow run details and check job conclusions
|
| 288 |
+
LATEST_ATTEMPT=$(gh run view $RUN_ID --json jobs -q '[.jobs[] | select(.conclusion != null) | .conclusion] | last')
|
| 289 |
+
NUM_FAILED=$(gh run view $RUN_ID --json jobs -q '[.jobs[] | select(.conclusion == "failure") | .name] | length')
|
| 290 |
+
NUM_CANCELLED=$(gh run view $RUN_ID --json jobs -q '[.jobs[] | select(.conclusion == "cancelled") | .name] | length')
|
| 291 |
+
|
| 292 |
+
if [[ $NUM_FAILED -eq 0 && $NUM_CANCELLED -eq 0 && ("$HAS_LABEL" == "true" || "$IS_SCHEDULED" == "true") ]]; then
|
| 293 |
+
RESULT="success"
|
| 294 |
+
elif [[ $NUM_CANCELLED -gt 0 ]]; then
|
| 295 |
+
RESULT="cancelled"
|
| 296 |
+
else
|
| 297 |
+
RESULT="failure"
|
| 298 |
+
fi
|
| 299 |
+
|
| 300 |
+
# Output the final status
|
| 301 |
+
echo "code=$RESULT" | tee -a $GITHUB_OUTPUT
|
| 302 |
+
|
| 303 |
+
- name: Checkout for GH CLI
|
| 304 |
+
uses: actions/checkout@v6
|
| 305 |
+
|
| 306 |
+
- name: Remove label if not cancelled
|
| 307 |
+
if: |
|
| 308 |
+
steps.result.outputs.code != 'cancelled'
|
| 309 |
+
&& github.event.label.name == 'Run CICD'
|
| 310 |
+
&& github.event.pull_request.head.repo.full_name == github.repository
|
| 311 |
+
env:
|
| 312 |
+
GH_TOKEN: ${{ github.token }}
|
| 313 |
+
PR_NUMBER: ${{ github.event.number }}
|
| 314 |
+
run: gh pr edit "$PR_NUMBER" --remove-label "Run CICD"
|
| 315 |
+
|
| 316 |
+
- name: Pipeline successful, add PR comment
|
| 317 |
+
if: |
|
| 318 |
+
steps.result.outputs.code == 'success'
|
| 319 |
+
&& github.event_name == 'pull_request'
|
| 320 |
+
&& env.SLACK_WEBHOOK != ''
|
| 321 |
+
uses: peter-evans/create-or-update-comment@v4
|
| 322 |
+
env:
|
| 323 |
+
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
| 324 |
+
REPOSITORY: ${{ github.repository }}
|
| 325 |
+
RUN_ID: ${{ github.run_id }}
|
| 326 |
+
with:
|
| 327 |
+
issue-number: ${{ github.event.number }}
|
| 328 |
+
body: |
|
| 329 |
+
[🤖]: Hi @${{ github.event.pull_request.user.login }} 👋,
|
| 330 |
+
|
| 331 |
+
We wanted to let you know that a [CICD pipeline](https://github.com/${{ env.REPOSITORY }}/actions/runs/${{ env.RUN_ID }}) for this PR just finished successfully.
|
| 332 |
+
|
| 333 |
+
So it might be time to merge this PR or get some approvals.
|
| 334 |
+
|
| 335 |
+
- name: Exit
|
| 336 |
+
if: ${{ always() }}
|
| 337 |
+
env:
|
| 338 |
+
RESULT: ${{ steps.result.outputs.code }}
|
| 339 |
+
run: |
|
| 340 |
+
if [ $RESULT == "success" ]; then
|
| 341 |
+
exit 0
|
| 342 |
+
else
|
| 343 |
+
exit 1
|
| 344 |
+
fi
|
| 345 |
+
|
| 346 |
+
Coverage:
|
| 347 |
+
runs-on: ubuntu-latest
|
| 348 |
+
needs: [pre-flight, Nemo_CICD_Test]
|
| 349 |
+
if: |
|
| 350 |
+
needs.pre-flight.outputs.test_to_run != '[]'
|
| 351 |
+
&& needs.pre-flight.outputs.components_to_run != '[]'
|
| 352 |
+
&& (
|
| 353 |
+
success()
|
| 354 |
+
|| needs.Nemo_CICD_Test.result == 'success'
|
| 355 |
+
)
|
| 356 |
+
&& !cancelled()
|
| 357 |
+
strategy:
|
| 358 |
+
matrix:
|
| 359 |
+
flag: [unit-test, e2e]
|
| 360 |
+
steps:
|
| 361 |
+
- name: Checkout
|
| 362 |
+
uses: actions/checkout@v6
|
| 363 |
+
|
| 364 |
+
- name: Download coverage reports of current branch
|
| 365 |
+
uses: actions/download-artifact@v7
|
| 366 |
+
with:
|
| 367 |
+
pattern: coverage-${{ matrix.flag }}-*
|
| 368 |
+
|
| 369 |
+
- name: Get total coverage of current branch
|
| 370 |
+
shell: bash -x -e -u -o pipefail {0}
|
| 371 |
+
if: always()
|
| 372 |
+
run: |
|
| 373 |
+
pip install coverage
|
| 374 |
+
|
| 375 |
+
ls -al .
|
| 376 |
+
ls -al coverage-*/
|
| 377 |
+
coverage combine --keep $(ls coverage-*/.coverage)
|
| 378 |
+
coverage report -i
|
| 379 |
+
rm -rf coverage-*
|
| 380 |
+
ls -al
|
| 381 |
+
|
| 382 |
+
- name: Upload coverage reports to Codecov
|
| 383 |
+
uses: codecov/codecov-action@v5
|
| 384 |
+
with:
|
| 385 |
+
token: ${{ secrets.CODECOV_TOKEN }}
|
| 386 |
+
verbose: true
|
| 387 |
+
flags: ${{ matrix.flag }}
|
| 388 |
+
|
| 389 |
+
- name: Upload artifacts
|
| 390 |
+
uses: actions/upload-artifact@v6
|
| 391 |
+
with:
|
| 392 |
+
name: coverage-${{ matrix.flag }}-aggregated
|
| 393 |
+
path: |
|
| 394 |
+
.coverage
|
| 395 |
+
include-hidden-files: true
|
.github/workflows/cicd-relabel-bot.yml
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# If the PR get's updated by a new commit, it prevents auto-merges
|
| 2 |
+
# since there's no CI event attached to the commit anymore.
|
| 3 |
+
# This workflow re-attaches the label after a push, if the PR
|
| 4 |
+
# was already labeled prior to the push.
|
| 5 |
+
name: CICD Relabel bot
|
| 6 |
+
|
| 7 |
+
on:
|
| 8 |
+
pull_request_target:
|
| 9 |
+
|
| 10 |
+
jobs:
|
| 11 |
+
relabel:
|
| 12 |
+
runs-on: ubuntu-latest
|
| 13 |
+
env:
|
| 14 |
+
PR_NUMBER: ${{ github.event.number }}
|
| 15 |
+
GH_TOKEN: ${{ secrets.NEMO_RELABEL_TOKEN }}
|
| 16 |
+
HOSTNAME: ${{ github.server_url }}
|
| 17 |
+
permissions: write-all
|
| 18 |
+
steps:
|
| 19 |
+
- name: Checkout repo
|
| 20 |
+
uses: actions/checkout@v6
|
| 21 |
+
|
| 22 |
+
- name: Check if PR was already labeled with `Run CICD`
|
| 23 |
+
id: pre-flight
|
| 24 |
+
run: |
|
| 25 |
+
LABELS=$(gh pr view "$PR_NUMBER" --json labels)
|
| 26 |
+
HAS_LABEL=$(echo $LABELS \
|
| 27 |
+
| jq '[.labels[].name] | any(. == "Run CICD")'
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
echo "has-label=$HAS_LABEL" | tee -a "$GITHUB_OUTPUT"
|
| 31 |
+
|
| 32 |
+
- name: Relabel PR
|
| 33 |
+
if: ${{ steps.pre-flight.outputs.has-label == 'true' }}
|
| 34 |
+
run: |
|
| 35 |
+
gh pr edit "$PR_NUMBER" --remove-label "Run CICD"
|
| 36 |
+
gh pr edit "$PR_NUMBER" --add-label "Run CICD"
|
.github/workflows/claude-answer.yml
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Claude Answer Issue
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
issue_comment:
|
| 5 |
+
types: [created]
|
| 6 |
+
|
| 7 |
+
permissions:
|
| 8 |
+
contents: read
|
| 9 |
+
issues: write
|
| 10 |
+
id-token: write
|
| 11 |
+
|
| 12 |
+
jobs:
|
| 13 |
+
authorize:
|
| 14 |
+
if: >-
|
| 15 |
+
!github.event.issue.pull_request &&
|
| 16 |
+
contains(github.event.comment.body, '/claude answer')
|
| 17 |
+
runs-on: ubuntu-latest
|
| 18 |
+
steps:
|
| 19 |
+
- name: Check team membership
|
| 20 |
+
uses: actions/github-script@v8
|
| 21 |
+
with:
|
| 22 |
+
github-token: ${{ secrets.ORG_TEAM_READ_TOKEN }}
|
| 23 |
+
script: |
|
| 24 |
+
const username = context.payload.comment.user.login;
|
| 25 |
+
try {
|
| 26 |
+
const res = await github.rest.teams.getMembershipForUserInOrg({
|
| 27 |
+
org: 'NVIDIA-NeMo',
|
| 28 |
+
team_slug: 'speech_team',
|
| 29 |
+
username,
|
| 30 |
+
});
|
| 31 |
+
if (res.data.state !== 'active') {
|
| 32 |
+
core.setFailed(`${username} is not an active member of NVIDIA Speech Team`);
|
| 33 |
+
}
|
| 34 |
+
} catch (e) {
|
| 35 |
+
core.setFailed(`${username} is not a member of NVIDIA Speech Team`);
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
acknowledge:
|
| 39 |
+
needs: authorize
|
| 40 |
+
runs-on: ubuntu-latest
|
| 41 |
+
steps:
|
| 42 |
+
- name: Add eyes reaction to comment
|
| 43 |
+
uses: actions/github-script@v8
|
| 44 |
+
with:
|
| 45 |
+
script: |
|
| 46 |
+
await github.rest.reactions.createForIssueComment({
|
| 47 |
+
owner: context.repo.owner,
|
| 48 |
+
repo: context.repo.repo,
|
| 49 |
+
comment_id: context.payload.comment.id,
|
| 50 |
+
content: 'eyes'
|
| 51 |
+
});
|
| 52 |
+
|
| 53 |
+
claude-answer:
|
| 54 |
+
needs: acknowledge
|
| 55 |
+
runs-on: ubuntu-latest
|
| 56 |
+
steps:
|
| 57 |
+
- uses: actions/checkout@v6
|
| 58 |
+
- uses: anthropics/claude-code-action@v1
|
| 59 |
+
with:
|
| 60 |
+
prompt: |
|
| 61 |
+
You are a helpful assistant for the NeMo repository.
|
| 62 |
+
Answer the user's question based on the issue description, comments, and the codebase.
|
| 63 |
+
Be concise and provide code references where relevant.
|
| 64 |
+
Do NOT make any code changes or create PRs — only answer the question.
|
| 65 |
+
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
.github/workflows/claude-fix.yml
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Claude Fix Issue
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
issue_comment:
|
| 5 |
+
types: [created]
|
| 6 |
+
|
| 7 |
+
permissions:
|
| 8 |
+
contents: write
|
| 9 |
+
pull-requests: write
|
| 10 |
+
issues: write
|
| 11 |
+
id-token: write
|
| 12 |
+
|
| 13 |
+
jobs:
|
| 14 |
+
authorize:
|
| 15 |
+
if: >-
|
| 16 |
+
!github.event.issue.pull_request &&
|
| 17 |
+
contains(github.event.comment.body, '/claude fix')
|
| 18 |
+
runs-on: ubuntu-latest
|
| 19 |
+
steps:
|
| 20 |
+
- name: Check team membership
|
| 21 |
+
uses: actions/github-script@v8
|
| 22 |
+
with:
|
| 23 |
+
github-token: ${{ secrets.ORG_TEAM_READ_TOKEN }}
|
| 24 |
+
script: |
|
| 25 |
+
const username = context.payload.comment.user.login;
|
| 26 |
+
try {
|
| 27 |
+
const res = await github.rest.teams.getMembershipForUserInOrg({
|
| 28 |
+
org: 'NVIDIA-NeMo',
|
| 29 |
+
team_slug: 'speech_team',
|
| 30 |
+
username,
|
| 31 |
+
});
|
| 32 |
+
if (res.data.state !== 'active') {
|
| 33 |
+
core.setFailed(`${username} is not an active member of NVIDIA Speech Team`);
|
| 34 |
+
}
|
| 35 |
+
} catch (e) {
|
| 36 |
+
core.setFailed(`${username} is not a member of NVIDIA Speech Team`);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
acknowledge:
|
| 40 |
+
needs: authorize
|
| 41 |
+
runs-on: ubuntu-latest
|
| 42 |
+
steps:
|
| 43 |
+
- name: Add eyes reaction to comment
|
| 44 |
+
uses: actions/github-script@v8
|
| 45 |
+
with:
|
| 46 |
+
script: |
|
| 47 |
+
await github.rest.reactions.createForIssueComment({
|
| 48 |
+
owner: context.repo.owner,
|
| 49 |
+
repo: context.repo.repo,
|
| 50 |
+
comment_id: context.payload.comment.id,
|
| 51 |
+
content: 'eyes'
|
| 52 |
+
});
|
| 53 |
+
|
| 54 |
+
claude-fix:
|
| 55 |
+
needs: acknowledge
|
| 56 |
+
runs-on: ubuntu-latest
|
| 57 |
+
steps:
|
| 58 |
+
- uses: actions/checkout@v6
|
| 59 |
+
- uses: anthropics/claude-code-action@v1
|
| 60 |
+
id: claude
|
| 61 |
+
with:
|
| 62 |
+
prompt: |
|
| 63 |
+
You are a developer working on the NeMo repository.
|
| 64 |
+
Implement a focused fix for the issue based on the issue description and comments.
|
| 65 |
+
|
| 66 |
+
Requirements:
|
| 67 |
+
- Always use `git commit -s` to sign off all commits (DCO requirement).
|
| 68 |
+
- Prioritize correctness and minimal scope; avoid unrelated refactors.
|
| 69 |
+
- Reproduce or reason about the failure first, then implement the smallest robust fix.
|
| 70 |
+
- If required, add or update tests for the changed behavior. If tests are not feasible, explain why.
|
| 71 |
+
- If required, update related docs or comments when behavior or usage changes.
|
| 72 |
+
|
| 73 |
+
PR expectations:
|
| 74 |
+
- Create a new branch and open a pull request with your changes.
|
| 75 |
+
- Include a concise summary of root cause and fix based on the following PR template:
|
| 76 |
+
```
|
| 77 |
+
# What does this PR do ?
|
| 78 |
+
Add a one line overview of what this PR aims to accomplish.
|
| 79 |
+
|
| 80 |
+
# Changelog
|
| 81 |
+
Add specific line by line info of high level changes in this PR.
|
| 82 |
+
|
| 83 |
+
# Usage
|
| 84 |
+
Add a usage example of the changed functionality.
|
| 85 |
+
|
| 86 |
+
Related to #${{ github.event.issue.number }}
|
| 87 |
+
|
| 88 |
+
This PR is created by Claude.
|
| 89 |
+
```
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
| 93 |
+
|
| 94 |
+
- name: Label PR with agent-contribution
|
| 95 |
+
if: steps.claude.outputs.branch_name
|
| 96 |
+
uses: actions/github-script@v8
|
| 97 |
+
with:
|
| 98 |
+
script: |
|
| 99 |
+
const prs = await github.rest.pulls.list({
|
| 100 |
+
owner: context.repo.owner,
|
| 101 |
+
repo: context.repo.repo,
|
| 102 |
+
head: `${context.repo.owner}:${process.env.BRANCH_NAME}`,
|
| 103 |
+
state: 'open'
|
| 104 |
+
});
|
| 105 |
+
if (prs.data.length > 0) {
|
| 106 |
+
await github.rest.issues.addLabels({
|
| 107 |
+
owner: context.repo.owner,
|
| 108 |
+
repo: context.repo.repo,
|
| 109 |
+
issue_number: prs.data[0].number,
|
| 110 |
+
labels: ['agent-contribution']
|
| 111 |
+
});
|
| 112 |
+
}
|
| 113 |
+
env:
|
| 114 |
+
BRANCH_NAME: ${{ steps.claude.outputs.branch_name }}
|
.github/workflows/claude-review.yml
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Claude Code Review
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
issue_comment:
|
| 5 |
+
types: [created]
|
| 6 |
+
|
| 7 |
+
permissions:
|
| 8 |
+
contents: read
|
| 9 |
+
pull-requests: write
|
| 10 |
+
issues: write
|
| 11 |
+
id-token: write
|
| 12 |
+
|
| 13 |
+
jobs:
|
| 14 |
+
acknowledge:
|
| 15 |
+
if: >-
|
| 16 |
+
github.event.issue.pull_request &&
|
| 17 |
+
contains(github.event.comment.body, '/claude review')
|
| 18 |
+
runs-on: ubuntu-latest
|
| 19 |
+
steps:
|
| 20 |
+
- name: Add eyes reaction to comment
|
| 21 |
+
uses: actions/github-script@v8
|
| 22 |
+
with:
|
| 23 |
+
script: |
|
| 24 |
+
await github.rest.reactions.createForIssueComment({
|
| 25 |
+
owner: context.repo.owner,
|
| 26 |
+
repo: context.repo.repo,
|
| 27 |
+
comment_id: context.payload.comment.id,
|
| 28 |
+
content: 'eyes'
|
| 29 |
+
});
|
| 30 |
+
|
| 31 |
+
claude-review:
|
| 32 |
+
needs: acknowledge
|
| 33 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_claude_review.yml@v0.79.0
|
| 34 |
+
with:
|
| 35 |
+
prompt: |
|
| 36 |
+
You are doing a light code review. Keep it concise and actionable.
|
| 37 |
+
|
| 38 |
+
Focus ONLY on:
|
| 39 |
+
- Critical bugs or logic errors
|
| 40 |
+
- Typos in code, comments, or strings
|
| 41 |
+
- Missing or insufficient test coverage for changed code
|
| 42 |
+
- Outdated or inaccurate documentation affected by the changes
|
| 43 |
+
|
| 44 |
+
Do NOT comment on:
|
| 45 |
+
- Style preferences or formatting
|
| 46 |
+
- Minor naming suggestions
|
| 47 |
+
- Architectural opinions or refactoring ideas
|
| 48 |
+
- Performance unless there is a clear, measurable issue
|
| 49 |
+
|
| 50 |
+
Provide feedback using inline comments for specific code suggestions.
|
| 51 |
+
Use top-level comments for general observations.
|
| 52 |
+
|
| 53 |
+
IMPORTANT: Do NOT approve the pull request. Only leave comments.
|
| 54 |
+
|
| 55 |
+
It's perfectly acceptable to not have anything to comment on.
|
| 56 |
+
If you do not have anything to comment on, post "LGTM".
|
| 57 |
+
secrets:
|
| 58 |
+
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
.github/workflows/close-inactive-issue-pr.yml
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Stale-Close-Inactive-Issues-PRs
|
| 2 |
+
on:
|
| 3 |
+
schedule:
|
| 4 |
+
- cron: "30 1 * * *"
|
| 5 |
+
|
| 6 |
+
jobs:
|
| 7 |
+
close-issues:
|
| 8 |
+
runs-on: ubuntu-latest
|
| 9 |
+
permissions:
|
| 10 |
+
issues: write
|
| 11 |
+
pull-requests: write
|
| 12 |
+
steps:
|
| 13 |
+
- uses: actions/stale@v6
|
| 14 |
+
with:
|
| 15 |
+
operations-per-run: 100
|
| 16 |
+
days-before-issue-stale: 30
|
| 17 |
+
days-before-issue-close: 7
|
| 18 |
+
stale-issue-label: "stale"
|
| 19 |
+
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity. Remove stale label or comment or this will be closed in 7 days."
|
| 20 |
+
close-issue-message: "This issue was closed because it has been inactive for 7 days since being marked as stale."
|
| 21 |
+
days-before-pr-stale: 14
|
| 22 |
+
days-before-pr-close: 7
|
| 23 |
+
stale-pr-message: "This PR is stale because it has been open for 14 days with no activity. Remove stale label or comment or update or this will be closed in 7 days."
|
| 24 |
+
close-pr-message: "This PR was closed because it has been inactive for 7 days since being marked as stale."
|
| 25 |
+
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
.github/workflows/code-formatting.yml
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Isort and Black Formatting
|
| 2 |
+
# Incrementally reformat only changed files with black, all files with isort
|
| 3 |
+
#
|
| 4 |
+
# Replaces pre-commit.ci, since it reformats all the files.
|
| 5 |
+
# See issue https://github.com/pre-commit-ci/issues/issues/90
|
| 6 |
+
#
|
| 7 |
+
# The action requires a custom token to trigger workflow after pushing reformatted files back to the branch.
|
| 8 |
+
# `secrets.GITHUB_TOKEN` can be used instead, but this will result
|
| 9 |
+
# in not running necessary checks after reformatting, which is undesirable.
|
| 10 |
+
# For details see https://github.com/orgs/community/discussions/25702
|
| 11 |
+
|
| 12 |
+
on:
|
| 13 |
+
pull_request_target:
|
| 14 |
+
paths:
|
| 15 |
+
- "**.py"
|
| 16 |
+
types: [opened, synchronize, reopened, labeled, unlabeled]
|
| 17 |
+
|
| 18 |
+
defaults:
|
| 19 |
+
run:
|
| 20 |
+
shell: bash -x -e -u -o pipefail {0}
|
| 21 |
+
|
| 22 |
+
jobs:
|
| 23 |
+
reformat_with_isort_and_black:
|
| 24 |
+
runs-on: ubuntu-latest
|
| 25 |
+
permissions:
|
| 26 |
+
# write permissions required to commit changes
|
| 27 |
+
contents: write
|
| 28 |
+
steps:
|
| 29 |
+
- name: Checkout branch
|
| 30 |
+
uses: actions/checkout@v6
|
| 31 |
+
with:
|
| 32 |
+
# setup repository and ref for PRs, see
|
| 33 |
+
# https://github.com/EndBug/add-and-commit?tab=readme-ov-file#working-with-prs
|
| 34 |
+
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
| 35 |
+
ref: ${{ github.event.pull_request.head.ref }}
|
| 36 |
+
# custom token is required to trigger actions after reformatting + pushing
|
| 37 |
+
token: ${{ secrets.NEMO_REFORMAT_TOKEN }}
|
| 38 |
+
fetch-depth: 0
|
| 39 |
+
|
| 40 |
+
- name: Get changed files
|
| 41 |
+
id: changed-files
|
| 42 |
+
uses: step-security/changed-files@v45.0.1
|
| 43 |
+
with:
|
| 44 |
+
files: |
|
| 45 |
+
**.py
|
| 46 |
+
|
| 47 |
+
- name: Setup Python env
|
| 48 |
+
uses: actions/setup-python@v6
|
| 49 |
+
with:
|
| 50 |
+
python-version: "3.10"
|
| 51 |
+
|
| 52 |
+
- name: black
|
| 53 |
+
uses: psf/black@stable
|
| 54 |
+
if: ${{ steps.changed-files.outputs.any_changed == 'true' }}
|
| 55 |
+
with:
|
| 56 |
+
options: "--verbose"
|
| 57 |
+
# apply only to changed files (pass explicitly the files)
|
| 58 |
+
src: "${{ steps.changed-files.outputs.all_changed_files }}"
|
| 59 |
+
version: "~= 24.3"
|
| 60 |
+
|
| 61 |
+
- name: isort
|
| 62 |
+
uses: isort/isort-action@v1
|
| 63 |
+
if: ${{ steps.changed-files.outputs.any_changed == 'true' }}
|
| 64 |
+
with:
|
| 65 |
+
isort-version: "5.13.2"
|
| 66 |
+
# reformat all files with isort – safe since the whole repo is already reformatted
|
| 67 |
+
configuration: ""
|
| 68 |
+
|
| 69 |
+
- uses: EndBug/add-and-commit@v9
|
| 70 |
+
# Commit changes. Nothing is committed if no changes.
|
| 71 |
+
with:
|
| 72 |
+
message: Apply isort and black reformatting
|
| 73 |
+
commit: --signoff
|
.github/workflows/code-init-file-checker.yml
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Check __init__ files
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
pull_request:
|
| 5 |
+
types: [opened, synchronize, reopened]
|
| 6 |
+
|
| 7 |
+
jobs:
|
| 8 |
+
check-init-files:
|
| 9 |
+
runs-on: ubuntu-latest
|
| 10 |
+
steps:
|
| 11 |
+
- name: Checkout
|
| 12 |
+
uses: actions/checkout@v6
|
| 13 |
+
|
| 14 |
+
- name: Set up Python
|
| 15 |
+
uses: actions/setup-python@v6
|
| 16 |
+
with:
|
| 17 |
+
python-version: "3.11"
|
| 18 |
+
|
| 19 |
+
- name: Install init-file-checker
|
| 20 |
+
run: pip install init-file-checker
|
| 21 |
+
|
| 22 |
+
- name: Run init-file-checker
|
| 23 |
+
run: init-file-checker nemo/
|
.github/workflows/code-linting.yml
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: PyLint and flake8 linting
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
pull_request:
|
| 5 |
+
types: [opened, synchronize, reopened, labeled, unlabeled]
|
| 6 |
+
workflow_call:
|
| 7 |
+
|
| 8 |
+
jobs:
|
| 9 |
+
linting:
|
| 10 |
+
name: "Domain: ${{ matrix.domain }}"
|
| 11 |
+
runs-on: ubuntu-latest
|
| 12 |
+
strategy:
|
| 13 |
+
fail-fast: false
|
| 14 |
+
matrix:
|
| 15 |
+
domain: [speech, other]
|
| 16 |
+
env:
|
| 17 |
+
DOMAIN: ${{ matrix.domain }}
|
| 18 |
+
steps:
|
| 19 |
+
- name: Checkout
|
| 20 |
+
uses: actions/checkout@v6
|
| 21 |
+
|
| 22 |
+
- name: Select filter
|
| 23 |
+
id: filter
|
| 24 |
+
run: |
|
| 25 |
+
if [[ "$DOMAIN" == "speech" ]]; then
|
| 26 |
+
FILTER=$(jq -crn '[
|
| 27 |
+
"nemo/collections/common/data/lhotse/*.py",
|
| 28 |
+
"nemo/collections/asr/**/*.py",
|
| 29 |
+
"nemo/collections/tts/**/*.py",
|
| 30 |
+
"nemo/collections/audio/**/*.py",
|
| 31 |
+
"nemo/collections/multimodal/speech_llm/**/*.py",
|
| 32 |
+
"nemo/collections/speechlm/**/*.py",
|
| 33 |
+
"nemo/collections/speechlm2/**/*.py"
|
| 34 |
+
] | join(",")')
|
| 35 |
+
|
| 36 |
+
else
|
| 37 |
+
FILTER=$(jq -crn '[
|
| 38 |
+
"nemo/**/*.py",
|
| 39 |
+
"!nemo/collections/common/data/lhotse/*.py",
|
| 40 |
+
"!nemo/collections/asr/**/*.py",
|
| 41 |
+
"!nemo/collections/tts/**/*.py",
|
| 42 |
+
"!nemo/collections/audio/**/*.py",
|
| 43 |
+
"!nemo/collections/multimodal/speech_llm/**/*.py",
|
| 44 |
+
"!nemo/collections/speechlm/**/*.py",
|
| 45 |
+
"!nemo/collections/speechlm2/**/*.py",
|
| 46 |
+
"!nemo/export/**/*.py"
|
| 47 |
+
] | join(",")')
|
| 48 |
+
fi
|
| 49 |
+
|
| 50 |
+
echo "main=$FILTER" | tee -a "$GITHUB_OUTPUT"
|
| 51 |
+
|
| 52 |
+
- name: Get changed files
|
| 53 |
+
id: changed-files
|
| 54 |
+
uses: step-security/changed-files@v45.0.1
|
| 55 |
+
with:
|
| 56 |
+
files: ${{ steps.filter.outputs.main }}
|
| 57 |
+
files_separator: ","
|
| 58 |
+
separator: " "
|
| 59 |
+
|
| 60 |
+
- name: Run PyLint
|
| 61 |
+
id: pylint
|
| 62 |
+
env:
|
| 63 |
+
CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }}
|
| 64 |
+
SKIP_DOCS: ${{ contains(github.event.pull_request.labels.*.name, 'skip-docs') }}
|
| 65 |
+
SKIP_LINTING: ${{ contains(github.event.pull_request.labels.*.name, 'skip-linting') }}
|
| 66 |
+
run: |
|
| 67 |
+
if [[ -z "$CHANGED_FILES" ]]; then
|
| 68 |
+
echo Nothing to lint.
|
| 69 |
+
echo "exit-code=0" | tee -a "$GITHUB_OUTPUT"
|
| 70 |
+
exit 0
|
| 71 |
+
fi
|
| 72 |
+
|
| 73 |
+
if [[ $SKIP_DOCS == true ]]; then
|
| 74 |
+
ADDITIONAL_PYLINT_ARGS="--disable=C0115,C0116"
|
| 75 |
+
else
|
| 76 |
+
ADDITIONAL_PYLINT_ARGS=""
|
| 77 |
+
fi
|
| 78 |
+
|
| 79 |
+
if [[ $SKIP_LINTING == true ]]; then
|
| 80 |
+
ADDITIONAL_PYLINT_ARGS="--exit-zero"
|
| 81 |
+
fi
|
| 82 |
+
|
| 83 |
+
pip install pylint
|
| 84 |
+
set +e
|
| 85 |
+
pylint $ADDITIONAL_PYLINT_ARGS --output "pylintrc.$DOMAIN.txt" --rcfile ".pylintrc.$DOMAIN" ${CHANGED_FILES[@]}
|
| 86 |
+
echo "exit-code=$?" | tee -a "$GITHUB_OUTPUT"
|
| 87 |
+
|
| 88 |
+
- name: Run flake8
|
| 89 |
+
id: flake8
|
| 90 |
+
env:
|
| 91 |
+
CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }}
|
| 92 |
+
SKIP_LINTING: ${{ contains(github.event.pull_request.labels.*.name, 'skip-linting') }}
|
| 93 |
+
run: |
|
| 94 |
+
if [[ -z "$CHANGED_FILES" ]]; then
|
| 95 |
+
echo Nothing to lint.
|
| 96 |
+
echo "exit-code=0" | tee -a "$GITHUB_OUTPUT"
|
| 97 |
+
exit 0
|
| 98 |
+
fi
|
| 99 |
+
|
| 100 |
+
if [[ $SKIP_LINTING == true ]]; then
|
| 101 |
+
ADDITIONAL_FLAKE8_ARGS="--exit-zero"
|
| 102 |
+
else
|
| 103 |
+
ADDITIONAL_FLAKE8_ARGS=""
|
| 104 |
+
fi
|
| 105 |
+
|
| 106 |
+
pip install flake8
|
| 107 |
+
set +e
|
| 108 |
+
flake8 $ADDITIONAL_FLAKE8_ARGS --output "flake8.$DOMAIN.txt" --config ".flake8.$DOMAIN" ${CHANGED_FILES[@]}
|
| 109 |
+
echo "exit-code=$?" | tee -a "$GITHUB_OUTPUT"
|
| 110 |
+
|
| 111 |
+
- name: Summary
|
| 112 |
+
env:
|
| 113 |
+
PYLINT: ${{ steps.pylint.outputs.exit-code == 0 }}
|
| 114 |
+
FLAKE8: ${{ steps.flake8.outputs.exit-code == 0 }}
|
| 115 |
+
run: |
|
| 116 |
+
|
| 117 |
+
if [[ "$PYLINT" != "true" ]]; then
|
| 118 |
+
echo "Pylint output:" | tee -a $GITHUB_STEP_SUMMARY
|
| 119 |
+
|
| 120 |
+
echo '```' | tee -a $GITHUB_STEP_SUMMARY
|
| 121 |
+
cat pylintrc.$DOMAIN.txt | tee -a $GITHUB_STEP_SUMMARY
|
| 122 |
+
echo '```' | tee -a $GITHUB_STEP_SUMMARY
|
| 123 |
+
fi
|
| 124 |
+
|
| 125 |
+
if [[ "$FLAKE8" != "true" ]]; then
|
| 126 |
+
echo "Flake8 output:" | tee -a $GITHUB_STEP_SUMMARY
|
| 127 |
+
|
| 128 |
+
echo '```' | tee -a $GITHUB_STEP_SUMMARY
|
| 129 |
+
cat flake8.$DOMAIN.txt | tee -a $GITHUB_STEP_SUMMARY
|
| 130 |
+
echo '```' | tee -a $GITHUB_STEP_SUMMARY
|
| 131 |
+
fi
|
| 132 |
+
|
| 133 |
+
if [[ "$PYLINT" != "true" || "$FLAKE8" != "true" ]]; then
|
| 134 |
+
echo "The following directories got scanned:" | tee -a $GITHUB_STEP_SUMMARY
|
| 135 |
+
|
| 136 |
+
echo '```' | tee -a $GITHUB_STEP_SUMMARY
|
| 137 |
+
echo ${{ steps.filter.outputs.main }} | tee -a $GITHUB_STEP_SUMMARY
|
| 138 |
+
echo '```' | tee -a $GITHUB_STEP_SUMMARY
|
| 139 |
+
|
| 140 |
+
exit 1
|
| 141 |
+
fi
|
| 142 |
+
|
| 143 |
+
Nemo_Linting_Test:
|
| 144 |
+
needs: linting
|
| 145 |
+
runs-on: ubuntu-latest
|
| 146 |
+
if: always()
|
| 147 |
+
steps:
|
| 148 |
+
- name: Main
|
| 149 |
+
env:
|
| 150 |
+
RESULTS: ${{ toJson(needs.linting) }}
|
| 151 |
+
run: |
|
| 152 |
+
RESULT=$(echo "$RESULTS" | jq -r '.result')
|
| 153 |
+
|
| 154 |
+
if [[ "$RESULT" == "success" ]]; then
|
| 155 |
+
echo "All passed."
|
| 156 |
+
exit 0
|
| 157 |
+
else
|
| 158 |
+
echo "Some linting domains failed."
|
| 159 |
+
exit 1
|
| 160 |
+
fi
|
.github/workflows/codeql.yml
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# For most projects, this workflow file will not need changing; you simply need
|
| 2 |
+
# to commit it to your repository.
|
| 3 |
+
#
|
| 4 |
+
# You may wish to alter this file to override the set of languages analyzed,
|
| 5 |
+
# or to provide custom queries or build logic.
|
| 6 |
+
#
|
| 7 |
+
# ******** NOTE ********
|
| 8 |
+
# We have attempted to detect the languages in your repository. Please check
|
| 9 |
+
# the `language` matrix defined below to confirm you have the correct set of
|
| 10 |
+
# supported CodeQL languages.
|
| 11 |
+
#
|
| 12 |
+
name: "CodeQL"
|
| 13 |
+
|
| 14 |
+
on:
|
| 15 |
+
push:
|
| 16 |
+
branches: [ "main", "[rv][0-9]*", "gh-pages-src" ]
|
| 17 |
+
pull_request:
|
| 18 |
+
# The branches below must be a subset of the branches above
|
| 19 |
+
branches: [ "main" ]
|
| 20 |
+
schedule:
|
| 21 |
+
- cron: '19 1 * * 4'
|
| 22 |
+
|
| 23 |
+
jobs:
|
| 24 |
+
analyze:
|
| 25 |
+
name: Analyze
|
| 26 |
+
runs-on: ubuntu-latest
|
| 27 |
+
permissions:
|
| 28 |
+
actions: read
|
| 29 |
+
contents: read
|
| 30 |
+
security-events: write
|
| 31 |
+
|
| 32 |
+
strategy:
|
| 33 |
+
fail-fast: false
|
| 34 |
+
matrix:
|
| 35 |
+
language: [ 'python' ]
|
| 36 |
+
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
| 37 |
+
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
| 38 |
+
|
| 39 |
+
steps:
|
| 40 |
+
- name: Checkout repository
|
| 41 |
+
uses: actions/checkout@v6
|
| 42 |
+
|
| 43 |
+
# Initializes the CodeQL tools for scanning.
|
| 44 |
+
- name: Initialize CodeQL
|
| 45 |
+
uses: github/codeql-action/init@v2
|
| 46 |
+
with:
|
| 47 |
+
languages: ${{ matrix.language }}
|
| 48 |
+
# If you wish to specify custom queries, you can do so here or in a config file.
|
| 49 |
+
# By default, queries listed here will override any specified in a config file.
|
| 50 |
+
# Prefix the list here with "+" to use these queries and those in the config file.
|
| 51 |
+
|
| 52 |
+
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
| 53 |
+
queries: security-and-quality # security-extended,
|
| 54 |
+
config-file: ./.github/workflows/config/codeql.yml
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
|
| 58 |
+
# If this step fails, then you should remove it and run the build manually (see below)
|
| 59 |
+
- name: Autobuild
|
| 60 |
+
uses: github/codeql-action/autobuild@v2
|
| 61 |
+
|
| 62 |
+
# ℹ️ Command-line programs to run using the OS shell.
|
| 63 |
+
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
| 64 |
+
|
| 65 |
+
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
| 66 |
+
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
| 67 |
+
|
| 68 |
+
# - run: |
|
| 69 |
+
# echo "Run, Build Application using script"
|
| 70 |
+
# ./location_of_script_within_repo/buildscript.sh
|
| 71 |
+
|
| 72 |
+
- name: Perform CodeQL Analysis
|
| 73 |
+
uses: github/codeql-action/analyze@v2
|
| 74 |
+
with:
|
| 75 |
+
category: "/language:${{matrix.language}}"
|
.github/workflows/community-bot.yml
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Community Bot
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
issues:
|
| 5 |
+
types: [opened, edited, reopened, closed, deleted]
|
| 6 |
+
issue_comment:
|
| 7 |
+
types: [created, edited, deleted]
|
| 8 |
+
|
| 9 |
+
jobs:
|
| 10 |
+
community-bot:
|
| 11 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_community_bot.yml@v0.62.0
|
| 12 |
+
with:
|
| 13 |
+
community_project_id: ${{ vars.COMMUNITY_PROJECT_ID }}
|
| 14 |
+
secrets:
|
| 15 |
+
GH_TOKEN: ${{ secrets.PAT }}
|
.github/workflows/config/changelog-config.json
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"categories": [
|
| 3 |
+
{
|
| 4 |
+
"title": "## ASR\n\n<details><summary>Changelog</summary>",
|
| 5 |
+
"labels": ["asr"],
|
| 6 |
+
"exclude_labels": ["cherry-pick"]
|
| 7 |
+
},
|
| 8 |
+
{
|
| 9 |
+
"title": "</details>\n\n## TTS\n\n<details><summary>Changelog</summary>",
|
| 10 |
+
"labels": ["tts"],
|
| 11 |
+
"exclude_labels": ["cherry-pick"]
|
| 12 |
+
},
|
| 13 |
+
{
|
| 14 |
+
"title": "</details>\n\n## NLP / NMT\n\n<details><summary>Changelog</summary>",
|
| 15 |
+
"labels": ["nlp", "nmt", "megatron"],
|
| 16 |
+
"exclude_labels": ["cherry-pick"]
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"title": "</details>\n\n## Text Normalization / Inverse Text Normalization\n\n<details><summary>Changelog</summary>",
|
| 20 |
+
"labels": ["tn", "itn"],
|
| 21 |
+
"exclude_labels": ["cherry-pick"]
|
| 22 |
+
},
|
| 23 |
+
{
|
| 24 |
+
"title": "</details>\n\n## NeMo Tools\n\n<details><summary>Changelog</summary>",
|
| 25 |
+
"labels": ["tools"],
|
| 26 |
+
"exclude_labels": ["cherry-pick"]
|
| 27 |
+
},
|
| 28 |
+
{
|
| 29 |
+
"title": "</details>\n\n## Export\n\n<details><summary>Changelog</summary>",
|
| 30 |
+
"labels": ["export"],
|
| 31 |
+
"exclude_labels": ["cherry-pick"]
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"title": "</details>\n\n## Documentation\n\n<details><summary>Changelog</summary>",
|
| 35 |
+
"labels": ["docs"],
|
| 36 |
+
"exclude_labels": ["cherry-pick"]
|
| 37 |
+
},
|
| 38 |
+
{
|
| 39 |
+
"title": "</details>\n\n## Bugfixes\n\n<details><summary>Changelog</summary>",
|
| 40 |
+
"labels": ["bug"],
|
| 41 |
+
"exclude_labels": ["cherry-pick"]
|
| 42 |
+
},
|
| 43 |
+
{
|
| 44 |
+
"title": "</details>\n\n## Cherrypick\n\n<details><summary>Changelog</summary>",
|
| 45 |
+
"labels": ["cherry-pick"],
|
| 46 |
+
"exclude_labels": ["cherry-pick"]
|
| 47 |
+
}
|
| 48 |
+
],
|
| 49 |
+
"ignore_labels": [
|
| 50 |
+
"ignore"
|
| 51 |
+
],
|
| 52 |
+
"sort": "ASC",
|
| 53 |
+
"template": "\n${{CHANGELOG}}</details>\n\n## Uncategorized:\n\n<details><summary>Changelog</summary>\n\n${{UNCATEGORIZED}}\n</details>\n",
|
| 54 |
+
"pr_template": "- ${{TITLE}} by @${{AUTHOR}} :: PR: #${{NUMBER}}",
|
| 55 |
+
"empty_template": "${{OWNER}}\n${{REPO}}\n${{FROM_TAG}}\n${{TO_TAG}}",
|
| 56 |
+
"label_extractor": [
|
| 57 |
+
{
|
| 58 |
+
"pattern": "(.*tts.*)|(.*g2p.*)",
|
| 59 |
+
"target": "tts",
|
| 60 |
+
"flags": "gimu",
|
| 61 |
+
"on_property": ["title", "body"]
|
| 62 |
+
},
|
| 63 |
+
{
|
| 64 |
+
"pattern": "(.*asr.*)|(.*ctc.*)|(.*rnnt.*)|(.*transducer.*)|(.*dali.*)|(.*k2.*)",
|
| 65 |
+
"target": "asr",
|
| 66 |
+
"flags": "gimu",
|
| 67 |
+
"on_property": ["title", "body"]
|
| 68 |
+
},
|
| 69 |
+
{
|
| 70 |
+
"pattern": "(.*nlp.*)|(.*punctuation.*)|(.*capitalization.*)|(.*entity.*)|(.*glue.*)|(.*entity.*)|(.*retrieval.*)|(.*entity.*)|(.*intent.*)|(.*slot.*)|(.*entity.*)|(.*language.*)|(.*qa.*)|(.*token class.*)|(.*text class.*)",
|
| 71 |
+
"target": "nlp",
|
| 72 |
+
"flags": "gimu",
|
| 73 |
+
"on_property": ["title", "body"]
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"pattern": "(.*nmt.*)|(.*bignlp.*)|(.*megatron.*)|(.*machine.*)|(.*translation.*)|(.*gpt.*)",
|
| 77 |
+
"target": "nmt",
|
| 78 |
+
"flags": "gimu",
|
| 79 |
+
"on_property": ["title", "body"]
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"pattern": "(.*tn.*)|(.*itn.*)|(.*text norm.*)",
|
| 83 |
+
"target": "tn",
|
| 84 |
+
"flags": "gimu",
|
| 85 |
+
"on_property": ["title", "body"]
|
| 86 |
+
},
|
| 87 |
+
{
|
| 88 |
+
"pattern": "(.*sde.*)|(.*ctc segment.*)",
|
| 89 |
+
"target": "tools",
|
| 90 |
+
"flags": "gimu",
|
| 91 |
+
"on_property": ["title", "body"]
|
| 92 |
+
},
|
| 93 |
+
{
|
| 94 |
+
"pattern": "(.*trt.*)|(.*onnx.*)|(.*export.*)",
|
| 95 |
+
"target": "export",
|
| 96 |
+
"flags": "gimu",
|
| 97 |
+
"on_property": ["title", "body"]
|
| 98 |
+
},
|
| 99 |
+
{
|
| 100 |
+
"pattern": "(.*\\[x\\] Documentation.*)",
|
| 101 |
+
"target": "docs",
|
| 102 |
+
"flags": "gmu",
|
| 103 |
+
"on_property": ["title", "body"]
|
| 104 |
+
},
|
| 105 |
+
{
|
| 106 |
+
"pattern": "(.*\\[x\\] Bugfix.*)|(.*patch.*)",
|
| 107 |
+
"target": "bug",
|
| 108 |
+
"flags": "gmu",
|
| 109 |
+
"on_property": ["title", "body"]
|
| 110 |
+
},
|
| 111 |
+
{
|
| 112 |
+
"pattern": "(.*cherry-pick.*)|(.*cherrypick.*)",
|
| 113 |
+
"target": "cherrypick",
|
| 114 |
+
"flags": "gimu",
|
| 115 |
+
"on_property": ["title", "body"]
|
| 116 |
+
}
|
| 117 |
+
],
|
| 118 |
+
"duplicate_filter": {
|
| 119 |
+
"pattern": ".+",
|
| 120 |
+
"on_property": "title",
|
| 121 |
+
"method": "match"
|
| 122 |
+
},
|
| 123 |
+
"transformers": [
|
| 124 |
+
],
|
| 125 |
+
"max_tags_to_fetch": 100,
|
| 126 |
+
"max_pull_requests": 500,
|
| 127 |
+
"max_back_track_time_days": 365,
|
| 128 |
+
"exclude_merge_branches": [
|
| 129 |
+
],
|
| 130 |
+
"tag_resolver": {
|
| 131 |
+
"method": "semver"
|
| 132 |
+
}
|
| 133 |
+
}
|
| 134 |
+
|
.github/workflows/config/codeql.yml
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: "CodeQL config"
|
| 2 |
+
|
| 3 |
+
paths:
|
| 4 |
+
- nemo/
|
| 5 |
+
- tests/
|
| 6 |
+
- tools/
|
| 7 |
+
- scripts/
|
| 8 |
+
- examples/
|
| 9 |
+
- .github/
|
.github/workflows/copyright-check.yml
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
name: Copyright check
|
| 16 |
+
|
| 17 |
+
on:
|
| 18 |
+
pull_request:
|
| 19 |
+
|
| 20 |
+
jobs:
|
| 21 |
+
copyright-check:
|
| 22 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_copyright_check.yml@v0.2.0
|
.github/workflows/install-test.yml
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: CI-Install-Check
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
pull_request:
|
| 5 |
+
paths:
|
| 6 |
+
- "**"
|
| 7 |
+
|
| 8 |
+
concurrency:
|
| 9 |
+
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
| 10 |
+
cancel-in-progress: true
|
| 11 |
+
|
| 12 |
+
jobs:
|
| 13 |
+
test-installs-macos:
|
| 14 |
+
name: ${{ matrix.os }}-py${{ matrix.python }}-${{ matrix.installer }}
|
| 15 |
+
runs-on: ${{ matrix.os }}
|
| 16 |
+
strategy:
|
| 17 |
+
fail-fast: false
|
| 18 |
+
matrix:
|
| 19 |
+
os: [macos-latest]
|
| 20 |
+
python: ["3.10", "3.11", "3.12"]
|
| 21 |
+
installer: ["pip-install", "nemo-install"]
|
| 22 |
+
steps:
|
| 23 |
+
- name: Checkout repo
|
| 24 |
+
uses: actions/checkout@v6
|
| 25 |
+
|
| 26 |
+
- name: Check disk space before cleanup
|
| 27 |
+
run: df -h
|
| 28 |
+
|
| 29 |
+
- name: Free up disk space
|
| 30 |
+
run: |
|
| 31 |
+
# Remove unnecessary files on macOS
|
| 32 |
+
sudo rm -rf /usr/local/lib/android || true
|
| 33 |
+
sudo rm -rf /usr/local/.ghcup || true
|
| 34 |
+
sudo rm -rf /usr/local/lib/node_modules || true
|
| 35 |
+
brew cleanup || true
|
| 36 |
+
# Clear pip cache
|
| 37 |
+
pip cache purge || true
|
| 38 |
+
|
| 39 |
+
- name: Check disk space after cleanup
|
| 40 |
+
run: df -h
|
| 41 |
+
|
| 42 |
+
- uses: actions/setup-python@v6
|
| 43 |
+
with:
|
| 44 |
+
python-version: "${{ matrix.python }}"
|
| 45 |
+
|
| 46 |
+
- name: Install NeMo
|
| 47 |
+
env:
|
| 48 |
+
INSTALLER: ${{ matrix.installer }}
|
| 49 |
+
NEMO_TAG: ${{ github.sha }}
|
| 50 |
+
NEMO_REPO: ${{ github.server_url }}/${{ github.repository }}
|
| 51 |
+
run: |
|
| 52 |
+
if [[ "$INSTALLER" == "pip-install" ]]; then
|
| 53 |
+
pip install --no-cache-dir -U pip
|
| 54 |
+
pip install --no-cache-dir ".[all]"
|
| 55 |
+
else
|
| 56 |
+
export NEMO_TAG
|
| 57 |
+
export NEMO_REPO
|
| 58 |
+
export INSTALL_DIR=$(pwd)
|
| 59 |
+
|
| 60 |
+
pip install --no-cache-dir ".[all]"
|
| 61 |
+
fi
|
| 62 |
+
|
| 63 |
+
- name: Check disk space after installation
|
| 64 |
+
run: df -h
|
| 65 |
+
|
| 66 |
+
- name: Run import checks
|
| 67 |
+
run: |
|
| 68 |
+
# Run import checks
|
| 69 |
+
for collection in "asr" "tts" "lightning" "core"; do
|
| 70 |
+
python tests/core_ptl/check_imports.py --domain "$collection"
|
| 71 |
+
done
|
| 72 |
+
|
| 73 |
+
test-installs-linux-amd:
|
| 74 |
+
name: ubuntu-22.04-amd-py${{ matrix.python }}-${{ matrix.installer }}
|
| 75 |
+
runs-on: ubuntu-22.04
|
| 76 |
+
strategy:
|
| 77 |
+
fail-fast: false
|
| 78 |
+
matrix:
|
| 79 |
+
python: ["3.10", "3.11", "3.12"]
|
| 80 |
+
installer: ["pip-install", "nemo-install"]
|
| 81 |
+
steps:
|
| 82 |
+
- name: Checkout repo
|
| 83 |
+
uses: actions/checkout@v6
|
| 84 |
+
|
| 85 |
+
- name: Check disk space before cleanup
|
| 86 |
+
run: df -h
|
| 87 |
+
|
| 88 |
+
- name: Free up disk space
|
| 89 |
+
run: |
|
| 90 |
+
# Remove unnecessary packages and files on Ubuntu
|
| 91 |
+
sudo apt-get clean
|
| 92 |
+
sudo rm -rf /usr/local/lib/android || true
|
| 93 |
+
sudo rm -rf /opt/ghc || true
|
| 94 |
+
sudo rm -rf /usr/local/.ghcup || true
|
| 95 |
+
sudo rm -rf /usr/share/dotnet || true
|
| 96 |
+
sudo rm -rf /opt/az || true
|
| 97 |
+
# Clear pip and npm caches
|
| 98 |
+
pip cache purge || true
|
| 99 |
+
sudo npm cache clean --force || true
|
| 100 |
+
|
| 101 |
+
- name: Check disk space after cleanup
|
| 102 |
+
run: df -h
|
| 103 |
+
|
| 104 |
+
- name: Install Python
|
| 105 |
+
uses: actions/setup-python@v6
|
| 106 |
+
with:
|
| 107 |
+
python-version: ${{ matrix.python }}
|
| 108 |
+
|
| 109 |
+
- name: Install NeMo
|
| 110 |
+
env:
|
| 111 |
+
INSTALLER: ${{ matrix.installer }}
|
| 112 |
+
run: |
|
| 113 |
+
if [ "$INSTALLER" = "pip-install" ]; then
|
| 114 |
+
pip install --no-cache-dir --upgrade pip
|
| 115 |
+
pip install --no-cache-dir ".[all]"
|
| 116 |
+
else
|
| 117 |
+
export INSTALL_DIR=$(pwd)
|
| 118 |
+
pip install --no-cache-dir ".[all]"
|
| 119 |
+
fi
|
| 120 |
+
|
| 121 |
+
- name: Check disk space after installation
|
| 122 |
+
run: df -h
|
| 123 |
+
|
| 124 |
+
- name: Run import checks
|
| 125 |
+
run: |
|
| 126 |
+
# Run import checks
|
| 127 |
+
for collection in "asr" "tts" "lightning" "core"; do
|
| 128 |
+
python tests/core_ptl/check_imports.py --domain "$collection"
|
| 129 |
+
done
|
| 130 |
+
|
| 131 |
+
test-asr-install-linux-amd:
|
| 132 |
+
name: ubuntu-22.04-amd-py${{ matrix.python }}-asr
|
| 133 |
+
runs-on: ubuntu-22.04
|
| 134 |
+
strategy:
|
| 135 |
+
fail-fast: false
|
| 136 |
+
matrix:
|
| 137 |
+
python: ["3.10", "3.11", "3.12"]
|
| 138 |
+
steps:
|
| 139 |
+
- name: Checkout repo
|
| 140 |
+
uses: actions/checkout@v6
|
| 141 |
+
|
| 142 |
+
- name: Check disk space before cleanup
|
| 143 |
+
run: df -h
|
| 144 |
+
|
| 145 |
+
- name: Free up disk space
|
| 146 |
+
run: |
|
| 147 |
+
# Remove unnecessary packages and files on Ubuntu
|
| 148 |
+
sudo apt-get clean
|
| 149 |
+
sudo rm -rf /usr/local/lib/android || true
|
| 150 |
+
sudo rm -rf /opt/ghc || true
|
| 151 |
+
sudo rm -rf /usr/local/.ghcup || true
|
| 152 |
+
sudo rm -rf /usr/share/dotnet || true
|
| 153 |
+
sudo rm -rf /opt/az || true
|
| 154 |
+
# Clear pip and npm caches
|
| 155 |
+
pip cache purge || true
|
| 156 |
+
sudo npm cache clean --force || true
|
| 157 |
+
|
| 158 |
+
- name: Check disk space after cleanup
|
| 159 |
+
run: df -h
|
| 160 |
+
|
| 161 |
+
- name: Install Python
|
| 162 |
+
uses: actions/setup-python@v6
|
| 163 |
+
with:
|
| 164 |
+
python-version: ${{ matrix.python }}
|
| 165 |
+
|
| 166 |
+
- name: Install NeMo
|
| 167 |
+
run: |
|
| 168 |
+
pip install --no-cache-dir --upgrade pip
|
| 169 |
+
pip install --no-cache-dir ".[asr]"
|
| 170 |
+
|
| 171 |
+
- name: Check disk space after installation
|
| 172 |
+
run: df -h
|
| 173 |
+
|
| 174 |
+
- name: Run import checks
|
| 175 |
+
run: |
|
| 176 |
+
# Run import checks
|
| 177 |
+
python tests/core_ptl/check_imports.py --domain asr
|
| 178 |
+
|
| 179 |
+
test-installs-linux-arm:
|
| 180 |
+
name: ubuntu-22.04-arm-py${{ matrix.python }}-${{ matrix.installer }}
|
| 181 |
+
runs-on: ubuntu-22.04-arm
|
| 182 |
+
strategy:
|
| 183 |
+
fail-fast: false
|
| 184 |
+
matrix:
|
| 185 |
+
python: ["3.10", "3.11", "3.12"]
|
| 186 |
+
installer: ["pip-install", "nemo-install"]
|
| 187 |
+
steps:
|
| 188 |
+
- name: Checkout repo
|
| 189 |
+
uses: actions/checkout@v6
|
| 190 |
+
|
| 191 |
+
- name: Check disk space before cleanup
|
| 192 |
+
run: df -h
|
| 193 |
+
|
| 194 |
+
- name: Free up disk space
|
| 195 |
+
run: |
|
| 196 |
+
# Remove unnecessary packages and files on Ubuntu ARM
|
| 197 |
+
sudo apt-get clean
|
| 198 |
+
sudo rm -rf /usr/local/lib/android || true
|
| 199 |
+
sudo rm -rf /opt/ghc || true
|
| 200 |
+
sudo rm -rf /usr/local/.ghcup || true
|
| 201 |
+
sudo rm -rf /usr/share/dotnet || true
|
| 202 |
+
sudo rm -rf /opt/az || true
|
| 203 |
+
# Clear pip and npm caches
|
| 204 |
+
pip cache purge || true
|
| 205 |
+
sudo npm cache clean --force || true
|
| 206 |
+
|
| 207 |
+
- name: Check disk space after cleanup
|
| 208 |
+
run: df -h
|
| 209 |
+
|
| 210 |
+
- name: Install Python
|
| 211 |
+
uses: actions/setup-python@v6
|
| 212 |
+
with:
|
| 213 |
+
python-version: ${{ matrix.python }}
|
| 214 |
+
|
| 215 |
+
- name: Install NeMo
|
| 216 |
+
env:
|
| 217 |
+
INSTALLER: ${{ matrix.installer }}
|
| 218 |
+
run: |
|
| 219 |
+
if [ "$INSTALLER" = "pip-install" ]; then
|
| 220 |
+
pip install --no-cache-dir --upgrade pip
|
| 221 |
+
pip install --no-cache-dir ".[all]"
|
| 222 |
+
else
|
| 223 |
+
export INSTALL_DIR=$(pwd)
|
| 224 |
+
pip install --no-cache-dir ".[all]"
|
| 225 |
+
fi
|
| 226 |
+
|
| 227 |
+
- name: Check disk space after installation
|
| 228 |
+
run: df -h
|
| 229 |
+
|
| 230 |
+
- name: Run import checks
|
| 231 |
+
run: |
|
| 232 |
+
# Run import checks
|
| 233 |
+
for collection in "asr" "tts" "lightning" "core"; do
|
| 234 |
+
python tests/core_ptl/check_imports.py --domain "$collection"
|
| 235 |
+
done
|
| 236 |
+
|
| 237 |
+
test-asr-installs-linux-arm:
|
| 238 |
+
name: ubuntu-22.04-arm-py${{ matrix.python }}-asr
|
| 239 |
+
runs-on: ubuntu-22.04-arm
|
| 240 |
+
strategy:
|
| 241 |
+
fail-fast: false
|
| 242 |
+
matrix:
|
| 243 |
+
python: ["3.10", "3.11", "3.12"]
|
| 244 |
+
steps:
|
| 245 |
+
- name: Checkout repo
|
| 246 |
+
uses: actions/checkout@v6
|
| 247 |
+
|
| 248 |
+
- name: Check disk space before cleanup
|
| 249 |
+
run: df -h
|
| 250 |
+
|
| 251 |
+
- name: Free up disk space
|
| 252 |
+
run: |
|
| 253 |
+
# Remove unnecessary packages and files on Ubuntu ARM
|
| 254 |
+
sudo apt-get clean
|
| 255 |
+
sudo rm -rf /usr/local/lib/android || true
|
| 256 |
+
sudo rm -rf /opt/ghc || true
|
| 257 |
+
sudo rm -rf /usr/local/.ghcup || true
|
| 258 |
+
sudo rm -rf /usr/share/dotnet || true
|
| 259 |
+
sudo rm -rf /opt/az || true
|
| 260 |
+
# Clear pip and npm caches
|
| 261 |
+
pip cache purge || true
|
| 262 |
+
sudo npm cache clean --force || true
|
| 263 |
+
|
| 264 |
+
- name: Check disk space after cleanup
|
| 265 |
+
run: df -h
|
| 266 |
+
|
| 267 |
+
- name: Install Python
|
| 268 |
+
uses: actions/setup-python@v6
|
| 269 |
+
with:
|
| 270 |
+
python-version: ${{ matrix.python }}
|
| 271 |
+
|
| 272 |
+
- name: Install NeMo
|
| 273 |
+
run: |
|
| 274 |
+
pip install --no-cache-dir --upgrade pip
|
| 275 |
+
pip install --no-cache-dir ".[asr]"
|
| 276 |
+
|
| 277 |
+
- name: Check disk space after installation
|
| 278 |
+
run: df -h
|
| 279 |
+
|
| 280 |
+
- name: Run import checks
|
| 281 |
+
run: |
|
| 282 |
+
# Run import checks
|
| 283 |
+
python tests/core_ptl/check_imports.py --domain asr
|
.github/workflows/labeler.yml
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: "Pull Request Labeler"
|
| 2 |
+
on:
|
| 3 |
+
- pull_request_target
|
| 4 |
+
|
| 5 |
+
jobs:
|
| 6 |
+
triage:
|
| 7 |
+
permissions:
|
| 8 |
+
contents: read
|
| 9 |
+
pull-requests: write
|
| 10 |
+
runs-on: ubuntu-latest
|
| 11 |
+
steps:
|
| 12 |
+
- uses: actions/labeler@v4
|
| 13 |
+
with:
|
| 14 |
+
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
.github/workflows/mcore-tag-bump-bot.yml
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Regularly updates the CI container
|
| 2 |
+
name: Megatron Tag Bump Bot
|
| 3 |
+
on:
|
| 4 |
+
workflow_dispatch:
|
| 5 |
+
schedule:
|
| 6 |
+
- cron: 0 0 * * *
|
| 7 |
+
|
| 8 |
+
jobs:
|
| 9 |
+
get-release-branch-names:
|
| 10 |
+
runs-on: ubuntu-latest
|
| 11 |
+
outputs:
|
| 12 |
+
mcore: ${{ steps.get-branch.outputs.mcore_release_branch }}
|
| 13 |
+
nemo: ${{ steps.get-branch.outputs.nemo_release_branch }}
|
| 14 |
+
steps:
|
| 15 |
+
- name: Get release branch names
|
| 16 |
+
id: get-branch
|
| 17 |
+
run: |
|
| 18 |
+
latest_branch=$(git ls-remote --heads https://github.com/NVIDIA/Megatron-LM.git 'refs/heads/core_r*' |
|
| 19 |
+
grep -o 'core_r[0-9]\+\.[0-9]\+\.[0-9]\+' |
|
| 20 |
+
sort -V |
|
| 21 |
+
tail -n1)
|
| 22 |
+
echo "mcore_release_branch=$latest_branch" >> $GITHUB_OUTPUT
|
| 23 |
+
|
| 24 |
+
latest_branch=$(git ls-remote --heads https://github.com/NVIDIA/NeMo.git 'refs/heads/r*' |
|
| 25 |
+
grep -o 'r[0-9]\+\.[0-9]\+\.[0-9]\+' |
|
| 26 |
+
sort -V |
|
| 27 |
+
tail -n1)
|
| 28 |
+
echo "nemo_release_branch=$latest_branch" >> $GITHUB_OUTPUT
|
| 29 |
+
|
| 30 |
+
bump-tags:
|
| 31 |
+
needs: [get-release-branch-names]
|
| 32 |
+
strategy:
|
| 33 |
+
fail-fast: false
|
| 34 |
+
matrix:
|
| 35 |
+
include:
|
| 36 |
+
- nemo-target-branch: ${{ needs.get-release-branch-names.outputs.nemo }}
|
| 37 |
+
mcore-target-branch: ${{ needs.get-release-branch-names.outputs.mcore }}
|
| 38 |
+
- nemo-target-branch: main
|
| 39 |
+
mcore-target-branch: main
|
| 40 |
+
uses: ./.github/workflows/_bump_mcore_tag.yml
|
| 41 |
+
with:
|
| 42 |
+
nemo-target-branch: ${{ matrix.nemo-target-branch }}
|
| 43 |
+
mcore-target-branch: ${{ matrix.mcore-target-branch }}
|
| 44 |
+
secrets:
|
| 45 |
+
PAT: ${{ secrets.PAT }}
|
| 46 |
+
|
| 47 |
+
notify:
|
| 48 |
+
if: failure()
|
| 49 |
+
runs-on: ubuntu-latest
|
| 50 |
+
needs: [bump-tags]
|
| 51 |
+
steps:
|
| 52 |
+
- name: Notify
|
| 53 |
+
env:
|
| 54 |
+
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
| 55 |
+
SLACK_WEBHOOK_ADMIN: <!subteam^${{ secrets.SLACK_WEBHOOK_ADMIN }}>
|
| 56 |
+
GITHUB_RUN_ID: ${{ github.run_id }}
|
| 57 |
+
GITHUB_REPOSITORY: ${{ github.repository }}
|
| 58 |
+
run: |
|
| 59 |
+
curl -X POST \
|
| 60 |
+
-H 'Content-type: application/json' \
|
| 61 |
+
--data "{\"text\":\":robot_joy: <https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}|Mcore-bump-bot workflow> failed. Please fix manually.\n\ncc ${SLACK_WEBHOOK_ADMIN}\"}" \
|
| 62 |
+
$SLACK_WEBHOOK
|
.github/workflows/monitor-single-vm.yml
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: ~shut down a single VM
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
workflow_call:
|
| 5 |
+
inputs:
|
| 6 |
+
vm:
|
| 7 |
+
type: string
|
| 8 |
+
description: Name of VM
|
| 9 |
+
required: true
|
| 10 |
+
n_gpus:
|
| 11 |
+
type: string
|
| 12 |
+
description: Number of GPUs this VM has
|
| 13 |
+
required: true
|
| 14 |
+
|
| 15 |
+
jobs:
|
| 16 |
+
check-status-and-maybe-shutdown:
|
| 17 |
+
environment: main
|
| 18 |
+
runs-on: ${{ inputs.vm }}
|
| 19 |
+
outputs:
|
| 20 |
+
status: ${{ steps.status.outputs.main }}
|
| 21 |
+
steps:
|
| 22 |
+
- name: Check status
|
| 23 |
+
id: status
|
| 24 |
+
run: |
|
| 25 |
+
docker run --rm --runtime=nvidia --gpus ${{ inputs.n_gpus }} ubuntu nvidia-smi
|
| 26 |
+
|
| 27 |
+
NUM_GPUS=$(nvidia-smi --query-gpu=name --format=csv,noheader | wc -l)
|
| 28 |
+
|
| 29 |
+
if [[ $NUM_GPUS -ne ${{ inputs.n_gpus }} ]]; then
|
| 30 |
+
echo "Issues with GPU detected, will take this runner offline."
|
| 31 |
+
echo "main=degraded" >> "$GITHUB_OUTPUT"
|
| 32 |
+
else
|
| 33 |
+
echo "main=healthy" >> "$GITHUB_OUTPUT"
|
| 34 |
+
fi
|
| 35 |
+
|
| 36 |
+
- name: Send Slack message & Disconnect runner from GitHub
|
| 37 |
+
if: ${{ steps.status.outputs.main == 'degraded' || failure() }}
|
| 38 |
+
run: |
|
| 39 |
+
MESSAGE='{
|
| 40 |
+
"blocks": [
|
| 41 |
+
{
|
| 42 |
+
"type": "section",
|
| 43 |
+
"text": {
|
| 44 |
+
"type": "mrkdwn",
|
| 45 |
+
"text": ":alert: VM bot 🤖: Hey <!subteam^${{ secrets.SLACK_WEBHOOK_ADMIN }}>: VM `${{ inputs.vm }}` is having not the best day of their life, maybe bring them an apple or so."
|
| 46 |
+
}
|
| 47 |
+
}
|
| 48 |
+
]
|
| 49 |
+
}'
|
| 50 |
+
|
| 51 |
+
curl -X POST -H "Content-type: application/json" --data "$MESSAGE" ${{ secrets.SLACK_WEBHOOK }}
|
| 52 |
+
|
| 53 |
+
cd /home/azureuser/actions-runner
|
| 54 |
+
echo ${{ secrets.VM_KEY }} | sudo -S ./svc.sh stop
|
.github/workflows/monitor-vms.yml
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Regularly updates the CI container
|
| 2 |
+
name: Reboots VMs in a controlled way
|
| 3 |
+
on:
|
| 4 |
+
schedule:
|
| 5 |
+
- cron: 0/15 * * * *
|
| 6 |
+
workflow_dispatch:
|
| 7 |
+
|
| 8 |
+
jobs:
|
| 9 |
+
pre-flight:
|
| 10 |
+
runs-on: ubuntu-latest
|
| 11 |
+
if: github.repository_owner == 'NVIDIA'
|
| 12 |
+
outputs:
|
| 13 |
+
list-of-vms: ${{ steps.main.outputs.main }}
|
| 14 |
+
environment: main
|
| 15 |
+
steps:
|
| 16 |
+
- name: Get list of VMs
|
| 17 |
+
id: main
|
| 18 |
+
env:
|
| 19 |
+
GITHUB_TOKEN: ${{ secrets.PAT }}
|
| 20 |
+
run: |
|
| 21 |
+
RUNNERS=$(curl -L \
|
| 22 |
+
-H "Accept: application/vnd.github+json" \
|
| 23 |
+
-H "Authorization: Bearer $GITHUB_TOKEN" \
|
| 24 |
+
-H "X-GitHub-Api-Version: 2022-11-28" \
|
| 25 |
+
https://api.github.com/repos/NVIDIA/NeMo/actions/runners)
|
| 26 |
+
|
| 27 |
+
MATRIX=$(echo $RUNNERS \
|
| 28 |
+
| jq -c '[
|
| 29 |
+
.runners[]
|
| 30 |
+
| select(.status == "online")
|
| 31 |
+
| select(.name | contains("cpu") | not)
|
| 32 |
+
| {
|
| 33 |
+
"vm": .name,
|
| 34 |
+
"n_gpus": [
|
| 35 |
+
.labels[]
|
| 36 |
+
| select(.name | endswith("gpu")) | .name
|
| 37 |
+
][0][:1]
|
| 38 |
+
}
|
| 39 |
+
]
|
| 40 |
+
'
|
| 41 |
+
)
|
| 42 |
+
echo main=$MATRIX | tee -a "$GITHUB_OUTPUT"
|
| 43 |
+
|
| 44 |
+
maintenance:
|
| 45 |
+
needs: pre-flight
|
| 46 |
+
strategy:
|
| 47 |
+
fail-fast: false
|
| 48 |
+
matrix:
|
| 49 |
+
include: ${{ fromJSON(needs.pre-flight.outputs.list-of-vms )}}
|
| 50 |
+
uses: ./.github/workflows/monitor-single-vm.yml
|
| 51 |
+
with:
|
| 52 |
+
vm: ${{ matrix.vm }}
|
| 53 |
+
n_gpus: ${{ matrix.n_gpus }}
|
| 54 |
+
secrets: inherit # pragma: allowlist secret
|
.github/workflows/release-docs.yml
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2026, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
name: Release docs
|
| 15 |
+
on:
|
| 16 |
+
workflow_dispatch:
|
| 17 |
+
inputs:
|
| 18 |
+
dry-run:
|
| 19 |
+
description: Whether to run the workflow in dry-run mode
|
| 20 |
+
required: true
|
| 21 |
+
type: boolean
|
| 22 |
+
default: true
|
| 23 |
+
publish-as-latest:
|
| 24 |
+
description: Publish as Latest stable version.
|
| 25 |
+
required: false
|
| 26 |
+
type: boolean
|
| 27 |
+
default: true
|
| 28 |
+
docs-version-override:
|
| 29 |
+
description: Docs version if commit is not tagged
|
| 30 |
+
required: false
|
| 31 |
+
type: string
|
| 32 |
+
default: ""
|
| 33 |
+
update-version-picker:
|
| 34 |
+
description: Update version picker.
|
| 35 |
+
required: false
|
| 36 |
+
type: boolean
|
| 37 |
+
default: true
|
| 38 |
+
notify-emails:
|
| 39 |
+
description: Email addresses to send the notification to. Format as "me@me.com,you@you.com".
|
| 40 |
+
required: false
|
| 41 |
+
type: string
|
| 42 |
+
github-ref:
|
| 43 |
+
description: Github ref to checkout
|
| 44 |
+
required: false
|
| 45 |
+
type: string
|
| 46 |
+
default: ""
|
| 47 |
+
workflow_call:
|
| 48 |
+
inputs:
|
| 49 |
+
dry-run:
|
| 50 |
+
description: Whether to run the workflow in dry-run mode
|
| 51 |
+
required: false
|
| 52 |
+
type: boolean
|
| 53 |
+
default: true
|
| 54 |
+
publish-as-latest:
|
| 55 |
+
description: Publish as Latest stable version.
|
| 56 |
+
required: false
|
| 57 |
+
type: boolean
|
| 58 |
+
default: true
|
| 59 |
+
docs-version-override:
|
| 60 |
+
description: Docs version if commit is not tagged
|
| 61 |
+
required: false
|
| 62 |
+
type: string
|
| 63 |
+
default: ""
|
| 64 |
+
update-version-picker:
|
| 65 |
+
description: Update version picker.
|
| 66 |
+
required: false
|
| 67 |
+
type: boolean
|
| 68 |
+
default: true
|
| 69 |
+
notify-emails:
|
| 70 |
+
description: Email addresses to send the notification to. Format as "me@me.com,you@you.com".
|
| 71 |
+
required: false
|
| 72 |
+
type: string
|
| 73 |
+
github-ref:
|
| 74 |
+
description: Github ref to checkout
|
| 75 |
+
required: false
|
| 76 |
+
type: string
|
| 77 |
+
default: ""
|
| 78 |
+
|
| 79 |
+
jobs:
|
| 80 |
+
build-docs:
|
| 81 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_build_docs.yml@v0.83.0
|
| 82 |
+
with:
|
| 83 |
+
ref: ${{ inputs.github-ref }}
|
| 84 |
+
docs-directory: docs/source
|
| 85 |
+
sync-all: true
|
| 86 |
+
no-extras: "--no-extra cu12"
|
| 87 |
+
|
| 88 |
+
publish-docs:
|
| 89 |
+
runs-on: ubuntu-latest
|
| 90 |
+
needs: [build-docs]
|
| 91 |
+
steps:
|
| 92 |
+
- uses: actions/checkout@v6
|
| 93 |
+
with:
|
| 94 |
+
repository: NVIDIA-NeMo/FW-CI-templates
|
| 95 |
+
ref: v0.74.0
|
| 96 |
+
path: FW-CI-templates
|
| 97 |
+
|
| 98 |
+
- uses: ./FW-CI-templates/.github/actions/publish-docs
|
| 99 |
+
# This workflow runs either on main, or on a version tag. Any other git ref will lead
|
| 100 |
+
# to an error.
|
| 101 |
+
# If its on main, it will publish to "latest" directory in Akamai.
|
| 102 |
+
# If its on a versioned tag, it will extract the version number from the tag (strip `v` prefix)
|
| 103 |
+
# and publish to the versioned directory in Akamai.
|
| 104 |
+
with:
|
| 105 |
+
dry-run: ${{ inputs.dry-run }}
|
| 106 |
+
artifacts-name: docs-html
|
| 107 |
+
artifacts-path: _build/html
|
| 108 |
+
emails-csv: ${{ inputs.notify-emails && format('{0},{1}', vars.docs_release_emails, inputs.notify-emails) || vars.docs_release_emails }}
|
| 109 |
+
overwrite-latest-on-tag: ${{ inputs.publish-as-latest }}
|
| 110 |
+
docs-version-override: ${{ inputs.docs-version-override }}
|
| 111 |
+
update-version-picker: ${{ inputs.update-version-picker }}
|
| 112 |
+
run-on-version-tag-only: ${{ github.ref_name != 'main' }}
|
| 113 |
+
request-name: nemo-speech-publish-docs-${{ github.run_id }}
|
| 114 |
+
aws-region: ${{ vars.DOCS_AWS_REGION }}
|
| 115 |
+
aws-role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }}
|
| 116 |
+
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
| 117 |
+
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
| 118 |
+
akamai-host: ${{ secrets.AKAMAI_HOST }}
|
| 119 |
+
akamai-client-token: ${{ secrets.AKAMAI_CLIENT_TOKEN }}
|
| 120 |
+
akamai-client-secret: ${{ secrets.AKAMAI_CLIENT_SECRET }}
|
| 121 |
+
akamai-access-token: ${{ secrets.AKAMAI_ACCESS_TOKEN }}
|
| 122 |
+
s3-target-root: ${{ secrets.S3_BUCKET_NAME }}
|
| 123 |
+
s3-target-path: nemo/speech
|
.github/workflows/release-freeze.yml
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: "Code freeze"
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
workflow_dispatch:
|
| 5 |
+
inputs:
|
| 6 |
+
type_of_release:
|
| 7 |
+
type: choice
|
| 8 |
+
description: Type of release
|
| 9 |
+
options:
|
| 10 |
+
- major
|
| 11 |
+
- minor
|
| 12 |
+
freeze-commit:
|
| 13 |
+
type: string
|
| 14 |
+
description: Commit SHA to use for cut-off
|
| 15 |
+
required: false
|
| 16 |
+
default: main
|
| 17 |
+
mcore_version:
|
| 18 |
+
description: "Version of MCore to use (must be a valid git ref)"
|
| 19 |
+
required: true
|
| 20 |
+
type: string
|
| 21 |
+
dry-run:
|
| 22 |
+
type: boolean
|
| 23 |
+
description: Dry-run of code-freeze
|
| 24 |
+
required: false
|
| 25 |
+
default: true
|
| 26 |
+
|
| 27 |
+
jobs:
|
| 28 |
+
code-freeze:
|
| 29 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_code_freeze.yml@v0.86.0
|
| 30 |
+
with:
|
| 31 |
+
library-name: NeMo-Toolkit
|
| 32 |
+
python-package: nemo
|
| 33 |
+
release-type: ${{ inputs.type_of_release }}
|
| 34 |
+
freeze-commit: ${{ inputs.freeze-commit }}
|
| 35 |
+
dry-run: ${{ inputs.dry-run }}
|
| 36 |
+
use-pat: true
|
| 37 |
+
secrets:
|
| 38 |
+
SLACK_WEBHOOK: ${{ secrets.SLACK_RELEASE_ENDPOINT }}
|
| 39 |
+
SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }}
|
| 40 |
+
PAT: ${{ secrets.PAT }}
|
| 41 |
+
|
| 42 |
+
freeze-tags:
|
| 43 |
+
runs-on: ubuntu-latest
|
| 44 |
+
needs: [code-freeze]
|
| 45 |
+
environment: main
|
| 46 |
+
steps:
|
| 47 |
+
- name: Checkout repository
|
| 48 |
+
uses: actions/checkout@v6
|
| 49 |
+
with:
|
| 50 |
+
path: ${{ github.run_id }}
|
| 51 |
+
token: ${{ secrets.PAT }}
|
| 52 |
+
fetch-depth: 0
|
| 53 |
+
fetch-tags: true
|
| 54 |
+
ref: ${{ inputs.dry-run == true && inputs.freeze-commit || needs.code-freeze.outputs.release-branch }}
|
| 55 |
+
|
| 56 |
+
- name: Pin branch name in Notebooks
|
| 57 |
+
run: |
|
| 58 |
+
cd ${{ github.run_id }}
|
| 59 |
+
find tutorials -type f -name "*.ipynb" -exec sed -i "s/BRANCH = 'main'/BRANCH = '${{ needs.code-freeze.outputs.release-branch }}'/g" {} +
|
| 60 |
+
|
| 61 |
+
- name: Pin MCore in Dockerfile
|
| 62 |
+
run: |
|
| 63 |
+
cd ${{ github.run_id }}
|
| 64 |
+
sed -i 's/^ARG MCORE_TAG=.*$/ARG MCORE_TAG=${{ inputs.mcore_version }}/' docker/Dockerfile.ci
|
| 65 |
+
|
| 66 |
+
- name: Show status
|
| 67 |
+
run: |
|
| 68 |
+
cd ${{ github.run_id }}
|
| 69 |
+
git status
|
| 70 |
+
|
| 71 |
+
- name: Create PR
|
| 72 |
+
uses: peter-evans/create-pull-request@v6
|
| 73 |
+
id: create-pull-request
|
| 74 |
+
if: ${{ inputs.dry-run != true }}
|
| 75 |
+
with:
|
| 76 |
+
path: ${{ github.run_id }}
|
| 77 |
+
base: ${{ needs.code-freeze.outputs.release-branch }}
|
| 78 |
+
branch: ci/freeze-tags-${{ needs.code-freeze.outputs.release-branch }}
|
| 79 |
+
title: "Freeze tags in in `${{ needs.code-freeze.outputs.release-branch }}`"
|
| 80 |
+
body: |
|
| 81 |
+
🚀 PR to freeze tags in `${{ needs.code-freeze.outputs.release-branch }}`.
|
| 82 |
+
|
| 83 |
+
commit-message: "[🤠]: Howdy folks, let's release NeMo `${{ needs.code-freeze.outputs.release-branch }}` !"
|
| 84 |
+
signoff: true
|
| 85 |
+
assignees: okoenig
|
.github/workflows/release-nightly-docs.yml
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2026, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
name: Release Nightly Docs
|
| 16 |
+
|
| 17 |
+
on:
|
| 18 |
+
schedule:
|
| 19 |
+
- cron: "0 10 * * *"
|
| 20 |
+
|
| 21 |
+
jobs:
|
| 22 |
+
call-release-docs:
|
| 23 |
+
uses: ./.github/workflows/release-docs.yml
|
| 24 |
+
with:
|
| 25 |
+
dry-run: false
|
| 26 |
+
publish-as-latest: false
|
| 27 |
+
docs-version-override: "nightly"
|
| 28 |
+
update-version-picker: false
|
| 29 |
+
secrets: inherit
|
.github/workflows/release.yml
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
name: "Release Neural Modules"
|
| 15 |
+
|
| 16 |
+
on:
|
| 17 |
+
workflow_dispatch:
|
| 18 |
+
inputs:
|
| 19 |
+
release-ref:
|
| 20 |
+
description: Ref (SHA or branch name) to release
|
| 21 |
+
required: true
|
| 22 |
+
type: string
|
| 23 |
+
version-bump-branch:
|
| 24 |
+
description: Branch for version bump
|
| 25 |
+
required: true
|
| 26 |
+
type: string
|
| 27 |
+
dry-run:
|
| 28 |
+
description: Do not publish a wheel and GitHub release.
|
| 29 |
+
required: true
|
| 30 |
+
default: true
|
| 31 |
+
type: boolean
|
| 32 |
+
create-gh-release:
|
| 33 |
+
description: Create a GitHub release
|
| 34 |
+
required: true
|
| 35 |
+
default: true
|
| 36 |
+
type: boolean
|
| 37 |
+
generate-changelog:
|
| 38 |
+
description: Generate changelog
|
| 39 |
+
required: false
|
| 40 |
+
default: true
|
| 41 |
+
type: boolean
|
| 42 |
+
publish-docs:
|
| 43 |
+
description: Publish docs
|
| 44 |
+
required: false
|
| 45 |
+
default: false
|
| 46 |
+
type: boolean
|
| 47 |
+
gh-release-from-tag:
|
| 48 |
+
description: Tag of previous release for changelog builder
|
| 49 |
+
required: false
|
| 50 |
+
type: string
|
| 51 |
+
default: ""
|
| 52 |
+
jobs:
|
| 53 |
+
release:
|
| 54 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_release_library.yml@v0.80.3
|
| 55 |
+
with:
|
| 56 |
+
release-ref: ${{ inputs.release-ref }}
|
| 57 |
+
python-package: nemo
|
| 58 |
+
python-version: "3.10"
|
| 59 |
+
library-name: Neural Modules
|
| 60 |
+
dry-run: ${{ inputs.dry-run }}
|
| 61 |
+
version-bump-branch: ${{ inputs.version-bump-branch }}
|
| 62 |
+
create-gh-release: ${{ inputs.create-gh-release }}
|
| 63 |
+
app-id: ${{ vars.BOT_ID }}
|
| 64 |
+
gh-release-use-changelog-builder: ${{ inputs.generate-changelog }}
|
| 65 |
+
publish-docs: ${{ inputs.publish-docs }}
|
| 66 |
+
gh-release-from-tag: ${{ inputs.gh-release-from-tag }}
|
| 67 |
+
secrets:
|
| 68 |
+
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
| 69 |
+
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
| 70 |
+
SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }}
|
| 71 |
+
SLACK_WEBHOOK: ${{ secrets.SLACK_RELEASE_ENDPOINT }}
|
| 72 |
+
PAT: ${{ secrets.PAT }}
|
| 73 |
+
SSH_KEY: ${{ secrets.SSH_KEY }}
|
| 74 |
+
SSH_PWD: ${{ secrets.SSH_PWD }}
|
| 75 |
+
BOT_KEY: ${{ secrets.BOT_KEY }}
|
| 76 |
+
AWS_ASSUME_ROLE_ARN: ${{ secrets.AWS_ASSUME_ROLE_ARN }}
|
| 77 |
+
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
| 78 |
+
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
| 79 |
+
AKAMAI_HOST: ${{ secrets.AKAMAI_HOST }}
|
| 80 |
+
AKAMAI_CLIENT_TOKEN: ${{ secrets.AKAMAI_CLIENT_TOKEN }}
|
| 81 |
+
AKAMAI_CLIENT_SECRET: ${{ secrets.AKAMAI_CLIENT_SECRET }}
|
| 82 |
+
AKAMAI_ACCESS_TOKEN: ${{ secrets.AKAMAI_ACCESS_TOKEN }}
|
| 83 |
+
S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }}
|
.github/workflows/secrets-detector.yml
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
name: Secrets detector
|
| 15 |
+
|
| 16 |
+
on:
|
| 17 |
+
pull_request_target:
|
| 18 |
+
branches:
|
| 19 |
+
- 'main'
|
| 20 |
+
|
| 21 |
+
jobs:
|
| 22 |
+
main:
|
| 23 |
+
runs-on: ubuntu-latest
|
| 24 |
+
steps:
|
| 25 |
+
- name: Checkout repository
|
| 26 |
+
uses: actions/checkout@v6
|
| 27 |
+
with:
|
| 28 |
+
fetch-depth: 0
|
| 29 |
+
token: ${{ secrets.NEMO_REFORMAT_TOKEN }}
|
| 30 |
+
|
| 31 |
+
- name: Install secrets detector
|
| 32 |
+
run: pip install detect-secrets
|
| 33 |
+
|
| 34 |
+
- name: Run on change-set
|
| 35 |
+
run: |
|
| 36 |
+
git diff --name-only --diff-filter=d --merge-base origin/main -z | xargs -0 detect-secrets-hook --disable-plugin HexHighEntropyString --baseline .secrets.baseline
|
| 37 |
+
|
| 38 |
+
- uses: EndBug/add-and-commit@v9
|
| 39 |
+
# Commit changes. Nothing is committed if no changes.
|
| 40 |
+
if: always()
|
| 41 |
+
with:
|
| 42 |
+
message: Update baseline
|
| 43 |
+
commit: --signoff
|
.github/workflows/update-buildcache.yml
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
name: Update build cache
|
| 15 |
+
on:
|
| 16 |
+
schedule:
|
| 17 |
+
- cron: 0 0 * * *
|
| 18 |
+
push:
|
| 19 |
+
branches:
|
| 20 |
+
- main
|
| 21 |
+
workflow_dispatch:
|
| 22 |
+
inputs:
|
| 23 |
+
runner:
|
| 24 |
+
required: false
|
| 25 |
+
default: self-hosted-azure-builder
|
| 26 |
+
type: string
|
| 27 |
+
description: VM to use for build
|
| 28 |
+
|
| 29 |
+
jobs:
|
| 30 |
+
pre-flight:
|
| 31 |
+
runs-on: ubuntu-latest
|
| 32 |
+
outputs:
|
| 33 |
+
build_args: ${{ steps.manifest.outputs.BUILD_ARGS }}
|
| 34 |
+
cache-from: ${{ steps.cache_from.outputs.LAST_PRS }}
|
| 35 |
+
steps:
|
| 36 |
+
- name: Checkout branch
|
| 37 |
+
uses: actions/checkout@v6
|
| 38 |
+
|
| 39 |
+
- name: Parse manifest.json
|
| 40 |
+
id: manifest
|
| 41 |
+
run: |
|
| 42 |
+
BUILD_ARGS=$(cat << EOF
|
| 43 |
+
BASE_IMAGE=$(cat requirements/manifest.json | jq -r '."ngc-pytorch"')
|
| 44 |
+
TRTLLM_REPO=$(cat requirements/manifest.json | jq -r '."vcs-dependencies"."trt-llm".repo')
|
| 45 |
+
TRTLLM_TAG=$(cat requirements/manifest.json | jq -r '."vcs-dependencies"."trt-llm".ref')
|
| 46 |
+
MLM_REPO=$(cat requirements/manifest.json | jq -r '."vcs-dependencies"."megatron-lm".repo')
|
| 47 |
+
MLM_TAG=$(cat requirements/manifest.json | jq -r '."vcs-dependencies"."megatron-lm".ref')
|
| 48 |
+
TE_REPO=$(cat requirements/manifest.json | jq -r '."vcs-dependencies".transformer_engine.repo')
|
| 49 |
+
TE_TAG=$(cat requirements/manifest.json | jq -r '."vcs-dependencies".transformer_engine.ref')
|
| 50 |
+
APEX_REPO=$(cat requirements/manifest.json | jq -r '."vcs-dependencies".apex.repo')
|
| 51 |
+
APEX_TAG=$(cat requirements/manifest.json | jq -r '."vcs-dependencies".apex.ref')
|
| 52 |
+
EOF
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
echo "BUILD_ARGS<<EOF" >> $GITHUB_OUTPUT
|
| 56 |
+
echo "$BUILD_ARGS" >> $GITHUB_OUTPUT
|
| 57 |
+
echo "EOF" >> $GITHUB_OUTPUT
|
| 58 |
+
|
| 59 |
+
- name: Get last merged PR
|
| 60 |
+
id: cache_from
|
| 61 |
+
env:
|
| 62 |
+
GH_TOKEN: ${{ github.token }}
|
| 63 |
+
run: |
|
| 64 |
+
LAST_PRS=$(gh api graphql -f query='
|
| 65 |
+
query {
|
| 66 |
+
repository(owner: "NVIDIA", name: "NeMo") {
|
| 67 |
+
pullRequests(states: MERGED, first: 100, orderBy: {field: UPDATED_AT, direction: DESC}) {
|
| 68 |
+
nodes {
|
| 69 |
+
number
|
| 70 |
+
}
|
| 71 |
+
}
|
| 72 |
+
}
|
| 73 |
+
}' | jq -r '.data.repository.pullRequests.nodes[].number' | while read -r number; do
|
| 74 |
+
echo "nemoci.azurecr.io/nemo_container-buildcache:$number"
|
| 75 |
+
done)
|
| 76 |
+
|
| 77 |
+
echo "LAST_PRS<<EOF" >> $GITHUB_OUTPUT
|
| 78 |
+
echo "$LAST_PRS" >> $GITHUB_OUTPUT
|
| 79 |
+
echo "EOF" >> $GITHUB_OUTPUT
|
| 80 |
+
|
| 81 |
+
cicd-test-container-build:
|
| 82 |
+
needs: [pre-flight]
|
| 83 |
+
uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_build_container.yml@v0.27.0
|
| 84 |
+
strategy:
|
| 85 |
+
fail-fast: false
|
| 86 |
+
matrix:
|
| 87 |
+
include:
|
| 88 |
+
- dockerfile: docker/Dockerfile.ci
|
| 89 |
+
image-name: nemo_container_automodel
|
| 90 |
+
- dockerfile: docker/Dockerfile.ci
|
| 91 |
+
image-name: nemo_container_nemo2
|
| 92 |
+
- dockerfile: docker/Dockerfile.ci
|
| 93 |
+
image-name: nemo_container_speech
|
| 94 |
+
- dockerfile: docker/Dockerfile.ci
|
| 95 |
+
image-name: nemo_container
|
| 96 |
+
with:
|
| 97 |
+
image-name: ${{ matrix.image-name }}
|
| 98 |
+
dockerfile: ${{ matrix.dockerfile }}
|
| 99 |
+
image-label: nemo-core
|
| 100 |
+
build-args: |
|
| 101 |
+
IMAGE_LABEL=nemo-core
|
| 102 |
+
NEMO_TAG=${{ github.sha }}
|
| 103 |
+
NEMO_REPO=https://github.com/NVIDIA/NeMo
|
| 104 |
+
${{ needs.pre-flight.outputs.BUILD_ARGS }}
|
| 105 |
+
runner: ${{ inputs.runner || 'self-hosted-azure-builder' }}
|
| 106 |
+
use-inline-cache: false
|
| 107 |
+
prune-filter-timerange: 24h
|
| 108 |
+
cache-from: |
|
| 109 |
+
nemoci.azurecr.io/${{ matrix.image-name }}-buildcache:main
|
| 110 |
+
${{ needs.pre-flight.outputs.cache-from }}
|