Skip to content

Commit

Permalink
Merge pull request nf-core#366 from nf-core/nf-test-conversion
Browse files Browse the repository at this point in the history
Add nf-test
  • Loading branch information
jfy133 authored Jul 12, 2024
2 parents 78093ef + 5c1c275 commit e535372
Show file tree
Hide file tree
Showing 45 changed files with 2,986 additions and 147 deletions.
135 changes: 63 additions & 72 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,70 +1,75 @@
name: nf-core CI
# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors
name: nf-core CI
on:
push:
branches:
- dev
- "dev"
pull_request:
branches:
- "dev"
- "master"
release:
types: [published]
types:
- "published"

env:
NXF_ANSI_LOG: false
NFTEST_VER: "0.8.4"

concurrency:
group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}"
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true

jobs:
test:
name: Run pipeline with test data (AMP and ARG)
# Only run on push if this is the nf-core dev branch (merged PRs)
if: "${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/funcscan') }}"
define_nxf_versions:
name: Choose nextflow versions to test against depending on target branch
runs-on: ubuntu-latest
strategy:
matrix:
NXF_VER:
- "23.04.0"
- "latest-everything"
parameters:
- "-profile docker,test_preannotated --annotation_tool prodigal"
- "-profile docker,test --annotation_tool prokka"
- "-profile docker,test --annotation_tool bakta --annotation_bakta_db_downloadtype light --arg_skip_deeparg --arg_skip_amrfinderplus" # Skip deeparg and amrfinderplus due to otherwise running out of space on GitHub Actions

outputs:
matrix: ${{ steps.nxf_versions.outputs.matrix }}
steps:
- name: Check out pipeline code
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4

- name: Install Nextflow
uses: nf-core/setup-nextflow@v2
with:
version: "${{ matrix.NXF_VER }}"

- name: Disk space cleanup
uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1

- name: Run pipeline with test data (AMP/ARG workflows)
- id: nxf_versions
run: |
nextflow run ${GITHUB_WORKSPACE} ${{ matrix.parameters }} --outdir ./results
if [[ "${{ github.event_name }}" == "pull_request" && "${{ github.base_ref }}" == "dev" && "${{ matrix.NXF_VER }}" != "latest-everything" ]]; then
echo matrix='["latest-everything"]' | tee -a $GITHUB_OUTPUT
else
echo matrix='["latest-everything", "23.10.0"]' | tee -a $GITHUB_OUTPUT
fi
test_bgc:
name: Run pipeline with test data (BGC)
# Only run on push if this is the nf-core dev branch (merged PRs)
if: "${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/funcscan') }}"
test:
name: nf-test
needs: define_nxf_versions
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
NXF_VER:
- "23.04.0"
- "latest-everything"
parameters:
- "-profile docker,test_preannotated_bgc --annotation_tool prodigal"
- "-profile docker,test_bgc --annotation_tool prokka"
- "-profile docker,test_bgc --annotation_tool bakta --annotation_bakta_db_downloadtype light"
NXF_VER: ${{ fromJson(needs.define_nxf_versions.outputs.matrix) }}
tags:
- "test"
- "test_nothing"
- "test_bakta"
- "test_prokka"
- "test_bgc_pyrodigal"
- "test_bgc_bakta"
- "test_bgc_prokka"
- "test_taxonomy_pyrodigal"
- "test_taxonomy_bakta"
- "test_taxonomy_prokka"
- "test_preannotated"
- "test_preannotated_bgc"
profile:
- "docker"

steps:
- name: Check out pipeline code
uses: actions/checkout@v2
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4

- name: Check out test data
uses: actions/checkout@v3
with:
repository: nf-core/test-datasets
ref: funcscan
path: test-datasets/
fetch-depth: 1

- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
Expand All @@ -74,37 +79,23 @@ jobs:
- name: Disk space cleanup
uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1

- name: Run pipeline with test data (BGC workflow)
- name: Install nf-test
run: |
nextflow run ${GITHUB_WORKSPACE} ${{ matrix.parameters }} --outdir ./results --bgc_skip_deepbgc
wget -qO- https://code.askimed.com/install/nf-test | bash -s $NFTEST_VER
sudo mv nf-test /usr/local/bin/
test_taxonomy:
name: Run pipeline with test data (AMP, ARG and BGC with taxonomy)
# Only run on push if this is the nf-core dev branch (merged PRs)
if: "${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/funcscan') }}"
runs-on: ubuntu-latest
strategy:
matrix:
NXF_VER:
- "23.04.0"
- "latest-everything"
parameters:
- "-profile docker,test_taxonomy --annotation_tool prodigal"
- "-profile docker,test_taxonomy --annotation_tool prokka"
- "-profile docker,test_taxonomy --annotation_tool bakta --annotation_bakta_db_downloadtype light"
- name: Run nf-test
run: |
nf-test test --tag ${{ matrix.tags }} --profile ${{ matrix.tags }},${{ matrix.profile }} --junitxml=test.xml
steps:
- name: Check out pipeline code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
- name: Output log on failure
if: failure()
run: |
sudo apt install bat > /dev/null
batcat --decorations=always --color=always ${{ github.workspace }}/.nf-test/*/tests/output/pipeline_info/software_versions.yml
- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
if: always() # always run even if the previous step fails
with:
version: "${{ matrix.NXF_VER }}"

- name: Disk space cleanup
uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1

- name: Run pipeline with test data (AMP, ARG and BGC taxonomy workflows)
run: |
nextflow run ${GITHUB_WORKSPACE} ${{ matrix.parameters }} --outdir ./results
report_paths: "*.xml"
2 changes: 2 additions & 0 deletions .nf-core.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
repository_type: pipeline
lint:
actions_ci: False ## TODO: re-activate once nf-test ci.yml structure updated
nf_core_version: "2.14.1"
10 changes: 8 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- [#375](https://github.com/nf-core/funcscan/pull/375) Merged pipeline template of nf-core/tools version 2.14.1. (by @jfy133)
- [#381](https://github.com/nf-core/funcscan/pull/381) Added support for supplying pre-annotated sequences to the pipeline. (by @jfy133, @jasmezz)
- [#382](https://github.com/nf-core/funcscan/pull/382) Optimised BGC screening run time and prevent crashes due to too-short contigs by adding contig length filtering for BGC workflow only. (by @jfy133, @darcy220606)
- [#384](https://github.com/nf-core/funcscan/pull/384) Deprecated AMPcombi and exchanged it with full suite of AMPcombi2 submodules. (by @darcy220606)
- [#366](https://github.com/nf-core/funcscan/pull/366) Added nf-test on pipeline level. (by @jfy133, @Darcy220606, @jasmezz)

### `Fixed`

Expand All @@ -38,22 +38,28 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

| Tool | Previous version | New version |
| ------------- | ---------------- | ----------- |
| AMPcombi | 0.1.7 | 0.2.2 |
| AMPlify | 1.1.0 | 2.0.0 |
| AMRFinderPlus | 3.11.18 | 3.12.8 |
| antiSMASH | 6.1.1 | 7.1.0 |
| bioawk | 1.0 | NA |
| DeepARG | 1.0.2 | 1.0.4 |
| DeepBGC | 0.1.30 | 0.1.31 |
| GECCO | 0.9.8 | 0.9.10 |
| hAMRonization | 1.1.1 | 1.1.4 |
| HMMER | 3.3.2 | 3.4 |
| MMSeqs | NA | 2:15.6f452 |
| MultiQC | 1.15 | 1.22.3 |
| Pyrodigal | 2.1.0 | 3.3.0 |
| RGI | 5.2.1 | 6.0.3 |
| seqkit | NA | 2.8.1 |
| tabix/htslib | 1.11 | 1.19.1 |
| ampcombi | 0.1.7 | 0.2.2 |

### `Deprecated`

- [#384](https://github.com/nf-core/funcscan/pull/384) Deprecated AMPcombi and exchanged it with full suite of AMPcombi2 submodules. (by @darcy220606)
- [#382](https://github.com/nf-core/funcscan/pull/382) Optimised BGC screening run time and prevent crashes due to too-short contigs by adding contig length filtering for BGC workflow only. Bioawk is replaced with seqkit (by @jfy133, @darcy220606)

## v1.1.6 - [2024-07-08]

### `Added`
Expand Down
5 changes: 3 additions & 2 deletions conf/test.config
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
*/

params {
config_profile_name = 'Test profile'
config_profile_name = 'AMP/ARG Pyrodigal test profile'
config_profile_description = 'Minimal test dataset to check pipeline function'

// Limit resources so that this can run on GitHub Actions
Expand All @@ -21,12 +21,13 @@ params {

// Input data
input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv'
amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm'

annotation_tool = 'pyrodigal'

run_arg_screening = true
arg_fargene_hmmmodel = 'class_a,class_b_1_2'

run_amp_screening = true
amp_run_hmmsearch = true
amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm'
}
34 changes: 34 additions & 0 deletions conf/test_bakta.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running minimal tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Defines input files and everything required to run a fast and simple pipeline test.
Use as follows:
nextflow run nf-core/funcscan -profile test_bakta,<docker/singularity> --outdir <OUTDIR>
----------------------------------------------------------------------------------------
*/

params {
config_profile_name = 'AMP/ARG Bakta test profile'
config_profile_description = 'Minimal test dataset to check pipeline function'

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '8.GB'
max_time = '6.h'

// Input data
input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv'

annotation_tool = 'bakta'
annotation_bakta_db_downloadtype = 'light'

run_amp_screening = true
amp_run_hmmsearch = true
amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm'

run_arg_screening = true
arg_fargene_hmmmodel = 'class_a,class_b_1_2'
}
34 changes: 34 additions & 0 deletions conf/test_bgc_bakta.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running minimal tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Defines input files and everything required to run a fast and simple pipeline test.
Use as follows:
nextflow run nf-core/funcscan -profile test_bgc_bakta,<docker/singularity> --outdir <OUTDIR>
----------------------------------------------------------------------------------------
*/

params {
config_profile_name = 'BGC Bakta test profile'
config_profile_description = 'Minimal test dataset to check BGC workflow function'

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '8.GB'
max_time = '6.h'

// Input data
input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv'

annotation_tool = 'bakta'
annotation_bakta_db_downloadtype = "light"

run_arg_screening = false
run_amp_screening = false
run_bgc_screening = true

bgc_run_hmmsearch = true
bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm'
}
33 changes: 33 additions & 0 deletions conf/test_bgc_prokka.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running minimal tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Defines input files and everything required to run a fast and simple pipeline test.
Use as follows:
nextflow run nf-core/funcscan -profile test_bgc_prokka,<docker/singularity> --outdir <OUTDIR>
----------------------------------------------------------------------------------------
*/

params {
config_profile_name = 'BGC Prokka test profile'
config_profile_description = 'Minimal test dataset to check BGC workflow function'

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '8.GB'
max_time = '6.h'

// Input data
input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv'

annotation_tool = 'prokka'

run_arg_screening = false
run_amp_screening = false
run_bgc_screening = true

bgc_run_hmmsearch = true
bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm'
}
16 changes: 9 additions & 7 deletions conf/test_bgc.config → conf/test_bgc_pyrodigal.config
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
Defines input files and everything required to run a fast and simple pipeline test.
Use as follows:
nextflow run nf-core/funcscan -profile test_bgc,<docker/singularity> --outdir <OUTDIR>
nextflow run nf-core/funcscan -profile test_bgc_pyrodigal,<docker/singularity> --outdir <OUTDIR>
----------------------------------------------------------------------------------------
*/

params {
config_profile_name = 'BGC test profile'
config_profile_name = 'BGC Pyrodigal test profile'
config_profile_description = 'Minimal test dataset to check BGC workflow function'

// Limit resources so that this can run on GitHub Actions
Expand All @@ -20,12 +20,14 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/funcscan/samplesheet_reduced.csv'
bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm'
input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv'

annotation_tool = 'pyrodigal'

run_arg_screening = false
run_amp_screening = false
run_bgc_screening = true
run_arg_screening = false
run_amp_screening = false
run_bgc_screening = true

bgc_run_hmmsearch = true
bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm'
}
Loading

0 comments on commit e535372

Please sign in to comment.