Skip to content

Commit

Permalink
Merge pull request #423 from ypriverol/dev
Browse files Browse the repository at this point in the history
update to openms 3.2.0
  • Loading branch information
ypriverol authored Oct 7, 2024
2 parents 48622b8 + 9d47118 commit b7c9b6a
Show file tree
Hide file tree
Showing 53 changed files with 1,631 additions and 622 deletions.
40 changes: 23 additions & 17 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ jobs:
NXF_VER:
- "23.04.0"
- "latest-everything"
test_profile: ["test_lfq", "test_lfq_sage", "test_dia", "test_localize", "test_tmt", "test_dda_id"]
test_profile:
["test_lfq", "test_lfq_sage", "test_dia", "test_localize", "test_tmt", "test_dda_id", "test_tmt_corr"]
exec_profile: ["docker"]
exclude:
- test_profile: test_dia
Expand Down Expand Up @@ -72,16 +73,19 @@ jobs:
echo "$(pwd)/micromamba/bin" >> $GITHUB_PATH
./bin/micromamba shell init -s bash
echo $'channels:\n - conda-forge\n - bioconda\n - defaults\nuse_lockfiles: false' >> ~/.mambarc
- name: Install Singularity with defaults
if: matrix.exec_profile == 'singularity'
uses: singularityhub/install-singularity@main
- name: Run pipeline with test data in docker profile
if: matrix.exec_profile == 'docker'

- name: Run pipeline with test data in docker/singularity profile
if: matrix.exec_profile == 'docker' || matrix.exec_profile == 'singularity'
# TODO nf-core: You can customise CI pipeline run tests as required
# For example: adding multiple test runs with different parameters
# Remember that you can parallelise this by using strategy.matrix
run: |
nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results
- name: Run pipeline with test data in conda profile (and single-threaded)
env:
MAMBA_ROOT_PREFIX: ${{ github.workspace }}/.micromamba
Expand All @@ -91,35 +95,37 @@ jobs:
# Remember that you can parallelise this by using strategy.matrix
run: |
nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,micromamba --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results
- name: Run pipeline with test data in singularity profile
if: matrix.exec_profile == 'singularity'
# TODO nf-core: You can customise CI pipeline run tests as required
# For example: adding multiple test runs with different parameters
# Remember that you can parallelise this by using strategy.matrix
run: |
nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results
- name: Gather failed logs
if: failure() || cancelled()
run: |
mkdir failed_logs
failed=$(grep "FAILED" ${TEST_PROFILE}_${EXEC_PROFILE}_results/pipeline_info/execution_trace.txt | cut -f 2)
while read -r line ; do cp $(ls work/${line}*/*.log) failed_logs/ | true ; done <<< "$failed"
- uses: actions/upload-artifact@v1
- uses: actions/upload-artifact@v4
if: failure() || cancelled()
name: Upload failed logs
with:
name: failed_logs
name: failed_logs_${{ matrix.test_profile }}_${{ matrix.exec_profile }}_${{ matrix.NXF_VER }}
include-hidden-files: true
path: failed_logs
- uses: actions/upload-artifact@v1
overwrite: false

- uses: actions/upload-artifact@v4
if: always()
name: Upload results
with:
name: ${{ env.TEST_PROFILE }}_${{ env.EXEC_PROFILE }}_results
path: ${{ env.TEST_PROFILE }}_${{ env.EXEC_PROFILE }}_results
- uses: actions/upload-artifact@v1
name: ${{ matrix.test_profile }}_${{ matrix.exec_profile }}_${{ matrix.NXF_VER }}_results
include-hidden-files: true
path: ${{ matrix.test_profile }}_${{ matrix.exec_profile }}_results
overwrite: false

- uses: actions/upload-artifact@v4
if: always()
name: Upload log
with:
name: nextflow.log
name: nextflow_${{ matrix.test_profile }}_${{ matrix.exec_profile }}_${{ matrix.NXF_VER }}.log
include-hidden-files: true
path: .nextflow.log
overwrite: false
18 changes: 17 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,27 @@
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.4.0] nfcore/quantms - [--/--/----] - Cambridge

### `Added`

- [#423](https://github.com/bigbio/quantms/pull/423) Support for plex Correction matrices for TMT and iTRAQ analytical methods.

### `Changed`

- [#423](https://github.com/bigbio/quantms/pull/423) Updated OpenMS==3.2.0

### `Fixed`

### `Dependencies`

- openms==3.2.0

## [1.3.0] nfcore/quantms - [08/04/2024] - Santiago de Cuba

### `Added`

- [#335](https://github.com/bigbio/quantms/pull/335) (Performance improvement) Improvements in DIA pipeline to use random/subset files for library search
- [#335](https://github.com/bigbio/quantms/pull/335) (Performance improvement) Improvements in a DIA pipeline to use random/subset files for library search
- [#351](https://github.com/bigbio/quantms/pull/351) Identification workflow for DDA data
- [#362](https://github.com/bigbio/quantms/pull/362) Introduction to ms2rescore to identification workflow
- [#374](https://github.com/bigbio/quantms/pull/374) Add msgf+ database indexing step
Expand Down
7 changes: 7 additions & 0 deletions assets/itraq4plex_default_correction.matrix
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# comment: itraq4plex from MSnbase
# comment: https://github.com/lgatto/MSnbase/blob/master/inst/extdata/iTRAQ4plexPurityCorrections.csv
channel/<-2Da>/<-1Da>/<+1Da>/<+2Da>
114/0.0/1.0/5.9/0.2
115/0.0/2.0/5.6/0.1
116/0.0/3.0/4.5/0.1
117/0.1/4.0/3.5/0.1
11 changes: 11 additions & 0 deletions assets/itraq8plex_default_correction.matrix
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# comment: itraq8plex from MSnbase
# comment: https://github.com/lgatto/MSnbase/blob/master/inst/extdata/iTRAQ8plexPurityCorrections.csv
channel/<-2Da>/<-1Da>/<+1Da>/<+2Da>
113/0.0/2.5/3.0/0.1
114/0.0/1.0/5.9/0.2
115/0.0/2.0/5.6/0.1
116/0.0/3.0/4.5/0.1
117/0.1/4.0/3.5/0.1
118/0.1/2.0/3.0/0.1
119/0.1/2.0/4.0/0.1
121/0.1/2.0/3.0/0.1
14 changes: 14 additions & 0 deletions assets/tmt11plex_default_correction.matrix
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# comment: tmt11plex default from OpenMS
# comment: Product Number: A37725 Lot Number: ZF395505
channel/<-2Da>/<-1Da>/<+1Da>/<+2Da>
126/0.0/0.0/8.6/0.3
127N/0.0/0.1/7.8/0.1
127C/0.0/0.8/6.9/0.1
128N/0.0/7.4/7.4/0.0
128C/0.0/1.5/6.2/0.2
129N/0.0/1.5/5.7/0.1
129C/0.0/2.6/4.8/0.0
130N/0.0/2.2/4.6/0.0
130C/0.0/2.8/4.5/0.1
131N/0.1/2.9/3.8/0.0
131C/0.0/3.9/2.8/0.0
9 changes: 9 additions & 0 deletions assets/tmt6plex_90064_XL352952_correction.matrix
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# comment: https://www.thermofisher.com/order/catalog/product/90064B?SID=srch-srp-90064B
# comment: Product Number: 90064 Lot Number: XL352952
channel/<-2Da>/<-1Da>/<+1Da>/<+2Da>
126/0.0/0.0/8.3/0.3
127/0.0/0.1/7.8/0.1
128/0.0/1.3/5.7/0.1
129/0.0/1.5/5.7/0.1
130/0.0/3.1/3.6/0.0
131/0.1/2.9/3.8/0.0
10 changes: 10 additions & 0 deletions assets/tmt6plex_default_correction.matrix
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# comment: tmt6plex default in OpenMS
# comment: https://www.thermofisher.com/order/catalog/product/90061?SID=srch-srp-90061
# comment: Product Number: 90061 Lot Number: ZE386964
channel/<-2Da>/<-1Da>/<+1Da>/<+2Da>
126/0.0/0.0/8.6/0.3
127/0.0/0.1/7.8/0.1
128/0.0/1.5/6.2/0.2
129/0.0/1.5/5.7/0.1
130/0.0/3.1/3.6/0.0
131/0.1/2.9/3.8/0.0
9 changes: 9 additions & 0 deletions assets/tmt6plex_msnbase_correction.matrix
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# comment: tmt6plex from MSnbase
# comment: https://github.com/lgatto/MSnbase/blob/master/inst/extdata/TMT6plexPurityCorrections.csv
channel/<-2Da>/<-1Da>/<+1Da>/<+2Da>
126/0.0/0.0/6.1/0.0
127/0.0/0.5/6.7/0.0
128/0.0/1.1/4.2/0.0
129/0.0/1.7/4.1/0.0
130/0.0/1.6/2.1/0.0
131/0.2/3.2/2.8/0.0
2 changes: 1 addition & 1 deletion conf/dev.config
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ params {

process {
withLabel: openms {
conda = "openms::openms-thirdparty=3.1.0"
conda = "openms::openms-thirdparty=3.2.0"
container = {"${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'ghcr.io/openms/openms-executables-sif:latest' : 'ghcr.io/openms/openms-executables:latest' }"}
}
}
8 changes: 4 additions & 4 deletions conf/modules.config
Original file line number Diff line number Diff line change
Expand Up @@ -180,12 +180,12 @@ process {
}

// IDFILTER on PROTEIN LEVEL
level = params.protein_quant == 'strictly_unique_peptides' ? 'prot' : 'protgroup'
level = params.protein_quant == 'strictly_unique_peptides' ? 'prot' : 'proteingroup'
decoys_present = params.quantify_decoys ? ' ' : '-remove_decoys'
withName: '.*:TMT:PROTEININFERENCE:IDFILTER' {
ext.args = [
"-score:${level} \"$params.protein_level_fdr_cutoff\"",
"-score:pep \"$params.psm_level_fdr_cutoff\"",
"-score:psm \"$params.psm_level_fdr_cutoff\"",
"-delete_unreferenced_peptide_hits",
"${decoys_present}"
].join(' ').trim()
Expand Down Expand Up @@ -240,7 +240,7 @@ process {

// IDFILTER
withName: '.*:ID:PSMFDRCONTROL:IDFILTER' {
ext.args = "-score:pep \"$params.run_fdr_cutoff\""
ext.args = "-score:psm \"$params.run_fdr_cutoff\""
ext.suffix = '.idXML'
publishDir = [
path: { "${params.outdir}/idfilter" },
Expand All @@ -250,7 +250,7 @@ process {
}

withName: '.*:DDA_ID:PSMFDRCONTROL:IDFILTER' {
ext.args = "-score:pep \"$params.run_fdr_cutoff\""
ext.args = "-score:psm \"$params.run_fdr_cutoff\""
ext.suffix = '.idXML'
publishDir = [
path: { "${params.outdir}/idfilter" },
Expand Down
35 changes: 35 additions & 0 deletions conf/test_tmt_corr.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running full-size tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Defines input files and everything required to run a full size pipeline test.
Use as follows:
nextflow run nf-core/quantms -profile test_tmt,<docker/singularity> [--outdir <OUTDIR>]
----------------------------------------------------------------------------------------
*/

params {
config_profile_name = 'Full test profile DDA ISO'
config_profile_description = 'Full test dataset in isotopic labelling mode to check pipeline function and sanity of results'

outdir = "./results_iso"

max_cpus = 2
max_memory = 6.GB
max_time = 48.h

// Input data for full size test
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/tmt_ci/PXD000001.sdrf.tsv'

database = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/tmt_ci/erwinia_carotovora.fasta'
posterior_probabilities = "percolator"
search_engines = "comet,msgf"
psm_level_fdr_cutoff = 1.0
decoy_string = "rev"
add_decoys = true
protocol = 'TMT'
isotope_correction = true
plex_corr_matrix_file = 'assets/tmt6plex_default_correction.matrix'
}
Loading

0 comments on commit b7c9b6a

Please sign in to comment.