Skip to content

Update diann_convert.py #14

Update diann_convert.py

Update diann_convert.py #14

Workflow file for this run

name: nf-core CI
# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors
on:
push:
branches:
- dev
pull_request:
release:
types: [published]
env:
NXF_ANSI_LOG: false
concurrency:
group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}"
cancel-in-progress: true
jobs:
test:
env:
NXF_ANSI_LOG: false
CAPSULE_LOG: none
TEST_PROFILE: ${{ matrix.test_profile }}
EXEC_PROFILE: ${{ matrix.exec_profile }}
name: Run pipeline with test data
# Only run on push if this is the nf-core dev branch (merged PRs)
if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/quantms') }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
# Nextflow versions
NXF_VER:
- "23.04.0"
- "latest-everything"
test_profile: ["test_lfq", "test_lfq_sage", "test_dia", "test_localize", "test_tmt"]
exec_profile: ["docker", "conda"]
exclude:
- test_profile: test_dia
exec_profile: conda
- test_profile: test_localize
exec_profile: conda
- NXF_VER: "latest-everything"
exec_profile: "conda"
steps:
- name: Check out pipeline code
uses: actions/checkout@v3
- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
with:
version: "${{ matrix.NXF_VER }}"
- name: Install micromamba
if: matrix.exec_profile == 'conda'
run: |
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
echo "$(pwd)/bin" >> $GITHUB_PATH
echo "$(pwd)/micromamba/bin" >> $GITHUB_PATH
./bin/micromamba shell init -s bash -p ./micromamba
echo $'channels:\n - conda-forge\n - bioconda\n - defaults\nuse_lockfiles: false' >> ~/.mambarc
- name: Run pipeline with test data
if: matrix.exec_profile != 'conda'
# TODO nf-core: You can customise CI pipeline run tests as required
# For example: adding multiple test runs with different parameters
# Remember that you can parallelise this by using strategy.matrix
run: |
nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results
- name: Run pipeline with test data in conda profile (and single-threaded)
if: matrix.exec_profile == 'conda'
# TODO nf-core: You can customise CI pipeline run tests as required
# For example: adding multiple test runs with different parameters
# Remember that you can parallelise this by using strategy.matrix
run: |
nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,micromamba --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results
- name: Gather failed logs
if: failure() || cancelled()
run: |
mkdir failed_logs
failed=$(grep "FAILED" ${TEST_PROFILE}_${EXEC_PROFILE}_results/pipeline_info/execution_trace.txt | cut -f 2)
while read -r line ; do cp $(ls work/${line}*/*.log) failed_logs/ | true ; done <<< "$failed"
- uses: actions/upload-artifact@v1
if: failure() || cancelled()
name: Upload failed logs
with:
name: failed_logs
path: failed_logs
- uses: actions/upload-artifact@v1
if: always()
name: Upload results
with:
name: ${{ env.TEST_PROFILE }}_${{ env.EXEC_PROFILE }}_results
path: ${{ env.TEST_PROFILE }}_${{ env.EXEC_PROFILE }}_results
- uses: actions/upload-artifact@v1
if: always()
name: Upload log
with:
name: nextflow.log
path: .nextflow.log