diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index da5099a7ab085e70631cd07a655007f52f5e4048..5b65356d252ae30d9c86e9c6741cc7d8aceedd60 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -6,6 +6,8 @@ before_script: - ln -sfn /project/BICF/BICF_Core/shared/gudmap/test_data/* ./test_data/ - mkdir -p ~/.deriva - mkdir -p ~/.bdbag + - singularity cache clean -a + - unset SINGULARITY_CACHEDIR variables: refMoVersion: "38.p6.vM25" @@ -32,8 +34,8 @@ build_badges: - chmod +x ./workflow/scripts/get_updated_badge_info.sh script: - echo "Building badges" - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/gudmap-rbk_base:1.0.0' bash ./workflow/scripts/get_updated_badge_info.sh - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/gudmap-rbk_base:1.0.0' bash ./workflow/scripts/get_updated_rep_count.sh + - singularity run 'docker://gudmaprbk/gudmap-rbk_base:1.0.0' bash ./workflow/scripts/get_updated_badge_info.sh + - singularity run 'docker://gudmaprbk/gudmap-rbk_base:1.0.0' bash ./workflow/scripts/get_updated_rep_count.sh artifacts: paths: - badges/ @@ -63,8 +65,8 @@ getBag: - schedules script: - ln -sfn `readlink -e ./test_data/auth/credential.json` ~/.deriva/credential.json - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-download-cli --version > version_deriva.txt - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-download-cli staging.gudmap.org --catalog 2 ./workflow/conf/Replicate_For_Input_Bag.json . rid=Q-Y5F6 + - singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-download-cli --version > version_deriva.txt + - singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-download-cli staging.gudmap.org --catalog 2 ./workflow/conf/Replicate_For_Input_Bag.json . rid=Q-Y5F6 - pytest -m getBag artifacts: name: "$CI_JOB_NAME" @@ -82,10 +84,10 @@ getData: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' bdbag --version > version_bdbag.txt + - singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' bdbag --version > version_bdbag.txt - ln -sfn `readlink -e ./test_data/auth/cookies.txt` ~/.bdbag/deriva-cookies.txt - unzip ./test_data/bag/Q-Y5F6_inputBag_xxxxtest.zip - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' bash ./workflow/scripts/bdbag_fetch.sh Q-Y5F6_inputBag Q-Y5F6 + - singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' bash ./workflow/scripts/bdbag_fetch.sh Q-Y5F6_inputBag Q-Y5F6 - pytest -m getData artifacts: name: "$CI_JOB_NAME" @@ -103,17 +105,17 @@ parseMetadata: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/python3:1.0.0' python3 --version > version_python.txt - - rep=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p repRID) - - exp=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p expRID) - - study=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p studyRID) - - endsRaw=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p endsMeta) + - singularity run 'docker://gudmaprbk/python3:1.0.0' python3 --version > version_python.txt + - rep=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p repRID) + - exp=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p expRID) + - study=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p studyRID) + - endsRaw=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p endsMeta) - endsMeta="uk" - endsManual="se" - - stranded=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p stranded) - - spike=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p spike) - - species=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p species) - - readLength=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p readLength) + - stranded=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p stranded) + - spike=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p spike) + - species=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p species) + - readLength=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p readLength) - echo -e "${endsMeta},${endsRaw},${endsManual},${stranded},${spike},${species},${readLength},${exp},${study},${rep}" > design.csv - pytest -m parseMetadata artifacts: @@ -132,8 +134,8 @@ fastqc: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/fastqc0.11.9:1.0.0' fastqc --version > version_fastqc.txt - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/fastqc0.11.9:1.0.0' fastqc ./test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz -o . + - singularity run 'docker://gudmaprbk/fastqc0.11.9:1.0.0' fastqc --version > version_fastqc.txt + - singularity run 'docker://gudmaprbk/fastqc0.11.9:1.0.0' fastqc ./test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz -o . - pytest -m fastqc artifacts: name: "$CI_JOB_NAME" @@ -153,8 +155,8 @@ seqwho: script: - wget -O SeqWho.ix https://cloud.biohpc.swmed.edu/index.php/s/eeNWqZz8jqN5zWY/download - mkdir -p SeqWho_call_plots/test_data/fastq/small/ - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/seqwho0.0.1:1.0.0' seqwho.py -h | grep -o Version.* > version_seqwho.txt - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/seqwho0.0.1:1.0.0' seqwho.py -f test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz -x SeqWho.ix + - singularity run 'docker://gudmaprbk/seqwho0.0.1:1.0.0' seqwho.py -h | grep -o Version.* > version_seqwho.txt + - singularity run 'docker://gudmaprbk/seqwho0.0.1:1.0.0' seqwho.py -f test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz -x SeqWho.ix - pytest -m seqwho artifacts: name: "$CI_JOB_NAME" @@ -172,9 +174,9 @@ trimData: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/trimgalore0.6.5:1.0.0' trim_galore --version > version_trimgalore.txt - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/trimgalore0.6.5:1.0.0' trim_galore --gzip -q 25 --length 35 --basename Q-Y5F6_1M.se ./test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/trimgalore0.6.5:1.0.0' trim_galore --gzip -q 25 --length 35 --paired --basename Q-Y5F6_1M.pe ./test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz ./test_data/fastq/small/Q-Y5F6_1M.R2.fastq.gz + - singularity run 'docker://gudmaprbk/trimgalore0.6.5:1.0.0' trim_galore --version > version_trimgalore.txt + - singularity run 'docker://gudmaprbk/trimgalore0.6.5:1.0.0' trim_galore --gzip -q 25 --length 35 --basename Q-Y5F6_1M.se ./test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz + - singularity run 'docker://gudmaprbk/trimgalore0.6.5:1.0.0' trim_galore --gzip -q 25 --length 35 --paired --basename Q-Y5F6_1M.pe ./test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz ./test_data/fastq/small/Q-Y5F6_1M.R2.fastq.gz - readLengthSE=$(zcat *_trimmed.fq.gz | awk '{if(NR%4==2) print length($1)}' | sort -n | awk '{a[NR]=$0}END{print(NR%2==1)?a[int(NR/2)+1]:(a[NR/2]+a[NR/2+1])/2}') - readLengthPE=$(zcat *_1.fq.gz | awk '{if(NR%4==2) print length($1)}' | sort -n | awk '{a[NR]=$0}END{print(NR%2==1)?a[int(NR/2)+1]:(a[NR/2]+a[NR/2+1])/2}') - pytest -m trimData @@ -194,8 +196,8 @@ downsampleData: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/seqtk1.3:1.0.0' seqtk 2>&1 | grep -o Version.* > version_seqtk.txt& - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/seqtk1.3:1.0.0' seqtk sample -s100 ./test_data/fastq/small/Q-Y5F6_1M.se_trimmed.fq.gz 1000 1> sampled.1.fq + - singularity run 'docker://gudmaprbk/seqtk1.3:1.0.0' seqtk 2>&1 | grep -o Version.* > version_seqtk.txt& + - singularity run 'docker://gudmaprbk/seqtk1.3:1.0.0' seqtk sample -s100 ./test_data/fastq/small/Q-Y5F6_1M.se_trimmed.fq.gz 1000 1> sampled.1.fq - pytest -m downsampleData artifacts: name: "$CI_JOB_NAME" @@ -213,13 +215,13 @@ inferMetadata: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/rseqc4.0.0:1.0.0' infer_experiment.py --version > version_rseqc.txt + - singularity run 'docker://gudmaprbk/rseqc4.0.0:1.0.0' infer_experiment.py --version > version_rseqc.txt - > align=$(echo $(grep "Overall alignment rate" ./test_data/meta/Q-Y5F6_1M.se.alignSummary.txt | cut -f2 -d ':' | cut -f2 -d ' ' | tr -d '%')) && if [[ ${align} == "" ]]; then exit 1; fi - > - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/rseqc4.0.0:1.0.0' infer_experiment.py -r "/project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/annotation/genome.bed" -i "./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.bam" 1>> Q-Y5F6_1M.se.inferMetadata.log && - ended=`export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/infer_meta.sh endness Q-Y5F6_1M.se.inferMetadata.log` && + singularity run 'docker://gudmaprbk/rseqc4.0.0:1.0.0' infer_experiment.py -r "/project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/annotation/genome.bed" -i "./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.bam" 1>> Q-Y5F6_1M.se.inferMetadata.log && + ended=`singularity run 'gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/infer_meta.sh endness Q-Y5F6_1M.se.inferMetadata.log` && if [[ ${ended} == "" ]]; then exit 1; fi - pytest -m inferMetadata artifacts: @@ -238,16 +240,16 @@ alignData: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' hisat2 --version > version_hisat2.txt - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools --version > version_samtools.txt - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' hisat2 -p 20 --add-chrname --un-gz Q-Y5F6_1M.se.unal.gz -S Q-Y5F6_1M.se.sam -x /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/hisat2/genome --rna-strandness F -U ./test_data/fastq/small/Q-Y5F6_1M.se_trimmed.fq.gz --summary-file Q-Y5F6_1M.se.alignSummary.txt --new-summary - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools view -1 -@ 20 -F 4 -F 8 -F 256 -o Q-Y5F6_1M.se.bam Q-Y5F6_1M.se.sam - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools sort -@ 20 -O BAM -o Q-Y5F6_1M.se.sorted.bam Q-Y5F6_1M.se.bam - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools index -@ 20 -b Q-Y5F6_1M.se.sorted.bam Q-Y5F6_1M.se.sorted.bam.bai - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' hisat2 -p 20 --add-chrname --un-gz Q-Y5F6_1M.pe.unal.gz -S Q-Y5F6_1M.pe.sam -x /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/hisat2/genome --rna-strandness FR --no-mixed --no-discordant -1 ./test_data/fastq/small/Q-Y5F6_1M.pe_val_1.fq.gz -2 ./test_data/fastq/small/Q-Y5F6_1M.pe_val_2.fq.gz --summary-file Q-Y5F6_1M.pe.alignSummary.txt --new-summary - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools view -1 -@ 20 -F 4 -F 8 -F 256 -o Q-Y5F6_1M.pe.bam Q-Y5F6_1M.pe.sam - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools sort -@ 20 -O BAM -o Q-Y5F6_1M.pe.sorted.bam Q-Y5F6_1M.pe.bam - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools index -@ 20 -b Q-Y5F6_1M.pe.sorted.bam Q-Y5F6_1M.pe.sorted.bam.bai + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' hisat2 --version > version_hisat2.txt + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools --version > version_samtools.txt + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' hisat2 -p 20 --add-chrname --un-gz Q-Y5F6_1M.se.unal.gz -S Q-Y5F6_1M.se.sam -x /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/hisat2/genome --rna-strandness F -U ./test_data/fastq/small/Q-Y5F6_1M.se_trimmed.fq.gz --summary-file Q-Y5F6_1M.se.alignSummary.txt --new-summary + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools view -1 -@ 20 -F 4 -F 8 -F 256 -o Q-Y5F6_1M.se.bam Q-Y5F6_1M.se.sam + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools sort -@ 20 -O BAM -o Q-Y5F6_1M.se.sorted.bam Q-Y5F6_1M.se.bam + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools index -@ 20 -b Q-Y5F6_1M.se.sorted.bam Q-Y5F6_1M.se.sorted.bam.bai + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' hisat2 -p 20 --add-chrname --un-gz Q-Y5F6_1M.pe.unal.gz -S Q-Y5F6_1M.pe.sam -x /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/hisat2/genome --rna-strandness FR --no-mixed --no-discordant -1 ./test_data/fastq/small/Q-Y5F6_1M.pe_val_1.fq.gz -2 ./test_data/fastq/small/Q-Y5F6_1M.pe_val_2.fq.gz --summary-file Q-Y5F6_1M.pe.alignSummary.txt --new-summary + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools view -1 -@ 20 -F 4 -F 8 -F 256 -o Q-Y5F6_1M.pe.bam Q-Y5F6_1M.pe.sam + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools sort -@ 20 -O BAM -o Q-Y5F6_1M.pe.sorted.bam Q-Y5F6_1M.pe.bam + - singularity run 'docker://gudmaprbk/hisat2.2.1:1.0.0' samtools index -@ 20 -b Q-Y5F6_1M.pe.sorted.bam Q-Y5F6_1M.pe.sorted.bam.bai - pytest -m alignData artifacts: name: "$CI_JOB_NAME" @@ -266,15 +268,15 @@ dedupData: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' samtools --version > version_samtools.txt - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' java -jar /picard/build/libs/picard.jar MarkDuplicates --version 2> version_markdups.txt& - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' java -jar /picard/build/libs/picard.jar MarkDuplicates I=./test_data/bam/small/Q-Y5F6_1M.se.sorted.bam O=Q-Y5F6_1M.se.deduped.bam M=Q-Y5F6_1M.se.deduped.Metrics.txt REMOVE_DUPLICATES=true - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' samtools sort -@ 20 -O BAM -o Q-Y5F6_1M.se.sorted.deduped.bam ./test_data/bam/small/Q-Y5F6_1M.se.deduped.bam - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' samtools index -@ 20 -b ./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.bam Q-Y5F6_1M.se.sorted.deduped.bam.bai + - singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' samtools --version > version_samtools.txt + - singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' java -jar /picard/build/libs/picard.jar MarkDuplicates --version 2> version_markdups.txt& + - singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' java -jar /picard/build/libs/picard.jar MarkDuplicates I=./test_data/bam/small/Q-Y5F6_1M.se.sorted.bam O=Q-Y5F6_1M.se.deduped.bam M=Q-Y5F6_1M.se.deduped.Metrics.txt REMOVE_DUPLICATES=true + - singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' samtools sort -@ 20 -O BAM -o Q-Y5F6_1M.se.sorted.deduped.bam ./test_data/bam/small/Q-Y5F6_1M.se.deduped.bam + - singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' samtools index -@ 20 -b ./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.bam Q-Y5F6_1M.se.sorted.deduped.bam.bai - > for i in {"chr8","chr4","chrY"}; do echo "samtools view -b Q-Y5F6_1M.se.sorted.deduped.bam ${i} > Q-Y5F6_1M.se.sorted.deduped.${i}.bam; samtools index -@ 20 -b Q-Y5F6_1M.se.sorted.deduped.${i}.bam Q-Y5F6_1M.se.sorted.deduped.${i}.bam.bai;"; - done | export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' parallel -j 20 -k + done | singularity run 'docker://gudmaprbk/picard2.23.9:1.0.0' parallel -j 20 -k - pytest -m dedupData artifacts: name: "$CI_JOB_NAME" @@ -295,12 +297,12 @@ countData: script: - ln -s /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/metadata/geneID.tsv - ln -s /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/metadata/Entrez.tsv - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' featureCounts -T 20 -a /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/annotation/genome.gtf -G /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/sequence/genome.fna -g 'gene_name' --extraAttributes 'gene_id' -o Q-Y5F6_1M.se_countData -s 1 -R SAM --primary --ignoreDup ./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.bam - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' Rscript ./workflow/scripts/calculateTPM.R --count ./test_data/counts/small/Q-Y5F6_1M.se_countData - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' Rscript ./workflow/scripts/convertGeneSymbols.R --repRID Q-Y5F6_1M.se + - singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' featureCounts -T 20 -a /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/annotation/genome.gtf -G /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/sequence/genome.fna -g 'gene_name' --extraAttributes 'gene_id' -o Q-Y5F6_1M.se_countData -s 1 -R SAM --primary --ignoreDup ./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.bam + - singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' Rscript ./workflow/scripts/calculateTPM.R --count ./test_data/counts/small/Q-Y5F6_1M.se_countData + - singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' Rscript ./workflow/scripts/convertGeneSymbols.R --repRID Q-Y5F6_1M.se - assignedReads=$(grep -m 1 'Assigned' *.summary | grep -oe '\([0-9.]*\)') - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' featureCounts -v &> version_featurecounts.txt - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' R --version > version_r.txt + - singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' featureCounts -v &> version_featurecounts.txt + - singularity run 'docker://gudmaprbk/subread2.0.1:1.0.0' R --version > version_r.txt - pytest -m makeFeatureCounts artifacts: name: "$CI_JOB_NAME" @@ -319,8 +321,8 @@ makeBigWig: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deeptools3.5.0:1.0.0' deeptools --version > version_deeptools.txt - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deeptools3.5.0:1.0.0' bamCoverage -p 20 -b ./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.bam -o Q-Y5F6_1M.se.bw + - singularity run 'docker://gudmaprbk/deeptools3.5.0:1.0.0' deeptools --version > version_deeptools.txt + - singularity run 'docker://gudmaprbk/deeptools3.5.0:1.0.0' bamCoverage -p 20 -b ./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.bam -o Q-Y5F6_1M.se.bw - pytest -m makeBigWig artifacts: name: "$CI_JOB_NAME" @@ -341,7 +343,7 @@ dataQC: - > for i in {"chr8","chr4","chrY"}; do echo "tin.py -i ./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.${i}.bam -r /project/BICF/BICF_Core/shared/gudmap/references/new/GRCh38.p13.v36/data/annotation/genome.bed; cat Q-Y5F6_1M.se.sorted.deduped.${i}.tin.xls | tr -s \"\\w\" \"\\t\" | grep -P \"\\t${i}\\t\";" - done | export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/rseqc4.0.0:1.0.0' parallel -j 20 -k >> Q-Y5F6_1M.se.sorted.deduped.tin.xls + done | singularity run 'docker://gudmaprbk/rseqc4.0.0:1.0.0' parallel -j 20 -k >> Q-Y5F6_1M.se.sorted.deduped.tin.xls - pytest -m dataQC uploadInputBag: @@ -358,12 +360,12 @@ uploadInputBag: - > md5=$(md5sum ./test.txt | awk '{ print $1 }') && size=$(wc -c < ./test.txt) && - exist=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:Input_Bag/File_MD5=${md5}) && + exist=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:Input_Bag/File_MD5=${md5}) && if [ "${exist}" == "[]" ]; then cookie=$(cat credential.json | grep -A 1 '\"staging.gudmap.org\": {' | grep -o '\"cookie\": \".*\"') && cookie=${cookie:11:-1} && - loc=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host staging.gudmap.org put ./test.txt /hatrac/resources/rnaseq/pipeline/input_bag/TEST/test.txt --parents) && - rid=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_input_bag.py -f test.txt -l ${loc} -s ${md5} -b ${size} -n 'This is a test input bag' -o staging.gudmap.org -c ${cookie}) && + loc=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host staging.gudmap.org put ./test.txt /hatrac/resources/rnaseq/pipeline/input_bag/TEST/test.txt --parents) && + rid=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_input_bag.py -f test.txt -l ${loc} -s ${md5} -b ${size} -n 'This is a test input bag' -o staging.gudmap.org -c ${cookie}) && echo ${rid} test input bag created else rid=$(echo ${exist} | grep -o '\"RID\":\".*\",\"RCT') && @@ -382,16 +384,16 @@ uploadExecutionRun: script: - ln -sfn `readlink -e ./test_data/auth/credential.json` ./credential.json - > - exist=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:Execution_Run/Replicate=17-BTFJ) && + exist=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:Execution_Run/Replicate=17-BTFJ) && cookie=$(cat credential.json | grep -A 1 '\"staging.gudmap.org\": {' | grep -o '\"cookie\": \".*\"') && cookie=${cookie:11:-1} && if [ "${exist}" == "[]" ]; then - rid=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_execution_run.py -r 17-BTFJ -w 17-BV2Y -g 17-BV90 -i 17-BTFT -s Success -d 'This is a test execution run' -o staging.gudmap.org -c ${cookie} -u F) && + rid=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_execution_run.py -r 17-BTFJ -w 17-BV2Y -g 17-BV90 -i 17-BTFT -s Success -d 'This is a test execution run' -o staging.gudmap.org -c ${cookie} -u F) && echo ${rid} test execution run created else rid=$(echo ${exist} | grep -o '\"RID\":\".*\",\"RCT') && rid=${rid:7:-6} && - rid=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_execution_run.py -r 17-BTFJ -w 17-BV2Y -g 17-BV90 -i 17-BTFT -s Success -d 'This is a test execution run' -o staging.gudmap.org -c ${cookie} -u ${rid}) && + rid=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_execution_run.py -r 17-BTFJ -w 17-BV2Y -g 17-BV90 -i 17-BTFT -s Success -d 'This is a test execution run' -o staging.gudmap.org -c ${cookie} -u ${rid}) && echo ${rid} test execution run already exists fi @@ -406,17 +408,17 @@ uploadQC: script: - ln -sfn `readlink -e ./test_data/auth/credential.json` ./credential.json - > - exist=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:mRNA_QC/Replicate=17-BTFJ) && + exist=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:mRNA_QC/Replicate=17-BTFJ) && cookie=$(cat credential.json | grep -A 1 '\"staging.gudmap.org\": {' | grep -o '\"cookie\": \".*\"') && cookie=${cookie:11:-1} && if [ "${exist}" != "[]" ]; then rids=$(echo ${exist} | grep -o '\"RID\":\".\{7\}' | sed 's/^.\{7\}//') && for rid in ${rids}; do - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/delete_entry.py -r ${rid} -t mRNA_QC -o staging.gudmap.org -c ${cookie} + singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/delete_entry.py -r ${rid} -t mRNA_QC -o staging.gudmap.org -c ${cookie} done echo all old mRNA QC RIDs deleted fi - rid=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_qc.py -r 17-BTFJ -e 17-BVDJ -p "Single End" -s forward -l 35 -w 5 -f 1 -t 1 -n "This is a test mRNA QC" -o staging.gudmap.org -c ${cookie} -u F) + rid=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_qc.py -r 17-BTFJ -e 17-BVDJ -p "Single End" -s forward -l 35 -w 5 -f 1 -t 1 -n "This is a test mRNA QC" -o staging.gudmap.org -c ${cookie} -u F) echo ${rid} test mRNA QC created uploadProcessedFile: @@ -433,20 +435,20 @@ uploadProcessedFile: - mkdir -p ./deriva/Seq/pipeline/17-BTFE/17-BVDJ/ - mv 17-BTFJ_test.csv ./deriva/Seq/pipeline/17-BTFE/17-BVDJ/17-BTFJ_test.csv - > - exist=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:Processed_File/Replicate=17-BTFJ) && + exist=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:Processed_File/Replicate=17-BTFJ) && cookie=$(cat credential.json | grep -A 1 '\"staging.gudmap.org\": {' | grep -o '\"cookie\": \".*\"') && cookie=${cookie:11:-1} && if [ "${exist}" != "[]" ]; then rids=$(echo ${exist} | grep -o '\"RID\":\".\{7\}' | sed 's/^.\{7\}//') && for rid in ${rids}; do - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/delete_entry.py -r ${rid} -t Processed_File -o staging.gudmap.org -c ${cookie} + singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/delete_entry.py -r ${rid} -t Processed_File -o staging.gudmap.org -c ${cookie} done echo all old processed file RIDs deleted fi - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-upload-cli --catalog 2 --token ${cookie:9} staging.gudmap.org ./deriva + singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-upload-cli --catalog 2 --token ${cookie:9} staging.gudmap.org ./deriva echo test processed file uploaded - mkdir test - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' bdbag test --archiver zip + - singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' bdbag test --archiver zip - echo test output bag created - pytest -m outputBag @@ -464,12 +466,12 @@ uploadOutputBag: - > md5=$(md5sum ./test.txt | awk '{ print $1 }') && size=$(wc -c < ./test.txt) && - exist=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:Output_Bag/File_MD5=${md5}) && + exist=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' curl -s https://staging.gudmap.org/ermrest/catalog/2/entity/RNASeq:Output_Bag/File_MD5=${md5}) && if [ "${exist}" == "[]" ]; then cookie=$(cat credential.json | grep -A 1 '\"staging.gudmap.org\": {' | grep -o '\"cookie\": \".*\"') && cookie=${cookie:11:-1} && - loc=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host staging.gudmap.org put ./test.txt /hatrac/resources/rnaseq/pipeline/output_bag/TEST/test.txt --parents) && - rid=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_output_bag.py -e 17-BVDJ -f test.txt -l ${loc} -s ${md5} -b ${size} -n 'This is a test output bag' -o staging.gudmap.org -c ${cookie}) && + loc=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host staging.gudmap.org put ./test.txt /hatrac/resources/rnaseq/pipeline/output_bag/TEST/test.txt --parents) && + rid=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_output_bag.py -e 17-BVDJ -f test.txt -l ${loc} -s ${md5} -b ${size} -n 'This is a test output bag' -o staging.gudmap.org -c ${cookie}) && echo ${rid} test output bag created else rid=$(echo ${exist} | grep -o '\"RID\":\".*\",\"RCT') && @@ -487,7 +489,7 @@ generateVersions: - merge_requests - schedules script: - - export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/multiqc1.9:1.0.0' multiqc --version > version_multiqc.txt + - singularity run 'docker://gudmaprbk/multiqc1.9:1.0.0' multiqc --version > version_multiqc.txt - python ./workflow/scripts/generate_versions.py -o software_versions - python ./workflow/scripts/generate_references.py -r ./docs/references.md -o software_references artifacts: @@ -545,7 +547,7 @@ human_dev: - loc=$(dirname ${refURL}) - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)') - - test=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) + - test=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) - test=$(echo ${test} | grep -o ${filename}) - if [ "${test}" == "" ]; then echo "reference file not present"; exit 1; fi @@ -571,7 +573,7 @@ mouse_dev: - loc=$(dirname ${refURL}) - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)') - - test=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) + - test=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) - test=$(echo ${test} | grep -o ${filename}) - if [ "${test}" == "" ]; then echo "reference file not present"; exit 1; fi @@ -597,7 +599,7 @@ human_staging: - loc=$(dirname ${refURL}) - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)') - - test=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) + - test=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) - test=$(echo ${test} | grep -o ${filename}) - if [ "${test}" == "" ]; then echo "reference file not present"; exit 1; fi @@ -624,7 +626,7 @@ mouse_staging: - loc=$(dirname ${refURL}) - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)') - - test=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) + - test=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) - test=$(echo ${test} | grep -o ${filename}) - if [ "${test}" == "" ]; then echo "reference file not present"; exit 1; fi @@ -650,7 +652,7 @@ human_prod: - loc=$(dirname ${refURL}) - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)') - - test=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) + - test=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) - test=$(echo ${test} | grep -o ${filename}) - if [ "${test}" == "" ]; then echo "reference file not present"; exit 1; fi @@ -677,7 +679,7 @@ mouse_prod: - loc=$(dirname ${refURL}) - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)') - - test=$(export SINGULARITY_CACHEDIR=/project/BICF/BICF_Core/shared/gudmap/singularity_cache/ && singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) + - test=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' deriva-hatrac-cli --host ${referenceBase} ls ${loc}/) - test=$(echo ${test} | grep -o ${filename}) - if [ "${test}" == "" ]; then echo "reference file not present"; exit 1; fi