diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6ca72df77a31b2de9608bb0a426a46bb6695fc0c..e52467c0c8e4a84daa70860820eaa098ddd6b0ba 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -50,7 +50,7 @@ getData: - singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' bdbag --version > version_bdbag.txt - ln -sfn `readlink -e ./test_data/auth/cookies.txt` ~/.bdbag/deriva-cookies.txt - unzip ./test_data/bag/Q-Y5F6_inputBag.zip - - singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' bash ./workflow/scripts/bdbagFetch.sh Q-Y5F6_inputBag Q-Y5F6 TEST + - singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' bash ./workflow/scripts/bdbag_fetch.sh Q-Y5F6_inputBag Q-Y5F6 TEST - pytest -m getData artifacts: name: "$CI_JOB_NAME" @@ -68,15 +68,15 @@ parseMetadata: - merge_requests script: - singularity run 'docker://gudmaprbk/python3:1.0.0' python3 --version > version_python.txt - - rep=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parseMeta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p repRID) - - exp=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parseMeta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p expRID) - - study=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parseMeta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p studyRID) - - endsMeta=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parseMeta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p endsMeta) - - endsManual=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parseMeta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p endsManual) - - stranded=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parseMeta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p stranded) - - spike=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parseMeta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p spike) - - species=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parseMeta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p species) - - readLength=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parseMeta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p readLength) + - rep=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p repRID) + - exp=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p expRID) + - study=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p studyRID) + - endsMeta=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p endsMeta) + - endsManual=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p endsManual) + - stranded=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p stranded) + - spike=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p spike) + - species=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p species) + - readLength=$(singularity run 'docker://gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/parse_meta.py -r Replicate_RID -m "./test_data/meta/metaTest.csv" -p readLength) - echo -e "${endsMeta},${endsManual},${stranded},${spike},${species},${readLength},${exp},${study},${rep}" > design.csv - pytest -m parseMetadata artifacts: @@ -100,7 +100,7 @@ inferMetadata: if [[ ${align} == "" ]]; then exit 1; fi - > singularity run 'docker://gudmaprbk/rseqc4.0.0:1.0.0' infer_experiment.py -r "/project/BICF/BICF_Core/shared/gudmap/references/GRCh38.p12.v31/bed/genome.bed" -i "./test_data/bam/small/Q-Y5F6_1M.se.sorted.deduped.bam" 1>> Q-Y5F6_1M.se.inferMetadata.log && - ended=`singularity run 'gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/inferMeta.sh endness Q-Y5F6_1M.se.inferMetadata.log` && + ended=`singularity run 'gudmaprbk/python3:1.0.0' python3 ./workflow/scripts/infer_meta.sh endness Q-Y5F6_1M.se.inferMetadata.log` && if [[ ${ended} == "" ]]; then exit 1; fi - pytest -m inferMetadata artifacts: @@ -290,7 +290,7 @@ uploadInputBag: cookie=$(cat credential.json | grep -A 1 '\"staging.gudmap.org\": {' | grep -o '\"cookie\": \".*\"') && cookie=${cookie:11:-1} && loc=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' deriva-hatrac-cli --host staging.gudmap.org put ./test.txt /hatrac/resources/rnaseq/pipeline/input_bag/TEST/test.txt --parents) && - rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/uploadInputBag.py -f test.txt -l ${loc} -s ${md5} -b ${size} -n 'This is a test input bag' -o staging.gudmap.org -c ${cookie}) && + rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/upload_input_bag.py -f test.txt -l ${loc} -s ${md5} -b ${size} -n 'This is a test input bag' -o staging.gudmap.org -c ${cookie}) && echo ${rid} test input bag created else rid=$(echo ${exist} | grep -o '\"RID\":\".*\",\"RCT') && @@ -312,12 +312,12 @@ uploadExecutionRun: cookie=$(cat credential.json | grep -A 1 '\"staging.gudmap.org\": {' | grep -o '\"cookie\": \".*\"') && cookie=${cookie:11:-1} && if [ "${exist}" == "[]" ]; then - rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/uploadExecutionRun.py -r 17-BTFJ -w 17-BTFM -g 17-BT50 -i 17-BTFT -s Success -d 'This is a test execution run' -o staging.gudmap.org -c ${cookie} -u F) && + rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/upload_execution_run.py -r 17-BTFJ -w 17-BTFM -g 17-BT50 -i 17-BTFT -s Success -d 'This is a test execution run' -o staging.gudmap.org -c ${cookie} -u F) && echo ${rid} test execution run created else rid=$(echo ${exist} | grep -o '\"RID\":\".*\",\"RCT') && rid=${rid:7:-6} && - rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/uploadExecutionRun.py -r 17-BTFJ -w 17-BTFM -g 17-BT50 -i 17-BTFT -s Success -d 'This is a test execution run' -o staging.gudmap.org -c ${cookie} -u ${rid}) && + rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/upload_execution_run.py -r 17-BTFJ -w 17-BTFM -g 17-BT50 -i 17-BTFT -s Success -d 'This is a test execution run' -o staging.gudmap.org -c ${cookie} -u ${rid}) && echo ${rid} test execution run already exists fi @@ -337,11 +337,11 @@ uploadQC: if [ "${exist}" != "[]" ]; then rids=$(echo ${exist} | grep -o '\"RID\":\".\{7\}' | sed 's/^.\{7\}//') && for rid in ${rids}; do - singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/deleteEntry.py -r ${rid} -t mRNA_QC -o staging.gudmap.org -c ${cookie} + singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/delete_entry.py -r ${rid} -t mRNA_QC -o staging.gudmap.org -c ${cookie} done echo all old mRNA QC RIDs deleted fi - rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/uploadQC.py -r 17-BTFJ -e 17-BTG4 -p "Single Read" -s forward -l 35 -w 5 -f 1 -n "This is a test mRNA QC" -o staging.gudmap.org -c ${cookie} -u F) + rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/upload_qc.py -r 17-BTFJ -e 17-BTG4 -p "Single Read" -s forward -l 35 -w 5 -f 1 -n "This is a test mRNA QC" -o staging.gudmap.org -c ${cookie} -u F) echo ${rid} test mRNA QC created uploadProcessedFile: @@ -363,7 +363,7 @@ uploadProcessedFile: if [ "${exist}" != "[]" ]; then rids=$(echo ${exist} | grep -o '\"RID\":\".\{7\}' | sed 's/^.\{7\}//') && for rid in ${rids}; do - singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/deleteEntry.py -r ${rid} -t Processed_File -o staging.gudmap.org -c ${cookie} + singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/delete_entry.py -r ${rid} -t Processed_File -o staging.gudmap.org -c ${cookie} done echo all old processed file RIDs deleted fi @@ -392,7 +392,7 @@ uploadOutputBag: cookie=$(cat credential.json | grep -A 1 '\"staging.gudmap.org\": {' | grep -o '\"cookie\": \".*\"') && cookie=${cookie:11:-1} && loc=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' deriva-hatrac-cli --host staging.gudmap.org put ./test.txt /hatrac/resources/rnaseq/pipeline/output_bag/TEST/test.txt --parents) && - rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/uploadOutputBag.py -e 17-BTG4 -f test.txt -l ${loc} -s ${md5} -b ${size} -n 'This is a test output bag' -o staging.gudmap.org -c ${cookie}) && + rid=$(singularity run 'docker://gudmaprbk/deriva1.3:1.0.0' python3 ./workflow/scripts/upload_output_bag.py -e 17-BTG4 -f test.txt -l ${loc} -s ${md5} -b ${size} -n 'This is a test output bag' -o staging.gudmap.org -c ${cookie}) && echo ${rid} test output bag created else rid=$(echo ${exist} | grep -o '\"RID\":\".*\",\"RCT') && diff --git a/workflow/rna-seq.nf b/workflow/rna-seq.nf index 3d7222a7720b1c40c50ba65937ed8f25b5cc0fc3..0d3f57d4462138b94049bccfb7b8910b3921f9e9 100644 --- a/workflow/rna-seq.nf +++ b/workflow/rna-seq.nf @@ -82,20 +82,20 @@ softwareReferences = Channel.fromPath("${baseDir}/../docs/software_references_mq softwareVersions = Channel.fromPath("${baseDir}/../docs/software_versions_mqc.yaml") // Define script files -script_bdbagFetch = Channel.fromPath("${baseDir}/scripts/bdbagFetch.sh") -script_parseMeta = Channel.fromPath("${baseDir}/scripts/parseMeta.py") -script_inferMeta = Channel.fromPath("${baseDir}/scripts/inferMeta.sh") -script_refDataInfer = Channel.fromPath("${baseDir}/scripts/extractRefData.py") -script_refData = Channel.fromPath("${baseDir}/scripts/extractRefData.py") +script_bdbagFetch = Channel.fromPath("${baseDir}/scripts/bdbag_fetch.sh") +script_parseMeta = Channel.fromPath("${baseDir}/scripts/parse_meta.py") +script_inferMeta = Channel.fromPath("${baseDir}/scripts/infer_meta.sh") +script_refDataInfer = Channel.fromPath("${baseDir}/scripts/extract_ref_data.py") +script_refData = Channel.fromPath("${baseDir}/scripts/extract_ref_data.py") script_calculateTPM = Channel.fromPath("${baseDir}/scripts/calculateTPM.R") script_convertGeneSymbols = Channel.fromPath("${baseDir}/scripts/convertGeneSymbols.R") -script_tinHist = Channel.fromPath("${baseDir}/scripts/tinHist.py") -script_uploadInputBag = Channel.fromPath("${baseDir}/scripts/uploadInputBag.py") -script_uploadExecutionRun = Channel.fromPath("${baseDir}/scripts/uploadExecutionRun.py") -script_uploadQC = Channel.fromPath("${baseDir}/scripts/uploadQC.py") -script_uploadOutputBag = Channel.fromPath("${baseDir}/scripts/uploadOutputBag.py") -script_deleteEntry_uploadQC = Channel.fromPath("${baseDir}/scripts/deleteEntry.py") -script_deleteEntry_uploadProcessedFile = Channel.fromPath("${baseDir}/scripts/deleteEntry.py") +script_tinHist = Channel.fromPath("${baseDir}/scripts/tin_hist.py") +script_uploadInputBag = Channel.fromPath("${baseDir}/scripts/upload_input_bag.py") +script_uploadExecutionRun = Channel.fromPath("${baseDir}/scripts/upload_execution_run.py") +script_uploadQC = Channel.fromPath("${baseDir}/scripts/upload_qc.py") +script_uploadOutputBag = Channel.fromPath("${baseDir}/scripts/upload_output_bag.py") +script_deleteEntry_uploadQC = Channel.fromPath("${baseDir}/scripts/delete_entry.py") +script_deleteEntry_uploadProcessedFile = Channel.fromPath("${baseDir}/scripts/delete_entry.py") /* * trackStart: track start of pipeline @@ -490,9 +490,9 @@ process getRefInfer { query=\$(echo 'https://${referenceBase}/ermrest/catalog/2/entity/RNASeq:Reference_Genome/Reference_Version=${refName}${refERCCVersion}/Annotation_Version=${refName}${refERCCVersion}') fi curl --request GET \${query} > refQuery.json - refURL=\$(python extractRefData.py --returnParam URL) + refURL=\$(python ${script_refDataInfer} --returnParam URL) loc=\$(dirname \${refURL}) - fName=\$(python extractRefData.py --returnParam fName) + fName=\$(python ${script_refDataInfer} --returnParam fName) fName=\${fName%.*} if [ "\${loc}" = "/hatrac/*" ]; then echo "LOG: Reference not present in hatrac"; exit 1; fi filename=\$(echo \$(basename \${refURL}) | grep -oP '.*(?=:)') @@ -682,18 +682,18 @@ process inferMetadata { infer_experiment.py -r "\${bed}" -i "\${bam}" 1>> ${repRID}.infer_experiment.txt echo -e "LOG: infered" >> ${repRID}.inferMetadata.log - ended=`bash inferMeta.sh endness ${repRID}.infer_experiment.txt` - fail=`bash inferMeta.sh fail ${repRID}.infer_experiment.txt` + ended=`bash ${script_inferMeta} endness ${repRID}.infer_experiment.txt` + fail=`bash ${script_inferMeta} fail ${repRID}.infer_experiment.txt` if [ \${ended} == "PairEnd" ] then ends="pe" - percentF=`bash inferMeta.sh pef ${repRID}.infer_experiment.txt` - percentR=`bash inferMeta.sh per ${repRID}.infer_experiment.txt` + percentF=`bash ${script_inferMeta} pef ${repRID}.infer_experiment.txt` + percentR=`bash ${script_inferMeta} per ${repRID}.infer_experiment.txt` elif [ \${ended} == "SingleEnd" ] then ends="se" - percentF=`bash inferMeta.sh sef ${repRID}.infer_experiment.txt` - percentR=`bash inferMeta.sh ser ${repRID}.infer_experiment.txt` + percentF=`bash ${script_inferMeta} sef ${repRID}.infer_experiment.txt` + percentR=`bash ${script_inferMeta} ser ${repRID}.infer_experiment.txt` fi echo -e "LOG: percentage reads in the same direction as gene: \${percentF}" >> ${repRID}.inferMetadata.log echo -e "LOG: percentage reads in the opposite direction as gene: \${percentR}" >> ${repRID}.inferMetadata.log @@ -840,9 +840,9 @@ process getRef { GENCODE=\$(echo \${references} | grep -o \${refName}.* | cut -d '.' -f3) query=\$(echo 'https://${referenceBase}/ermrest/catalog/2/entity/RNASeq:Reference_Genome/Reference_Version='\${GRCv}'.'\${GRCp}'/Annotation_Version=GENCODE%20'\${GENCODE}) curl --request GET \${query} > refQuery.json - refURL=\$(python extractRefData.py --returnParam URL) + refURL=\$(python ${script_refData} --returnParam URL) loc=\$(dirname \${refURL}) - fName=\$(python extractRefData.py --returnParam fName) + fName=\$(python ${script_refData} --returnParam fName) fName=\${fName%.*} if [ "\${loc}" = "/hatrac/*" ]; then echo "LOG: Reference not present in hatrac"; exit 1; fi filename=\$(echo \$(basename \${refURL}) | grep -oP '.*(?=:)') @@ -1061,11 +1061,11 @@ process countData { # calculate TPM from the resulting countData table echo -e "LOG: calculating TPM with R" >> ${repRID}.countData.log - Rscript calculateTPM.R --count "${repRID}_countData" + Rscript ${script_calculateTPM} --count "${repRID}_countData" # convert gene symbols to Entrez id's echo -e "LOG: convert gene symbols to Entrez id's" >> ${repRID}.countData.log - Rscript convertGeneSymbols.R --repRID "${repRID}" + Rscript ${script_convertGeneSymbols} --repRID "${repRID}" """ } @@ -1323,7 +1323,7 @@ process uploadInputBag { cookie=\${cookie:11:-1} loc=\$(deriva-hatrac-cli --host ${source} put ./\${file} /hatrac/resources/rnaseq/pipeline/input_bag/study/${studyRID}/replicate/${repRID}/\${file} --parents) - inputBag_rid=\$(python3 uploadInputBag.py -f \${file} -l \${loc} -s \${md5} -b \${size} -o ${source} -c \${cookie}) + inputBag_rid=\$(python3 ${script_uploadInputBag} -f \${file} -l \${loc} -s \${md5} -b \${size} -o ${source} -c \${cookie}) echo LOG: input bag RID uploaded - \${inputBag_rid} >> ${repRID}.uploadInputBag.log rid=\${inputBag_rid} else @@ -1397,13 +1397,13 @@ process uploadExecutionRun { echo \${exist} >> ${repRID}.uploadExecutionRun.log if [ "\${exist}" == "[]" ] then - executionRun_rid=\$(python3 uploadExecutionRun.py -r ${repRID} -w \${workflow} -g \${genome} -i ${inputBagRID} -s In-progress -d 'Run in process' -o ${source} -c \${cookie} -u F) + executionRun_rid=\$(python3 ${script_uploadExecutionRun} -r ${repRID} -w \${workflow} -g \${genome} -i ${inputBagRID} -s In-progress -d 'Run in process' -o ${source} -c \${cookie} -u F) echo LOG: execution run RID uploaded - \${executionRun_rid} >> ${repRID}.uploadExecutionRun.log else rid=\$(echo \${exist} | grep -o '\\"RID\\":\\".*\\",\\"RCT') rid=\${rid:7:-6} echo \${rid} >> ${repRID}.uploadExecutionRun.log - executionRun_rid=\$(python3 uploadExecutionRun.py -r ${repRID} -w \${workflow} -g \${genome} -i ${inputBagRID} -s In-progress -d 'Run in process' -o ${source} -c \${cookie} -u \${rid}) + executionRun_rid=\$(python3 ${script_uploadExecutionRun} -r ${repRID} -w \${workflow} -g \${genome} -i ${inputBagRID} -s In-progress -d 'Run in process' -o ${source} -c \${cookie} -u \${rid}) echo LOG: execution run RID updated - \${executionRun_rid} >> ${repRID}.uploadExecutionRun.log fi @@ -1470,13 +1470,13 @@ process uploadQC { rids=\$(echo \${exist} | grep -o '\\"RID\\":\\".\\{7\\}' | sed 's/^.\\{7\\}//') for rid in \${rids} do - python3 deleteEntry.py -r \${rid} -t mRNA_QC -o ${source} -c \${cookie} + python3 ${script_deleteEntry_uploadQC} -r \${rid} -t mRNA_QC -o ${source} -c \${cookie} echo LOG: old mRNA QC RID deleted - \${rid} >> ${repRID}.uploadQC.log done echo LOG: all old mRNA QC RIDs deleted >> ${repRID}.uploadQC.log fi - qc_rid=\$(python3 uploadQC.py -r ${repRID} -e ${executionRunRID} -p "\${end}" -s ${stranded} -l ${length} -w ${rawCount} -f ${finalCount} -o ${source} -c \${cookie} -u F) + qc_rid=\$(python3 ${script_deleteEntry_uploadQC} -r ${repRID} -e ${executionRunRID} -p "\${end}" -s ${stranded} -l ${length} -w ${rawCount} -f ${finalCount} -o ${source} -c \${cookie} -u F) echo LOG: mRNA QC RID uploaded - \${qc_rid} >> ${repRID}.uploadQC.log echo \${qc_rid} > qcRID.csv @@ -1536,7 +1536,7 @@ process uploadProcessedFile { rids=\$(echo \${exist} | grep -o '\\"RID\\":\\".\\{7\\}' | sed 's/^.\\{7\\}//') for rid in \${rids} do - python3 deleteEntry.py -r \${rid} -t Processed_File -o ${source} -c \${cookie} + python3 ${script_deleteEntry_uploadProcessedFile} -r \${rid} -t Processed_File -o ${source} -c \${cookie} done echo LOG: all old processed file RIDs deleted >> ${repRID}.uploadQC.log fi @@ -1622,7 +1622,7 @@ process uploadOutputBag { cookie=\${cookie:11:-1} loc=\$(deriva-hatrac-cli --host ${source} put ./\${file} /hatrac/resources/rnaseq/pipeline/output_bag/study/${studyRID}/replicate/${repRID}/\${file} --parents) - outputBag_rid=\$(python3 uploadOutputBag.py -e ${executionRunRID} -f \${file} -l \${loc} -s \${md5} -b \${size} -o ${source} -c \${cookie}) + outputBag_rid=\$(python3 ${script_uploadOutputBag} -e ${executionRunRID} -f \${file} -l \${loc} -s \${md5} -b \${size} -o ${source} -c \${cookie}) echo LOG: output bag RID uploaded - \${outputBag_rid} >> ${repRID}.uploadOutputBag.log rid=\${outputBag_rid} else diff --git a/workflow/scripts/bdbagFetch.sh b/workflow/scripts/bdbag_fetch.sh similarity index 100% rename from workflow/scripts/bdbagFetch.sh rename to workflow/scripts/bdbag_fetch.sh diff --git a/workflow/scripts/deleteEntry.py b/workflow/scripts/delete_entry.py similarity index 100% rename from workflow/scripts/deleteEntry.py rename to workflow/scripts/delete_entry.py diff --git a/workflow/scripts/extractRefData.py b/workflow/scripts/extract_ref_data.py similarity index 100% rename from workflow/scripts/extractRefData.py rename to workflow/scripts/extract_ref_data.py diff --git a/workflow/scripts/inferMeta.sh b/workflow/scripts/infer_meta.sh similarity index 100% rename from workflow/scripts/inferMeta.sh rename to workflow/scripts/infer_meta.sh diff --git a/workflow/scripts/parseMeta.py b/workflow/scripts/parse_meta.py similarity index 100% rename from workflow/scripts/parseMeta.py rename to workflow/scripts/parse_meta.py diff --git a/workflow/scripts/splitStudy.py b/workflow/scripts/split_study.py similarity index 100% rename from workflow/scripts/splitStudy.py rename to workflow/scripts/split_study.py diff --git a/workflow/scripts/splitStudy.sh b/workflow/scripts/split_study.sh similarity index 100% rename from workflow/scripts/splitStudy.sh rename to workflow/scripts/split_study.sh diff --git a/workflow/scripts/tinHist.py b/workflow/scripts/tin_hist.py similarity index 100% rename from workflow/scripts/tinHist.py rename to workflow/scripts/tin_hist.py diff --git a/workflow/scripts/uploadExecutionRun.py b/workflow/scripts/upload_execution_run.py similarity index 100% rename from workflow/scripts/uploadExecutionRun.py rename to workflow/scripts/upload_execution_run.py diff --git a/workflow/scripts/uploadInputBag.py b/workflow/scripts/upload_input_bag.py similarity index 100% rename from workflow/scripts/uploadInputBag.py rename to workflow/scripts/upload_input_bag.py diff --git a/workflow/scripts/uploadOutputBag.py b/workflow/scripts/upload_output_bag.py similarity index 100% rename from workflow/scripts/uploadOutputBag.py rename to workflow/scripts/upload_output_bag.py diff --git a/workflow/scripts/uploadQC.py b/workflow/scripts/upload_qc.py similarity index 100% rename from workflow/scripts/uploadQC.py rename to workflow/scripts/upload_qc.py