Skip to content
Snippets Groups Projects
Commit ac1c64c4 authored by Gervaise Henry's avatar Gervaise Henry :cowboy:
Browse files

Add median TIN to mRNA_QC table

parent 64fc509b
Branches
Tags
2 merge requests!58Develop,!57Prep of 1.0.0 release
Pipeline #8928 canceled with stages
in 4 minutes and 28 seconds
...@@ -389,7 +389,7 @@ uploadQC: ...@@ -389,7 +389,7 @@ uploadQC:
done done
echo all old mRNA QC RIDs deleted echo all old mRNA QC RIDs deleted
fi fi
rid=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_qc.py -r 17-BTFJ -e 17-BVDJ -p "Single Read" -s forward -l 35 -w 5 -f 1 -n "This is a test mRNA QC" -o staging.gudmap.org -c ${cookie} -u F) rid=$(singularity run 'docker://gudmaprbk/deriva1.4:1.0.0' python3 ./workflow/scripts/upload_qc.py -r 17-BTFJ -e 17-BVDJ -p "Single Read" -s forward -l 35 -w 5 -f 1 -t 1 -n "This is a test mRNA QC" -o staging.gudmap.org -c ${cookie} -u F)
echo ${rid} test mRNA QC created echo ${rid} test mRNA QC created
uploadProcessedFile: uploadProcessedFile:
......
# v1.0.0 (in development) # v1.0.0 (in development)
**User Facing** **User Facing**
* Add link to reference builder script * Add link to reference builder script
* Output median TIN to mRNA_QC table
**Background** **Background**
* Change consistency test to check if +/- 1% of standard * Change consistency test to check if +/- 1% of standard
......
...@@ -1734,6 +1734,12 @@ tinMedInfer_fl.splitCsv(sep: ",", header: false).separate( ...@@ -1734,6 +1734,12 @@ tinMedInfer_fl.splitCsv(sep: ",", header: false).separate(
tinMedInfer tinMedInfer
) )
// Replicate inferred median TIN for multiple process inputs
tinMedInfer.into {
tinMedInfer_aggrQC
tinMedInfer_uploadQC
}
/* /*
*aggrQC: aggregate QC from processes as well as metadata and run MultiQC *aggrQC: aggregate QC from processes as well as metadata and run MultiQC
*/ */
...@@ -1769,7 +1775,7 @@ process aggrQC { ...@@ -1769,7 +1775,7 @@ process aggrQC {
val readLengthI from readLengthInfer_aggrQC val readLengthI from readLengthInfer_aggrQC
val rawReadsI from rawReadsInfer_aggrQC val rawReadsI from rawReadsInfer_aggrQC
val assignedReadsI from assignedReadsInfer_aggrQC val assignedReadsI from assignedReadsInfer_aggrQC
val tinMedI from tinMedInfer val tinMedI from tinMedInfer_aggrQC
val studyRID from studyRID_aggrQC val studyRID from studyRID_aggrQC
val expRID from expRID_aggrQC val expRID from expRID_aggrQC
val fastqCountError_aggrQC val fastqCountError_aggrQC
...@@ -1869,6 +1875,7 @@ process uploadQC { ...@@ -1869,6 +1875,7 @@ process uploadQC {
val length from readLengthInfer_uploadQC val length from readLengthInfer_uploadQC
val rawCount from rawReadsInfer_uploadQC val rawCount from rawReadsInfer_uploadQC
val finalCount from assignedReadsInfer_uploadQC val finalCount from assignedReadsInfer_uploadQC
val tinMed from tinMedInfer_uploadQC
val fastqCountError_uploadQC val fastqCountError_uploadQC
val fastqReadError_uploadQC val fastqReadError_uploadQC
val speciesError_uploadQC val speciesError_uploadQC
...@@ -1912,7 +1919,7 @@ process uploadQC { ...@@ -1912,7 +1919,7 @@ process uploadQC {
echo LOG: all old mRNA QC RIDs deleted >> ${repRID}.uploadQC.log echo LOG: all old mRNA QC RIDs deleted >> ${repRID}.uploadQC.log
fi fi
qc_rid=\$(python3 ${script_uploadQC} -r ${repRID} -e ${executionRunRID} -p "\${end}" -s ${stranded} -l ${length} -w ${rawCount} -f ${finalCount} -o ${source} -c \${cookie} -u F) qc_rid=\$(python3 ${script_uploadQC} -r ${repRID} -e ${executionRunRID} -p "\${end}" -s ${stranded} -l ${length} -w ${rawCount} -f ${finalCount} -t ${tinMed} -o ${source} -c \${cookie} -u F)
echo LOG: mRNA QC RID uploaded - \${qc_rid} >> ${repRID}.uploadQC.log echo LOG: mRNA QC RID uploaded - \${qc_rid} >> ${repRID}.uploadQC.log
echo "\${qc_rid}" > qcRID.csv echo "\${qc_rid}" > qcRID.csv
......
...@@ -12,6 +12,7 @@ def get_args(): ...@@ -12,6 +12,7 @@ def get_args():
parser.add_argument('-l', '--length', help="median read length", required=True) parser.add_argument('-l', '--length', help="median read length", required=True)
parser.add_argument('-w', '--rawCount', help="raw count", required=True) parser.add_argument('-w', '--rawCount', help="raw count", required=True)
parser.add_argument('-f', '--assignedCount', help="final assigned count", required=True) parser.add_argument('-f', '--assignedCount', help="final assigned count", required=True)
parser.add_argument('-t', '--tin', help="median TIN", required=True)
parser.add_argument('-n', '--notes', help="notes", default="", required=False) parser.add_argument('-n', '--notes', help="notes", default="", required=False)
parser.add_argument('-o', '--host', help="datahub host", required=True) parser.add_argument('-o', '--host', help="datahub host", required=True)
parser.add_argument('-c', '--cookie', help="cookie token", required=True) parser.add_argument('-c', '--cookie', help="cookie token", required=True)
...@@ -33,6 +34,7 @@ def main(hostname, catalog_number, credential): ...@@ -33,6 +34,7 @@ def main(hostname, catalog_number, credential):
"Median_Read_Length": args.length, "Median_Read_Length": args.length,
"Raw_Count": args.rawCount, "Raw_Count": args.rawCount,
"Final_Count": args.assignedCount, "Final_Count": args.assignedCount,
"Median_TIN": args.tin,
"Notes": args.notes "Notes": args.notes
} }
entities = run_table.insert([run_data]) entities = run_table.insert([run_data])
...@@ -47,6 +49,7 @@ def main(hostname, catalog_number, credential): ...@@ -47,6 +49,7 @@ def main(hostname, catalog_number, credential):
"Median_Read_Length": args.length, "Median_Read_Length": args.length,
"Raw_Count": args.rawCount, "Raw_Count": args.rawCount,
"Final_Count": args.assignedCount, "Final_Count": args.assignedCount,
"Median_TIN": args.tin,
"Notes": args.notes "Notes": args.notes
} }
entities = run_table.update([run_data]) entities = run_table.update([run_data])
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment