Skip to content
Snippets Groups Projects
rna-seq.nf 56.9 KiB
Newer Older
Gervaise Henry's avatar
Gervaise Henry committed
#!/usr/bin/env nextflow

Venkat Malladi's avatar
Venkat Malladi committed
//  ########  ####  ######  ########
//  ##     ##  ##  ##    ## ##
//  ##     ##  ##  ##       ##
//  ########   ##  ##       ######
//  ##     ##  ##  ##       ##
//  ##     ##  ##  ##    ## ##
//  ########  ####  ######  ##
Gervaise Henry's avatar
Gervaise Henry committed
// Define input variables
params.deriva = "${baseDir}/../test_data/auth/credential.json"
params.bdbag = "${baseDir}/../test_data/auth/cookies.txt"
//params.repRID = "16-1ZX4"
Jeremy Mathews's avatar
Jeremy Mathews committed
params.repRID = "Q-Y5F6"
params.source = "dev"
params.refMoVersion = "38.p6.vM22"
params.refHuVersion = "38.p12.v31"
Gervaise Henry's avatar
Gervaise Henry committed
params.refERCCVersion = "92"
Gervaise Henry's avatar
Gervaise Henry committed
params.outDir = "${baseDir}/../output"
params.upload = true
params.email = ""

Gervaise Henry's avatar
Gervaise Henry committed

// Define override input variable
params.refSource = "biohpc"
params.inputBagForce = ""
Gervaise Henry's avatar
Gervaise Henry committed
params.fastqsForce = ""
Gervaise Henry's avatar
Gervaise Henry committed
params.speciesForce = ""
Gervaise Henry's avatar
Gervaise Henry committed
// Parse input variables
deriva = Channel
  .fromPath(params.deriva)
  .ifEmpty { exit 1, "deriva credential file not found: ${params.deriva}" }
deriva.into {
  deriva_getBag
  deriva_getRefInfer
  deriva_getRef
Gervaise Henry's avatar
Gervaise Henry committed
  deriva_uploadInputBag
  deriva_uploadExecutionRun
Gervaise Henry's avatar
Gervaise Henry committed
  deriva_uploadQC
  deriva_uploadProcessedFile
Gervaise Henry's avatar
Gervaise Henry committed
  deriva_uploadOutputBag
bdbag = Channel
  .fromPath(params.bdbag)
  .ifEmpty { exit 1, "deriva cookie file for bdbag not found: ${params.bdbag}" }
Jonathan Gesell's avatar
Jonathan Gesell committed
repRID = params.repRID
refMoVersion = params.refMoVersion
refHuVersion = params.refHuVersion
Gervaise Henry's avatar
Gervaise Henry committed
refERCCVersion = params.refERCCVersion
Gervaise Henry's avatar
Gervaise Henry committed
outDir = params.outDir
logsDir = "${outDir}/Logs"
upload = params.upload
inputBagForce = params.inputBagForce
Gervaise Henry's avatar
Gervaise Henry committed
fastqsForce = params.fastqsForce
Gervaise Henry's avatar
Gervaise Henry committed
speciesForce = params.speciesForce
email = params.email
Gervaise Henry's avatar
Gervaise Henry committed

Gervaise Henry's avatar
Gervaise Henry committed
// Define fixed files
replicateExportConfig = Channel.fromPath("${baseDir}/conf/Replicate_For_Input_Bag.json")
Gervaise Henry's avatar
Gervaise Henry committed
executionRunExportConfig = Channel.fromPath("${baseDir}/conf/Execution_Run_For_Output_Bag.json")
if (params.source == "dev") {
  source = "dev.gudmap.org"
} else if (params.source == "staging") {
  source = "staging.gudmap.org"
} else if (params.source == "production") {
  source = "www.gudmap.org"
}
if (params.refSource == "biohpc") {
  referenceBase = "/project/BICF/BICF_Core/shared/gudmap/references"
//} else if (params.refSource == "aws") {
//  referenceBase = "s3://bicf-references"
} else if (params.refSource == "datahub") {
  referenceBase = "dev.gudmap.org"
}
Gervaise Henry's avatar
Gervaise Henry committed
referenceInfer = Channel.fromList(["ERCC","GRCh","GRCm"])
multiqcConfig = Channel.fromPath("${baseDir}/conf/multiqc_config.yaml")
Gervaise Henry's avatar
Gervaise Henry committed
bicfLogo = Channel.fromPath("${baseDir}/../docs/bicf_logo.png")
softwareReferences = Channel.fromPath("${baseDir}/../docs/software_references_mqc.yaml")
softwareVersions = Channel.fromPath("${baseDir}/../docs/software_versions_mqc.yaml")
// Define script files
script_bdbagFetch = Channel.fromPath("${baseDir}/scripts/bdbag_fetch.sh")
script_parseMeta = Channel.fromPath("${baseDir}/scripts/parse_meta.py")
script_inferMeta = Channel.fromPath("${baseDir}/scripts/infer_meta.sh")
script_refDataInfer = Channel.fromPath("${baseDir}/scripts/extract_ref_data.py")
script_refData = Channel.fromPath("${baseDir}/scripts/extract_ref_data.py")
script_calculateTPM = Channel.fromPath("${baseDir}/scripts/calculateTPM.R")
script_convertGeneSymbols = Channel.fromPath("${baseDir}/scripts/convertGeneSymbols.R")
script_tinHist = Channel.fromPath("${baseDir}/scripts/tin_hist.py")
script_uploadInputBag = Channel.fromPath("${baseDir}/scripts/upload_input_bag.py")
script_uploadExecutionRun = Channel.fromPath("${baseDir}/scripts/upload_execution_run.py")
script_uploadQC = Channel.fromPath("${baseDir}/scripts/upload_qc.py")
script_uploadOutputBag = Channel.fromPath("${baseDir}/scripts/upload_output_bag.py")
script_deleteEntry_uploadQC = Channel.fromPath("${baseDir}/scripts/delete_entry.py")
script_deleteEntry_uploadProcessedFile = Channel.fromPath("${baseDir}/scripts/delete_entry.py")
Gervaise Henry's avatar
Gervaise Henry committed
/*
 * trackStart: track start of pipeline
 */
params.ci = false
params.dev = false
Gervaise Henry's avatar
Gervaise Henry committed
process trackStart {
Gervaise Henry's avatar
Gervaise Henry committed
  container 'docker://bicf/bicfbase:2.1.0'
Gervaise Henry's avatar
Gervaise Henry committed
  script:
  """
  hostname
  ulimit -a
Venkat Malladi's avatar
Venkat Malladi committed

  curl -H 'Content-Type: application/json' -X PUT -d \
    '{ \
      "sessionId": "${workflow.sessionId}", \
      "pipeline": "gudmap.rbk_rnaseq", \
      "start": "${workflow.start}", \
      "repRID": "${repRID}", \
      "astrocyte": false, \
      "status": "started", \
      "nextflowVersion": "${workflow.nextflow.version}", \
      "pipelineVersion": "${workflow.manifest.version}", \
      "ci": ${params.ci}, \
      "dev": ${params.dev} \
    }' \
    "https://xku43pcwnf.execute-api.us-east-1.amazonaws.com/ProdDeploy/pipeline-tracking"
Gervaise Henry's avatar
Gervaise Henry committed
  """
Gervaise Henry's avatar
Gervaise Henry committed
}
Gervaise Henry's avatar
Gervaise Henry committed

Gervaise Henry's avatar
Gervaise Henry committed
log.info """\
====================================
BICF RNA-seq Pipeline for GUDMAP/RBK
====================================
Gervaise Henry's avatar
Gervaise Henry committed
Replicate RID          : ${params.repRID}
Source Server          : ${params.source}
Gervaise Henry's avatar
Gervaise Henry committed
Mouse Reference Version: ${params.refMoVersion}
Human Reference Version: ${params.refHuVersion}
Gervaise Henry's avatar
Gervaise Henry committed
ERCC Reference Version : ${params.refERCCVersion}
Reference source       : ${params.refSource}
Gervaise Henry's avatar
Gervaise Henry committed
Output Directory       : ${params.outDir}
Gervaise Henry's avatar
Gervaise Henry committed
------------------------------------
Gervaise Henry's avatar
Gervaise Henry committed
Nextflow Version       : ${workflow.nextflow.version}
Pipeline Version       : ${workflow.manifest.version}
Session ID             : ${workflow.sessionId}
Gervaise Henry's avatar
Gervaise Henry committed
------------------------------------
Gervaise Henry's avatar
Gervaise Henry committed
CI                     : ${params.ci}
Development            : ${params.dev}
Gervaise Henry's avatar
Gervaise Henry committed
------------------------------------
"""

Gervaise Henry's avatar
Gervaise Henry committed
/*
 * splitData: split bdbag files by replicate so fetch can occure in parallel, and rename files to replicate rid
 */
process getBag {
Jonathan Gesell's avatar
Jonathan Gesell committed
  tag "${repRID}"
Gervaise Henry's avatar
Gervaise Henry committed
  publishDir "${outDir}/inputBag", mode: 'copy', pattern: "Replicate_*.zip"
    path credential, stageAs: "credential.json" from deriva_getBag
Gervaise Henry's avatar
Gervaise Henry committed
    path replicateExportConfig
Gervaise Henry's avatar
Gervaise Henry committed
    path ("*.zip") into bag
Gervaise Henry's avatar
Gervaise Henry committed
    inputBagForce == ""
    hostname > ${repRID}.getBag.log
    ulimit -a >> ${repRID}.getBag.log

    # link credential file for authentication
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: linking deriva credentials" >> ${repRID}.getBag.log
Gervaise Henry's avatar
Gervaise Henry committed
    mkdir -p ~/.deriva
    ln -sf `readlink -e credential.json` ~/.deriva/credential.json
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: linked" >> ${repRID}.getBag.log

    # deriva-download replicate RID
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: fetching bag for ${repRID} in GUDMAP" >> ${repRID}.getBag.log
Gervaise Henry's avatar
Gervaise Henry committed
    deriva-download-cli ${source} --catalog 2 ${replicateExportConfig} . rid=${repRID}
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: fetched" >> ${repRID}.getBag.log

    name=\$(ls *.zip)
    name=\$(basename \${name} | cut -d "." -f1)
    yr=\$(date +'%Y')
    mn=\$(date +'%m')
    dy=\$(date +'%d')
    mv \${name}.zip \${name}_\${yr}\${mn}\${dy}.zip
// Set inputBag to downloaded or forced input
if (inputBagForce != "") {
  inputBag = Channel
    .fromPath(inputBagForce)
Gervaise Henry's avatar
Gervaise Henry committed
    .ifEmpty { exit 1, "override inputBag file not found: ${inputBagForce}" }
Gervaise Henry's avatar
Gervaise Henry committed
  inputBag = bag
Gervaise Henry's avatar
Gervaise Henry committed
inputBag.into {
  inputBag_getData
  inputBag_uploadInputBag
}
/*
 * getData: fetch study files from consortium with downloaded bdbag.zip
 */
process getData {
Jonathan Gesell's avatar
Jonathan Gesell committed
  tag "${repRID}"
    path script_bdbagFetch
    path cookies, stageAs: "deriva-cookies.txt" from bdbag
Gervaise Henry's avatar
Gervaise Henry committed
    path inputBag from inputBag_getData
    path ("*.R{1,2}.fastq.gz") into fastqs
    path ("**/File.csv") into fileMeta
    path ("**/Experiment Settings.csv") into experimentSettingsMeta
    path ("**/Experiment.csv") into experimentMeta
    hostname > ${repRID}.getData.log
    ulimit -a >> ${repRID}.getData.log
    # link deriva cookie for authentication
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: linking deriva cookie" >> ${repRID}.getData.log
Gervaise Henry's avatar
Gervaise Henry committed
    mkdir -p ~/.bdbag
    ln -sf `readlink -e deriva-cookies.txt` ~/.bdbag/deriva-cookies.txt
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: linked" >> ${repRID}.getData.log
Venkat Malladi's avatar
Venkat Malladi committed

Gervaise Henry's avatar
Gervaise Henry committed
    # get bag basename
    replicate=\$(basename "${inputBag}")
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: bag replicate name \${replicate}" >> ${repRID}.getData.log
Venkat Malladi's avatar
Venkat Malladi committed

Gervaise Henry's avatar
Gervaise Henry committed
    # unzip bag
    echo -e "LOG: unzipping replicate bag" >> ${repRID}.getData.log
    unzip ${inputBag}
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: unzipped" >> ${repRID}.getData.log
Venkat Malladi's avatar
Venkat Malladi committed

Gervaise Henry's avatar
Gervaise Henry committed
    # bag fetch fastq's only and rename by repRID
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: fetching replicate bdbag" >> ${repRID}.getData.log
    sh ${script_bdbagFetch} \${replicate::-13} ${repRID}
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: fetched" >> ${repRID}.getData.log
Gervaise Henry's avatar
Gervaise Henry committed
// Set raw fastq to downloaded or forced input and replicate them for multiple process inputs
if (fastqsForce != "") {
  Channel
    .fromPath(fastqsForce)
    .ifEmpty { exit 1, "override inputBag file not found: ${fastqsForce}" }
Gervaise Henry's avatar
Gervaise Henry committed
      fastqs_trimData
    }
} else {
Gervaise Henry's avatar
Gervaise Henry committed
    fastqs_trimData
  }
Gervaise Henry's avatar
Gervaise Henry committed
/*
 * parseMetadata: parses metadata to extract experiment parameters
*/
process parseMetadata {
  tag "${repRID}"
Gervaise Henry's avatar
Gervaise Henry committed

  input:
    path script_parseMeta
    path file from fileMeta
    path experimentSettings, stageAs: "ExperimentSettings.csv" from experimentSettingsMeta
    path experiment from experimentMeta
Gervaise Henry's avatar
Gervaise Henry committed

  output:
Gervaise Henry's avatar
Gervaise Henry committed
    path "design.csv" into metadata_fl
Gervaise Henry's avatar
Gervaise Henry committed

  script:
    """
    hostname > ${repRID}.parseMetadata.log
    ulimit -a >> ${repRID}.parseMetadata.log
Gervaise Henry's avatar
Gervaise Henry committed

    # check replicate RID metadata
    rep=\$(python3 ${script_parseMeta} -r ${repRID} -m "${file}" -p repRID)
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: replicate RID metadata parsed: \${rep}" >> ${repRID}.parseMetadata.log
    # get experiment RID metadata
    exp=\$(python3 ${script_parseMeta} -r ${repRID} -m "${file}" -p expRID)
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: experiment RID metadata parsed: \${exp}" >> ${repRID}.parseMetadata.log
Venkat Malladi's avatar
Venkat Malladi committed

    # get study RID metadata
    study=\$(python3 ${script_parseMeta} -r ${repRID} -m "${file}" -p studyRID)
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: study RID metadata parsed: \${study}" >> ${repRID}.parseMetadata.log
    # get endedness metadata
    endsMeta=\$(python3 ${script_parseMeta} -r ${repRID} -m "${experimentSettings}" -p endsMeta)
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: endedness metadata parsed: \${endsMeta}" >> ${repRID}.parseMetadata.log
Venkat Malladi's avatar
Venkat Malladi committed

    # ganually get endness
    endsManual=\$(python3 ${script_parseMeta} -r ${repRID} -m "${file}" -p endsManual)
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: endedness manually detected: \${endsManual}" >> ${repRID}.parseMetadata.log
    # get strandedness metadata
    stranded=\$(python3 ${script_parseMeta} -r ${repRID} -m "${experimentSettings}" -p stranded)
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: strandedness metadata parsed: \${stranded}" >> ${repRID}.parseMetadata.log
Venkat Malladi's avatar
Venkat Malladi committed

    # get spike-in metadata
    spike=\$(python3 ${script_parseMeta} -r ${repRID} -m "${experimentSettings}" -p spike)
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: spike-in metadata parsed: \${spike}" >> ${repRID}.parseMetadata.log
Venkat Malladi's avatar
Venkat Malladi committed

    # get species metadata
    species=\$(python3 ${script_parseMeta} -r ${repRID} -m "${experiment}" -p species)
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: species metadata parsed: \${species}" >> ${repRID}.parseMetadata.log
    # get read length metadata
    readLength=\$(python3 ${script_parseMeta} -r ${repRID} -m "${experimentSettings}" -p readLength)
    if [ "\${readLength}" = "nan"]
    echo -e "LOG: read length metadata parsed: \${readLength}" >> ${repRID}.parseMetadata.log

    echo -e "\${endsMeta},\${endsManual},\${stranded},\${spike},\${species},\${readLength},\${exp},\${study}" > design.csv
Gervaise Henry's avatar
Gervaise Henry committed
    """
Gervaise Henry's avatar
Gervaise Henry committed
}
Gervaise Henry's avatar
Gervaise Henry committed

// Split metadata into separate channels
endsMeta = Channel.create()
endsManual = Channel.create()
strandedMeta = Channel.create()
spikeMeta = Channel.create()
speciesMeta = Channel.create()
readLengthMeta = Channel.create()
expRID = Channel.create()
studyRID = Channel.create()
Gervaise Henry's avatar
Gervaise Henry committed
metadata_fl.splitCsv(sep: ",", header: false).separate(
  endsMeta,
  endsManual,
  strandedMeta,
  spikeMeta,
Gervaise Henry's avatar
Gervaise Henry committed
// Replicate metadata for multiple process inputs
  endsManual_trimData
  endsManual_downsampleData
Gervaise Henry's avatar
Gervaise Henry committed
  endsManual_alignSampleData
  endsManual_aggrQC
Gervaise Henry's avatar
Gervaise Henry committed
studyRID.into {
  studyRID_aggrQC
  studyRID_uploadInputBag
  studyRID_uploadProcessedFile
  studyRID_uploadOutputBag
Gervaise Henry's avatar
Gervaise Henry committed
}
expRID.into {
  expRID_aggrQC
  expRID_uploadProcessedFile
Gervaise Henry's avatar
Gervaise Henry committed
}


/*
 * trimData: trims any adapter or non-host sequences from the data
*/
process trimData {
  tag "${repRID}"

  input:
    path (fastq) from fastqs_trimData
Gervaise Henry's avatar
Gervaise Henry committed
    val ends from endsManual_trimData

  output:
    path ("*.fq.gz") into fastqsTrim
Venkat Malladi's avatar
Venkat Malladi committed
    path ("*.fastq.gz", includeInputs:true) into fastqs_fastqc
    path ("*_trimming_report.txt") into trimQC
Gervaise Henry's avatar
Gervaise Henry committed
    path ("readLength.csv") into readLengthInfer_fl
    hostname > ${repRID}.trimData.log
    ulimit -a >> ${repRID}.trimData.log
    # trim fastq's using trim_galore and extract median read length
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: trimming ${ends}" >> ${repRID}.trimData.log
    if [ "${ends}" == "se" ]
    then
      trim_galore --gzip -q 25 --length 35 --basename ${repRID} ${fastq[0]}
      readLength=\$(zcat *_trimmed.fq.gz | awk '{if(NR%4==2) print length(\$1)}' | sort -n | awk '{a[NR]=\$0}END{print(NR%2==1)?a[int(NR/2)+1]:(a[NR/2]+a[NR/2+1])/2}')
    elif [ "${ends}" == "pe" ]
    then
      trim_galore --gzip -q 25 --length 35 --paired --basename ${repRID} ${fastq[0]} ${fastq[1]}
Gervaise Henry's avatar
Gervaise Henry committed
      readLength=\$(zcat *_1.fq.gz | awk '{if(NR%4==2) print length(\$1)}' | sort -n | awk '{a[NR]=\$0}END{print(NR%2==1)?a[int(NR/2)+1]:(a[NR/2]+a[NR/2+1])/2}')
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: trimmed" >> ${repRID}.trimData.log
    echo -e "LOG: average trimmed read length: \${readLength}" >> ${repRID}.trimData.log
Venkat Malladi's avatar
Venkat Malladi committed

    # save read length file
    echo -e "\${readLength}" > readLength.csv
// Extract calculated read length metadata into channel
Gervaise Henry's avatar
Gervaise Henry committed
readLengthInfer_fl.splitCsv(sep: ",", header: false).separate(
Gervaise Henry's avatar
Gervaise Henry committed
// Replicate infered read length for multiple process inputs
readLengthInfer.into {
  readLengthInfer_aggrQC
  readLengthInfer_uploadQC
}
// Replicate trimmed fastq's for multiple process inputs
fastqsTrim.into {
  fastqsTrim_alignData
  fastqsTrim_downsampleData
// Combine inputs of getRefInfer
getRefInferInput = referenceInfer.combine(deriva_getRefInfer.combine(script_refDataInfer))

Gervaise Henry's avatar
Gervaise Henry committed
/*
  * getRefInfer: dowloads appropriate reference for metadata inference
Venkat Malladi's avatar
Venkat Malladi committed
*/
Gervaise Henry's avatar
Gervaise Henry committed
process getRefInfer {
Gervaise Henry's avatar
Gervaise Henry committed

  input:
    tuple val (refName), path (credential, stageAs: "credential.json"), path (script_refDataInfer) from getRefInferInput
Gervaise Henry's avatar
Gervaise Henry committed

  output:
    tuple val (refName), path ("hisat2", type: 'dir'), path ("*.fna"), path ("*.gtf")  into refInfer
    path ("${refName}", type: 'dir') into bedInfer
Venkat Malladi's avatar
Venkat Malladi committed

Gervaise Henry's avatar
Gervaise Henry committed
  script:
    """
Gervaise Henry's avatar
Gervaise Henry committed
    hostname > ${repRID}.${refName}.getRefInfer.log
    ulimit -a >> ${repRID}.${refName}.getRefInfer.log
Gervaise Henry's avatar
Gervaise Henry committed

    # link credential file for authentication
    echo -e "LOG: linking deriva credentials" >> ${repRID}.getRefInfer.log
    mkdir -p ~/.deriva
    ln -sf `readlink -e credential.json` ~/.deriva/credential.json
    echo -e "LOG: linked" >> ${repRID}.getRefInfer.log

    # set the reference name
    if [ "${refName}" == "ERCC" ]
Gervaise Henry's avatar
Gervaise Henry committed
    then
      references=\$(echo ${referenceBase}/ERCC${refERCCVersion})
    elif [ "${refName}" == "GRCm" ]
Gervaise Henry's avatar
Gervaise Henry committed
    then
      references=\$(echo ${referenceBase}/GRCm${refMoVersion})
    elif [ '${refName}' == "GRCh" ]
Gervaise Henry's avatar
Gervaise Henry committed
    then
      references=\$(echo ${referenceBase}/GRCh${refHuVersion})
    else
Gervaise Henry's avatar
Gervaise Henry committed
      echo -e "LOG: ERROR - References could not be set!\nReference found: ${referenceBase}" >> ${repRID}.${refName}.getRefInfer.log
Gervaise Henry's avatar
Gervaise Henry committed
      exit 1
    fi

    # retreive appropriate reference appropriate location
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: fetching ${refName} reference files from ${referenceBase}" >> ${repRID}.${refName}.getRefInfer.log
    if [ ${referenceBase} == "/project/BICF/BICF_Core/shared/gudmap/references" ]
Gervaise Henry's avatar
Gervaise Henry committed
    then
      ln -s "\${references}"/hisat2
      ln -s "\${references}"/bed ${refName}/bed
      ln -s "\${references}"/genome.fna
      ln -s "\${references}"/genome.gtf
    #elif [ ${referenceBase} == "s3://bicf-references" ]
    #then
    #  aws s3 cp "\${references}"/hisat2 ./hisat2 --recursive
    #  aws s3 cp "\${references}"/bed ./${refName}/bed --recursive
    #  aws s3 cp "\${references}"/genome.fna ./
    #  aws s3 cp "\${references}"/genome.gtf ./
    elif [ ${referenceBase} == "dev.gudmap.org" ]
    then
      GRCv=\$(echo \${references} | grep -o ${refName}.* | cut -d '.' -f1)
      GRCp=\$(echo \${references} | grep -o ${refName}.* | cut -d '.' -f2)
      GENCODE=\$(echo \${references} | grep -o ${refName}.* | cut -d '.' -f3)
Gervaise Henry's avatar
Gervaise Henry committed
      if [ "${refName}" != "ERCC" ]
      then
        query=\$(echo 'https://${referenceBase}/ermrest/catalog/2/entity/RNASeq:Reference_Genome/Reference_Version='\${GRCv}'.'\${GRCp}'/Annotation_Version=GENCODE%20'\${GENCODE})
      else
        query=\$(echo 'https://${referenceBase}/ermrest/catalog/2/entity/RNASeq:Reference_Genome/Reference_Version=${refName}${refERCCVersion}/Annotation_Version=${refName}${refERCCVersion}')
      fi
      curl --request GET \${query} > refQuery.json
      refURL=\$(python ${script_refDataInfer} --returnParam URL)
      loc=\$(dirname \${refURL})
      fName=\$(python ${script_refDataInfer} --returnParam fName)
      fName=\${fName%.*}
      if [ "\${loc}" = "/hatrac/*" ]; then echo "LOG: Reference not present in hatrac"; exit 1; fi
      filename=\$(echo \$(basename \${refURL}) | grep -oP '.*(?=:)')
      deriva-hatrac-cli --host ${referenceBase} get \${refURL}
      unzip \$(basename \${refURL})
Gervaise Henry's avatar
Gervaise Henry committed
      mv \${fName}/data/* .
Gervaise Henry's avatar
Gervaise Henry committed
    fi
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: fetched" >> ${repRID}.${refName}.getRefInfer.log
Gervaise Henry's avatar
Gervaise Henry committed

    # make blank bed folder for ERCC
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: making dummy bed folder for ERCC" >> ${repRID}.${refName}.getRefInfer.log
    if [ "${refName}" == "ERCC" ]
Gervaise Henry's avatar
Gervaise Henry committed
    then
Gervaise Henry's avatar
Gervaise Henry committed
      rm -rf ${refName}/bed
      mkdir ${refName}/bed
Gervaise Henry's avatar
Gervaise Henry committed
      touch ${refName}/bed/temp
    elif [ ${referenceBase} == "dev.gudmap.org" ]
Gervaise Henry's avatar
Gervaise Henry committed
    then
      mv bed ${refName}/
    fi
    """
}

/*
 * downsampleData: downsample fastq's for metadata inference
 */
process downsampleData {
  tag "${repRID}"

  input:
    path fastq from fastqsTrim_downsampleData
Gervaise Henry's avatar
Gervaise Henry committed
    val ends from endsManual_downsampleData

  output:
    path ("sampled.1.fq") into fastqs1Sample
    path ("sampled.2.fq") into fastqs2Sample
    hostname > ${repRID}.downsampleData.log
    ulimit -a >> ${repRID}.downsampleData.log

    if [ "${ends}" == "se" ]
    then
Gervaise Henry's avatar
Gervaise Henry committed
      echo -e "LOG: downsampling SE trimmed fastq" >> ${repRID}.downsampleData.log
      seqtk sample -s100 *trimmed.fq.gz 100000 1> sampled.1.fq
      touch sampled.2.fq
    elif [ "${ends}" == "pe" ]
    then
Gervaise Henry's avatar
Gervaise Henry committed
      echo -e "LOG: downsampling R1 of PE trimmed fastq" >> ${repRID}.downsampleData.log
      seqtk sample -s100 *1.fq.gz 1000000 1> sampled.1.fq
Gervaise Henry's avatar
Gervaise Henry committed
      echo -e "LOG: downsampling R2 of PE trimmed fastq" >> ${repRID}.downsampleData.log
      seqtk sample -s100 *2.fq.gz 1000000 1> sampled.2.fq
Gervaise Henry's avatar
Gervaise Henry committed
    fi
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: downsampled" >> ${repRID}.downsampleData.log
// Replicate the dowsampled fastq's and attatched to the references
inferInput = endsManual_alignSampleData.combine(refInfer.combine(fastqs1Sample.collect().combine(fastqs2Sample.collect())))

/*
 * alignSampleData: aligns the downsampled reads to a reference database
*/
process alignSampleData {
  tag "${ref}"

  input:
    tuple val (ends), val (ref), path (hisat2), path (fna), path (gtf), path (fastq1), path (fastq2) from inferInput

  output:
    path ("${ref}.sampled.sorted.bam") into sampleBam
    path ("${ref}.sampled.sorted.bam.bai") into sampleBai
    path ("${ref}.alignSampleSummary.txt") into alignSampleQC

  script:
    """
    hostname > ${repRID}.${ref}.alignSampleData.log
    ulimit -a >> ${repRID}.${ref}.alignSampleData.log
Gervaise Henry's avatar
Gervaise Henry committed
    # align the reads with Hisat2
    echo -e "LOG: aligning ${ends}" >> ${repRID}.${ref}.alignSampleData.log
    if [ "${ends}" == "se" ]
    then
Venkat Malladi's avatar
Venkat Malladi committed

      hisat2 -p `nproc` --add-chrname -S ${ref}.sampled.sam -x hisat2/genome -U ${fastq1} --summary-file ${ref}.alignSampleSummary.txt --new-summary
    elif [ "${ends}" == "pe" ]
    then
      hisat2 -p `nproc` --add-chrname -S ${ref}.sampled.sam -x hisat2/genome --no-mixed --no-discordant -1 ${fastq1} -2 ${fastq2} --summary-file ${ref}.alignSampleSummary.txt --new-summary
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: aliged" >> ${repRID}.${ref}.alignSampleData.log
Venkat Malladi's avatar
Venkat Malladi committed

    # convert the output sam file to a sorted bam file using Samtools
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: converting from sam to bam" >> ${repRID}.${ref}.alignSampleData.log
    samtools view -1 -@ `nproc` -F 4 -F 8 -F 256 -o ${ref}.sampled.bam ${ref}.sampled.sam
    # sort the bam file using Samtools
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: sorting the bam file" >> ${repRID}.${ref}.alignSampleData.log
    samtools sort -@ `nproc` -O BAM -o ${ref}.sampled.sorted.bam ${ref}.sampled.bam
    # index the sorted bam using Samtools
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: indexing sorted bam file" >> ${repRID}.${ref}.alignSampleData.log
    samtools index -@ `nproc` -b ${ref}.sampled.sorted.bam ${ref}.sampled.sorted.bam.bai
alignSampleQC.into {
  alignSampleQC_inferMetadata
  alignSampleQC_aggrQC
}

process inferMetadata {
  tag "${repRID}"

  input:
    path script_inferMeta
    path beds from bedInfer.collect()
    path bam from sampleBam.collect()
    path bai from sampleBai.collect()
    path alignSummary from alignSampleQC_inferMetadata.collect()
Gervaise Henry's avatar
Gervaise Henry committed
    path "infer.csv" into inferMetadata_fl
    path "${repRID}.infer_experiment.txt" into inferExperiment
    hostname > ${repRID}.inferMetadata.log
    ulimit -a >> ${repRID}.inferMetadata.log
    # collect alignment rates (round down to integers)
    align_ercc=\$(echo \$(grep "Overall alignment rate" ERCC.alignSampleSummary.txt | cut -f2 -d ':' | cut -f2 -d ' ' | tr -d '%'))
    align_ercc=\$(echo \${align_ercc%.*})
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: alignment rate to ERCC: \${align_ercc}" >> ${repRID}.inferMetadata.log
    align_hu=\$(echo \$(grep "Overall alignment rate" GRCh.alignSampleSummary.txt | cut -f2 -d ':' | cut -f2 -d ' ' | tr -d '%'))
    align_hu=\$(echo \${align_hu%.*})
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: alignment rate to GRCh: \${align_hu}" >> ${repRID}.inferMetadata.log
    align_mo=\$(echo \$(grep "Overall alignment rate" GRCm.alignSampleSummary.txt | cut -f2 -d ':' | cut -f2 -d ' ' | tr -d '%'))
    align_mo=\$(echo \${align_mo%.*})
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: alignment rate to GRCm: \${align_mo}" >> ${repRID}.inferMetadata.log
    # determine spike-in
    if [ 1 -eq \$(echo \$(expr \${align_ercc} ">=" 10)) ]
    then
      spike="yes"
    else
      spike="no"
    fi
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: inference of strandedness results is: \${spike}" >> ${repRID}.inferMetadata.log
    if [ 1 -eq \$(echo \$(expr \${align_hu} ">=" 40)) ] && [ 1 -eq \$(echo \$(expr \${align_mo} "<" 40)) ]
    then
      species="Homo sapiens"
      bam="GRCh.sampled.sorted.bam"
      bed="./GRCh/bed/genome.bed"
    elif [ 1 -eq \$(echo \$(expr \${align_mo} ">=" 40)) ] && [ 1 -eq \$(echo \$(expr \${align_hu} "<" 40)) ]
    then
      species="Mus musculus"
      bam="GRCm.sampled.sorted.bam"
      bed="./GRCm/bed/genome.bed"
    else
Gervaise Henry's avatar
Gervaise Henry committed
      echo -e "LOG: ERROR - inference of species returns an ambiguous result: hu=\${align_hu} mo=\${align_mo}" >> ${repRID}.inferMetadata.log
Gervaise Henry's avatar
Gervaise Henry committed
      if [ "${speciesForce}" == "" ]
      then
        exit 1
      fi
    fi
    if [ "${speciesForce}" != "" ]
    then
      echo -e "LOG: species overridden to: ${speciesForce}"
      species="${speciesForce}"
      if [ "${speciesForce}" == "Homo sapiens" ]
      then
        bam="GRCh.sampled.sorted.bam"
        bed="./GRCh/bed/genome.bed"
      elif [ "${speciesForce}" == "Mus musculus" ]
      then
        bam="GRCm.sampled.sorted.bam"
        bed="./GRCm/bed/genome.bed"
      fi
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: inference of species results in: \${species}" >> ${repRID}.inferMetadata.log

    # infer experimental setting from dedup bam
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: infer experimental setting from dedup bam" >> ${repRID}.inferMetadata.log
    infer_experiment.py -r "\${bed}" -i "\${bam}" 1>> ${repRID}.infer_experiment.txt
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: infered" >> ${repRID}.inferMetadata.log
    ended=`bash ${script_inferMeta} endness ${repRID}.infer_experiment.txt`
    fail=`bash ${script_inferMeta} fail ${repRID}.infer_experiment.txt`
Venkat Malladi's avatar
Venkat Malladi committed
    if [ \${ended} == "PairEnd" ]
      percentF=`bash ${script_inferMeta} pef ${repRID}.infer_experiment.txt`
      percentR=`bash ${script_inferMeta} per ${repRID}.infer_experiment.txt`
    elif [ \${ended} == "SingleEnd" ]
    then
      ends="se"
      percentF=`bash ${script_inferMeta} sef ${repRID}.infer_experiment.txt`
      percentR=`bash ${script_inferMeta} ser ${repRID}.infer_experiment.txt`
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: percentage reads in the same direction as gene: \${percentF}" >> ${repRID}.inferMetadata.log
    echo -e "LOG: percentage reads in the opposite direction as gene: \${percentR}" >> ${repRID}.inferMetadata.log
    if [ 1 -eq \$(echo \$(expr \${percentF#*.} ">" 2500)) ] && [ 1 -eq \$(echo \$(expr \${percentR#*.} "<" 2500)) ]
    then
      stranded="forward"
    elif [ 1 -eq \$(echo \$(expr \${percentR#*.} ">" 2500)) ] && [ 1 -eq \$(echo \$(expr \${percentF#*.} "<" 2500)) ]
    then
      stranded="reverse"
    else
      stranded="unstranded"
    fi
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: stradedness set to: \${stranded}" >> ${repRID}.inferMetadata.log

    # write infered metadata to file
    echo "\${ends},\${stranded},\${spike},\${species},\${align_ercc},\${align_hu},\${align_mo},\${percentF},\${percentR},\${fail}" 1>> infer.csv
    """
}

// Split metadata into separate channels
endsInfer = Channel.create()
strandedInfer = Channel.create()
spikeInfer = Channel.create()
speciesInfer = Channel.create()
align_erccInfer = Channel.create()
align_huInfer = Channel.create()
align_moInfer = Channel.create()
percentFInfer = Channel.create()
percentRInfer = Channel.create()
failInfer = Channel.create()
Gervaise Henry's avatar
Gervaise Henry committed
inferMetadata_fl.splitCsv(sep: ",", header: false).separate(
  endsInfer,
  strandedInfer,
  spikeInfer,
  speciesInfer,
  align_erccInfer,
  align_huInfer,
  align_moInfer,
  percentFInfer,
  percentRInfer,
  failInfer
)
// Replicate metadata for multiple process inputs
endsInfer.into {
  endsInfer_alignData
  endsInfer_countData
  endsInfer_dataQC
  endsInfer_aggrQC
Gervaise Henry's avatar
Gervaise Henry committed
  endsInfer_uploadQC
}
strandedInfer.into {
  strandedInfer_alignData
  strandedInfer_countData
  strandedInfer_aggrQC
Gervaise Henry's avatar
Gervaise Henry committed
  strandedInfer_uploadQC
}
spikeInfer.into{
  spikeInfer_getRef
  spikeInfer_aggrQC
  spikeInfer_uploadExecutionRun
}
speciesInfer.into {
  speciesInfer_getRef
  speciesInfer_aggrQC
  speciesInfer_uploadExecutionRun
  speciesInfer_uploadProcessedFile
  * getRef: downloads appropriate reference
*/
process getRef {
    path script_refData
Gervaise Henry's avatar
Gervaise Henry committed
    path credential, stageAs: "credential.json" from deriva_getRef
    val spike from spikeInfer_getRef
    val species from speciesInfer_getRef
    tuple path ("hisat2", type: 'dir'), path ("bed", type: 'dir'), path ("*.fna"), path ("*.gtf"), path ("geneID.tsv"), path ("Entrez.tsv")  into reference
Venkat Malladi's avatar
Venkat Malladi committed

  script:
    """
    hostname > ${repRID}.getRef.log
    ulimit -a >> ${repRID}.getRef.log
    # link credential file for authentication
    echo -e "LOG: linking deriva credentials" >> ${repRID}.getRef.log
    mkdir -p ~/.deriva
    ln -sf `readlink -e credential.json` ~/.deriva/credential.json
    echo -e "LOG: linked" >> ${repRID}.getRef.log

    # set the reference name
    if [ "${species}" == "Mus musculus" ]
    then
      references=\$(echo ${referenceBase}/GRCm${refMoVersion})
Gervaise Henry's avatar
Gervaise Henry committed
      refName=GRCm
    elif [ '${species}' == "Homo sapiens" ]
    then
      references=\$(echo ${referenceBase}/GRCh${refHuVersion})
Gervaise Henry's avatar
Gervaise Henry committed
      refName=GRCh
    else
      echo -e "LOG: ERROR - References could not be set!\nSpecies reference found: ${species}" >> ${repRID}.getRef.log
      exit 1
    fi
    if [ "${spike}" == "yes" ]
    then
      references=\$(echo \${reference}-S/)
    elif [ "${spike}" == "no" ]
    then
      reference=\$(echo \${references}/)
    fi
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: species set to \${references}" >> ${repRID}.getRef.log
    # retreive appropriate reference appropriate location
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: fetching ${species} reference files from ${referenceBase}" >> ${repRID}.getRef.log
    if [ ${referenceBase} == "/project/BICF/BICF_Core/shared/gudmap/references" ]
      echo -e "LOG: grabbing reference files from local (BioHPC)" >> ${repRID}.getRef.log
      ln -s "\${references}"/hisat2
      ln -s "\${references}"/bed
      ln -s "\${references}"/genome.fna
      ln -s "\${references}"/genome.gtf
      ln -s "\${references}"/geneID.tsv
      ln -s "\${references}"/Entrez.tsv
    #elif [ ${referenceBase} == "s3://bicf-references" ]
    #then
    #  echo -e "LOG: grabbing reference files from S3" >> ${repRID}.getRef.log
    #  aws s3 cp "\${references}"/hisat2 ./hisat2 --recursive
    #  aws s3 cp "\${references}"/bed ./bed --recursive
    #  aws s3 cp "\${references}"/genome.fna ./
    #  aws s3 cp "\${references}"/genome.gtf ./
    #  aws s3 cp "\${references}"/geneID.tsv ./
    #  aws s3 cp "\${references}"/Entrez.tsv ./
    elif [ ${referenceBase} == "dev.gudmap.org" ]
    then
      echo -e "LOG: grabbing reference files from datahub" >> ${repRID}.getRef.log
Gervaise Henry's avatar
Gervaise Henry committed
      GRCv=\$(echo \${references} | grep -o \${refName}.* | cut -d '.' -f1)
      GRCp=\$(echo \${references} | grep -o \${refName}.* | cut -d '.' -f2)
      GENCODE=\$(echo \${references} | grep -o \${refName}.* | cut -d '.' -f3)
Gervaise Henry's avatar
Gervaise Henry committed
      query=\$(echo 'https://${referenceBase}/ermrest/catalog/2/entity/RNASeq:Reference_Genome/Reference_Version='\${GRCv}'.'\${GRCp}'/Annotation_Version=GENCODE%20'\${GENCODE})
      curl --request GET \${query} > refQuery.json
      refURL=\$(python ${script_refData} --returnParam URL)
      loc=\$(dirname \${refURL})
      fName=\$(python ${script_refData} --returnParam fName)
      fName=\${fName%.*}
      if [ "\${loc}" = "/hatrac/*" ]; then echo "LOG: Reference not present in hatrac"; exit 1; fi
      filename=\$(echo \$(basename \${refURL}) | grep -oP '.*(?=:)')
      deriva-hatrac-cli --host ${referenceBase} get \${refURL}
      unzip \$(basename \${refURL})
      mv \${fName}/data/* .
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: fetched" >> ${repRID}.getRef.log
Gervaise Henry's avatar
Gervaise Henry committed

Gervaise Henry's avatar
Gervaise Henry committed
// Replicate reference for multiple process inputs
reference.into {
  reference_alignData
  reference_countData
  reference_dataQC
 * alignData: aligns the reads to a reference database
process alignData {
    path fastq from fastqsTrim_alignData
    path reference_alignData
Gervaise Henry's avatar
Gervaise Henry committed
    val ends from endsInfer_alignData
    val stranded from strandedInfer_alignData
    tuple path ("${repRID}.sorted.bam"), path ("${repRID}.sorted.bam.bai") into rawBam
Gervaise Henry's avatar
Gervaise Henry committed
    path ("*.alignSummary.txt") into alignQC
    hostname > ${repRID}.align.log
    ulimit -a >> ${repRID}.align.log
    # set stranded param for hisat2
    if [ "${stranded}"=="unstranded" ]
    then
      strandedParam=""
    elif [ "${stranded}" == "forward" ] && [ "${ends}" == "se" ]
    then
        strandedParam="--rna-strandness F"
    elif [ "${stranded}" == "forward" ] && [ "${ends}" == "pe" ]
    then
      strandedParam="--rna-strandness FR"
    elif [ "${stranded}" == "reverse" ] && [ "${ends}" == "se" ]
    then
        strandedParam="--rna-strandness R"
    elif [ "${stranded}" == "reverse" ] && [ "${ends}" == "pe" ]
    then
Venkat Malladi's avatar
Venkat Malladi committed
      strandedParam="--rna-strandness RF"
Gervaise Henry's avatar
Gervaise Henry committed
    # align the reads with Hisat2
    echo -e "LOG: aligning ${ends}" >> ${repRID}.align.log
    if [ "${ends}" == "se" ]
    then
      hisat2 -p `nproc` --add-chrname --un-gz ${repRID}.unal.gz -S ${repRID}.sam -x hisat2/genome \${strandedParam} -U ${fastq[0]} --summary-file ${repRID}.alignSummary.txt --new-summary
    elif [ "${ends}" == "pe" ]
    then
      hisat2 -p `nproc` --add-chrname --un-gz ${repRID}.unal.gz -S ${repRID}.sam -x hisat2/genome \${strandedParam} --no-mixed --no-discordant -1 ${fastq[0]} -2 ${fastq[1]} --summary-file ${repRID}.alignSummary.txt --new-summary
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: alignined" >> ${repRID}.align.log
Venkat Malladi's avatar
Venkat Malladi committed

    # convert the output sam file to a sorted bam file using Samtools
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: converting from sam to bam" >> ${repRID}.align.log
    samtools view -1 -@ `nproc` -F 4 -F 8 -F 256 -o ${repRID}.bam ${repRID}.sam
    # sort the bam file using Samtools
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: sorting the bam file" >> ${repRID}.align.log
    samtools sort -@ `nproc` -O BAM -o ${repRID}.sorted.bam ${repRID}.bam
    # index the sorted bam using Samtools
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: indexing sorted bam file" >> ${repRID}.align.log
    samtools index -@ `nproc` -b ${repRID}.sorted.bam ${repRID}.sorted.bam.bai
// Replicate rawBam for multiple process inputs
Gervaise Henry's avatar
Gervaise Henry committed
 *dedupData: mark the duplicate reads, specifically focused on PCR or optical duplicates
Gervaise Henry's avatar
Gervaise Henry committed
process dedupData {
Gervaise Henry's avatar
Gervaise Henry committed
  tag "${repRID}"
Gervaise Henry's avatar
Gervaise Henry committed
  publishDir "${outDir}/bam", mode: 'copy', pattern: "*.deduped.bam"
    tuple path (bam), path (bai) from rawBam_dedupData
    tuple path ("${repRID}_sorted.deduped.bam"), path ("${repRID}_sorted.deduped.bam.bai") into dedupBam
    tuple path ("${repRID}_sorted.deduped.*.bam"), path ("${repRID}_sorted.deduped.*.bam.bai") into dedupChrBam
Gervaise Henry's avatar
Gervaise Henry committed
    path ("*.deduped.Metrics.txt") into dedupQC
    hostname > ${repRID}.dedup.log
    ulimit -a >> ${repRID}.dedup.log

    # remove duplicated reads using Picard's MarkDuplicates
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: deduplicating reads" >> ${repRID}.dedup.log
    java -jar /picard/build/libs/picard.jar MarkDuplicates I=${bam} O=${repRID}.deduped.bam M=${repRID}.deduped.Metrics.txt REMOVE_DUPLICATES=true
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: deduplicated" >> ${repRID}.dedup.log
    # sort the bam file using Samtools
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: sorting the bam file" >> ${repRID}.dedup.log
    samtools sort -@ `nproc` -O BAM -o ${repRID}_sorted.deduped.bam ${repRID}.deduped.bam
Venkat Malladi's avatar
Venkat Malladi committed

    # index the sorted bam using Samtools
Gervaise Henry's avatar
Gervaise Henry committed
    echo -e "LOG: indexing sorted bam file" >> ${repRID}.dedup.log
    samtools index -@ `nproc` -b ${repRID}_sorted.deduped.bam ${repRID}_sorted.deduped.bam.bai
Gervaise Henry's avatar
Gervaise Henry committed

    # split the deduped BAM file for multi-threaded tin calculation
    for i in `samtools view ${repRID}_sorted.deduped.bam | cut -f3 | sort | uniq`;
      echo "echo \"LOG: splitting each chromosome into its own BAM and BAI files with Samtools\"; samtools view -b ${repRID}_sorted.deduped.bam \${i} 1>> ${repRID}_sorted.deduped.\${i}.bam; samtools index -@ `nproc` -b ${repRID}_sorted.deduped.\${i}.bam ${repRID}_sorted.deduped.\${i}.bam.bai"
    done | parallel -j `nproc` -k
Gervaise Henry's avatar
Gervaise Henry committed
// Replicate dedup bam/bai for multiple process inputs
dedupBam.into {
  dedupBam_countData
Gervaise Henry's avatar
Gervaise Henry committed
  dedupBam_makeBigWig
  dedupBam_dataQC
  dedupBam_uploadProcessedFile
Gervaise Henry's avatar
Gervaise Henry committed
/*
 *makeBigWig: make BigWig files for output
*/
process makeBigWig {
  tag "${repRID}"
  publishDir "${outDir}/bigwig", mode: 'copy', pattern: "${repRID}.bw"
    tuple path (bam), path (bai) from dedupBam_makeBigWig
    path ("${repRID}_sorted.deduped.bw") into bigwig
    hostname > ${repRID}.makeBigWig.log
    ulimit -a >> ${repRID}.makeBigWig.log
Gervaise Henry's avatar
Gervaise Henry committed
    # create bigwig