diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1ba7c6df4d16a875b093c7322b4da1078a174bce..87348e65a26ec0fdfa75a0d60febe6e755cb26f0 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -18,6 +18,7 @@ variables:
   dir: "/project/BICF/BICF_Core/shared/gudmap/singularity_cache/"
 
 stages:
+  - environment
   - singularity
   - versions
   - aggregation
@@ -138,6 +139,7 @@ build_badges:
     - master
     - develop
     - tags
+    - schedules
   before_script:
     - module load singularity/3.5.3
     - chmod +x ./workflow/scripts/get_updated_badge_info.sh
@@ -158,6 +160,7 @@ pages:
     - master
     - develop
     - tags
+    - schedules
   dependencies:
     - build_badges
   script:
@@ -601,7 +604,7 @@ human_dev:
     - curl --request GET ${query} > refQuery.json
     - refURL=$(python ./workflow/scripts/extract_ref_data.py --returnParam URL)
     - loc=$(dirname ${refURL})
-    - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
+    - if [ "${loc}" == "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
     - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)')
     - test=$(singularity run ${dir}${derivaImg}_${derivaVar}.sif deriva-hatrac-cli --host ${referenceBase} ls ${loc}/)
     - test=$(echo ${test} | grep -o ${filename})
@@ -630,7 +633,7 @@ mouse_dev:
     - curl --request GET ${query} > refQuery.json
     - refURL=$(python ./workflow/scripts/extract_ref_data.py --returnParam URL)
     - loc=$(dirname ${refURL})
-    - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
+    - if [ "${loc}" == "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
     - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)')
     - test=$(singularity run ${dir}${derivaImg}_${derivaVar}.sif deriva-hatrac-cli --host ${referenceBase} ls ${loc}/)
     - test=$(echo ${test} | grep -o ${filename})
@@ -659,7 +662,7 @@ human_staging:
     - curl --request GET ${query} > refQuery.json
     - refURL=$(python ./workflow/scripts/extract_ref_data.py --returnParam URL)
     - loc=$(dirname ${refURL})
-    - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
+    - if [ "${loc}" == "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
     - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)')
     - test=$(singularity run ${dir}${derivaImg}_${derivaVar}.sif deriva-hatrac-cli --host ${referenceBase} ls ${loc}/)
     - test=$(echo ${test} | grep -o ${filename})
@@ -689,7 +692,7 @@ mouse_staging:
     - curl --request GET ${query} > refQuery.json
     - refURL=$(python ./workflow/scripts/extract_ref_data.py --returnParam URL)
     - loc=$(dirname ${refURL})
-    - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
+    - if [ "${loc}" == "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
     - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)')
     - test=$(singularity run ${dir}${derivaImg}_${derivaVar}.sif deriva-hatrac-cli --host ${referenceBase} ls ${loc}/)
     - test=$(echo ${test} | grep -o ${filename})
@@ -718,7 +721,7 @@ human_prod:
     - curl --request GET ${query} > refQuery.json
     - refURL=$(python ./workflow/scripts/extract_ref_data.py --returnParam URL)
     - loc=$(dirname ${refURL})
-    - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
+    - if [ "${loc}" == "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
     - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)')
     - test=$(singularity run ${dir}${derivaImg}_${derivaVar}.sif deriva-hatrac-cli --host ${referenceBase} ls ${loc}/)
     - test=$(echo ${test} | grep -o ${filename})
@@ -748,7 +751,7 @@ mouse_prod:
     - curl --request GET ${query} > refQuery.json
     - refURL=$(python ./workflow/scripts/extract_ref_data.py --returnParam URL)
     - loc=$(dirname ${refURL})
-    - if [ "${loc}" = "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
+    - if [ "${loc}" == "/hatrac/*" ]; then echo "reference not present in hatrac"; exit 1; fi
     - filename=$(echo $(basename ${refURL}) | grep -oP '.*(?=:)')
     - test=$(singularity run ${dir}${derivaImg}_${derivaVar}.sif deriva-hatrac-cli --host ${referenceBase} ls ${loc}/)
     - test=$(echo ${test} | grep -o ${filename})
@@ -916,7 +919,7 @@ override_fastq:
   script:
     - hostname
     - ulimit -a
-    - nextflow -q run ./rna-seq.nf --deriva ./test_data/auth/credential.json --bdbag ./test_data/auth/cookies.txt --repRID Q-Y5F6    --source staging --fastqsForce './test_data/fastq/small/Q-Y5F6_1M.R{1,2}.fastq.gz' --upload false --dev false --ci true --track false -with-report ./fastqOverride_report.html
+    - nextflow -q run ./rna-seq.nf --deriva ./test_data/auth/credential.json --bdbag ./test_data/auth/cookies.txt --repRID Q-Y5F6  --source staging --fastqsForce './test_data/fastq/small/Q-Y5F6_1M.R{1,2}.fastq.gz' --upload false --dev false --ci true --track false -with-report ./fastqOverride_report.html
     - find . -type f -name "multiqc_data.json" -exec cp {} ./fastqOverride_multiqc_data.json \;
     - find ./**/report/ -type f -name "*multiqc.html" -exec cp {} ./fastqOverride_multiqc.html \;
     - pytest -m completionMultiqc --filename fastqOverride_multiqc_data.json
@@ -1023,4 +1026,103 @@ consistency:
     paths:
       - SE_multiqc_data.json
       - PE_multiqc_data.json
-    expire_in: 7 days
\ No newline at end of file
+    expire_in: 7 days
+
+
+dnanexus:
+  stage: environment
+  only:
+    variables:
+      - $dnanexusEnv == "true"
+  except:
+    - push
+    - tags
+    - merge_requests
+  script:
+    - hostname
+    - ulimit -a
+    - mkdir -p ./badges/env
+    - curl --request GET https://img.shields.io/badge/Envronment%3A%20DNAnexus-inactive-critical?style=flat > ./badges/env/dnanexus.svg
+    - module load dxtoolkit/python27/0.294.0
+    - export NXF_XPACK_LICENSE=${nxf_license}
+    - dx upload ./test_data/auth/c* --path /ci-env/auth/ --parents --auth-token ${dnanexus_authToken} --project-context-id ${dnanexus_workspace}
+    - dx upload ./test_data/fastq/xsmall/Q-Y5F6_10K.R{1,2}.fastq.gz --path /ci-env/input/ --parents --auth-token ${dnanexus_authToken} --project-context-id ${dnanexus_workspace}
+    - >
+      dx run nf-dxapp-bicf --auth-token ${dnanexus_authToken} --project-context-id ${dnanexus_workspace} \
+        --delay-workspace-destruction \
+        --instance-type mem1_ssd1_v2_x16 \
+        --input-json "$(envsubst < ./docs/nxf_dnanexus-ci-test.json)" \
+        > dx.log
+    - >
+      jobID=$(cat dx.log | grep -oP "Job ID: \K.*")
+    - dx watch ${jobID} --auth-token ${dnanexus_authToken} --project-context-id ${dnanexus_workspace}
+    - status=$(dx find executions --id ${jobID} --state failed --brief --auth-token ${dnanexus_authToken} --project-context-id ${dnanexus_workspace})
+    - >
+      if [ "${status}" == "" ]; then
+        curl --request GET https://img.shields.io/badge/Envronment%3A%20DNAnexus-run%20succesful-success?style=flat > ./badges/env/dnanexus.svg
+      else
+        curl --request GET https://img.shields.io/badge/Envronment%3A%20DNAnexus-run%20failed-critical?style=flat > ./badges/env/dnanexus.svg
+      fi
+  after_script:
+    - module load dxtoolkit/python27/0.294.0
+    - dx rm /ci-env/auth/* --all --auth-token ${dnanexus_authToken} --project-context-id ${dnanexus_workspace}
+    - dx rm /ci-env/input/* --all --auth-token ${dnanexus_authToken} --project-context-id ${dnanexus_workspace}
+  artifacts:
+    when: always
+    paths:
+      - badges/
+  allow_failure: true
+
+aws:
+  stage: environment
+  only:
+    variables:
+      - $awsEnv == "true"
+  except:
+    - push
+    - tags
+    - merge_requests
+  script:
+    - hostname
+    - ulimit -a
+    - mkdir -p ./badges/env
+    - curl --request GET https://img.shields.io/badge/Envronment%3A%20AWS-inactive-critical?style=flat > ./badges/env/aws.svg
+    - module load awscli/1.11.139
+    - export AWS_ACCESS_KEY_ID=${aws_accesskeyid}
+    - export AWS_SECRET_ACCESS_KEY=${aws_secretaccesskey}
+    - aws configure set region ${aws_region}
+    - aws s3 cp ./test_data/auth/ s3://bicf-output/ci-env/auth/ --exclude "*" --include "c*" --recursive
+    - aws s3 cp ./test_data/fastq/xsmall/ s3://bicf-output/ci-env/input/ --exclude "*" --include "Q-Y5F6_10K.R*.fastq.gz" --recursive
+    - >
+      id=$(aws batch submit-job\
+        --job-name nf-GUDMAP_RBK_ci-env\
+        --job-queue default-bicf\
+        --job-definition nextflow-bicf-nextflow\
+        --container-overrides command=$(envsubst < ./docs/nxf_aws-ci-test.json))
+      id=$(echo ${id}| grep -oP "jobId\K.*" | tr -d '"' | tr -d ":" | tr -d " " | tr -d "}")
+    - >
+      status=$(aws batch describe-jobs --jobs ${id} | grep -oP "status\": \K.*" | tr -d '"' | tr -d ',' | tr -d " " ) &&
+      until [[ "${status}" == "SUCCEEDED" || "${status}" == "FAILED" ]]; do
+        status=$(aws batch describe-jobs --jobs ${id} | grep -oP "status\": \K.*" | tr -d '"' | tr -d ',' | tr -d " " ) &&
+        echo ${status} &&
+        sleep 5m
+      done
+    - >
+      if [ "${status}" == "SUCCEEDED" ]; then
+        curl --request GET https://img.shields.io/badge/Envronment%3A%20AWS-run%20succesful-success?style=flat > ./badges/env/aws.svg
+      else
+        curl --request GET https://img.shields.io/badge/Envronment%3A%20AWS-run%20failed-critical?style=flat > ./badges/env/aws.svg
+      fi
+  after_script:
+    - module load awscli/1.11.139
+    - >
+      export AWS_ACCESS_KEY_ID=${aws_accesskeyid}
+      export AWS_SECRET_ACCESS_KEY=${aws_secretaccesskey}
+      aws configure set region ${aws_region}
+    - aws s3 rm s3://bicf-output/ci-env/auth/ --recursive
+    - aws s3 rm s3://bicf-output/ci-env/input/ --recursive
+  artifacts:
+    when: always
+    paths:
+      - badges/
+  allow_failure: true
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d69f5809cbee8d9ecc3723df9b497571310c2403..36020f0d0050b198f7c888891489e5c656bba893 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -35,6 +35,8 @@
 * Don't download fastq's if fastq override present
 * Override fastq count to override counts
 * Change ambiguous species ci to wrong species
+* Add test for DNAnexus env
+* Add test for AWS env
 
 *Known Bugs*
 * Override params (inputBag, fastq, species) aren't checked for integrity
diff --git a/README.md b/README.md
index 419dc687c27a0cdc7fa77dbc0718c550beaa4ee3..c4ec0fdeeeecfa381ad6780193fa0fe37f4f4ba4 100644
--- a/README.md
+++ b/README.md
@@ -74,29 +74,40 @@ FULL EXAMPLE:
 ```
 nextflow run workflow/rna-seq.nf --repRID Q-Y5JA --source production --deriva ./data/credential.json --bdbag ./data/cookies.txt --dev false --upload true -profile biohpc
 ```
-
+<hr>
 Cloud Compatibility:
 --------------------
 This pipeline is also capable of being run on AWS and DNAnexus. To do so:
-### [AWS](https://aws.amazon.com/)
-* Build a AWS batch queue and environment either manually or with [aws-cloudformantion](https://console.aws.amazon.com/cloudformation/home?#/stacks/new?stackName=Nextflow&templateURL=https://s3.amazonaws.com/aws-genomics-workflows/templates/nextflow/nextflow-aio.template.yaml)
-* Edit one of the aws configs in workflow/config/
-  * Replace workDir with the S3 bucket generated
-  * Change region if different
-  * Change queue to the aws batch queue generated
-* The user must have awscli configured with an appropriate authentication (with `aws configure` and access keys) in the environment which nextflow will be run
-* Add `-profile` with the name aws config which was customized
-### DNAnexus (utilizes the [DNAnexus extension package for Nextflow (XPACK-DNANEXUS)](https://github.com/seqeralabs/xpack-dnanexus))
-* Follow the istructions from [XPACK-DNANEXUS](https://github.com/seqeralabs/xpack-dnanexus) about installing and authenticating (a valid license must be available for the extension package from Seqera Labs, as well as a subsription with DNAnexus)
-* The nf-dxapp needs to be built with a custom scm config to allow nextflow to pull the pipeline from the UTSW self-hosted GitLab server (git.biohpc.swmed.edu)
-```
-providers {
+* The Nextflow binary needs to contain a custom scm config to allow nextflow to pull the pipeline from the UTSW self-hosted GitLab server (git.biohpc.swmed.edu)
+  ```
+  providers {
     bicf {
         server = 'https://git.biohpc.swmed.edu'
         platform = 'gitlab'
     }
-}
-```
+  }
+  ```
+  This is required for the use of `nextflow run` or `nextflow pull` pointed directly to the git repo, but also the use in AWS or DNAnexus environments as those both use `nextflow run` directly to that repo. To get around this requirement, there is a clone of the repo hosted on [GitHub](https://github.com/utsw-bicf/gudmap_rbk.rna-seq) which can be used... but the currency of that clone cannot be guarnteed!
+### [AWS](https://aws.amazon.com/)
+* Build a AWS batch queue and environment either manually or with a template, such as: [Genomics Workflows on AWS](https://docs.opendata.aws/genomics-workflows/)
+* The user must have awscli configured with an appropriate authentication (with `aws configure` and access keys) in the environment which nextflow
+* Follow the instructions from [AWS](https://docs.aws.amazon.com/cli/latest/reference/batch/submit-job.html) about launching runs, using AWS cli. A template *json* file has been included ([awsExample.json](docs/awsExample.json))
+  * `[version]` should be replaced with the pipeline version required (eg: `v2.0.0`)
+  * `[credential.json]` should be replaced with the location of the credential file outpted by authentification with Deriva
+  * `[cookies.txt]` should be replaced with the location of the cookies file outpted by authentification with Deriva for BDBag
+  * `[repRID]` should be replaced with the replicate RID to be analized (eg: `Q-Y5F6`)
+  * `[outDir]` should be replaced with the location to save local outputs of the pipeline
+
+  example `aws batch submit-job` command (replaceing the parameters in `[]` with the appropriate values)
+  ```
+  aws batch submit-job\
+    --job-name [Job Name]\
+    --job-queue [Queue]\
+    --job-definition [Job Definition]\
+    --container-overrides command=$(envsubst < ./docs/nxf_aws-ci-test.json)
+  ```
+### [DNAnexus](https://dnanexus.com/) (utilizes the [DNAnexus extension package for Nextflow (XPACK-DNANEXUS)](https://github.com/seqeralabs/xpack-dnanexus))
+* Follow the istructions from [XPACK-DNANEXUS](https://github.com/seqeralabs/xpack-dnanexus) about installing and authenticating (a valid license must be available for the extension package from Seqera Labs, as well as a subsription with DNAnexus)
 * Follow the instructions from [XPACK-DNANEXUS](https://github.com/seqeralabs/xpack-dnanexus) about launching runs. A template *json* file has been included ([dnanexusExample.json](docs/dnanexusExample.json))
   * `[version]` should be replaced with the pipeline version required (eg: `v2.0.0`)
   * `[credential.json]` should be replaced with the location of the credential file outpted by authentification with Deriva
@@ -104,12 +115,21 @@ providers {
   * `[repRID]` should be replaced with the replicate RID to be analized (eg: `Q-Y5F6`)
   * `[outDir]` should be replaced with the location to save local outputs of the pipeline
 
+  example `dx-run` command
+  ```
+  dx run nf-dxapp-bicf \
+    --delay-workspace-destruction \
+    --instance-type mem1_ssd1_v2_x16 \
+    --input-json "$(envsubst < ./docs/nxf_dnanexus-ci-test.json)"
+  ```
+### NOTE:
+* File locations used in cloud deployments (auth files and output folder) need to be accessible in that environment (eg s3 location, or DNAnexus location). Local paths cannot be read local locations.
+<hr>
 To generate you own references or new references:
 ------------------------------------------
 Download the [reference creation script](https://git.biohpc.swmed.edu/gudmap_rbk/rna-seq/-/snippets/31).
 This script will auto create human and mouse references from GENCODE. It can also create ERCC92 spike-in references as well as concatenate them to GENCODE references automatically. In addition, it can create references from manually downloaded FASTA and GTF files.
-
-
+<hr>
 Errors:
 -------
 Error reported back to the data-hub are (they aren't thrown on the command line by the pipeline, but rather are submitted (if `--upload true`) to the data-hub for that replicate in the execution run submission):
diff --git a/docs/awsExample.json b/docs/awsExample.json
new file mode 100644
index 0000000000000000000000000000000000000000..8e8f39f7785559eb3c941aa46dbb577bea1a6bf4
--- /dev/null
+++ b/docs/awsExample.json
@@ -0,0 +1 @@
+["https://git.biohpc.swmed.edu/gudmap_rbk/rna-seq","-r","[Version]","-profile","aws","--deriva","[credential.json]","--bdbag","[cookies.txt]","--repRID","[repRID]","--outDir","[outDir]"]
diff --git a/docs/nxf_aws-ci-test.json b/docs/nxf_aws-ci-test.json
new file mode 100644
index 0000000000000000000000000000000000000000..de8752b202a05d6ed8f6aea2e1eed845917fcb7d
--- /dev/null
+++ b/docs/nxf_aws-ci-test.json
@@ -0,0 +1 @@
+["utsw-bicf/gudmap_rbk.rna-seq","-r","env.ci","-profile","aws","--deriva","s3://bicf-output/ci-env/auth/credential.json","--bdbag","s3://bicf-output/ci-env/auth/cookies.txt","--repRID","Q-Y5F6","--source","staging","--upload","false","--dev","false","--ci","true","--track","false","-with-report","s3://bicf-output/ci-env/output/Q-Y5F6_fastqoverride_report.html","--refSource","datahub","--outDir","s3://bicf-output/ci-env/output/Q-Y5F6_fastqoverride","--fastqsForce","s3://bicf-output/ci-env/input/*.fastq.gz"]
diff --git a/docs/nxf_dnanexus-ci-test.json b/docs/nxf_dnanexus-ci-test.json
new file mode 100644
index 0000000000000000000000000000000000000000..cb523c7244600f516543012c962b4fd3b60d777f
--- /dev/null
+++ b/docs/nxf_dnanexus-ci-test.json
@@ -0,0 +1,5 @@
+{
+	"pipeline_url": "https://git.biohpc.swmed.edu/gudmap_rbk/rna-seq.git -r env.ci",
+	"args": "-profile dnanexus --deriva dx://NextFlow_Prototype:/ci-env/auth/credential.json --bdbag dx://NextFlow_Prototype:/ci-env/auth/cookies.txt --repRID Q-Y5F6 --source staging --upload false --dev false --ci true --track false -with-report dx://NextFlow_Prototype:/ci-env/output/Q-Y5F6_fastqoverride_report.html --refSource datahub --outDir dx://NextFlow_Prototype:ci-env/output/Q-Y5F6_fastqoverride --fastqsForce dx://NextFlow_Prototype:/ci-env/input/*.fastq.gz",
+	"license": "$NXF_XPACK_LICENSE"
+}
diff --git a/main.nf b/main.nf
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/nextflow.config b/nextflow.config
index 84c9c000e31aab1bd7fb11e7e8381380493db027..c8983df669cb59384e0914bf06e852a5b381a2b5 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -8,17 +8,12 @@ profiles {
   biohpc_max {
     includeConfig 'nextflowConf/biohpc_max.config'
   }
-  aws_ondemand {
-    includeConfig 'nextflowConf/aws.config'
-    includeConfig 'nextflowConf/ondemand.config'
-  }
-  aws_spot {
-    includeConfig 'nextflowConf/aws.config'
-    includeConfig 'nextflowConf/spot.config'
-  }
   dnanexus {
     includeConfig 'nextflowConf/dnanexus.config'
   }
+  aws {
+    includeConfig 'nextflowConf/aws.config'
+  }
 }
 
 process {
diff --git a/nextflowConf/aws.config b/nextflowConf/aws.config
index bf5b59c7cf9db00606a5db9f97c706d53f21137f..659e7f21c471f5039c11ea4c23e69c7a7c6829d3 100644
--- a/nextflowConf/aws.config
+++ b/nextflowConf/aws.config
@@ -1,127 +1,142 @@
 params {
-  refSource = "aws"
-}
-
-workDir = 's3://gudmap-rbk.output/work'
-aws.client.storageEncryption = 'AES256'
-aws {
-  region = 'us-east-2'
-  batch {
-    cliPath = '/home/ec2-user/miniconda/bin/aws'
-  }
+  refSource = "datahub"
 }
 
 process {
-  executor = 'awsbatch'
-  cpus = 1
-  memory = '1 GB'
-
   withName:trackStart {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:getBag {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:getData {
-    cpus = 1
-    memory = '1 GB'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:parseMetadata {
-    cpus = 15
+    executor = 'local'
+    cpus = 1
     memory = '1 GB'
   }
-  withName:trimData {
-    cpus = 20
-    memory = '2 GB'
+  withName:getRefERCC {
+    cpus = 16
+    memory = '32 GB'
+  }
+  withName:getRef {
+    cpus = 16
+    memory = '32 GB'
   }
-  withName:getRefInfer {
+  withName:fastqc {
+    cpus = 16
+    memory = '32 GB'
+  }
+  withName:seqwho {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
+  withName:trimData {
+    cpus = 16
+    memory = '32 GB'
+  }
   withName:downsampleData {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
+  withName:alignSampleDataERCC {
+    cpus = 16
+    memory = '32 GB'
+  }
   withName:alignSampleData {
-    cpus = 50
-    memory = '5 GB'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:inferMetadata {
-    cpus = 5
-    memory = '1 GB'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:checkMetadata {
-    cpus = 1
-    memory = '1 GB'
-  }
-  withName:getRef {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:alignData {
-    cpus = 50
-    memory = '10 GB'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:dedupData {
-    cpus = 5
-    memory = '20 GB'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:countData {
-    cpus = 2
-    memory = '5 GB'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:makeBigWig {
-    cpus = 15
-    memory = '5 GB'
-  }
-  withName:fastqc {
-    cpus = 1
-    memory = '1 GB'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:dataQC {
-    cpus = 15
-    memory = '2 GB'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:aggrQC {
-    cpus = 2
+    executor = 'local'
+    cpus = 1
     memory = '1 GB'
   }
   withName:uploadInputBag {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:uploadExecutionRun {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:uploadQC {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:uploadProcessedFile {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:uploadOutputBag {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:finalizeExecutionRun {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:failPreExecutionRun {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:failExecutionRun {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
   withName:uploadQC_fail {
+    executor = 'local'
     cpus = 1
     memory = '1 GB'
   }
 }
+
+docker {
+  enabled = true
+}
diff --git a/nextflowConf/dnanexus.config b/nextflowConf/dnanexus.config
index 58531a418a6cc0c31c80e1f155cfe76007b98e8a..d7c10297d2548536ba7deb7c9501b5f1f8de0836 100755
--- a/nextflowConf/dnanexus.config
+++ b/nextflowConf/dnanexus.config
@@ -54,7 +54,9 @@ process {
     memory = '1 GB'
   }
   withName:alignSampleDataERCC {
-    queue = '128GB,256GB,256GBv1,384GB'
+    machineType = 'mem3_ssd1_v2_x16'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:alignSampleData {
     machineType = 'mem3_ssd1_v2_x16'
@@ -73,8 +75,8 @@ process {
   }
   withName:alignData {
     machineType = 'mem3_ssd1_v2_x32'
-    cpus = 32
-    memory = '256 GB'
+    cpus = 16
+    memory = '32 GB'
   }
   withName:dedupData {
     machineType = 'mem1_ssd1_v2_x16'
diff --git a/nextflowConf/ondemand.config b/nextflowConf/ondemand.config
deleted file mode 100755
index 131fdbb19e1fedf1bc9e206a03d801f13791b810..0000000000000000000000000000000000000000
--- a/nextflowConf/ondemand.config
+++ /dev/null
@@ -1,3 +0,0 @@
-process {
-  queue = 'highpriority-0ef8afb0-c7ad-11ea-b907-06c94a3c6390'
-}
diff --git a/nextflowConf/spot.config b/nextflowConf/spot.config
deleted file mode 100755
index d9c7a4c8fa34aadd597da0170f8e3e223923011a..0000000000000000000000000000000000000000
--- a/nextflowConf/spot.config
+++ /dev/null
@@ -1,3 +0,0 @@
-process {
-  queue = 'default-0ef8afb0-c7ad-11ea-b907-06c94a3c6390'
-}
diff --git a/rna-seq.nf b/rna-seq.nf
index c047b3163848be6415c1fc47646345994dc625ca..6f77afd99d636e773e26e1abe9eb9c1012c461d4 100644
--- a/rna-seq.nf
+++ b/rna-seq.nf
@@ -246,7 +246,7 @@ process getData {
   output:
     path ("*.R{1,2}.fastq.gz") into fastqs
     path ("**/File.csv") into fileMeta
-    path ("**/Experiment Settings.csv") into experimentSettingsMeta
+    path ("ExperimentSettings.csv") into experimentSettingsMeta
     path ("**/Experiment.csv") into experimentMeta
     path "fastqCount.csv" into fastqCount_fl
 
@@ -278,8 +278,11 @@ process getData {
     if [ "\${fastqCount}" == "0" ]
     then
       touch dummy.R1.fastq.gz
+      touch dummy.R2.fastq.gz
     fi
     echo "\${fastqCount}" > fastqCount.csv
+
+    cp "${repRID}_inputBag/data/Experiment Settings.csv" ExperimentSettings.csv
     """
 }
 
diff --git a/test_data/createTestData.sh b/test_data/createTestData.sh
index 5d876ed032790d0e3442aed94a0fd79e5e430e60..0f0454112ad278e5e032582bf30042f5a41495d2 100644
--- a/test_data/createTestData.sh
+++ b/test_data/createTestData.sh
@@ -31,6 +31,14 @@ pigz Q-Y5F6_1M.R2.fastq
 cp Q-Y5F6_1M.R1.fastq.gz ./NEW_test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz
 cp Q-Y5F6_1M.R2.fastq.gz ./NEW_test_data/fastq/small/Q-Y5F6_1M.R2.fastq.gz
 
+mkdir -p ./NEW_test_data/fastq/xsmall
+singularity exec 'docker://gudmaprbk/seqtk1.3:1.0.0' seqtk sample -s100 ./NEW_test_data/fastq/Q-Y5F6.R1.fastq.gz 10000 1> Q-Y5F6_10K.R1.fastq
+singularity exec 'docker://gudmaprbk/seqtk1.3:1.0.0' seqtk sample -s100 ./NEW_test_data/fastq/Q-Y5F6.R2.fastq.gz 10000 1> Q-Y5F6_10K.R2.fastq
+pigz Q-Y5F6_10K.R1.fastq
+pigz Q-Y5F6_10K.R2.fastq
+cp Q-Y5F6_10K.R1.fastq.gz ./NEW_test_data/fastq/xsmall/Q-Y5F6_10K.R1.fastq.gz
+cp Q-Y5F6_10K.R2.fastq.gz ./NEW_test_data/fastq/xsmall/Q-Y5F6_10K.R2.fastq.gz
+
 mkdir -p ./NEW_test_data/meta
 singularity run 'docker://gudmaprbk/trimgalore0.6.5:1.0.0' trim_galore --gzip -q 25 --illumina --length 35 --basename Q-Y5F6_1M.se -j 20 ./NEW_test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz
 singularity run 'docker://gudmaprbk/trimgalore0.6.5:1.0.0' trim_galore --gzip -q 25 --illumina --length 35 --paired --basename Q-Y5F6_1M.pe -j 20 ./NEW_test_data/fastq/small/Q-Y5F6_1M.R1.fastq.gz ./NEW_test_data/fastq/small/Q-Y5F6_1M.R2.fastq.gz
diff --git a/workflow/scripts/get_updated_badge_info.sh b/workflow/scripts/get_updated_badge_info.sh
index 343f1aab251436a53c44dd3bcc352df6b6a542e5..3a5df46c52a6e1fe0cbd41946cdea09c67d1e08e 100644
--- a/workflow/scripts/get_updated_badge_info.sh
+++ b/workflow/scripts/get_updated_badge_info.sh
@@ -51,3 +51,22 @@ curl --request GET https://img.shields.io/badge/FastQC%20Version-${fastqc_versio
 curl --request GET https://img.shields.io/badge/SeqWho%20Version-${seqwho_version}-blueviolet?style=flat > ./badges/tools/seqwho.svg
 curl --request GET https://img.shields.io/badge/RSeQC%20Version-${rseqc_version}-blueviolet?style=flat > ./badges/tools/rseqc.svg
 curl --request GET https://img.shields.io/badge/MultiQC%20Version-${multiqc_version}-blueviolet?style=flat > ./badges/tools/multiqc.svg
+
+echo "creating blank env badges if not tested"
+mkdir -p ./badges/env
+if [ ! -f ./badges/env/dnanexus.svg ]
+then
+curl --request GET https://img.shields.io/badge/Envronment%3A%20DNAnexus-not_tested-important?style=flat > ./badges/env/dnanexus.svg
+fi
+if [ ! -f ./badges/env/aws.svg ]
+then
+curl --request GET https://img.shields.io/badge/Envronment%3A%20AWS-not_tested-important?style=flat > ./badges/env/aws.svg
+fi
+if [ ! -f ./badges/env/azure.svg ]
+then
+curl --request GET https://img.shields.io/badge/Envronment%3A%20Azure-not_tested-important?style=flat > ./badges/env/azure.svg
+fi
+if [ ! -f ./badges/env/gcp.svg ]
+then
+curl --request GET https://img.shields.io/badge/Envronment%3A%20GCP-not_tested-important?style=flat > ./badges/env/gcp.svg
+fi
\ No newline at end of file