Commit 6eee680d authored by Guillaume Jimenez's avatar Guillaume Jimenez
Browse files

Fixed sampledetails export

parent 492fa044
#!/bin/bash
#### To backup the MySQL database daily, call this script from a cron job
echo "Started backup on `date '+%m-%d-%Y'`"
/bin/mysqldump --all-databases | gzip > /opt/nuclia/backups/nuclia_`date '+%m-%d-%Y'`.sql.gz
find /opt/nuclia/backups/ -type f -name '*.gz' -mtime +15 -exec rm {} \;
echo "End of backup"
\ No newline at end of file
......@@ -3,6 +3,7 @@ package utsw.bicf.nucliavault.controller;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
......@@ -27,11 +28,13 @@ import org.springframework.web.bind.annotation.ResponseBody;
import com.fasterxml.jackson.core.JsonProcessingException;
import utsw.bicf.nucliavault.controller.serialization.AjaxResponse;
import utsw.bicf.nucliavault.controller.serialization.OrderDetailsPDF;
import utsw.bicf.nucliavault.controller.serialization.vuetify.OrderDetailsSummary;
import utsw.bicf.nucliavault.dao.ModelDAO;
import utsw.bicf.nucliavault.dao.SampleDetailsDAO;
import utsw.bicf.nucliavault.dao.SubjectDAO;
import utsw.bicf.nucliavault.model.Demultiplex;
import utsw.bicf.nucliavault.model.Sample;
import utsw.bicf.nucliavault.model.Subject;
import utsw.bicf.nucliavault.model.hybrid.AccessionSample;
......@@ -77,11 +80,21 @@ public class OrderDetailsController {
if (subject != null) {
List<Sample> samples = subject.getSamples();
if (samples != null) {
List<SampleDetails> sampleDetails = samples.stream()
.filter(sample -> sample != null && sample.getNucType() != null)
.map(sample -> sampleDetailsDAO.getSampleDetails(sample.getSampleId(),
sampleDetailsDAO.getDemultiplexBySampleId(sample.getSampleId())))
.collect(Collectors.toList());
// List<SampleDetails> sampleDetails = samples.stream()
// .filter(sample -> sample != null && sample.getNucType() != null)
// .map(sample -> sampleDetailsDAO.getSampleDetails(sample.getSampleId(),
// sampleDetailsDAO.getDemultiplexBySampleId(sample.getSampleId())))
// .collect(Collectors.toList());
List<SampleDetails> sampleDetails = new ArrayList<SampleDetails>();
for (Sample sample : samples) {
if (sample != null && sample.getNucType() != null) {
List<Demultiplex> dmuxes = sampleDetailsDAO.getDemultiplexBySampleId(sample.getSampleId());
List<SampleDetails> details = sampleDetailsDAO.getSampleDetails(sample.getSampleId(), dmuxes);
sampleDetails.addAll(details);
}
}
List<AccessionSample> accessionSamples = samples.stream()
.filter(sample -> sample != null && sample.getNucType() == null)
.map(sample -> new AccessionSample(sample))
......@@ -90,7 +103,11 @@ public class OrderDetailsController {
return orderDetailsSummary.createVuetifyObjectJSON();
}
}
return null;
AjaxResponse response = new AjaxResponse();
response.setSuccess(false);
response.setIsAllowed(false);
response.setMessage("No order with id :" + subjectId);
return response.createObjectJSON();
} catch (JsonProcessingException e) {
e.printStackTrace();
return null;
......@@ -107,11 +124,19 @@ public class OrderDetailsController {
if (subject != null) {
List<Sample> samples = subject.getSamples();
if (samples != null) {
List<SampleDetails> sampleDetails = samples.stream()
.filter(sample -> sample != null && sample.getNucType() != null && isVisible(sample, sampleIdsVisible))
.map(sample -> sampleDetailsDAO.getSampleDetails(sample.getSampleId(),
sampleDetailsDAO.getDemultiplexBySampleId(sample.getSampleId())))
.collect(Collectors.toList());
// List<SampleDetails> sampleDetails = samples.stream()
// .filter(sample -> sample != null && sample.getNucType() != null && isVisible(sample, sampleIdsVisible))
// .map(sample -> sampleDetailsDAO.getSampleDetails(sample.getSampleId(),
// sampleDetailsDAO.getDemultiplexBySampleId(sample.getSampleId())))
// .collect(Collectors.toList());
List<SampleDetails> sampleDetails = new ArrayList<SampleDetails>();
for (Sample sample : samples) {
if (sample != null && sample.getNucType() != null) {
List<Demultiplex> dmuxes = sampleDetailsDAO.getDemultiplexBySampleId(sample.getSampleId());
List<SampleDetails> details = sampleDetailsDAO.getSampleDetails(sample.getSampleId(), dmuxes);
sampleDetails.addAll(details);
}
}
List<AccessionSample> accessionSamples = samples.stream()
.filter(sample -> sample != null && sample.getNucType() == null)
.map(sample -> new AccessionSample(sample))
......
......@@ -123,7 +123,7 @@ public class APIController {
@RequestMapping("/addPipelineResultsWithProp")
@ResponseBody
public String addPipelineResultsWithProps(Model model, @RequestParam String token,
public String addPipelineResultsWithProp(Model model, @RequestParam String token,
@RequestHeader(value = "User-Agent") String userAgent, @RequestParam String propFilePath) {
try {
// check that token is valid
......@@ -133,7 +133,7 @@ public class APIController {
}
// fetchAndSaveNewData
APIResponse response = new APIResponse();
response.addInfoMessage("Running addPipelineResultsWithProps with params: " + propFilePath);
response.addInfoMessage("Running addPipelineResultsWithProp with params: " + propFilePath);
File propFile = new File(propFilePath);
if (propFile.exists() && propFile.canRead()) {
List<String> lines = Files.readAllLines(propFile.toPath());
......@@ -161,6 +161,40 @@ public class APIController {
return "An error occured when importing from this file: " + propFilePath + ". Look at tomcat logs to know more.";
}
@RequestMapping("/addPipelineResultsSomatic")
@ResponseBody
public String addPipelineResultsSomatic(Model model, @RequestParam String token,
@RequestHeader(value = "User-Agent") String userAgent, @RequestParam String filePath) {
try {
// check that token is valid
Token theToken = pipelineDAO.getPipelineToken(token);
if (theToken == null) {
return "You are not allowed to run this servlet.";
}
// fetchAndSaveNewData
APIResponse response = new APIResponse();
response.addInfoMessage("Running addPipelineResultsSomatic with params: " + filePath);
File somaticFile = new File(filePath);
if (!somaticFile.exists()) {
response.setSuccess(false);
response.addErrorMessage("Somatic file not found here: " + filePath);
}
if (!somaticFile.canRead()) {
response.setSuccess(false);
response.addErrorMessage("Somatic file cannot be read: " + filePath);
}
else {
response.setMessage("Adding Somatic Data: ");
pipelineDAO2.fetchSomaticData(response, somaticFile);
}
return response.createResponse(userAgent, true, true);
} catch (Exception e) {
e.printStackTrace();
}
return "An error occured when importing from this file: " + filePath + ". Look at tomcat logs to know more.";
}
@RequestMapping("/syncWithClaritySQL")
@ResponseBody
public String syncWithClaritySQL(Model model, @RequestParam String token, @RequestHeader(value = "User-Agent") String userAgent) throws ParseException, IOException, ClassNotFoundException {
......
......@@ -124,13 +124,16 @@ public abstract class Summary<T> {
List<String> row = new ArrayList<String>();
for (Header header : this.getHeaders()) {
if (header.buttons == null || !header.buttons) {
String value = item.get(header.getValue()).asText(); // get the item value matching the header
// could be a PassableValue
if ((value == null || value.equals("")) && item.get(header.getValue()) != null
&& item.get(header.getValue()).get("value") != null) {
value = item.get(header.getValue()).get("value").asText();
JsonNode node = item.get(header.getValue());
if (node != null) {
String value = item.get(header.getValue()).asText(); // get the item value matching the header
// could be a PassableValue
if ((value == null || value.equals("")) && item.get(header.getValue()) != null
&& item.get(header.getValue()).get("value") != null) {
value = item.get(header.getValue()).get("value").asText();
}
row.add((value == null || value.equals("null")) ? "" : value.replaceAll(",", ""));
}
row.add((value == null || value.equals("null")) ? "" : value.replaceAll(",", ""));
}
}
rows.add(row.stream().collect(Collectors.joining(",")));
......
......@@ -625,6 +625,9 @@ public class ModelDAO {
@Transactional
public List<SomaticStats> getSomaticStatsForSampleIds(List<Integer> dnaSampleIds) {
if (dnaSampleIds == null || dnaSampleIds.isEmpty()) {
return null;
}
Session session = sessionFactory.getCurrentSession();
StringBuilder sql = new StringBuilder(" select * from somatic_stats where sample_1_id in :dnaSampleIds ");
sql.append(" or sample_2_id in :dnaSampleIds");
......
package utsw.bicf.nucliavault.dao;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.persistence.Transient;
import javax.transaction.Transactional;
import org.hibernate.Session;
......@@ -14,6 +17,7 @@ import org.springframework.stereotype.Repository;
import utsw.bicf.nucliavault.model.Demultiplex;
import utsw.bicf.nucliavault.model.Sample;
import utsw.bicf.nucliavault.model.Threshold;
import utsw.bicf.nucliavault.model.hybrid.AvgDemultiplex;
import utsw.bicf.nucliavault.model.hybrid.SampleDetails;
@Repository
......@@ -27,11 +31,27 @@ public class SampleDetailsDAO {
@Transactional
public SampleDetails getSampleDetails(int sampleId, Demultiplex dmux) {
public List<SampleDetails> getSampleDetails(int sampleId, List<Demultiplex> dmuxes) {
Session session = sessionFactory.getCurrentSession();
Sample sample = session.get(Sample.class, sampleId);
List<Threshold> thresholds = modelDAO.getAllThresholds();
return new SampleDetails(sample, dmux, modelDAO, thresholds);
List<SampleDetails> sampleDetails = new ArrayList<SampleDetails>();
if (dmuxes == null || dmuxes.isEmpty()) {
sampleDetails.add(new SampleDetails(sample, null, modelDAO, thresholds));
return sampleDetails;
}
Map<String, List<Demultiplex>> dmuxByRun = dmuxes.stream().collect(Collectors.groupingBy(d -> d.getSeqRun().getSeqRunId()));
for (String runId : dmuxByRun.keySet()) {
List<Demultiplex> dmuxesForRun = dmuxByRun.get(runId);
Long clusters = dmuxesForRun.stream().collect(Collectors.summingLong(d -> d.getClusters()));
Double q30Clusters = dmuxesForRun.stream().collect(Collectors.summingDouble(d -> (double) (d.getClusters() * d.getPercQ30() / 100)));
Double q30Avg = q30Clusters / clusters * 100;
// Create an avg dmux object so that one run will have 1 dmux instead of 4 (1 per lane)
AvgDemultiplex avgDmux = new AvgDemultiplex(dmuxesForRun.get(0));
avgDmux.setPercQ30(q30Avg.floatValue());
sampleDetails.add(new SampleDetails(sample, avgDmux, modelDAO, thresholds));
}
return sampleDetails;
}
@Transactional
......@@ -61,14 +81,14 @@ public class SampleDetailsDAO {
}
@Transactional
public Demultiplex getDemultiplexBySampleId(Integer sampleId) {
public List<Demultiplex> getDemultiplexBySampleId(Integer sampleId) {
Session session = sessionFactory.getCurrentSession();
StringBuilder hql = new StringBuilder("from Demultiplex d")
.append(" where d.sample.sampleId = :sampleId ");
List<Demultiplex> results = session.createQuery(hql.toString(), Demultiplex.class)
.setParameter("sampleId", sampleId).list();
if (results != null && !results.isEmpty()) {
return results.get(0);
if (results != null) {
return results;
}
return null;
}
......
......@@ -104,7 +104,7 @@ public class PipelineDAO2 {
response.addErrorMessage("Run " + props.getRunName() + " could not be found. Check for typos or if the sync with Clarity has not happened yet.");
return;
}
fetchSomaticData(response, props);
// fetchSomaticData(response, props);
Sample sample = sampleDAO.getSampleByName(props.getSampleName());
if (sample == null) {
......@@ -139,19 +139,41 @@ public class PipelineDAO2 {
}
}
// @Transactional
// private void fetchSomaticData(APIResponse response, SampleImportProperties props)
// throws IOException, ParseException {
// File somaticStatsFile = props.getSomaticSeqStats();
// if (somaticStatsFile == null) {
// response.addInfoMessage("No somatic stats file provided.");
// return;
// }
// else if (!somaticStatsFile.exists()) {
// response.addErrorMessage("Somatic stats file not found here: " + somaticStatsFile.getAbsolutePath());
// return;
// }
// SomaticStats stats = SomaticStatsParser.parseFile(somaticStatsFile, sampleDAO);
// // check if somatic data already exists
// List<SomaticStats> duplicates = modelDAO.findSameSomaticStats(stats);
// if (duplicates != null && !duplicates.isEmpty()) {
// response.addInfoMessage("Duplicate Somatic Stats found. No new somatic data added.");
// return;
// }
// sessionFactory.getCurrentSession().saveOrUpdate(stats);
// return;
// }
@Transactional
private void fetchSomaticData(APIResponse response, SampleImportProperties props)
public void fetchSomaticData(APIResponse response, File somaticFile)
throws IOException, ParseException {
File sequenceStatsFile = props.getSomaticSeqStats();
if (sequenceStatsFile == null) {
if (somaticFile == null) {
response.addInfoMessage("No somatic stats file provided.");
return;
}
else if (!sequenceStatsFile.exists()) {
response.addErrorMessage("Somatic stats file not found here: " + sequenceStatsFile.getAbsolutePath());
else if (!somaticFile.exists()) {
response.addErrorMessage("Somatic stats file not found here: " + somaticFile.getAbsolutePath());
return;
}
SomaticStats stats = SomaticStatsParser.parseFile(sequenceStatsFile, sampleDAO);
SomaticStats stats = SomaticStatsParser.parseFile(somaticFile, sampleDAO);
// check if somatic data already exists
List<SomaticStats> duplicates = modelDAO.findSameSomaticStats(stats);
if (duplicates != null && !duplicates.isEmpty()) {
......
package utsw.bicf.nucliavault.model.hybrid;
import utsw.bicf.nucliavault.model.Demultiplex;
import utsw.bicf.nucliavault.model.Sample;
import utsw.bicf.nucliavault.model.SeqRun;
/**
* Fake Demultiplex object that can be use
* to store avg values from multiple dmuxes
* @author Guillaume
*
*/
public class AvgDemultiplex {
SeqRun seqRun;
Integer lane;
Integer clusters;
Integer pfClusters;
Float percPF;
Long readYield;
Float percQ30;
Float avgQualScore;
Float percLane;
Boolean pass;
Sample sample;
public AvgDemultiplex() {
}
public AvgDemultiplex(Demultiplex dmux) {
this.seqRun = dmux.getSeqRun();
this.lane = dmux.getLane();
this.clusters = dmux.getClusters();
this.pfClusters = dmux.getPfClusters();
this.percPF = dmux.getPercPF();
this.readYield = dmux.getReadYield();
this.percQ30 = dmux.getPercQ30();
this.avgQualScore = dmux.getAvgQualScore();
this.percLane = dmux.getPercLane();
this.pass = dmux.getPass();
this.sample = dmux.getSample();
}
public SeqRun getSeqRun() {
return seqRun;
}
public void setSeqRun(SeqRun seqRun) {
this.seqRun = seqRun;
}
public Integer getLane() {
return lane;
}
public void setLane(Integer lane) {
this.lane = lane;
}
public Integer getClusters() {
return clusters;
}
public void setClusters(Integer clusters) {
this.clusters = clusters;
}
public Integer getPfClusters() {
return pfClusters;
}
public void setPfClusters(Integer pfClusters) {
this.pfClusters = pfClusters;
}
public Float getPercPF() {
return percPF;
}
public void setPercPF(Float percPF) {
this.percPF = percPF;
}
public Long getReadYield() {
return readYield;
}
public void setReadYield(Long readYield) {
this.readYield = readYield;
}
public Float getPercQ30() {
return percQ30;
}
public void setPercQ30(Float percQ30) {
this.percQ30 = percQ30;
}
public Float getAvgQualScore() {
return avgQualScore;
}
public void setAvgQualScore(Float avgQualScore) {
this.avgQualScore = avgQualScore;
}
public Float getPercLane() {
return percLane;
}
public void setPercLane(Float percLane) {
this.percLane = percLane;
}
public Boolean getPass() {
return pass;
}
public void setPass(Boolean pass) {
this.pass = pass;
}
public Sample getSample() {
return sample;
}
public void setSample(Sample sample) {
this.sample = sample;
}
}
......@@ -206,7 +206,7 @@ public class SampleDetails {
//Threshold Summary
public Boolean allPassed;
public SampleDetails(Sample sample, Demultiplex dmux, ModelDAO modelDAO, List<Threshold> thresholds) {
public SampleDetails(Sample sample, AvgDemultiplex avgDmux, ModelDAO modelDAO, List<Threshold> thresholds) {
this.sample = sample;
......@@ -340,10 +340,10 @@ public class SampleDetails {
}
SeqRun run = null;
if (dmux != null) {
run = dmux.getSeqRun();
seqSampleReadCount = formatNumber(dmux.getReadYield());
seqSampleQ30 = dmux.getPercQ30(); // already formatted
if (avgDmux != null) {
run = avgDmux.getSeqRun();
seqSampleReadCount = formatNumber(avgDmux.getReadYield());
seqSampleQ30 = avgDmux.getPercQ30(); // already formatted
if (seqSampleQ30 != null) {
seqSampleQ30Value = new PassableValue(Threshold.getThreshold(thresholds, Threshold.SEQ_RUN_Q30), "seqSampleQ30Value", seqSampleQ30);
}
......@@ -459,9 +459,9 @@ public class SampleDetails {
}
totalReadsFormatted = totalReads == null ? null : NumberFormat.getInstance().format(totalReads);
List<Integer> sampleId = new ArrayList<Integer>();
sampleId.add(sample.getSampleId());
List<SomaticStats> somaticStats = modelDAO.getSomaticStatsForSampleIds(sampleId);
List<Integer> sampleIds = new ArrayList<Integer>();
sampleIds.add(sample.getSampleId());
List<SomaticStats> somaticStats = modelDAO.getSomaticStatsForSampleIds(sampleIds);
if (somaticStats != null && !somaticStats.isEmpty()) {
SomaticStats ss = somaticStats.get(0);
correlatedSampleName = ss.getSample1().getSampleId() != sample.getSampleId() ? ss.getSample1().getSampleLabName() : ss.getSample2().getSampleLabName();
......
......@@ -24,7 +24,7 @@ public class SampleImportProperties {
private static final String HEADER_COVERAGE_RAW = "sample.coverage.raw";
private static final String HEADER_COVERAGE_UNIQ = "sample.coverage.uniq";
private static final String HEADER_CONVERSION_STATS = "dmux.conversion.stats";
private static final String HEADER_SOMATIC_STATS = "somatic.seq.stats";
// private static final String HEADER_SOMATIC_STATS = "somatic.seq.stats";
private static final String HEADER_TRANSLOCATION = "somatic.translocation";
private static final String HEADER_GIAB_SNSP = "giab.snsp";
private static final String HEADER_TDF_RAW = "tdf.raw";
......@@ -38,7 +38,7 @@ public class SampleImportProperties {
File sampleCoverageRaw;
File sampleCoverageUniq;
File dmuxConverstionStats;
File somaticSeqStats;
// File somaticSeqStats;
File somaticTranslocation;
File giabSNSP;
File tdfRaw;
......@@ -58,7 +58,7 @@ public class SampleImportProperties {
sampleCoverageRaw = getFileFromProp(props, HEADER_COVERAGE_RAW);
sampleCoverageUniq = getFileFromProp(props, HEADER_COVERAGE_UNIQ);
dmuxConverstionStats = getFileFromProp(props, HEADER_CONVERSION_STATS);
somaticSeqStats = getFileFromProp(props, HEADER_SOMATIC_STATS);
// somaticSeqStats = getFileFromProp(props, HEADER_SOMATIC_STATS);
somaticTranslocation = getFileFromProp(props, HEADER_TRANSLOCATION);
giabSNSP = getFileFromProp(props, HEADER_GIAB_SNSP);
tdfRaw = getFileFromProp(props, HEADER_TDF_RAW);
......@@ -98,9 +98,9 @@ public class SampleImportProperties {
return dmuxConverstionStats;
}
public File getSomaticSeqStats() {
return somaticSeqStats;
}
// public File getSomaticSeqStats() {
// return somaticSeqStats;
// }
public File getSomaticTranslocation() {
return somaticTranslocation;
......
......@@ -63,13 +63,18 @@ public class LDAPAuthentication {
return false; //ldap authentication failed. Wrong password or other server issue
} finally {
try {
ctx.close();
if (ctx != null) {
ctx.close();
}
else {
return false;
}
} catch (NamingException e) {
// e.printStackTrace();
return false; //could not close the context for some reason. Don't let user log in just in case.
}
}
return false; //should only reach this spot is username is wrong.
return false; //should only reach this spot if username is wrong.
}
public String getUsername() {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment