Commit 874cdb39 authored by Devin OKelly's avatar Devin OKelly
Browse files

Substantial refactoring- Input parsing into MATLAB from command line added....

Substantial refactoring- Input parsing into MATLAB from command line added. More aggressive usage and standardization of ReconSystem in initialization. Added parallelization options to MATLAB code. Included skeletons for default settings and defaults registry.
parent 77f8217b
......@@ -165,6 +165,9 @@ workflow_parameters:
- id: spectralEndmembers
type: multiselect
required: false
description: |
Selection of spectral unmixing endmembers.
default: 'water'
choices:
- ['hbo2', 'HbO2 - Oxyhemoglobin']
- ['hb', 'Hb - Deoxyhemoglobin']
......@@ -173,23 +176,17 @@ workflow_parameters:
- ['icg', 'ICG - Indocyanine Green']
- ['carbazole_violet', 'Carbazole Violet']
- ['phthalo_green', 'Phthalo Green']
default: ['hbo2','hb']
description: |
Selection of spectral unmixing endmembers.
#id: parallelizationSettings
#type: select
#choices:
# - ['useWorkerParallel', 'Use multi-worker parallelization']
# - ['dontuseWorkerParallel', 'Do not use multi-worker parallelization']
# # - ['useThreadParallel', 'HbO2 - Oxyhemoglobin']
# # - ['gpu_reconstruct', 'HbO2 - Oxyhemoglobin']
# # - ['gpu_operator', 'HbO2 - Oxyhemoglobin']
#required: true
#description: |
# Whether or not to use parallelization in the processing.
#default: useWorkerParallel
- id: parallelizationSettings
type: select
choices:
- ['useWorkerParallel', 'Use multi-worker parallelization']
- ['dontuseWorkerParallel', 'Do not use multi-worker parallelization']
required: true
description: |
Whether or not to use parallelization in the processing.
default: useWorkerParallel
# -----------------------------------------------------------------------------
# SHINY APP CONFIGURATION
......
......@@ -38,9 +38,42 @@ process runPipeline {
output: // Whatever files are designated here and show up in the work directory at process completion are output.
file "output.mat" into output
// Need to clarify the difference between the pwd and basedir, as well as how those
// end up behaving differently in terms of locating output vs locating scripts and data.
shell:
'''
echo ScriptsLinked
bash !{baseDir}/matlabShellWrapper ${PWD} !{baseDir} "!{params.msotFile}" !{params.reconModel} !{params.reconSolver} !{params.fieldOfView} !{params.pixelResolution}
#!/bin/bash
module load matlab/2017b
echo "Formatting Inputs"
MATLAB_PASS_STRING="\$(bash !{baseDir}/matlabArgumentParsing.sh \"!{params.msotFile}\" ${PWD} !{baseDir} -m !{params.reconModel} -s !{params.reconSolver} -f !{params.fieldOfView} -n !{params.pixelResolution} --filter.order 4 -recon.N_Projections 500)"
echo $MATLAB_PASS_STRING
bash !{baseDir}/matlabShellWrapper !{baseDir} "$MATLAB_PASS_STRING"
'''
// bash !{baseDir}/matlabShellWrapper ${PWD} !{baseDir} "!{params.msotFile}" !{params.reconModel} !{params.reconSolver} !{params.fieldOfView} !{params.pixelResolution}
}
// '''
// echo ScriptsLinked
// MATLAB_PASS_STRING="!{baseDir}/matlabArgumentParsing.sh !{params.msotFile} ${PWD} !{baseDir} -m !{params.reconModel} -s !{params.reconSolver} -f !{params.fieldOfView} -n !{params.pixelResolution}"
// echo ''
// echo ''
// echo $MATLAB_PASS_STRING
// echo ''
// echo ''
// bash !{baseDir}/matlabShellWrapper $MATLAB_PASS_STRING
// '''
//matlab -nosplash -nodisplay -nodesktop -r \
// "disp('Entered matlabshellWrapper'); \
// disp(fullfile('!{baseDir}','scripts')); \
// addpath(fullfile('!{baseDir}','scripts')); \
// disp('Added script paths'); drivePipeline(${MATLAB_PASS_STRING}); \
// exit;"
#!/bin/bash
## Eventually this should replace matlabShellWrapper so that the user can
## invoke drivePipeline from a bash script using the same generic input
## scheme as the inputParser in MATLAB.
## We require that the first argument be the source file (i.e., the .msot file)
## while the second argument is the output directory.
## Check that we have at least the right number of arguments.
if [[ $# -lt 2 ]]; then
#echo "Not enough parameters passed. Need at least 2"
exit -1
else #TODO: Add checking for extension and that the output folder exists.
MSOT_FILE=$1
OUTPUT_FOLDER=$2
#echo "MSOT_FILE : $MSOT_FILE"
#echo "OUTPUT_FOLDER : $OUTPUT_FOLDER"
fi
# Pass through all of the remaining arguments and assign them.
# Catch all additional arguments in an array and pass that too.
# Put in defaults for all declared arguments.
DEBUG=0
PARALLEL=0
RECON_MODEL="backproject2D"
RECON_SOLVER="lsqr"
FIELD_OF_VIEW=25
NUMBER_PIXELS=128
NAME_ARRAY=()
VALUE_ARRAY=()
# While there are still values beyond the required two...
while [ "$3" != "" ]; do
# See if the switch matches any of the single-letter or phrase flags.
case $3 in
# Name-only flags.
-d | --debug) #echo "debug activated"
DEBUG=1
;;
-p | --parallel) #echo "parallel activated"
PARALLEL=1
;;
# Name-Value flags. Add another shift to move into the actual value.
-m | --reconModel) shift
RECON_MODEL=$3
#echo "RECON_MODEL : $RECON_MODEL"
;;
-s | --reconSolver) shift
RECON_SOLVER=$3
#echo "RECON_SOLVER : $RECON_SOLVER"
;;
-f | --fieldOfView) shift
FIELD_OF_VIEW=$3
#echo "FIELD_OF_VIEW : $FIELD_OF_VIEW"
;;
-n | --npix) shift
NUMBER_PIXELS=$3
#echo "NUMBER_PIXELS: $NUMBER_PIXELS"
;;
# If not matched to anything, grab the name and the corresponding value.
-*) NAME=$3
NAME_ARRAY+=($NAME)
shift
VALUE=$3
VALUE_ARRAY+=($VALUE)
#echo "unlisted flag $NAME with value $VALUE" ;;
;;
# ;;
esac
shift # Shift to next pair.
done
# echo ' '
# echo 'NAMES:'
# for index in ${!NAME_ARRAY[*]}; do
# echo ${NAME_ARRAY[$index]}
# done
# echo ' '
# echo 'VALUES:'
# for index in ${!VALUE_ARRAY[*]}; do
# echo ${VALUE_ARRAY[$index]}
# done
#echo ""
#echo ""
MATLAB_ARG_STRING="'$MSOT_FILE','$OUTPUT_FOLDER',\
'$RECON_MODEL','$RECON_SOLVER','$FIELD_OF_VIEW','$NUMBER_PIXELS'"
NUMBER_NAME_VALUES=${#VALUE_ARRAY[@]}
# echo $NUMBER_NAME_VALUES
# echo $MATLAB_ARG_STRING
# Concatenate together all of the name-value arguments so MATLAB takes them as
# varargin that we can pass to the input parser.
for index in ${!NAME_ARRAY[*]}; do
MATLAB_ARG_STRING+=','
MATLAB_ARG_STRING+="'${NAME_ARRAY[$index]}','${VALUE_ARRAY[$index]}'"
done
#echo ""
#echo ""
#echo "MATLAB argument string:"
echo $MATLAB_ARG_STRING
#echo ""
......@@ -12,6 +12,13 @@
module load matlab/2017b
outPath=pwd
echo "entering MATLAB"
echo $2
# matlab -nosplash -nodisplay -nodesktop -r "try disp('bad'); drivePipeline('${1}', '${2}', '${3}', ${4}, ${5}, ${6}, ${7}); catch; disp('${1}'); disp('${2}');disp('${3}');disp('${4}');disp('${5}');disp('${6}');disp('${7}');end;"
matlab -nosplash -nodisplay -nodesktop -r \
"disp('Entered matlabshellWrapper'); disp(fullfile('${2}','scripts')); addpath(fullfile('${2}','scripts')); disp('Added script paths'); drivePipeline('${1}', '${2}', '${3}', '${4}', '${5}', '${6}', '${7}'); exit;"
\ No newline at end of file
"disp('Entered matlabshellWrapper'); \
disp(fullfile('${1}','scripts')); \
addpath(fullfile('${1}','scripts')); \
disp('Added script paths'); \
drivePipeline(${2}); \
exit;"
\ No newline at end of file
function [Qd, modelTimeList,projList]=CDMMI(dataStruct)
ROI = range(dataStruct.image.xRange);
pixRes = dataStruct.image.Nx;
sensR = mean(dataStruct.signal.R);
sensTheta = linspace(dataStruct.signal.startAngle,...
dataStruct.signal.endAngle, ...
dataStruct.signal.NTransducers);
function [Qd, modelTimeList,projList] = CDMMI(dataStruct,reconObj)
% TODO: Replace this with FieldOfView
fieldOfView = range(dataStruct.image.xRange);
n_x = dataStruct.image.Nx;
transducerCoordinatesR = mean(dataStruct.signal.R);
transducerCoordinatesTheta = linspace(dataStruct.signal.startAngle,...
dataStruct.signal.endAngle, ...
dataStruct.signal.NTransducers);
% Get the number of projections. When this is unequal to N_Transducers, there is an interpolation step that needs to be performed.
proj = dataStruct.signal.NProjections;
speedOfSound = dataStruct.speedOfSound;
speedOfSound = dataStruct.speedOfSound; %TODO: Post a warning if this isn't scalar (Math doesn't work for varying SoS)
timeres = dataStruct.timeres;
px=fieldOfView/(n_x);
py=fieldOfView/(n_x);
px=ROI/(pixRes);
py=ROI/(pixRes);
RFOV=ROI*sqrt(2)/2;
RFOV=fieldOfView*sqrt(2)/2;
Rclose=sensR-RFOV;
Rfar=sensR+RFOV;
modelTimeList=linspace(Rclose/speedOfSound,Rfar/speedOfSound,pixRes*timeres);
modelTimeList=linspace(Rclose/speedOfSound,Rfar/speedOfSound,n_x*timeres);
dt=mean(diff(modelTimeList));
%% Define the grid giving us the image area.
% Corners and centers for the marching tests
marchX=[-ROI/2,0,ROI/2];
marchY=[-ROI/2,0,ROI/2];
XYCoo=(pixRes-1)/2*px;
marchX=[-fieldOfView/2,0,fieldOfView/2];
marchY=[-fieldOfView/2,0,fieldOfView/2];
XYCoo=(n_x-1)/2*px;
[marchXgrid,marchYgrid]=meshgrid(marchX,marchY);
% Actual grid (TODO)
z=zeros(pixRes);
z=zeros(n_x);
% imHandle=imagesc([marchX(1),marchY(1)],[marchX(end),marchY(end)],z); hold on;
% imHandle.Parent.YDir='normal';
% gridHandle=displayGrid(pixRes,pixRes,ROI/pixRes,ROI/pixRes,[],[]); hold on;
%% Calculate distances to each marching test point for each sensor.
[sensorx,sensory]=pol2cart(sensTheta,sensR);
[sensorx,sensory]=pol2cart(transducerCoordinatesTheta,sensR);
sensory=(sensory);
sensorx=(sensorx);
......@@ -130,7 +132,7 @@ function [Qd, modelTimeList,projList]=CDMMI(dataStruct)
coInd=[];
valInd=[];
%% The ve;tor pointing to the closest point from each sensor denotes the point which is 'farthest into the image'.
Nt=(timeres*pixRes);
Nt=(timeres*n_x);
firstFlag=0;
......@@ -203,8 +205,8 @@ function [Qd, modelTimeList,projList]=CDMMI(dataStruct)
end
% Calculate the intersection points with each gridline.
th1x=acos(((linspace(-ROI/2,ROI/2,pixRes+1)-xdcrLocX))/R);
th1y=asin(((linspace(-ROI/2,ROI/2,pixRes+1)-xdcrLocY))/R);
th1x=acos(((linspace(-fieldOfView/2,fieldOfView/2,n_x+1)-xdcrLocX))/R);
th1y=asin(((linspace(-fieldOfView/2,fieldOfView/2,n_x+1)-xdcrLocY))/R);
switch xdcrType
case 1
......@@ -256,14 +258,14 @@ function [Qd, modelTimeList,projList]=CDMMI(dataStruct)
% delth=
goodMask=(xCrossInd<pixRes)&(yCrossInd<pixRes)&(xCrossInd>0)&(yCrossInd>0);
goodMask=(xCrossInd<n_x)&(yCrossInd<n_x)&(xCrossInd>0)&(yCrossInd>0);
xCrossInd=(xCrossInd(goodMask));
yCrossInd=(yCrossInd(goodMask));
delth=abs(delth(goodMask));
[crossInd,vals] = reverseInterpolation(xCrossInd,yCrossInd,delth,pixRes,pixRes);
[crossInd,vals] = reverseInterpolation(xCrossInd,yCrossInd,delth,n_x,n_x);
......@@ -349,9 +351,9 @@ function [Qd, modelTimeList,projList]=CDMMI(dataStruct)
end
%% Calculate the time derivatives based on the particular transducer's matrix.
Qmat=sparse(roInd,coInd,valInd,Nt,pixRes*pixRes);
Qmat=sparse(roInd,coInd,valInd,Nt,n_x*n_x);
Qdtemp = calculateDerivative(Qmat,dt);
Qdtemp = calculateDerivative(Qmat,dt*5E7);
Qdholder{xdcr}=Qdtemp;
......
function [Qd, modelTimeList,projList] = CDMMI_obj(reconObj)
% TODO: Replace this with FieldOfView
% fieldOfView = range(dataStruct.image.xRange);
fieldOfView = reconObj.FieldOfView_X;
% n_x = dataStruct.image.Nx;
n_x = reconObj.N_x;
% transducerCoordinatesR = mean(dataStruct.signal.R);
transducerCoordinatesR = reconObj.TransducerCoordinatesCylindrical_R;
% transducerCoordinatesTheta = linspace(dataStruct.signal.startAngle,...
% dataStruct.signal.endAngle, ...
% dataStruct.signal.NTransducers);
transducerCoordinatesTheta = reconObj.TransducerCoordinatesCylindrical_Theta;
% Get the number of projections. When this is unequal to N_Transducers, there is an interpolation step that needs to be performed.
proj = reconObj.N_Projections;
%TODO: Post a warning if this isn't scalar (Math doesn't work for varying SoS)'
speedOfSound = reconObj.SpeedOfSound;
timeres = reconObj.PixelOversampling;
px=fieldOfView/(n_x);
py=fieldOfView/(n_x);
RFOV=fieldOfView*sqrt(2)/2;
Rclose=min(transducerCoordinatesR-RFOV);
Rfar=max(transducerCoordinatesR+RFOV);
modelTimeList=linspace(Rclose/speedOfSound,Rfar/speedOfSound,n_x*timeres);
dt=mean(diff(modelTimeList));
%% Define the grid giving us the image area.
% Corners and centers for the marching tests
marchX=[-fieldOfView/2,0,fieldOfView/2];
marchY=[-fieldOfView/2,0,fieldOfView/2];
XYCoo=(n_x-1)/2*px;
[marchXgrid,marchYgrid]=meshgrid(marchX,marchY);
% Actual grid (TODO)
z=zeros(n_x);
% imHandle=imagesc([marchX(1),marchY(1)],[marchX(end),marchY(end)],z); hold on;
% imHandle.Parent.YDir='normal';
% gridHandle=displayGrid(pixRes,pixRes,ROI/pixRes,ROI/pixRes,[],[]); hold on;
%% Calculate distances to each marching test point for each sensor.
[sensorx,sensory]=pol2cart(transducerCoordinatesTheta,transducerCoordinatesR);
sensory=(sensory);
sensorx=(sensorx);
% xdcrXStart = dataStruct.signal.xCoordinates; %
xdcrXStart = reconObj.TransducerCoordinatesCartesian_X;
% xdcrYStart = dataStruct.signal.yCoordinates; %
xdcrYStart = reconObj.TransducerCoordinatesCartesian_Y;
[theta,rho] = cart2pol(xdcrXStart,xdcrYStart);
[thetaNew] = interp1(1:numel(theta),unwrap(theta),linspace(1,numel(theta),proj));
[rhoNew] = interp1(1:numel(rho),rho,linspace(1,numel(theta),proj));
[sensorx,sensory] = pol2cart(thetaNew,rhoNew);
projList=linspace(1,256,proj);
% index 1 is the bottom left corner, index 3 is the upper left, index 9 is the upper right.
% We now know how many points will have been passed at each time point, as
% well as the first and last time points for each sensor.
% Classify each sensor as either a corner or an edge. The corners must
% intersect with the corner points first. The edges share a coordinate with
% their nearest sensor.
onLeft=sensorx<=marchX(1);
onRight=sensorx>=marchX(end);
onBottom=sensory<=marchY(1);
onTop=sensory>=marchY(end);
edgeCornerParity=onLeft+onRight+onTop+onBottom;
isEdge=(edgeCornerParity==1);
isCorn=(edgeCornerParity==2);
%% Make arrays of the distance of each corner sensor to its nearest edge.
% Each row is the distance to the nearest edge. 1/3 are the close/far
% vertical edges, 2/4 are the close/far horizontal edges.
distToEdges=zeros(4,proj);
% Corner Sensors
%%
distToEdges(1,:)=abs(sensorx-marchX(1));
distToEdges(2,:)=abs(sensory-marchY(1));
distToEdges(3,:)=abs(sensorx-marchX(end));
distToEdges(4,:)=abs(sensory-marchY(end));
%% Angle corrections based on which sensor is being used. Also classify sensors by region. Clockwise from upper right, 1:8
sensorRegion=zeros(1,proj);
sensorRegion(onTop&onLeft)=1;
sensorRegion(onTop&isEdge)=2;
sensorRegion(onTop&onRight)=3;
sensorRegion(onRight&isEdge)=4;
sensorRegion(onBottom&onRight)=5;
sensorRegion(onBottom&isEdge)=6;
sensorRegion(onBottom&onLeft)=7;
sensorRegion(onLeft&isEdge)=8;
corrAng=pi*[1.5 1.0 1.5 1.0 0.5 1.0 0.5 0.0
2.0 1.5 1.0 1.5 1.0 0.5 0.0 1.5
1.5 2.0 1.5 1.0 0.5 0.0 0.5 0.0
2.0 1.5 1.0 0.5 1.0 0.5 0.0 0.5];
corrAngPlus=logical([1 1 0 1 1 0 0 1
0 1 1 0 0 1 1 1
1 0 0 1 1 1 0 1
0 1 1 1 0 1 1 0]);
corrAngMinus=logical([0 0 1 1 0 1 1 1
1 1 0 1 1 1 0 0
0 1 1 1 0 0 1 1
1 1 0 0 1 1 0 1]);
thorta = 0:pi/150:2*pi;
roInd=[];
coInd=[];
valInd=[];
%% The ve;tor pointing to the closest point from each sensor denotes the point which is 'farthest into the image'.
Nt=(timeres*n_x);
firstFlag=0;
for xdcr=1:proj
for t=1:Nt
% Fetch the corrections for that transducer region.
xdcrDist=distToEdges(:,xdcr);
xdcrType=sensorRegion(xdcr);
angleCorrection=corrAng(:,xdcrType);
anglePlus=corrAngPlus(:,xdcrType);
angleMinus=corrAngMinus(:,xdcrType);
xdcrLocX=sensorx(xdcr);
xdcrLocY=sensory(xdcr);
R=speedOfSound*modelTimeList(t);
%calculate angles based on edge/corner
if isCorn(xdcr)
theseAngles=asin(xdcrDist./R);
elseif isEdge(xdcr)
theseAngles=acos(xdcrDist./R);
else
error('Something went terribly wrong.');
end
%Provide corrections.
thetaTemp=[angleCorrection(anglePlus)+theseAngles(anglePlus); angleCorrection(angleMinus)-theseAngles(angleMinus)];
%We only want real angles.
theta=sort(thetaTemp(thetaTemp==real(thetaTemp)));
%Remove the angles that Go Too Far.
[xadj,yadj]=pol2cart(theta,R);
pointPosX=xadj+xdcrLocX;
pointPosY=yadj+xdcrLocY;
badAngles=(pointPosX<(marchX(1)-1E-16)|pointPosX>(marchX(end)+1E-16))|(pointPosY<(marchY(1)-1E-16)|pointPosY>(marchY(end)+1E-16));
thetaChoice=theta(~badAngles);
if isempty(thetaChoice)
continue;
end
thetaChoice=sort(thetaChoice);
%Correct for wraparound.
if numel(thetaChoice)==2
if abs(diff(thetaChoice))>(pi/2)
thetaChoice(2)=thetaChoice(2)-2*pi;
end
elseif numel(thetaChoice)==4
else
error('Something terrible has happened');
end
%% We now have intersections of each vector with the edge of the ROI.
%There will only be two or four intersections; split up by case
%to calculate the arc length inside the ROI.
if numel(thetaChoice)==2
arcLength=(thetaChoice(2)-thetaChoice(1));
elseif numel(thetaChoice)==4
arcLength=((thetaChoice(2)-thetaChoice(1))+(thetaChoice(4)-thetaChoice(3)));
else
error('Something terrible has happened');
end
% Calculate the intersection points with each gridline.
th1x=acos(((linspace(-fieldOfView/2,fieldOfView/2,n_x+1)-xdcrLocX))/R);
th1y=asin(((linspace(-fieldOfView/2,fieldOfView/2,n_x+1)-xdcrLocY))/R);
switch xdcrType
case 1
th1x=2*pi-th1x;
th1y=th1y+2*pi;
case 2
th1x=(pi/2)-th1x+3*pi/2;
th1y=2*pi+th1y;
th1y=[3*pi/2-(th1y-3*pi/2) th1y];
case 3
th1x=2*(pi)-th1x;
th1y=pi-th1y;
case 4
th1x=[2*pi-th1x th1x];
th1y=pi-th1y;
case 5
th1y=pi-th1y;
case 6
th1y=[th1y pi-th1y];
case 7
case 8
thetaChoice(thetaChoice>(pi/2))=thetaChoice(thetaChoice>(pi/2))-2*pi;
thetaChoice=sort(thetaChoice);
th1x=[-th1x th1x];
end
%bundle and retain only those which end up between the max and
%min vectors.
th=[th1x(th1x==real(th1x)) th1y(th1y==real(th1y))];
th=th(th>min(thetaChoice)&th<max(thetaChoice));
if numel(thetaChoice)==4
th=[th(th<thetaChoice(2)|th>thetaChoice(3))];
end
th=sort([thetaChoice' th]);
mdth