diff --git a/meteor_packages/mats-common/imports/startup/server/data_match_util.js b/meteor_packages/mats-common/imports/startup/server/data_match_util.js
index ae3fb854c..8b94e051c 100644
--- a/meteor_packages/mats-common/imports/startup/server/data_match_util.js
+++ b/meteor_packages/mats-common/imports/startup/server/data_match_util.js
@@ -35,6 +35,9 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
let subModelSumY = [];
let subObsSumY = [];
let subAbsSumY = [];
+ let subRelCount = [];
+ let subRelRawCount = [];
+ let subRelHit = [];
let newSubSecs = [];
let newSubLevs = [];
let newSubHit = [];
@@ -62,6 +65,9 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
let newSubObsSumY = [];
let newSubAbsSumY = [];
let newSubValuesY = [];
+ let newSubRelCount = [];
+ let newSubRelRawCount = [];
+ let newSubRelHit = [];
let newCurveData = {};
const independentVarGroups = [];
const independentVarHasPoint = [];
@@ -88,6 +94,7 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
curveInfoParams.statType === "scalar" &&
plotType !== matsTypes.PlotTypes.histogram;
const isSimpleScatter = plotType === matsTypes.PlotTypes.simpleScatter;
+ const isReliability = plotType === matsTypes.PlotTypes.reliability;
let curveXStats;
let curveXVars;
let curveYStats;
@@ -149,7 +156,7 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
}
// find the matching independentVars shared across all curves
- for (curveIndex = 0; curveIndex < curvesLength; curveIndex++) {
+ for (curveIndex = 0; curveIndex < curvesLength; curveIndex += 1) {
independentVarGroups[curveIndex] = []; // array for the independentVars for each curve that are not null
independentVarHasPoint[curveIndex] = []; // array for the *all* of the independentVars for each curve
subSecs[curveIndex] = {}; // map of the individual record times (subSecs) going into each independentVar for each curve
@@ -158,7 +165,7 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
}
data = dataset[curveIndex];
// loop over every independentVar value in this curve
- for (di = 0; di < data[independentVarName].length; di++) {
+ for (di = 0; di < data[independentVarName].length; di += 1) {
currIndependentVar = data[independentVarName][di];
if (data[statVarName][di] !== null) {
// store raw secs for this independentVar value, since it's not a null point
@@ -180,11 +187,11 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
if (removeNonMatchingIndVars) {
if (hasLevels) {
// loop over each common non-null independentVar value
- for (fi = 0; fi < matchingIndependentVars.length; fi++) {
+ for (fi = 0; fi < matchingIndependentVars.length; fi += 1) {
currIndependentVar = matchingIndependentVars[fi];
subIntersections[currIndependentVar] = [];
let currSubIntersections = [];
- for (si = 0; si < subSecs[0][currIndependentVar].length; si++) {
+ for (si = 0; si < subSecs[0][currIndependentVar].length; si += 1) {
// fill current intersection array with sec-lev pairs from the first curve
currSubIntersections.push([
subSecs[0][currIndependentVar][si],
@@ -192,9 +199,9 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
]);
}
// loop over every curve after the first
- for (curveIndex = 1; curveIndex < curvesLength; curveIndex++) {
+ for (curveIndex = 1; curveIndex < curvesLength; curveIndex += 1) {
tempSubIntersections = [];
- for (si = 0; si < subSecs[curveIndex][currIndependentVar].length; si++) {
+ for (si = 0; si < subSecs[curveIndex][currIndependentVar].length; si += 1) {
// create an individual sec-lev pair for each index in the subSecs and subLevs arrays
tempPair = [
subSecs[curveIndex][currIndependentVar][si],
@@ -214,12 +221,12 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
}
} else {
// loop over each common non-null independentVar value
- for (fi = 0; fi < matchingIndependentVars.length; fi++) {
+ for (fi = 0; fi < matchingIndependentVars.length; fi += 1) {
currIndependentVar = matchingIndependentVars[fi];
// fill current subSecs intersection array with subSecs from the first curve
let currSubSecIntersection = subSecs[0][currIndependentVar];
// loop over every curve after the first
- for (curveIndex = 1; curveIndex < curvesLength; curveIndex++) {
+ for (curveIndex = 1; curveIndex < curvesLength; curveIndex += 1) {
// keep taking the intersection of the current subSecs intersection array with each curve's subSecs array for this independentVar value
currSubSecIntersection = _.intersection(
currSubSecIntersection,
@@ -232,7 +239,7 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
}
} else {
// pull all subSecs and subLevs out of their bins, and back into one main array
- for (curveIndex = 0; curveIndex < curvesLength; curveIndex++) {
+ for (curveIndex = 0; curveIndex < curvesLength; curveIndex += 1) {
data = dataset[curveIndex];
subSecsRaw[curveIndex] = [];
subSecs[curveIndex] = [];
@@ -240,7 +247,7 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
subLevsRaw[curveIndex] = [];
subLevs[curveIndex] = [];
}
- for (di = 0; di < data.x.length; di++) {
+ for (di = 0; di < data.x.length; di += 1) {
subSecsRaw[curveIndex].push(data.subSecs[di]);
if (hasLevels) {
subLevsRaw[curveIndex].push(data.subLevs[di]);
@@ -254,14 +261,14 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
if (hasLevels) {
// determine which seconds and levels are present in all curves
- for (si = 0; si < subSecs[0].length; si++) {
+ for (si = 0; si < subSecs[0].length; si += 1) {
// fill current intersection array with sec-lev pairs from the first curve
subIntersections.push([subSecs[0][si], subLevs[0][si]]);
}
// loop over every curve after the first
- for (curveIndex = 1; curveIndex < curvesLength; curveIndex++) {
+ for (curveIndex = 1; curveIndex < curvesLength; curveIndex += 1) {
tempSubIntersections = [];
- for (si = 0; si < subSecs[curveIndex].length; si++) {
+ for (si = 0; si < subSecs[curveIndex].length; si += 1) {
// create an individual sec-lev pair for each index in the subSecs and subLevs arrays
tempPair = [subSecs[curveIndex][si], subLevs[curveIndex][si]];
// see if the individual sec-lev pair matches a pair from the current intersection array
@@ -278,7 +285,7 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
// fill current subSecs intersection array with subSecs from the first curve
subSecIntersection = subSecs[0];
// loop over every curve after the first
- for (curveIndex = 1; curveIndex < curvesLength; curveIndex++) {
+ for (curveIndex = 1; curveIndex < curvesLength; curveIndex += 1) {
// keep taking the intersection of the current subSecs intersection array with each curve's subSecs array
subSecIntersection = _.intersection(subSecIntersection, subSecs[curveIndex]);
}
@@ -286,13 +293,13 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
}
// remove non-matching independentVars and subSecs
- for (curveIndex = 0; curveIndex < curvesLength; curveIndex++) {
+ for (curveIndex = 0; curveIndex < curvesLength; curveIndex += 1) {
// loop over every curve
data = dataset[curveIndex];
// need to loop backwards through the data array so that we can splice non-matching indices
// while still having the remaining indices in the correct order
let dataLength = data[independentVarName].length;
- for (di = dataLength - 1; di >= 0; di--) {
+ for (di = dataLength - 1; di >= 0; di -= 1) {
if (removeNonMatchingIndVars) {
if (matchingIndependentVars.indexOf(data[independentVarName][di]) === -1) {
// if this is not a common non-null independentVar value, we'll have to remove some data
@@ -345,6 +352,10 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
subObsSum = data.subObsSum[di];
subAbsSum = data.subAbsSum[di];
}
+ } else if (isReliability) {
+ subRelHit = data.subRelHit[di];
+ subRelRawCount = data.subRelRawCount[di];
+ subRelCount = data.subRelCount[di];
}
if (isSimpleScatter) {
subValuesX = data.subValsX[di];
@@ -386,12 +397,15 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
newSubObsSumY = [];
newSubAbsSumY = [];
newSubValuesY = [];
+ newSubRelCount = [];
+ newSubRelRawCount = [];
+ newSubRelHit = [];
newSubSecs = [];
if (hasLevels) {
newSubLevs = [];
}
// loop over all subSecs for this independentVar
- for (si = 0; si < subSecs.length; si++) {
+ for (si = 0; si < subSecs.length; si += 1) {
if (hasLevels) {
// create sec-lev pair for each sub value
tempPair = [subSecs[si], subLevs[si]];
@@ -438,6 +452,10 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
var newObsSum = subObsSum[si];
var newAbsSum = subAbsSum[si];
}
+ } else if (isReliability) {
+ var newRelCount = subRelCount[si];
+ var newRelRawCount = subRelRawCount[si];
+ var newRelHit = subRelHit[si];
}
if (isSimpleScatter) {
var newValX = subValuesX[si];
@@ -494,6 +512,17 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
if (hasLevels) {
newSubLevs.push(newLev);
}
+ } else if (isReliability) {
+ if (newRelHit !== undefined) {
+ newSubRelCount.push(newRelCount);
+ newSubRelRawCount.push(newRelRawCount);
+ newSubRelHit.push(newRelHit);
+ newSubValues.push(newVal);
+ newSubSecs.push(newSec);
+ if (hasLevels) {
+ newSubLevs.push(newLev);
+ }
+ }
} else if (newVal !== undefined) {
newSubValues.push(newVal);
newSubSecs.push(newSec);
@@ -535,6 +564,10 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
data.subObsSum[di] = newSubObsSum;
data.subAbsSum[di] = newSubAbsSum;
}
+ } else if (isReliability) {
+ data.subRelCount[di] = newSubRelCount;
+ data.subRelRawCount[di] = newSubRelRawCount;
+ data.subRelHit[di] = newSubRelHit;
}
if (isSimpleScatter) {
data.subValsX[di] = newSubValuesX;
@@ -559,7 +592,7 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
) {
// need to recalculate the primary statistic with the newly matched hits, false alarms, etc.
dataLength = data[independentVarName].length;
- for (di = 0; di < dataLength; di++) {
+ for (di = 0; di < dataLength; di += 1) {
if (data.subHit[di] instanceof Array) {
const hit = matsDataUtils.sum(data.subHit[di]);
const fa = matsDataUtils.sum(data.subFa[di]);
@@ -673,7 +706,7 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
) {
// need to recalculate the primary statistic with the newly matched partial sums.
dataLength = data[independentVarName].length;
- for (di = 0; di < dataLength; di++) {
+ for (di = 0; di < dataLength; di += 1) {
if (plotType === matsTypes.PlotTypes.simpleScatter) {
if (data.subSquareDiffSumX[di] instanceof Array) {
const squareDiffSumX = matsDataUtils.sum(data.subSquareDiffSumX[di]);
@@ -725,6 +758,15 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
);
}
}
+ } else if (isReliability) {
+ for (di = 0; di < dataLength; di += 1) {
+ data.y[di] =
+ matsDataUtils.sum(data.subRelHit[di]) /
+ matsDataUtils.sum(data.subRelCount[di]);
+ data.hitCount[di] = matsDataUtils.sum(data.subRelHit[di]);
+ data.fcstCount[di] = matsDataUtils.sum(data.subRelCount[di]);
+ data.fcstRawCount[di] = matsDataUtils.sum(data.subRelRawCount[di]);
+ }
} else if (plotType === matsTypes.PlotTypes.histogram) {
const d = {
// relevant fields to recalculate
@@ -768,12 +810,17 @@ const getMatchedDataSet = function (dataset, curveInfoParams, appParams, binStat
newCurveData = d;
}
const newCurveDataKeys = Object.keys(newCurveData);
- for (let didx = 0; didx < newCurveDataKeys.length; didx++) {
+ for (let didx = 0; didx < newCurveDataKeys.length; didx += 1) {
dataset[curveIndex][newCurveDataKeys[didx]] =
newCurveData[newCurveDataKeys[didx]];
}
}
+ if (isReliability) {
+ data.sample_climo =
+ matsDataUtils.sum(data.hitCount) / matsDataUtils.sum(data.fcstRawCount);
+ }
+
// save matched data and recalculate the max and min for this curve
const filteredx = data.x.filter((x) => x);
const filteredy = data.y.filter((y) => y);
diff --git a/meteor_packages/mats-common/imports/startup/server/data_process_util.js b/meteor_packages/mats-common/imports/startup/server/data_process_util.js
index 7e4c2fa0a..93201be76 100644
--- a/meteor_packages/mats-common/imports/startup/server/data_process_util.js
+++ b/meteor_packages/mats-common/imports/startup/server/data_process_util.js
@@ -42,7 +42,7 @@ const processDataXYCurve = function (
const axisLimitReprocessed = {};
// calculate data statistics (including error bars) for each curve
- for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
axisLimitReprocessed[curveInfoParams.curves[curveIndex].axisKey] =
axisLimitReprocessed[curveInfoParams.curves[curveIndex].axisKey] !== undefined;
const { diffFrom } = curveInfoParams.curves[curveIndex];
@@ -318,7 +318,7 @@ const processDataXYCurve = function (
).toPrecision(4)}`;
}
- di++;
+ di += 1;
}
// enable error bars if matching and they aren't null.
@@ -345,7 +345,7 @@ const processDataXYCurve = function (
// recalculate axis options after QC and matching
const filteredIndVars = [];
- for (let vidx = 0; vidx < values.length; vidx++) {
+ for (let vidx = 0; vidx < values.length; vidx += 1) {
if (values[vidx] !== null) filteredIndVars.push(indVars[vidx]);
}
const minx = Math.min(...filteredIndVars);
@@ -400,7 +400,7 @@ const processDataXYCurve = function (
}
}
- for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
// remove sub values and times to save space
data = dataset[curveIndex];
data.subHit = [];
@@ -486,7 +486,7 @@ const processDataXYCurve = function (
// add ideal value lines, if any
let idealValueLine;
let idealLabel;
- for (let ivIdx = 0; ivIdx < curveInfoParams.idealValues.length; ivIdx++) {
+ for (let ivIdx = 0; ivIdx < curveInfoParams.idealValues.length; ivIdx += 1) {
idealLabel = `ideal${ivIdx.toString()}`;
idealValueLine = matsDataCurveOpsUtils.getHorizontalValueLine(
resultOptions.xaxis.range[1],
@@ -551,7 +551,7 @@ const processDataProfile = function (
const axisLimitReprocessed = {};
// calculate data statistics (including error bars) for each curve
- for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
axisLimitReprocessed[curveInfoParams.curves[curveIndex].axisKey] =
axisLimitReprocessed[curveInfoParams.curves[curveIndex].axisKey] !== undefined;
const { diffFrom } = curveInfoParams.curves[curveIndex];
@@ -780,7 +780,7 @@ const processDataProfile = function (
).toPrecision(4)}`;
}
- di++;
+ di += 1;
}
// enable error bars if matching and they aren't null.
@@ -812,7 +812,7 @@ const processDataProfile = function (
// recalculate axis options after QC and matching
const filteredLevels = [];
- for (let vidx = 0; vidx < values.length; vidx++) {
+ for (let vidx = 0; vidx < values.length; vidx += 1) {
if (values[vidx] !== null) filteredLevels.push(levels[vidx]);
}
const miny = Math.min(...filteredLevels);
@@ -857,7 +857,7 @@ const processDataProfile = function (
}
}
- for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
// remove sub values and times to save space
data = dataset[curveIndex];
data.subHit = [];
@@ -904,7 +904,7 @@ const processDataProfile = function (
// add ideal value lines, if any
let idealValueLine;
let idealLabel;
- for (let ivIdx = 0; ivIdx < curveInfoParams.idealValues.length; ivIdx++) {
+ for (let ivIdx = 0; ivIdx < curveInfoParams.idealValues.length; ivIdx += 1) {
idealLabel = `ideal${ivIdx.toString()}`;
idealValueLine = matsDataCurveOpsUtils.getVerticalValueLine(
resultOptions.yaxis.range[1],
@@ -952,6 +952,16 @@ const processDataReliability = function (
const isMetexpress =
matsCollections.Settings.findOne({}).appType === matsTypes.AppTypes.metexpress;
+ // if matching, pare down dataset to only matching data. METexpress takes care of matching in its python query code
+ if (curveInfoParams.curvesLength > 1 && appParams.matching && !isMetexpress) {
+ dataset = matsDataMatchUtils.getMatchedDataSet(
+ dataset,
+ curveInfoParams,
+ appParams,
+ {}
+ );
+ }
+
// sort data statistics for each curve
for (let curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
const data = dataset[curveIndex];
@@ -1055,6 +1065,17 @@ const processDataReliability = function (
dataset.push(noSkillLine);
}
+ for (let curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
+ // remove sub values and times to save space
+ const data = dataset[curveIndex];
+ data.subRelHit = [];
+ data.subRelRawCount = [];
+ data.subRelCount = [];
+ data.subVals = [];
+ data.subSecs = [];
+ data.subLevs = [];
+ }
+
const totalProcessingFinish = moment();
bookkeepingParams.dataRequests["total retrieval and processing time for curve set"] =
{
@@ -1100,7 +1121,7 @@ const processDataROC = function (
}
// sort data statistics for each curve
- for (let curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (let curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
var data = dataset[curveIndex];
var statType;
if (curveInfoParams.statType === undefined) {
@@ -1140,7 +1161,7 @@ const processDataROC = function (
] = `${data.text[di]}
probability of false detection: ${data.x[di]}`;
data.text[di] = `${data.text[di]}
n: ${data.n[di]}`;
- di++;
+ di += 1;
}
dataset[curveIndex].glob_stats = {
auc,
@@ -1232,7 +1253,7 @@ const processDataPerformanceDiagram = function (
}
// sort data statistics for each curve
- for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
var data = dataset[curveIndex];
var statType;
if (curveInfoParams.statType === undefined) {
@@ -1269,7 +1290,7 @@ const processDataPerformanceDiagram = function (
data.text[di] = `${data.text[di]}
success ratio: ${data.x[di]}`;
data.text[di] = `${data.text[di]}
n: ${data.n[di]}`;
- di++;
+ di += 1;
}
dataset[curveIndex].glob_stats = {};
}
@@ -1368,14 +1389,14 @@ const processDataPerformanceDiagram = function (
let textVals;
let cval;
let csiLine;
- for (let csiidx = 1; csiidx < 10; csiidx++) {
+ for (let csiidx = 1; csiidx < 10; csiidx += 1) {
cval = csiidx / 10;
xvals = _.range(cval, 1.01, 0.01);
yvals = [];
textVals = [];
var xval;
var yval;
- for (let xidx = 0; xidx < xvals.length; xidx++) {
+ for (let xidx = 0; xidx < xvals.length; xidx += 1) {
xval = xvals[xidx];
yval = (xval * cval) / (xval + xval * cval - cval);
yvals.push(yval);
@@ -1393,7 +1414,7 @@ const processDataPerformanceDiagram = function (
dataset.push(csiLine);
}
- for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
// remove sub values and times to save space
data = dataset[curveIndex];
data.subHit = [];
@@ -1563,7 +1584,7 @@ const processDataEnsembleHistogram = function (
const axisLimitReprocessed = {};
// calculate data statistics (including error bars) for each curve
- for (let curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (let curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
axisLimitReprocessed[curveInfoParams.curves[curveIndex].axisKey] =
axisLimitReprocessed[curveInfoParams.curves[curveIndex].axisKey] !== undefined;
const { diffFrom } = curveInfoParams.curves[curveIndex];
@@ -1634,7 +1655,7 @@ const processDataEnsembleHistogram = function (
data.y[di] === null ? null : Math.round(data.y[di])
}`;
- di++;
+ di += 1;
}
const valueTotal = values.reduce((a, b) => Math.abs(a) + Math.abs(b), 0);
@@ -1642,12 +1663,16 @@ const processDataEnsembleHistogram = function (
// calculate the relative frequency for all the bins.
// for diff curves, there's no good way to produce a diff of only matching data, so just diff the two parent curves.
let diffIndexVal = 0;
- for (let d_idx = 0; d_idx < data.y.length; d_idx++) {
+ for (let d_idx = 0; d_idx < data.y.length; d_idx += 1) {
if (data.y[d_idx] !== null) {
if (diffFrom === null || diffFrom === undefined) {
data.bin_stats[d_idx].bin_rf = data.bin_stats[d_idx].bin_rf / valueTotal;
} else {
- for (let diffIndex = diffIndexVal; diffIndex < data.x.length; diffIndex++) {
+ for (
+ let diffIndex = diffIndexVal;
+ diffIndex < data.x.length;
+ diffIndex += 1
+ ) {
if (dataset[diffFrom[0]].x[d_idx] === dataset[diffFrom[1]].x[diffIndex]) {
data.bin_stats[d_idx].bin_rf =
dataset[diffFrom[0]].bin_stats[d_idx].bin_rf -
@@ -1799,7 +1824,7 @@ const processDataHistogram = function (
const plotBins = {};
plotBins.binMeans = [];
plotBins.binLabels = [];
- for (let b_idx = 0; b_idx < binStats.binMeans.length; b_idx++) {
+ for (let b_idx = 0; b_idx < binStats.binMeans.length; b_idx += 1) {
plotBins.binMeans.push(binStats.binMeans[b_idx]);
plotBins.binLabels.push(binStats.binLabels[b_idx]);
}
@@ -1809,7 +1834,7 @@ const processDataHistogram = function (
let curve;
let diffFrom;
let label;
- for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
curve = curveInfoParams.curves[curveIndex];
diffFrom = curve.diffFrom;
label = curve.label;
@@ -1921,7 +1946,7 @@ const processDataHistogram = function (
appParams
); // generate plot with data, curve annotation, axis labels, etc.
dataset.push(cOptions);
- curvesLengthSoFar++;
+ curvesLengthSoFar += 1;
} // end for curves
// if matching, pare down dataset to only matching data. Only do this if we didn't already do it while calculating diffs.
@@ -1939,7 +1964,7 @@ const processDataHistogram = function (
}
// calculate data statistics (including error bars) for each curve
- for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
const statisticSelect = curveInfoParams.curves[curveIndex].statistic;
diffFrom = curveInfoParams.curves[curveIndex].diffFrom;
var data = dataset[curveIndex];
@@ -1982,7 +2007,7 @@ const processDataHistogram = function (
: data.bin_stats[di].bin_sd.toPrecision(4)
}`;
- di++;
+ di += 1;
}
} // end curves
@@ -1992,7 +2017,7 @@ const processDataHistogram = function (
curveInfoParams.axisMap[curveInfoParams.curves[0].axisKey].ymin = ymin;
curveInfoParams.axisMap[curveInfoParams.curves[0].axisKey].ymax = ymax;
- for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
// remove sub values and times to save space
data = dataset[curveIndex];
data.subHit = [];
@@ -2085,9 +2110,9 @@ const processDataContour = function (
let j;
let currText;
let currYTextArray;
- for (j = 0; j < data.y.length; j++) {
+ for (j = 0; j < data.y.length; j += 1) {
currYTextArray = [];
- for (i = 0; i < data.x.length; i++) {
+ for (i = 0; i < data.x.length; i += 1) {
currText = `${label}
${data.xAxisKey}: ${data.x[i]}
${data.yAxisKey}: ${
data.y[j]
}
${statisticSelect}: ${
@@ -2169,7 +2194,7 @@ const processDataSimpleScatter = function (
}
// sort data statistics for each curve
- for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (var curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
var data = dataset[curveIndex];
var statType;
if (curveInfoParams.statType === undefined) {
@@ -2217,12 +2242,12 @@ const processDataSimpleScatter = function (
] = `${data.text[di]}
${variableYSelect} ${statisticYSelect}: ${data.y[di]}`;
data.text[di] = `${data.text[di]}
n: ${data.n[di]}`;
- di++;
+ di += 1;
}
dataset[curveIndex].glob_stats = {};
}
- for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex++) {
+ for (curveIndex = 0; curveIndex < curveInfoParams.curvesLength; curveIndex += 1) {
// remove sub values and times to save space
data = dataset[curveIndex];
data.subSquareDiffSumX = [];
diff --git a/meteor_packages/mats-common/imports/startup/server/data_query_util.js b/meteor_packages/mats-common/imports/startup/server/data_query_util.js
index 802fccc72..b27e4fd6b 100644
--- a/meteor_packages/mats-common/imports/startup/server/data_query_util.js
+++ b/meteor_packages/mats-common/imports/startup/server/data_query_util.js
@@ -614,7 +614,7 @@ const queryDBSpecialtyCurve = function (pool, statement, appParams, statisticStr
};
// this method queries the database for performance diagrams
-const queryDBReliability = function (pool, statement, kernel) {
+const queryDBReliability = function (pool, statement, appParams, kernel) {
if (Meteor.isServer) {
let d = {
// d will contain the curve data
@@ -623,6 +623,7 @@ const queryDBReliability = function (pool, statement, kernel) {
binVals: [],
hitCount: [],
fcstCount: [],
+ fcstRawCount: [],
sample_climo: 0,
n: [],
subHit: [],
@@ -631,6 +632,9 @@ const queryDBReliability = function (pool, statement, kernel) {
subCn: [],
subData: [],
subHeaders: [],
+ subRelHit: [],
+ subRelRawCount: [],
+ subRelCount: [],
subVals: [],
subSecs: [],
subLevs: [],
@@ -664,7 +668,7 @@ const queryDBReliability = function (pool, statement, kernel) {
} else if (rows.includes("queryCB ERROR: ")) {
error = rows;
} else {
- parsedData = parseQueryDataReliability(rows, d, kernel);
+ parsedData = parseQueryDataReliability(rows, d, appParams, kernel);
d = parsedData.d;
}
dFuture.return();
@@ -678,7 +682,7 @@ const queryDBReliability = function (pool, statement, kernel) {
} else if (rows === undefined || rows === null || rows.length === 0) {
error = matsTypes.Messages.NO_DATA_FOUND;
} else {
- parsedData = parseQueryDataReliability(rows, d, kernel);
+ parsedData = parseQueryDataReliability(rows, d, appParams, kernel);
d = parsedData.d;
}
// done waiting - have results
@@ -1990,7 +1994,7 @@ const parseQueryDataXYCurve = function (
};
// this method parses the returned query data for performance diagrams
-const parseQueryDataReliability = function (rows, d, kernel) {
+const parseQueryDataReliability = function (rows, d, appParams, kernel) {
/*
let d = {
// d will contain the curve data
@@ -1999,6 +2003,7 @@ const parseQueryDataReliability = function (rows, d, kernel) {
binVals: [],
hitCount: [],
fcstCount: [],
+ fcstRawCount: [],
sample_climo: 0,
n: [],
subHit: [],
@@ -2007,6 +2012,9 @@ const parseQueryDataReliability = function (rows, d, kernel) {
subCn: [],
subData: [],
subHeaders: [],
+ subRelHit: [],
+ subRelRawCount: [],
+ subRelCount: [],
subVals: [],
subSecs: [],
subLevs: [],
@@ -2023,12 +2031,21 @@ const parseQueryDataReliability = function (rows, d, kernel) {
};
*/
+ const { hasLevels } = appParams;
+
// initialize local variables
const binVals = [];
const hitCounts = [];
const fcstCounts = [];
+ const fcstRawCounts = [];
const observedFreqs = [];
let totalForecastCount = 0;
+ const subRelCount = [];
+ const subRelRawCount = [];
+ const subRelHit = [];
+ const subVals = [];
+ const subSecs = [];
+ const subLevs = [];
for (let rowIndex = 0; rowIndex < rows.length; rowIndex += 1) {
if (
Number(rows[rowIndex].kernel) === 0 &&
@@ -2036,6 +2053,33 @@ const parseQueryDataReliability = function (rows, d, kernel) {
rows[rowIndex].rawfcstcount !== "NULL"
) {
totalForecastCount += Number(rows[rowIndex].rawfcstcount);
+ const fcstRawCount =
+ rows[rowIndex].rawfcstcount === "NULL"
+ ? null
+ : Number(rows[rowIndex].rawfcstcount);
+ let sub_raw_counts = []; // actually raw counts but I'm re-using fields
+ // parse the sub-data
+ if (rows[rowIndex].sub_data !== undefined && rows[rowIndex].sub_data !== null) {
+ try {
+ const sub_data = rows[rowIndex].sub_data.toString().split(",");
+ let curr_sub_data;
+ for (let sd_idx = 0; sd_idx < sub_data.length; sd_idx++) {
+ curr_sub_data = sub_data[sd_idx].split(";");
+ if (hasLevels) {
+ sub_raw_counts.push(Number(curr_sub_data[3]));
+ } else {
+ sub_raw_counts.push(Number(curr_sub_data[2]));
+ }
+ }
+ } catch (e) {
+ // this is an error produced by a bug in the query function, not an error returned by the mysql database
+ e.message = `Error in parseQueryDataReliability. The expected fields don't seem to be present in the results cache: ${e.message}`;
+ throw new Error(e.message);
+ }
+ } else {
+ sub_raw_counts = NaN;
+ }
+ subRelRawCount.push(sub_raw_counts);
}
if (Number(rows[rowIndex].kernel) === Number(kernel)) {
const binVal = Number(rows[rowIndex].binValue);
@@ -2059,6 +2103,61 @@ const parseQueryDataReliability = function (rows, d, kernel) {
hitCounts.push(hitCount);
fcstCounts.push(fcstCount);
observedFreqs.push(observedFreq);
+
+ let sub_rel_hit = [];
+ let sub_rel_counts = [];
+ const sub_values = [];
+ let sub_secs = [];
+ let sub_levs = [];
+ if (
+ hitCount !== null &&
+ rows[rowIndex].sub_data !== undefined &&
+ rows[rowIndex].sub_data !== null
+ ) {
+ // parse the sub-data
+ try {
+ const sub_data = rows[rowIndex].sub_data.toString().split(",");
+ let curr_sub_data;
+ for (let sd_idx = 0; sd_idx < sub_data.length; sd_idx++) {
+ curr_sub_data = sub_data[sd_idx].split(";");
+ sub_secs.push(Number(curr_sub_data[0]));
+ if (hasLevels) {
+ if (!isNaN(Number(curr_sub_data[1]))) {
+ sub_levs.push(Number(curr_sub_data[1]));
+ } else {
+ sub_levs.push(curr_sub_data[1]);
+ }
+ sub_rel_counts.push(Number(curr_sub_data[2]));
+ sub_rel_hit.push(Number(curr_sub_data[4]));
+ // this is a dummy to fit the expectations of common functions that xy line curves have a populated sub_values array. It isn't used for anything.
+ sub_values.push(0);
+ } else {
+ sub_rel_counts.push(Number(curr_sub_data[1]));
+ sub_rel_hit.push(Number(curr_sub_data[3]));
+ // this is a dummy to fit the expectations of common functions that xy line curves have a populated sub_values array. It isn't used for anything.
+ sub_values.push(0);
+ }
+ }
+ } catch (e) {
+ // this is an error produced by a bug in the query function, not an error returned by the mysql database
+ e.message = `Error in parseQueryDataReliability. The expected fields don't seem to be present in the results cache: ${e.message}`;
+ throw new Error(e.message);
+ }
+ } else {
+ sub_rel_counts = NaN;
+ sub_rel_hit = NaN;
+ sub_secs = NaN;
+ if (hasLevels) {
+ sub_levs = NaN;
+ }
+ }
+ subRelCount.push(sub_rel_counts);
+ subRelHit.push(sub_rel_hit);
+ subVals.push(sub_values);
+ subSecs.push(sub_secs);
+ if (hasLevels) {
+ subLevs.push(sub_levs);
+ }
}
}
@@ -2072,7 +2171,14 @@ const parseQueryDataReliability = function (rows, d, kernel) {
d.binVals = binVals;
d.hitCount = hitCounts;
d.fcstCount = fcstCounts;
+ d.fcstRawCount = fcstRawCounts;
d.sample_climo = sampleClimo;
+ d.subRelHit = subRelHit;
+ d.subRelCount = subRelCount;
+ d.subRelRawCount = subRelRawCount;
+ d.subVals = subVals;
+ d.subSecs = subSecs;
+ d.subLevs = subLevs;
let xMin = Number.MAX_VALUE;
let xMax = -1 * Number.MAX_VALUE;
diff --git a/meteor_packages/mats-common/imports/startup/server/data_util.js b/meteor_packages/mats-common/imports/startup/server/data_util.js
index b35409db2..b1bc6b8e5 100644
--- a/meteor_packages/mats-common/imports/startup/server/data_util.js
+++ b/meteor_packages/mats-common/imports/startup/server/data_util.js
@@ -1480,6 +1480,10 @@ const removePoint = function (
data.subObsSum.splice(di, 1);
data.subAbsSum.splice(di, 1);
}
+ } else if (data.subRelHit !== undefined) {
+ data.subRelHit.splice(di, 1);
+ data.subRelCount.splice(di, 1);
+ data.subRelRawCount.splice(di, 1);
}
if (data.subValsX !== undefined) {
data.subValsX.splice(di, 1);
@@ -1544,6 +1548,10 @@ const addNullPoint = function (
data.subObsSum.splice(di, 0, []);
data.subAbsSum.splice(di, 0, []);
}
+ } else if (data.subRelHit !== undefined) {
+ data.subRelHit.splice(di, 0, []);
+ data.subRelCount.splice(di, 0, []);
+ data.subRelRawCount.splice(di, 0, []);
}
if (data.subValsX !== undefined) {
data.subValsX.splice(di, 0, []);
@@ -1589,6 +1597,10 @@ const nullPoint = function (data, di, statVarName, isCTC, isScalar, hasLevels) {
data.subObsSum[di] = [];
data.subAbsSum[di] = [];
}
+ } else if (data.subRelHit !== undefined) {
+ data.subRelHit[di] = [];
+ data.subRelCount[di] = [];
+ data.subRelRawCount[di] = [];
}
if (data.subValsX !== undefined) {
data.subValsX[di] = [];