Commit e3453df7 authored by Fabian Wachsmann's avatar Fabian Wachsmann
Browse files

Edited function to subtract a value from histogram and enabled single precision store

parent 5c6c5c0a
......@@ -170,7 +170,7 @@ Seaspctl(void *process)
cdoReadRecord(streamID1, vars1[varID][levelID].vec.data(), &nmiss);
vars1[varID][levelID].nmiss = nmiss;
hset.addSubVarLevelValues(varID, levelID, vars1[varID][levelID], true);
hset.addSubVarLevelValues(varID, levelID, vars1[varID][levelID], 1, FIELD_VEC);
}
nsets++;
......@@ -184,7 +184,7 @@ Seaspctl(void *process)
if (vlistInqVarTimetype(vlistID1, varID) == TIME_CONSTANT) continue;
const auto nlevels = zaxisInqSize(vlistInqVarZaxis(vlistID1, varID));
for (levelID = 0; levelID < nlevels; levelID++) hset.getVarLevelPercentiles(vars1[varID][levelID], varID, levelID, pn);
for (levelID = 0; levelID < nlevels; levelID++) hset.getVarLevelPercentiles(vars1[varID][levelID], varID, levelID, pn, FIELD_VEC);
}
dtlist.statTaxisDefTimestep(taxisID4, nsets);
......
......@@ -158,7 +158,7 @@ timpctl(int operatorID)
cdoReadRecord(streamID1, vars1[varID][levelID].vec.data(), &nmiss);
vars1[varID][levelID].nmiss = nmiss;
hset.addSubVarLevelValues(varID, levelID, vars1[varID][levelID], true);
hset.addSubVarLevelValues(varID, levelID, vars1[varID][levelID], 1, FIELD_VEC);
}
nsets++;
......@@ -172,7 +172,7 @@ timpctl(int operatorID)
if (vlistInqVarTimetype(vlistID1, varID) == TIME_CONSTANT) continue;
const auto nlevels = zaxisInqSize(vlistInqVarZaxis(vlistID1, varID));
for (levelID = 0; levelID < nlevels; levelID++) hset.getVarLevelPercentiles(vars1[varID][levelID], varID, levelID, pn);
for (levelID = 0; levelID < nlevels; levelID++) hset.getVarLevelPercentiles(vars1[varID][levelID], varID, levelID, pn, FIELD_VEC);
}
dtlist.statTaxisDefTimestep(taxisID4, nsets);
......
......@@ -186,7 +186,7 @@ Timselpctl(void *process)
cdoReadRecord(streamID1, vars1[varID][levelID].vec.data(), &nmiss);
vars1[varID][levelID].nmiss = nmiss;
hset.addSubVarLevelValues(varID, levelID, vars1[varID][levelID], true);
hset.addSubVarLevelValues(varID, levelID, vars1[varID][levelID], 1, FIELD_VEC);
}
tsID++;
......@@ -199,7 +199,7 @@ Timselpctl(void *process)
if (vlistInqVarTimetype(vlistID1, varID) == TIME_CONSTANT) continue;
const auto nlevels = zaxisInqSize(vlistInqVarZaxis(vlistID1, varID));
for (levelID = 0; levelID < nlevels; levelID++) hset.getVarLevelPercentiles(vars1[varID][levelID], varID, levelID, pn);
for (levelID = 0; levelID < nlevels; levelID++) hset.getVarLevelPercentiles(vars1[varID][levelID], varID, levelID, pn, FIELD_VEC);
}
dtlist.statTaxisDefTimestep(taxisID4, nsets);
......
......@@ -171,7 +171,7 @@ Ydaypctl(void *process)
cdoReadRecord(streamID1, vars1[dayoy][varID][levelID].vec.data(), &nmiss);
vars1[dayoy][varID][levelID].nmiss = nmiss;
hsets[dayoy].addSubVarLevelValues(varID, levelID, vars1[dayoy][varID][levelID], true);
hsets[dayoy].addSubVarLevelValues(varID, levelID, vars1[dayoy][varID][levelID], 1, FIELD_VEC);
}
nsets[dayoy]++;
......@@ -192,7 +192,7 @@ Ydaypctl(void *process)
const auto nlevels = zaxisInqSize(vlistInqVarZaxis(vlistID1, varID));
for (levelID = 0; levelID < nlevels; levelID++)
hsets[dayoy].getVarLevelPercentiles(vars1[dayoy][varID][levelID], varID, levelID, pn);
hsets[dayoy].getVarLevelPercentiles(vars1[dayoy][varID][levelID], varID, levelID, pn, FIELD_VEC);
}
taxisDefVdate(taxisID4, vdates1[dayoy]);
......
......@@ -171,7 +171,7 @@ Ymonpctl(void *process)
cdoReadRecord(streamID1, vars1[month][varID][levelID].vec.data(), &nmiss);
vars1[month][varID][levelID].nmiss = nmiss;
hsets[month].addSubVarLevelValues(varID, levelID, vars1[month][varID][levelID], true);
hsets[month].addSubVarLevelValues(varID, levelID, vars1[month][varID][levelID], 1, FIELD_VEC);
}
nsets[month]++;
......@@ -192,7 +192,7 @@ Ymonpctl(void *process)
const auto nlevels = zaxisInqSize(vlistInqVarZaxis(vlistID1, varID));
for (levelID = 0; levelID < nlevels; levelID++)
hsets[month].getVarLevelPercentiles(vars1[month][varID][levelID], varID, levelID, pn);
hsets[month].getVarLevelPercentiles(vars1[month][varID][levelID], varID, levelID, pn, FIELD_VEC);
}
taxisDefVdate(taxisID4, vdates1[month]);
......
......@@ -177,7 +177,7 @@ Yseaspctl(void *process)
cdoReadRecord(streamID1, vars1[seas][varID][levelID].vec.data(), &nmiss);
vars1[seas][varID][levelID].nmiss = nmiss;
hsets[seas].addSubVarLevelValues(varID, levelID, vars1[seas][varID][levelID], true);
hsets[seas].addSubVarLevelValues(varID, levelID, vars1[seas][varID][levelID], 1, FIELD_VEC);
}
nsets[seas]++;
......@@ -198,7 +198,7 @@ Yseaspctl(void *process)
const auto nlevels = zaxisInqSize(vlistInqVarZaxis(vlistID1, varID));
for (levelID = 0; levelID < nlevels; levelID++)
hsets[seas].getVarLevelPercentiles(vars1[seas][varID][levelID], varID, levelID, pn);
hsets[seas].getVarLevelPercentiles(vars1[seas][varID][levelID], varID, levelID, pn, FIELD_VEC);
}
taxisDefVdate(taxisID4, datetime1[seas].date);
......
......@@ -30,8 +30,15 @@
#define DBL_CAPACITY(n) ((int) (((n) * sizeof(int)) / sizeof(double)))
#define DBL_PTR(p) ((double *) (p))
#define FLT_PTR(p) ((float *) (p))
#define INT_PTR(p) ((int *) (p))
enum value_operations
{
OP_ADD = 1,
OP_SUB = 2
};
static int
histGetEnvNBins()
{
......@@ -83,6 +90,18 @@ histBin(Histogram &hist)
for (int i = 0; i < hist.nsamp; i++) histBinValue(hist, values[i]);
}
static void
histBinF(Histogram &hist)
{
assert(hist.nsamp == DBL_CAPACITY(hist.nbins));
std::vector<float> values(hist.nsamp);
for (int i = 0; i < hist.nsamp; i++) values[i] = FLT_PTR(hist.ptr)[i];
for (int i = 0; i < hist.nbins; i++) INT_PTR(hist.ptr)[i] = 0;
for (int i = 0; i < hist.nsamp; i++) histBinValue(hist, (double) values[i]);
}
static int
histReset(Histogram &hist)
{
......@@ -101,6 +120,24 @@ histReset(Histogram &hist)
return 0;
}
static int
histResetF(Histogram &hist)
{
assert(hist.nbins > 0);
if (hist.nsamp < DBL_CAPACITY(hist.nbins))
{
for (int i = 0; i < hist.nsamp; i++) FLT_PTR(hist.ptr)[i] = 0.;
}
else
{
for (int i = 0; i < hist.nbins; i++) INT_PTR(hist.ptr)[i] = 0;
}
hist.nsamp = 0;
return 0;
}
static int
histAddValue(Histogram &hist, double value)
{
......@@ -133,6 +170,38 @@ histAddValue(Histogram &hist, double value)
return 0;
}
static int
histAddValueF(Histogram &hist, float value)
{
assert(hist.nbins > 0);
// 2011-08-01 Uwe Schulzweida: added check for rounding errors
if (value < hist.min && (hist.min - value) < 1e5) value = hist.min;
if (value > hist.max && (value - hist.max) < 1e5) value = hist.max;
if (IS_EQUAL(hist.min, hist.max)) return 0;
if (value < hist.min || value > hist.max) return 1;
if (hist.nsamp < DBL_CAPACITY(hist.nbins))
{
FLT_PTR(hist.ptr)[hist.nsamp] = value;
hist.nsamp++;
}
else if (hist.nsamp > DBL_CAPACITY(hist.nbins))
{
histBinValue(hist, (double) value);
hist.nsamp++;
}
else
{
histBinF(hist);
histBinValue(hist, (double) value);
hist.nsamp++;
}
return 0;
}
static void
histRemoveValue(Histogram &hist, double value)
{
......@@ -152,6 +221,25 @@ histRemoveValue(Histogram &hist, double value)
hist.nsamp--;
}
static void
histRemoveValueF(Histogram &hist, float value)
{
int i = 0;
for ( i = 0; i < hist.nsamp; i++ )
{
if ( IS_EQUAL(FLT_PTR(hist.ptr)[i], value) )
{
if ( i != hist.nsamp-1 )
FLT_PTR(hist.ptr)[i] = FLT_PTR(hist.ptr)[hist.nsamp-1];
break;
}
}
if ( i == hist.nsamp )
cdoWarning("'%f' not found in histogram!", value);
else
hist.nsamp--;
}
static int
histSubValue(Histogram &hist, double value)
{
......@@ -174,13 +262,40 @@ histSubValue(Histogram &hist, double value)
hist.nsamp--;
}
else
return 1
return 1;
return 0;
}
static int
histSubValueF(Histogram &hist, float value)
{
assert(hist.nbins > 0);
// 2011-08-01 Uwe Schulzweida: added check for rounding errors
if (value < hist.min && (hist.min - value) < 1e5) value = hist.min;
if (value > hist.max && (value - hist.max) < 1e5) value = hist.max;
if (IS_EQUAL(hist.min, hist.max)) return 0;
if (value < hist.min || value > hist.max) return 1;
if (hist.nsamp < DBL_CAPACITY(hist.nbins))
{
histRemoveValueF(hist, value);
}
else if (hist.nsamp > DBL_CAPACITY(hist.nbins))
{
histBinSubValue(hist, (double)value);
hist.nsamp--;
}
else
return 1;
return 0;
}
static double
histGetPercentile(const Histogram &hist, double p)
histGetPercentile(const Histogram &hist, double p, int ptype)
{
int i = 0, count = 0;
......@@ -207,7 +322,14 @@ histGetPercentile(const Histogram &hist, double p)
}
else
{
return percentile(DBL_PTR(hist.ptr), hist.nsamp, p);
if ( (ptype & FIELD_FLT))
{
double values[hist.nsamp];
for (int i = 0; i < hist.nsamp; i++) values[i] = (double)FLT_PTR(hist.ptr)[i];
return percentile(values, hist.nsamp, p);
}
else
return percentile(DBL_PTR(hist.ptr), hist.nsamp, p);
}
}
......@@ -275,10 +397,40 @@ HistogramSet::defVarLevelBounds(int varID, int levelID, const Field &field1, con
}
void
HistogramSet::addSubVarLevelValues(int varID, int levelID, const Field &field, bool lisAdd)
HistogramSet::defVarLevelBoundsF(int varID, int levelID, const Field &field1, const Field &field2)
{
const auto &array1 = field1.vecf;
const auto &array2 = field2.vecf;
assert(!array1.empty());
assert(!array2.empty());
if (varID < 0 || varID >= nvars) cdoAbort("Illegal argument: varID %d is undefined (%s)", varID, __func__);
const auto nlevels = this->var_nlevels[varID];
if (levelID < 0 || levelID >= nlevels) cdoAbort("Illegal argument: levelID %d is undefined (%s)", levelID, __func__);
const auto nhists = this->var_nhists[varID];
if (nhists != gridInqSize(field1.grid) || nhists != gridInqSize(field2.grid)) cdoAbort("Grids are different (%s)", __func__);
auto &hists = this->histograms[varID][levelID];
for (size_t i = 0; i < nhists; i++)
{
float a = array1[i];
float b = array2[i];
if (DBL_IS_EQUAL(a, field1.missval) || DBL_IS_EQUAL(b, field2.missval) || DBL_IS_EQUAL(a, b))
histDefBounds(hists[i], 0.0, 0.0);
else
histDefBounds(hists[i], (double) a, (double) b);
}
}
void
HistogramSet::addSubVarLevelValues(int varID, int levelID, const Field &field, int operation, int ptype)
{
const auto &array = field.vec;
assert(!array.empty());
const auto &arrayf = field.vecf;
if (varID < 0 || varID >= nvars) cdoAbort("Illegal argument: varID %d is undefined (%s)", varID, __func__);
......@@ -293,35 +445,67 @@ HistogramSet::addSubVarLevelValues(int varID, int levelID, const Field &field, b
int nign = 0;
int missval = field.missval;
if ( lisAdd )
if ( (ptype & FIELD_FLT))
{
if (field.nmiss)
assert(!arrayf.empty());
if ( operation == OP_ADD )
{
for (size_t i = 0; i < nhists; i++)
if (!DBL_IS_EQUAL(array[i], missval)) nign += histAddValue(hists[i], array[i]);
if (field.nmiss)
{
for (size_t i = 0; i < nhists; i++)
if (!DBL_IS_EQUAL(array[i], missval)) nign += histAddValueF(hists[i], arrayf[i]);
}
else
{
for (size_t i = 0; i < nhists; i++) nign += histAddValueF(hists[i], arrayf[i]);
}
}
else
{
for (size_t i = 0; i < nhists; i++) nign += histAddValue(hists[i], array[i]);
if (field.nmiss)
{
for (size_t i = 0; i < nhists; i++)
if (!DBL_IS_EQUAL(array[i], missval)) nign += histSubValueF(hists[i], arrayf[i]);
}
else
{
for (size_t i = 0; i < nhists; i++) nign += histSubValueF(hists[i], arrayf[i]);
}
}
}
else
{
if (field.nmiss)
{
assert(!array.empty());
if ( operation == OP_ADD )
{
for (size_t i = 0; i < nhists; i++)
if (!DBL_IS_EQUAL(array[i], missval)) nign += histSubValue(hists[i], array[i]);
if (field.nmiss)
{
for (size_t i = 0; i < nhists; i++)
if (!DBL_IS_EQUAL(array[i], missval)) nign += histAddValue(hists[i], array[i]);
}
else
{
for (size_t i = 0; i < nhists; i++) nign += histAddValue(hists[i], array[i]);
}
}
else
{
for (size_t i = 0; i < nhists; i++) nign += histSubValue(hists[i], array[i]);
if (field.nmiss)
{
for (size_t i = 0; i < nhists; i++)
if (!DBL_IS_EQUAL(array[i], missval)) nign += histSubValue(hists[i], array[i]);
}
else
{
for (size_t i = 0; i < nhists; i++) nign += histSubValue(hists[i], array[i]);
}
}
}
if (nign) cdoWarning("%d out of %d grid values are out of bounds and have been ignored (%s)", nign, nhists, __func__);
}
void
HistogramSet::Reset(int varID, int levelID)
HistogramSet::Reset(int varID, int levelID, int ptype)
{
int nvars = this->nvars;
......@@ -339,16 +523,19 @@ HistogramSet::Reset(int varID, int levelID)
auto &hists = this->histograms[varID][levelID];
assert(nhists > 0);
for (int i = 0; i < nhists; i++)
histReset(hists[i]);
if ( (ptype & FIELD_FLT))
for (int i = 0; i < nhists; i++)
histResetF(hists[i]);
else
for (int i = 0; i < nhists; i++)
histReset(hists[i]);
}
void
HistogramSet::getVarLevelPercentiles(Field &field, int varID, int levelID, double p)
HistogramSet::getVarLevelPercentiles(Field &field, int varID, int levelID, double p, int ptype)
{
auto &array = field.vec;
assert(!array.empty());
assert(!array.empty());
if (varID < 0 || varID >= nvars) cdoAbort("Illegal argument: varID %d is undefined (%s)", varID, __func__);
......@@ -361,12 +548,11 @@ HistogramSet::getVarLevelPercentiles(Field &field, int varID, int levelID, doubl
const auto &hists = this->histograms[varID][levelID];
field.nmiss = 0;
for (size_t i = 0; i < nhists; i++)
{
if (hists[i].nsamp)
{
array[i] = histGetPercentile(hists[i], p);
}
array[i] = histGetPercentile(hists[i], p, ptype);
else
{
array[i] = field.missval;
......
......@@ -79,9 +79,10 @@ public:
void createVarLevels(int varID, int nlevels, size_t nhists);
void defVarLevelBounds(int varID, int levelID, const Field &field1, const Field &field2);
void addSubVarLevelValues(int varID, int levelID, const Field &field, bool lisAdd);
void getVarLevelPercentiles(Field &field, int varID, int levelID, double p);
void Reset(int varID, int levelID);
void defVarLevelBoundsF(int varID, int levelID, const Field &field1, const Field &field2);
void addSubVarLevelValues(int varID, int levelID, const Field &field, int operation, int ptype);
void getVarLevelPercentiles(Field &field, int varID, int levelID, double p, int ptype);
void Reset(int varID, int levelID, int ptype);
};
#endif /* PERCENTILES_HIST_H_ */
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment