Commit 95a8490b authored by graham's avatar graham
Browse files

ENH: Import (probability) Distribution class and test app from cvm.

parent 6ffbe055
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2008-2011 OpenCFD Ltd.
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Application
DistributionTest
Description
Test the Distribution class
Plot normal distribution test in gnuplot using:
@verbatim
normalDistribution(mean, sigma, x) = \
sqrt(1.0/(2.0*pi*sigma**2))*exp(-(x - mean)**2.0/(2.0*sigma**2))
plot normalDistribution(8.5, 2.5, x), "Distribution_scalar_test_x" w p
@endverbatim
\*---------------------------------------------------------------------------*/
#include "vector.H"
#include "labelVector.H"
#include "tensor.H"
#include "Distribution.H"
#include "Random.H"
#include "dimensionedTypes.H"
#include "argList.H"
#include "PstreamReduceOps.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
using namespace Foam;
int main(int argc, char *argv[])
{
# include "setRootCase.H"
Random R(918273);
{
// scalar
label randomDistributionTestSize = 50000000;
Distribution<scalar> dS(scalar(5e-2));
Info<< nl << "Distribution<scalar>" << nl
<< "Sampling "
<< randomDistributionTestSize
<< " times from GaussNormal distribution."
<< endl;
for (label i = 0; i < randomDistributionTestSize; i++)
{
dS.add(2.5*R.GaussNormal() + 8.5);
}
Info<< "Mean " << dS.mean() << nl
<< "Median " << dS.median()
<< endl;
dS.write("Distribution_scalar_test_1");
Distribution<scalar> dS2(scalar(1e-2));
Info<< nl << "Distribution<scalar>" << nl
<< "Sampling "
<< randomDistributionTestSize
<< " times from GaussNormal distribution."
<< endl;
for (label i = 0; i < randomDistributionTestSize; i++)
{
dS2.add(1.5*R.GaussNormal() -6.0);
}
Info<< "Mean " << dS2.mean() << nl
<< "Median " << dS2.median()
<< endl;
dS2.write("Distribution_scalar_test_2");
Info<< nl << "Adding previous two Distribution<scalar>" << endl;
dS = dS + dS2;
dS.write("Distribution_scalar_test_1+2");
}
if (Pstream::parRun())
{
// scalar in parallel
label randomDistributionTestSize = 100000000;
Distribution<scalar> dS(scalar(1e-1));
Pout<< "Distribution<scalar>" << nl
<< "Sampling "
<< randomDistributionTestSize
<< " times from uniform distribution."
<< endl;
for (label i = 0; i < randomDistributionTestSize; i++)
{
dS.add(R.scalar01() + 10*Pstream::myProcNo());
}
Pout<< "Mean " << dS.mean() << nl
<< "Median " << dS.median()
<< endl;
reduce(dS, sumOp< Distribution<scalar> >());
if (Pstream::master())
{
Info<< "Reducing parallel Distribution<scalar>" << nl
<< "Mean " << dS.mean() << nl
<< "Median " << dS.median()
<< endl;
dS.write("Distribution_scalar_test_parallel_reduced");
}
}
{
// vector
Distribution<vector> dV(vector(0.1, 0.05, 0.15));
label randomDistributionTestSize = 1000000;
Info<< nl << "Distribution<vector>" << nl
<< "Sampling "
<< randomDistributionTestSize
<< " times from uniform and GaussNormal distribution."
<< endl;
for (label i = 0; i < randomDistributionTestSize; i++)
{
dV.add(R.vector01());
// Adding separate GaussNormal components with component
// weights
dV.add
(
vector
(
R.GaussNormal()*3.0 + 1.5,
R.GaussNormal()*0.25 + 4.0,
R.GaussNormal()*3.0 - 1.5
),
vector(1.0, 2.0, 5.0)
);
}
Info<< "Mean " << dV.mean() << nl
<< "Median " << dV.median()
<< endl;
dV.write("Distribution_vector_test");
}
// {
// // labelVector
// Distribution<labelVector> dLV(labelVector::one*10);
// label randomDistributionTestSize = 2000000;
// Info<< nl << "Distribution<labelVector>" << nl
// << "Sampling "
// << randomDistributionTestSize
// << " times from uniform distribution."
// << endl;
// for (label i = 0; i < randomDistributionTestSize; i++)
// {
// dLV.add
// (
// labelVector
// (
// R.integer(-1000, 1000),
// R.integer(-5000, 5000),
// R.integer(-2000, 7000)
// )
// );
// }
// Info<< "Mean " << dLV.mean() << nl
// << "Median " << dLV.median()
// << endl;
// dLV.write("Distribution_labelVector_test");
// }
{
// tensor
Distribution<tensor> dT(tensor::one*1e-2);
label randomDistributionTestSize = 2000000;
Info<< nl << "Distribution<tensor>" << nl
<< "Sampling "
<< randomDistributionTestSize
<< " times from uniform distribution."
<< endl;
for (label i = 0; i < randomDistributionTestSize; i++)
{
dT.add(R.tensor01());
}
Info<< "Mean " << dT.mean() << nl
<< "Median " << dT.median()
<< endl;
dT.write("Distribution_tensor_test");
}
{
// symmTensor
Distribution<symmTensor> dSyT(symmTensor::one*1e-2);
label randomDistributionTestSize = 2000000;
Info<< nl << "Distribution<symmTensor>" << nl
<< "Sampling "
<< randomDistributionTestSize
<< " times from uniform distribution."
<< endl;
for (label i = 0; i < randomDistributionTestSize; i++)
{
dSyT.add(R.symmTensor01());
}
Info<< "Mean " << dSyT.mean() << nl
<< "Median " << dSyT.median()
<< endl;
dSyT.write("Distribution_symmTensor_test");
}
{
// sphericalTensor
Distribution<sphericalTensor> dSpT(sphericalTensor::one*1e-2);
label randomDistributionTestSize = 50000000;
Info<< nl << "Distribution<sphericalTensor>" << nl
<< "Sampling "
<< randomDistributionTestSize
<< " times from uniform distribution."
<< endl;
for (label i = 0; i < randomDistributionTestSize; i++)
{
dSpT.add(R.sphericalTensor01());
}
Info<< "Mean " << dSpT.mean() << nl
<< "Median " << dSpT.median()
<< endl;
dSpT.write("Distribution_sphericalTensor_test");
}
Info<< nl << "End" << nl << endl;
return 0;
}
// ************************************************************************* //
DistributionTest.C
EXE = $(FOAM_USER_APPBIN)/DistributionTest
/* EXE_INC = */
/* EXE_LIBS = */
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2009-2011 OpenCFD Ltd.
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "Distribution.H"
#include "OFstream.H"
#include "ListOps.H"
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
template<class Type>
Foam::Distribution<Type>::Distribution()
:
List< List<scalar> >(pTraits<Type>::nComponents),
binWidth_(pTraits<Type>::one),
listStarts_(pTraits<Type>::nComponents, 0)
{}
template<class Type>
Foam::Distribution<Type>::Distribution(const Type& binWidth)
:
List< List<scalar> >(pTraits<Type>::nComponents),
binWidth_(binWidth),
listStarts_(pTraits<Type>::nComponents, 0)
{}
template<class Type>
Foam::Distribution<Type>::Distribution(const Distribution<Type>& d)
:
List< List<scalar> >(static_cast< const List< List<scalar> >& >(d)),
binWidth_(d.binWidth()),
listStarts_(d.listStarts())
{}
// * * * * * * * * * * * * * * * * Destructor * * * * * * * * * * * * * * * //
template<class Type>
Foam::Distribution<Type>::~Distribution()
{}
// * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * * //
template<class Type>
Foam::scalar Foam::Distribution<Type>::totalWeight(direction cmpt) const
{
const List<scalar>& cmptDistribution = (*this)[cmpt];
scalar sumOfWeights = 0.0;
forAll(cmptDistribution, i)
{
sumOfWeights += cmptDistribution[i];
}
return sumOfWeights;
}
template<class Type>
Foam::List<Foam::label> Foam::Distribution<Type>::keys(direction cmpt) const
{
List<label> keys = identity((*this)[cmpt].size());
forAll(keys, k)
{
keys[k] += listStarts_[cmpt];
}
return keys;
}
template<class Type>
Foam::label Foam::Distribution<Type>::index
(
direction cmpt,
label n
)
{
List<scalar>& cmptDistribution = (*this)[cmpt];
if (cmptDistribution.empty())
{
// Initialise this list with this value
cmptDistribution.setSize(2, 0.0);
listStarts_[cmpt] = n;
return 0;
}
label listIndex = -1;
label& listStart = listStarts_[cmpt];
label testIndex = n - listStart;
if (testIndex < 0)
{
// Underflow of this List, storage increase and remapping
// required
List<scalar> newCmptDistribution(2*cmptDistribution.size(), 0.0);
label sOld = cmptDistribution.size();
forAll(cmptDistribution, i)
{
newCmptDistribution[i + sOld] = cmptDistribution[i];
}
cmptDistribution = newCmptDistribution;
listStart -= sOld;
// Recursively call this function in case another remap is required.
listIndex = index(cmpt, n);
}
else if (testIndex > cmptDistribution.size() - 1)
{
// Overflow of this List, storage increase required
cmptDistribution.setSize(2*cmptDistribution.size(), 0.0);
// Recursively call this function in case another storage
// alteration is required.
listIndex = index(cmpt, n);
}
else
{
listIndex = n - listStart;
}
return listIndex;
}
template<class Type>
Foam::Pair<Foam::label> Foam::Distribution<Type>::validLimits
(
direction cmpt
) const
{
const List<scalar>& cmptDistribution = (*this)[cmpt];
// limits.first(): lower bound, i.e. the first non-zero entry
// limits.second(): upper bound, i.e. the last non-zero entry
Pair<label> limits(-1, -1);
forAll(cmptDistribution, i)
{
if (cmptDistribution[i] > 0.0)
{
if (limits.first() == -1)
{
limits.first() = i;
limits.second() = i;
}
else
{
limits.second() = i;
}
}
}
return limits;
}
template<class Type>
Type Foam::Distribution<Type>::mean() const
{
Type meanValue(pTraits<Type>::zero);
for (direction cmpt = 0; cmpt < pTraits<Type>::nComponents; cmpt++)
{
const List<scalar>& cmptDistribution = (*this)[cmpt];
scalar totalCmptWeight = totalWeight(cmpt);
List<label> theKeys = keys(cmpt);
forAll(theKeys, k)
{
label key = theKeys[k];
setComponent(meanValue, cmpt) +=
(0.5 + scalar(key))
*component(binWidth_, cmpt)
*cmptDistribution[k]
/totalCmptWeight;
}
}
return meanValue;
}
template<class Type>
Type Foam::Distribution<Type>::median() const
{
Type medianValue(pTraits<Type>::zero);
List< List < Pair<scalar> > > normDistribution = normalised();
for (direction cmpt = 0; cmpt < pTraits<Type>::nComponents; cmpt++)
{
List< Pair<scalar> >& normDist = normDistribution[cmpt];
if (normDist.size())
{
if (normDist.size() == 1)
{
setComponent(medianValue, cmpt) = normDist[0].first();
}
else if
(
normDist.size() > 1
&& normDist[0].second()*component(binWidth_, cmpt) > 0.5
)
{
scalar xk =
normDist[0].first()
+ 0.5*component(binWidth_, cmpt);
scalar xkm1 =
normDist[0].first()
- 0.5*component(binWidth_, cmpt);
scalar Sk = (normDist[0].second())*component(binWidth_, cmpt);
setComponent(medianValue, cmpt) = 0.5*(xk - xkm1)/(Sk) + xkm1;
}
else
{
label previousNonZeroIndex = 0;
scalar cumulative = 0.0;
forAll(normDist, nD)
{
if
(
cumulative
+ (normDist[nD].second()*component(binWidth_, cmpt))
> 0.5
)
{
scalar xk =
normDist[nD].first()
+ 0.5*component(binWidth_, cmpt);
scalar xkm1 =
normDist[previousNonZeroIndex].first()
+ 0.5*component(binWidth_, cmpt);
scalar Sk =
cumulative
+ (normDist[nD].second()*component(binWidth_, cmpt));
scalar Skm1 = cumulative;
setComponent(medianValue, cmpt) =
(0.5 - Skm1)*(xk - xkm1)/(Sk - Skm1) + xkm1;
break;
}
else if (mag(normDist[nD].second()) > VSMALL)
{
cumulative +=
normDist[nD].second()*component(binWidth_, cmpt);
previousNonZeroIndex = nD;
}
}
}
}
}