Skip to content

Commit

Permalink
Improved PointConditionalDistribution
Browse files Browse the repository at this point in the history
The following special cases have been implemented: BernsteinCopula, Dirac, Mixture, KernelMixture
  • Loading branch information
regislebrun committed Nov 8, 2024
1 parent ae136d3 commit ba849cc
Showing 1 changed file with 153 additions and 7 deletions.
160 changes: 153 additions & 7 deletions lib/src/Uncertainty/Distribution/PointConditionalDistribution.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,21 @@
#include "openturns/SimplicialCubature.hxx"
#include "openturns/CubaIntegration.hxx"
#include "openturns/Tuples.hxx"
#include "openturns/Beta.hxx"
#include "openturns/Dirac.hxx"
#include "openturns/Dirichlet.hxx"
#include "openturns/EmpiricalBernsteinCopula.hxx"
#include "openturns/JointDistribution.hxx"
#include "openturns/KernelMixture.hxx"
#include "openturns/Mixture.hxx"
#include "openturns/Normal.hxx"
#include "openturns/Student.hxx"
#include "openturns/OptimizationAlgorithm.hxx"
#include "openturns/RandomGenerator.hxx"
#include "openturns/DomainEvent.hxx"
#include "openturns/PlatformInfo.hxx"
#include "openturns/JointDistribution.hxx"
#include "openturns/GaussKronrod.hxx"
#include "openturns/SobolSequence.hxx"
#include "openturns/DistFunc.hxx"

BEGIN_NAMESPACE_OPENTURNS
Expand Down Expand Up @@ -260,7 +267,6 @@ void PointConditionalDistribution::update()
const Interval bounds(getRange());
const Point lb(bounds.getLowerBound());
const Point ub(bounds.getUpperBound());
const Point middle(0.5 * (lb + ub));

// First, the upper bound on U
const Function objectiveU(new PointConditionalDistributionUBoundEvaluation(*this, r_));
Expand All @@ -269,7 +275,18 @@ void PointConditionalDistribution::update()
problemU.setBounds(bounds);
OptimizationAlgorithm algo(OptimizationAlgorithm::GetByName(ResourceMap::GetAsString("PointConditionalDistribution-OptimizationAlgorithm")));
algo.setProblem(problemU);
algo.setStartingPoint(middle);
const SobolSequence generator(dimension);
Bool found = false;
Point candidate;
while (!found)
{
candidate = lb;
const Point u(generator.generate());
for (UnsignedInteger i = 0; i < dimension; ++i)
candidate[i] += (ub[i] - lb[i]) * u[i];
found = computePDF(candidate) > 0.0;
} // found
algo.setStartingPoint(candidate);
algo.run();
supU_ = std::exp(algo.getResult().getOptimalValue()[0]);

Expand All @@ -287,18 +304,36 @@ void PointConditionalDistribution::update()
{
problemVI.setBounds(Interval(zero, ub));
algo.setProblem(problemVI);
algo.setStartingPoint(ub * 0.5);
Bool found = false;
while (!found)
{
candidate = zero;
const Point u(generator.generate());
for (UnsignedInteger i = 0; i < dimension; ++i)
candidate[i] = ub[i] * u[i];
found = computePDF(candidate) > 0.0;
} // found
algo.setStartingPoint(candidate);
algo.run();
supV_[i] = std::exp(algo.getResult().getOptimalValue()[0]);
}
} // ub[i] > 0.0
if (lb[i] < 0.0)
{
problemVI.setBounds(Interval(lb, zero));
algo.setProblem(problemVI);
algo.setStartingPoint(lb * 0.5);
Bool found = false;
while (!found)
{
candidate = zero;
const Point u(generator.generate());
for (UnsignedInteger i = 0; i < dimension; ++i)
candidate[i] = lb[i] * u[i];
found = computePDF(candidate) > 0.0;
} // found
algo.setStartingPoint(candidate);
algo.run();
infV_[i] = -std::exp(algo.getResult().getOptimalValue()[0]);
}
} // lb[i] < 0.0
}
}

Expand Down Expand Up @@ -346,6 +381,13 @@ Bool PointConditionalDistribution::hasSimplifiedVersion(Distribution & simplifie
return true;
}

// full conditioning
if (getDimension() == 0)
{
simplified = Dirac(conditioningValues_);
return true;
}

// conditioning components have no influence on the other components
if (distribution_.hasIndependentCopula())
{
Expand Down Expand Up @@ -378,6 +420,110 @@ Bool PointConditionalDistribution::hasSimplifiedVersion(Distribution & simplifie
return true;
}

// Mixture
Mixture *p_mixture = dynamic_cast<Mixture *>(distribution_.getImplementation().get());
if (p_mixture)
{
Collection<Distribution> atoms(p_mixture->getDistributionCollection());
const UnsignedInteger atomsNumber = atoms.getSize();
Point newWeights(p_mixture->getWeights());
Collection<Distribution> newAtoms(atomsNumber);
for (UnsignedInteger i = 0; i < atomsNumber; ++i)
{
newWeights[i] *= atoms[i].getMarginal(conditioningIndices_).computePDF(conditioningValues_);
newAtoms[i] = PointConditionalDistribution(atoms[i], conditioningIndices_, conditioningValues_);
}
simplified = Mixture(newAtoms, newWeights);
return true;
}

// Kernel mixture
KernelMixture *p_kernel_mixture = dynamic_cast<KernelMixture *>(distribution_.getImplementation().get());
if (p_kernel_mixture)
{
const Distribution kernel(p_kernel_mixture->getKernel());
const Point bandwidth(p_kernel_mixture->getBandwidth());
const Sample sample(p_kernel_mixture->getInternalSample());
const UnsignedInteger sampleSize = sample.getSize();
Collection<Distribution> atoms(sampleSize);
Point weights(sampleSize, 1.0);
const UnsignedInteger dimension = getDimension();
const UnsignedInteger conditioningDimension = conditioningIndices_.getSize();
for (UnsignedInteger i = 0; i < sampleSize; ++i)
{
Collection<Distribution> atomComponents(dimension);
for (UnsignedInteger j = 0; j < dimension; ++j)
{
const UnsignedInteger newJ = nonConditioningIndices_[j];
const Scalar hJ = bandwidth[newJ];
atomComponents[j] = kernel * hJ + sample(i, newJ);
} // j
atoms[i] = JointDistribution(atomComponents);
for (UnsignedInteger j = 0; j < conditioningDimension; ++j)
{
const UnsignedInteger newJ = conditioningIndices_[j];
const Scalar hJ = bandwidth[newJ];
const Scalar xJ = conditioningValues_[j];
weights[i] *= kernel.computePDF((xJ - sample(i, newJ)) / hJ) / hJ;
} // j
} // i
simplified = Mixture(atoms, weights);
return true;
}

// EmpiricalBernsteinCopula
EmpiricalBernsteinCopula *p_empirical_bernstein_copula = dynamic_cast<EmpiricalBernsteinCopula *>(distribution_.getImplementation().get());
if (p_empirical_bernstein_copula)
{
const Sample copulaSample(p_empirical_bernstein_copula->getCopulaSample());
const UnsignedInteger sampleSize = copulaSample.getSize();
const UnsignedInteger binNumber = p_empirical_bernstein_copula->getBinNumber();
Collection<Distribution> atoms(sampleSize);
Point weights(sampleSize, 1.0);
const UnsignedInteger dimension = getDimension();
const UnsignedInteger conditioningDimension = conditioningIndices_.getSize();
for (UnsignedInteger i = 0; i < sampleSize; ++i)
{
Collection<Distribution> atomComponents(dimension);
for (UnsignedInteger j = 0; j < dimension; ++j)
{
const UnsignedInteger newJ = nonConditioningIndices_[j];
const Scalar r = std::ceil(binNumber * copulaSample(i, newJ));
atomComponents[j] = Beta(r, binNumber - r + 1.0, 0.0, 1.0);
} // j
atoms[i] = JointDistribution(atomComponents);
for (UnsignedInteger j = 0; j < conditioningDimension; ++j)
{
const UnsignedInteger newJ = conditioningIndices_[j];
const Scalar r = std::ceil(binNumber * copulaSample(i, newJ));
weights[i] *= Beta(r, binNumber - r + 1.0, 0.0, 1.0).computePDF(conditioningValues_[j]);
} // j
} // i
simplified = Mixture(atoms, weights);
return true;
}

// Dirichlet
Dirichlet *p_dirichlet = dynamic_cast<Dirichlet *>(distribution_.getImplementation().get());
if (p_dirichlet)
{
const Point theta(p_dirichlet->getTheta());
Scalar weight = 1.0;
for (UnsignedInteger i = 0; i < conditioningValues_.getSize(); ++i)
weight -= conditioningValues_[i];
const UnsignedInteger dimension = getDimension();
Point newTheta(dimension + 1, theta[p_dirichlet->getDimension()]);
for (UnsignedInteger i = 0; i < nonConditioningIndices_.getSize(); ++i)
{
const UnsignedInteger newI = nonConditioningIndices_[i];
newTheta[i] =theta[i];
}
// Unfortunately we don't know how to multiply a multivariate distribution by a scalar
// simplified = Distribution(Dirichlet(newTheta)) * weight;
// return true;
return false;
}

// Joint
JointDistribution *p_joint = dynamic_cast<JointDistribution *>(distribution_.getImplementation().get());
if (p_joint && p_joint->getCore().isCopula())
Expand Down

0 comments on commit ba849cc

Please sign in to comment.