Skip to content

Commit

Permalink
Fixed CBN&CBNFactoy
Browse files Browse the repository at this point in the history
The nodes were not explored in the correct order. Also fixed copyright & date.
  • Loading branch information
regislebrun authored and jschueller committed Apr 7, 2020
1 parent 0c5c89b commit 92969e0
Show file tree
Hide file tree
Showing 17 changed files with 166 additions and 118 deletions.
2 changes: 1 addition & 1 deletion COPYING
Original file line number Diff line number Diff line change
Expand Up @@ -635,7 +635,7 @@ the "copyright" line and a pointer to where the full notice is found.
Copyright (C) <year> <name of author>

This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

Expand Down
2 changes: 1 addition & 1 deletion cmake/Useotagrum.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
# Copyright 2010-2020 Airbus-LIP6-Phimeca
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
Expand Down
128 changes: 73 additions & 55 deletions lib/src/ContinuousBayesianNetwork.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
* Copyright 2010-2020 Airbus-LIP6-Phimeca
*
* This library is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
Expand All @@ -31,33 +31,39 @@
#include <openturns/SpecFunc.hxx>
#include <openturns/Uniform.hxx>

using namespace OT;

namespace OTAGRUM
{

CLASSNAMEINIT(ContinuousBayesianNetwork)

static const OT::Factory<ContinuousBayesianNetwork>
static const Factory<ContinuousBayesianNetwork>
Factory_ContinuousBayesianNetwork;

/* Default constructor */
ContinuousBayesianNetwork::ContinuousBayesianNetwork()
: OT::ContinuousDistribution(), dag_(), jointDistributions_(0)
: ContinuousDistribution()
, dag_()
, jointDistributions_(0)
{
setName("ContinuousBayesianNetwork");
setDAGAndDistributionCollection(dag_, jointDistributions_);
}

/* Parameters constructor */
ContinuousBayesianNetwork::ContinuousBayesianNetwork(
const NamedDAG &dag, const DistributionCollection &jointDistributions)
: OT::ContinuousDistribution(), dag_(dag), jointDistributions_(0)
ContinuousBayesianNetwork::ContinuousBayesianNetwork(const NamedDAG &dag,
const DistributionCollection &jointDistributions)
: ContinuousDistribution()
, dag_(dag)
, jointDistributions_(0)
{
setName("ContinuousBayesianNetwork");
setDAGAndDistributionCollection(dag, jointDistributions);
}

/* Comparison operator */
OT::Bool ContinuousBayesianNetwork::
Bool ContinuousBayesianNetwork::
operator==(const ContinuousBayesianNetwork &other) const
{
if (this == &other)
Expand All @@ -66,7 +72,7 @@ operator==(const ContinuousBayesianNetwork &other) const
(jointDistributions_ == other.jointDistributions_);
}

OT::Bool ContinuousBayesianNetwork::equals(
Bool ContinuousBayesianNetwork::equals(
const DistributionImplementation &other) const
{
const ContinuousBayesianNetwork *p_other =
Expand All @@ -75,18 +81,18 @@ OT::Bool ContinuousBayesianNetwork::equals(
}

/* String converter */
OT::String ContinuousBayesianNetwork::__repr__() const
String ContinuousBayesianNetwork::__repr__() const
{
OT::OSS oss(true);
OSS oss(true);
oss << "class=" << ContinuousBayesianNetwork::GetClassName()
<< " name=" << getName() << " dimension=" << getDimension()
<< " dag=" << dag_ << " jointDistributions=" << jointDistributions_;
return oss;
}

OT::String ContinuousBayesianNetwork::__str__(const OT::String &offset) const
String ContinuousBayesianNetwork::__str__(const String &offset) const
{
OT::OSS oss(false);
OSS oss(false);
oss << offset << getClassName() << "(dag=" << dag_
<< ", joint distributions=" << jointDistributions_ << ")";
return oss;
Expand All @@ -102,69 +108,78 @@ ContinuousBayesianNetwork *ContinuousBayesianNetwork::clone() const
*/
void ContinuousBayesianNetwork::computeRange()
{
const OT::UnsignedInteger dimension = dag_.getSize();
const UnsignedInteger dimension = dag_.getSize();
setDimension(dimension);
OT::Point lower(dimension);
OT::Point upper(dimension);
for (OT::UnsignedInteger i = 0; i < dimension; ++i)
Point lower(dimension);
Point upper(dimension);
for (UnsignedInteger i = 0; i < dimension; ++i)
{
const OT::Interval rangeI(jointDistributions_[i].getRange());
const OT::UnsignedInteger dimensionI = rangeI.getDimension();
const Interval rangeI(jointDistributions_[i].getRange());
const UnsignedInteger dimensionI = rangeI.getDimension();
// Check if the current node is a root node
lower[i] = rangeI.getLowerBound()[dimensionI - 1];
upper[i] = rangeI.getUpperBound()[dimensionI - 1];
} // i
setRange(OT::Interval(lower, upper));
setRange(Interval(lower, upper));
}

/* Get one realization of the distribution */
OT::Point ContinuousBayesianNetwork::getRealization() const
{
const OT::UnsignedInteger dimension = getDimension();
OT::Point result(dimension);
const OT::Indices order(dag_.getTopologicalOrder());

for (OT::UnsignedInteger i = 0; i < order.getSize(); ++i)
Point ContinuousBayesianNetwork::getRealization() const
{
const UnsignedInteger dimension = getDimension();
Point result(dimension);
const Indices order(dag_.getTopologicalOrder());

// The generation works this way:
// + go through the nodes according to a topological order wrt the dag
// + the ith node in this order has a global index order[i]
// + its parents have global indices parents[i]
// + for the ith node, sample the conditional distribution corresponding
// to the multivariate distribution linked to this node.
// The convention is that the (d-1) first components of this distribution
// is the distribution of the parents of the node IN THE CORRECT ORDER
// whild the d-th component is the current node.
for (UnsignedInteger i = 0; i < order.getSize(); ++i)
{
const OT::UnsignedInteger globalI = order[i];
const OT::Distribution localDistribution(jointDistributions_[globalI]);
const OT::Indices parents(dag_.getParents(globalI));
const OT::UnsignedInteger conditioningDimension(parents.getSize());
const UnsignedInteger globalI = order[i];
const Distribution localDistribution(jointDistributions_[globalI]);
const Indices parents(dag_.getParents(globalI));
const UnsignedInteger conditioningDimension(parents.getSize());
if (conditioningDimension == 0)
{
result[globalI] = localDistribution.getRealization()[0];
}
else
{
OT::Point y(conditioningDimension);
for (OT::UnsignedInteger j = 0; j < conditioningDimension; ++j)
Point y(conditioningDimension);
for (UnsignedInteger j = 0; j < conditioningDimension; ++j)
y[j] = result[parents[j]];
result[globalI] = localDistribution.computeConditionalQuantile(
OT::RandomGenerator::Generate(), y);
RandomGenerator::Generate(), y);
}
} // i
return result;
}

/* Get the PDF of the distribution */
OT::Scalar ContinuousBayesianNetwork::computePDF(const OT::Point &point) const
Scalar ContinuousBayesianNetwork::computePDF(const Point &point) const
{
const OT::Indices order(dag_.getTopologicalOrder());
OT::Scalar pdf = 1.0;
for (OT::UnsignedInteger i = 0; i < order.getSize(); ++i)
const Indices order(dag_.getTopologicalOrder());
Scalar pdf = 1.0;
for (UnsignedInteger i = 0; i < order.getSize(); ++i)
{
const OT::UnsignedInteger globalI = order[i];
const OT::Indices parents(dag_.getParents(globalI));
const OT::UnsignedInteger conditioningDimension(parents.getSize());
const OT::Scalar x = point[globalI];
const UnsignedInteger globalI = order[i];
const Indices parents(dag_.getParents(globalI));
const UnsignedInteger conditioningDimension(parents.getSize());
const Scalar x = point[globalI];
if (conditioningDimension == 0)
pdf *= jointDistributions_[globalI].computePDF(x);
else
{
OT::Point y(conditioningDimension);
for (OT::UnsignedInteger j = 0; j < conditioningDimension; ++j)
Point y(conditioningDimension);
for (UnsignedInteger j = 0; j < conditioningDimension; ++j)
y[j] = point[parents[j]];
const OT::Scalar conditionalPDF =
const Scalar conditionalPDF =
jointDistributions_[globalI].computeConditionalPDF(x, y);
pdf *= conditionalPDF;
}
Expand All @@ -176,14 +191,17 @@ OT::Scalar ContinuousBayesianNetwork::computePDF(const OT::Point &point) const
void ContinuousBayesianNetwork::setDAGAndDistributionCollection(
const NamedDAG &dag, const DistributionCollection &jointDistributions)
{
const OT::Indices order(dag.getTopologicalOrder());
for (OT::UnsignedInteger i = 0; i < order.getSize(); ++i)
if (jointDistributions[i].getDimension() != dag.getParents(i).getSize() + 1)
throw OT::InvalidArgumentException(HERE)
const Indices order(dag.getTopologicalOrder());
for (UnsignedInteger i = 0; i < order.getSize(); ++i)
{
const UnsignedInteger globalIndex(order[i]);
if (jointDistributions[globalIndex].getDimension() != dag.getParents(globalIndex).getSize() + 1)
throw InvalidArgumentException(HERE)
<< "Error: expected a joint distribution of dimension="
<< dag.getParents(i).getSize() + 1 << " for node=" << order[i]
<< " and its parents=" << dag.getParents(i)
<< ", got dimension=" << jointDistributions[i].getDimension();
<< dag.getParents(globalIndex).getSize() + 1 << " for node=" << globalIndex
<< " and its parents=" << dag.getParents(globalIndex)
<< ", got dimension=" << jointDistributions[globalIndex].getDimension();
}
dag_ = dag;
jointDistributions_ = jointDistributions;
computeRange();
Expand All @@ -202,17 +220,17 @@ ContinuousBayesianNetwork::getDistributionCollection() const
}

/* Method save() stores the object through the StorageManager */
void ContinuousBayesianNetwork::save(OT::Advocate &adv) const
void ContinuousBayesianNetwork::save(Advocate &adv) const
{
OT::ContinuousDistribution::save(adv);
ContinuousDistribution::save(adv);
adv.saveAttribute("dag_", dag_);
adv.saveAttribute("jointDistributions_", jointDistributions_);
}

/* Method load() reloads the object from the StorageManager */
void ContinuousBayesianNetwork::load(OT::Advocate &adv)
void ContinuousBayesianNetwork::load(Advocate &adv)
{
OT::ContinuousDistribution::load(adv);
ContinuousDistribution::load(adv);
adv.loadAttribute("dag_", dag_);
adv.loadAttribute("jointDistributions_", jointDistributions_);
computeRange();
Expand Down
34 changes: 19 additions & 15 deletions lib/src/ContinuousBayesianNetworkFactory.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
/**
* @brief ContinuousBayesianNetworkFactory
*
* Copyright 2010-2019 Airbus-LIP6-Phimeca
* Copyright 2010-2020 Airbus-LIP6-Phimeca
*
* This library is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
Expand All @@ -22,7 +22,6 @@
#include <openturns/OTprivate.hxx>
#include <openturns/PersistentObjectFactory.hxx>

#include "otagrum/OTAgrumResourceMap.hxx"
#include "otagrum/ContinuousPC.hxx"
#include "otagrum/ContinuousBayesianNetworkFactory.hxx"

Expand All @@ -43,6 +42,7 @@ ContinuousBayesianNetworkFactory::ContinuousBayesianNetworkFactory()
, namedDAG_()
, alpha_(ResourceMap::GetAsScalar("ContinuousBayesianNetworkFactory-DefaultAlpha"))
, maximumConditioningSetSize_(ResourceMap::GetAsUnsignedInteger("ContinuousBayesianNetworkFactory-DefaultMaximumConditioningSetSize"))
, workInCopulaSpace_(ResourceMap::GetAsBool("ContinuousBayesianNetworkFactory-WorkInCopulaSpace"))
{
setName("ContinuousBayesianNetworkFactory");
}
Expand All @@ -51,12 +51,14 @@ ContinuousBayesianNetworkFactory::ContinuousBayesianNetworkFactory()
ContinuousBayesianNetworkFactory::ContinuousBayesianNetworkFactory(const Collection< DistributionFactory > & factories,
const NamedDAG & namedDAG,
const Scalar alpha,
const UnsignedInteger maximumConditioningSetSize)
const UnsignedInteger maximumConditioningSetSize,
const Bool workInCopulaSpace)
: DistributionFactoryImplementation()
, factories_(factories)
, namedDAG_(namedDAG)
, alpha_(alpha)
, maximumConditioningSetSize_(maximumConditioningSetSize)
, workInCopulaSpace_(workInCopulaSpace)
{
setName("ContinuousBayesianNetworkFactory");
}
Expand Down Expand Up @@ -91,7 +93,6 @@ ContinuousBayesianNetworkFactory::buildAsContinuousBayesianNetwork(
<< "Error: cannot build a ContinuousBayesianNetwork distribution "
"from an empty "
"sample";
const Bool workInCopulaSpace = ResourceMap::GetAsBool("ContinuousBayesianNetworkFactory-WorkInCopulaSpace");
// Check if the named DAG has to be learnt
NamedDAG localDAG;
if (namedDAG_.getSize() == 0)
Expand All @@ -101,8 +102,8 @@ ContinuousBayesianNetworkFactory::buildAsContinuousBayesianNetwork(
}
else localDAG = namedDAG_;
// Now, learn the local distributions
Indices order = localDAG.getTopologicalOrder();
Collection< Distribution > localDistributions;
const Indices order(localDAG.getTopologicalOrder());
Collection< Distribution > localDistributions(order.getSize());
const Scalar learningRatio = ResourceMap::GetAsScalar("ContinuousBayesianNetworkFactory-LearningRatio");
if (!((learningRatio >= 0.0) && (learningRatio <= 1.0)))
throw InvalidArgumentException(HERE) << "Error: expected a learning ratio in (0, 1), here learning ratio=" << learningRatio << ". Check \"ContinuousBayesianNetworkFactory-LearningRatio\" in ResourceMap.";
Expand All @@ -111,40 +112,43 @@ ContinuousBayesianNetworkFactory::buildAsContinuousBayesianNetwork(
throw InvalidArgumentException(HERE) << "Error: expected a learning size between 1 and size-1, here learning size=" << learningSize << ". Check \"ContinuousBayesianNetworkFactory-LearningRatio\" in ResourceMap.";
for (UnsignedInteger i = 0; i < order.getSize(); ++i)
{
Indices indices(localDAG.getParents(i));
LOGINFO(OSS() << "Learn node=" << i << ", with parents=" << indices);
const UnsignedInteger globalIndex = order[i];
Indices indices(localDAG.getParents(globalIndex));
LOGINFO(OSS() << "Learn node=" << globalIndex << ", with parents=" << indices);
const UnsignedInteger dimension = 1 + indices.getSize();
if (dimension == 1 && workInCopulaSpace) localDistributions.add(Uniform(0.0, 1.0));
if (dimension == 1 && workInCopulaSpace_) localDistributions.add(Uniform(0.0, 1.0));
else
{
indices.add(i);
indices.add(globalIndex);
Sample localSample(sample.getMarginal(indices));
// Now, check if we have to perform a model selection
if (factories_.getSize() == 1)
localDistributions.add(factories_[0].build(localSample));
localDistributions[globalIndex] = factories_[0].build(localSample);
else
{
// Select the best model using a cross-validation based on
// log-likelihood
Sample validationSample(localSample.split(learningSize));
const Sample validationSample(localSample.split(learningSize));
const UnsignedInteger factoriesNumber = factories_.getSize();
Scalar bestScore = -SpecFunc::MaxScalar;
Distribution bestCandidate;
for (UnsignedInteger j = 0; j < factoriesNumber; ++j)
{
Distribution candidate(factories_[j].build(localSample));
// Enforce the candidate to be a copula
if (workInCopulaSpace && !candidate.isCopula())
if (workInCopulaSpace_ && !candidate.isCopula())
candidate = candidate.getCopula();
const Scalar score = candidate.computeLogPDF(validationSample).computeMean()[0];
LOGINFO(OSS() << "Candidate " << j << "=" << candidate << ", score=" << score);
if (score > bestScore)
{
bestScore = score;
bestCandidate = candidate;
LOGINFO(OSS() << "Best candidate so far=" << bestCandidate.getDimension() << ", best score so far=" << bestScore);
}
} // j (factories)
localDistributions.add(bestCandidate);
LOGINFO(OSS() << "Best candidate=" << bestCandidate.getDimension() << ", best score=" << bestScore);
localDistributions[globalIndex] = bestCandidate;
} // factories_.getSize() > 1
} // d > 1 or !workInCopulaSpace
} // i (nodes)
Expand Down
Loading

0 comments on commit 92969e0

Please sign in to comment.