Skip to content

Commit

Permalink
refactor: moved cmake_module folder, updated README, shortened test s…
Browse files Browse the repository at this point in the history
…yntax
  • Loading branch information
paolotron committed Feb 10, 2022
1 parent 7e21a57 commit 59d514d
Show file tree
Hide file tree
Showing 5 changed files with 40 additions and 36 deletions.
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ project(baylib)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/baylib/cmake_module)
set(CMAKE_MODULE_PATH cmake_module)

# change this option to compile the examples
option(BUILD_EXAMPLES "Build the examples showing how to use baylib" OFF)
Expand Down
17 changes: 9 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ Baylib is a parallel inference library for discrete Bayesian networks supporting
## Main features
Here's a list of the main requested features:
* Copy-On-Write semantics for the graph data structure, including the conditional probability table (CPT) of each node
* parallel implementation of the algorithms either using C++17 threads or GPU computing with [boost compute](https://www.boost.org/doc/libs/1_66_0/libs/compute/doc/html/index.html)
* parallel implementation of the algorithms either using C++17 threads or GPGPUU optimization
* GPGPU optimization implemented with opencl, using [boost compute](https://www.boost.org/doc/libs/1_66_0/libs/compute/doc/html/index.html) and cuda.
* template-based classes for probability format
* input compatibility with the [XDSL format](https://support.bayesfusion.com/docs/) provided by the SMILE library
* cmake-based deployment
Expand All @@ -23,13 +24,13 @@ Here's a list of the main requested features:
* Rejection Sampling - C++11 threads
* Adaptive importance sampling - C++11 threads, GPGPU with boost compute

| algorithm | evidence | deterministic nodes | multi-threading | GPGPU |
|:------------------------------:|--------------|-----------------------|-------------------|-----------|
| gibbs sampling | ✓ | * | ✓ | |
| likelihood weighting | ✓ | ✓ | ✓ | |
| logic sampling | ✓ | ✓ | | ✓ |
| rejection sampling | ✓ | ✓ | ✓ | |
| adaptive importance sampling | ✓ | ✓ | ✓ | ✓ |
| algorithm | evidence | deterministic nodes | multi-threading | GPGPU-opencl | GPGPU - CUDA |
|:------------------------------:|--------------|-----------------------|-------------------|---------------|--------------|
| gibbs sampling | ✓ | * | ✓ | | |
| likelihood weighting | ✓ | ✓ | ✓ | | |
| logic sampling | ✓ | ✓ | | ✓ | ✓ |
| rejection sampling | ✓ | ✓ | ✓ | | |
| adaptive importance sampling | ✓ | ✓ | ✓ | ✓ | |

*It's a very well-known limitation of the Gibbs sampling approach
## Dependencies
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
#

find_path( THRUST_INCLUDE_DIR
HINTS ./
HINTS .
../thrust
../../thrust
../../../thrust
Expand Down
26 changes: 14 additions & 12 deletions test/evidence_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,32 +22,34 @@

using namespace baylib::inference;
using probability_type = double;
template<class Variable>
using bnet = baylib::bayesian_net<Variable>;




template<typename Probability, class Variable>
std::vector<baylib::marginal_distribution<Probability>> get_results(const baylib::bayesian_net<Variable> &bn){
std::vector<baylib::marginal_distribution<Probability>> get_results(const bnet<Variable> &bn){
std::vector<baylib::marginal_distribution<Probability>> results{
logic_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
gibbs_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
likelihood_weighting<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
rejection_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
adaptive_importance_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
logic_sampling<bnet<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
gibbs_sampling<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
likelihood_weighting<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
rejection_sampling<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
adaptive_importance_sampling<bnet<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
#ifdef CUDA_CMP_FOUND
logic_sampling_cuda<baylib::bayesian_net<Variable>>(bn, SAMPLES).make_inference()
logic_sampling_cuda<bnet<Variable>>(bn, SAMPLES).make_inference()
#endif
};
return results;
}

template<typename Probability, class Variable>
std::vector<baylib::marginal_distribution<Probability>> get_results_heavy(const baylib::bayesian_net<Variable> &bn){
std::vector<baylib::marginal_distribution<Probability>> get_results_heavy(const bnet<Variable> &bn){
std::vector<baylib::marginal_distribution<Probability>> results{
likelihood_weighting<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
gibbs_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
rejection_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
adaptive_importance_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, MEMORY).make_inference()
likelihood_weighting<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
gibbs_sampling<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
rejection_sampling<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
adaptive_importance_sampling<bnet<Variable>>(bn, SAMPLES, MEMORY).make_inference()
};
return results;
}
Expand Down
29 changes: 15 additions & 14 deletions test/inference_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,32 +22,33 @@

using namespace baylib::inference;
using probability_type = double;

template<class Variable>
using bnet = baylib::bayesian_net<Variable>;;

template<typename Probability, class Variable>
std::vector<baylib::marginal_distribution<Probability>> get_results(const baylib::bayesian_net<Variable> &bn){
std::vector<baylib::marginal_distribution<Probability>> get_results(const bnet<Variable> &bn){
std::vector<baylib::marginal_distribution<Probability>> results{
logic_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
gibbs_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
likelihood_weighting<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
rejection_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
adaptive_importance_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
logic_sampling<bnet<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
gibbs_sampling<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
likelihood_weighting<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
rejection_sampling<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
adaptive_importance_sampling<bnet<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
#ifdef CUDA_CMP_FOUND
logic_sampling_cuda<baylib::bayesian_net<Variable>>(bn, SAMPLES).make_inference()
logic_sampling_cuda<bnet<Variable>>(bn, SAMPLES).make_inference()
#endif
};
return results;
}

template<typename Probability, class Variable>
std::vector<baylib::marginal_distribution<Probability>> get_results_deterministic(const baylib::bayesian_net<Variable> &bn){
std::vector<baylib::marginal_distribution<Probability>> get_results_deterministic(const bnet<Variable> &bn){
std::vector<baylib::marginal_distribution<Probability>> results{
logic_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
likelihood_weighting<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
rejection_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, THREADS).make_inference(),
adaptive_importance_sampling<baylib::bayesian_net<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
logic_sampling<bnet<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
likelihood_weighting<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
rejection_sampling<bnet<Variable>>(bn, SAMPLES, THREADS).make_inference(),
adaptive_importance_sampling<bnet<Variable>>(bn, SAMPLES, MEMORY).make_inference(),
#ifdef CUDA_CMP_FOUND
logic_sampling_cuda<baylib::bayesian_net<Variable>>(bn, SAMPLES).make_inference()
logic_sampling_cuda<bnet<Variable>>(bn, SAMPLES).make_inference()
#endif
};
return results;
Expand Down

0 comments on commit 59d514d

Please sign in to comment.