diff --git a/source/auxiliary/confusion-matrix.cpp b/source/auxiliary/confusion-matrix.cpp index 3d2b66a..20e8be2 100644 --- a/source/auxiliary/confusion-matrix.cpp +++ b/source/auxiliary/confusion-matrix.cpp @@ -25,7 +25,6 @@ double ksi::confusion_matrix::TestF1score (const ksi::result & r) return (2.0 * recall * precision) / (recall + precision); } - std::string ksi::confusion_matrix::ca(int n) { std::stringstream ss; diff --git a/source/auxiliary/roc.cpp b/source/auxiliary/roc.cpp index 540c2ef..ed72ba6 100644 --- a/source/auxiliary/roc.cpp +++ b/source/auxiliary/roc.cpp @@ -17,6 +17,7 @@ std::string ksi::to_string (const ksi::roc_threshold & th) case ksi::roc_threshold::mean : return "mean"; case ksi::roc_threshold::minimal_distance : return "minimal-distance"; case ksi::roc_threshold::youden : return "youden"; + case ksi::roc_threshold::manual : return "manual"; case ksi::roc_threshold::none : return "none"; default : return "error"; } @@ -24,7 +25,7 @@ std::string ksi::to_string (const ksi::roc_threshold & th) double ksi::roc::trapezoidArea(double x1, double x2, double y1, double y2) { - return abs(x1 - x2) * (y1 + y2) / 2; + return fabs(x1 - x2) * (y1 + y2) / 2; } ksi::results ksi::roc::calculate_ROC_points (std::vector & Out, @@ -69,7 +70,7 @@ ksi::results ksi::roc::calculate_ROC_points (std::vector & Out, iter != Out.end(); iter++) { - if (abs(*iter - negativeClassvalue) < epsilon) // negative + if (fabs(*iter - negativeClassvalue) < epsilon) // negative nNeg++; else nPos++; @@ -96,7 +97,7 @@ ksi::results ksi::roc::calculate_ROC_points (std::vector & Out, points.push_back(punkt); - if (abs(iter->first - negativeClassvalue) < epsilon) // negative + if (fabs(iter->first - negativeClassvalue) < epsilon) // negative { nFP--; //cerr << "n "; diff --git a/source/auxiliary/roc.h b/source/auxiliary/roc.h index 9af9cca..8ee373a 100644 --- a/source/auxiliary/roc.h +++ b/source/auxiliary/roc.h @@ -16,6 +16,7 @@ namespace ksi mean, ///< mean value of positive and negative class labels youden, ///< Youden criterion minimal_distance, ///< minimal distance of ROC to point (0, 1) + manual, ///< the threshold value must be set manually none ///< none }; diff --git a/source/neuro-fuzzy/abstract-annbfis.cpp b/source/neuro-fuzzy/abstract-annbfis.cpp index 27cad55..9c0d4a7 100644 --- a/source/neuro-fuzzy/abstract-annbfis.cpp +++ b/source/neuro-fuzzy/abstract-annbfis.cpp @@ -436,6 +436,45 @@ ksi::abstract_annbfis::abstract_annbfis(int nRules, _minimal_typicality = dbMinimalTypicality; } +ksi::abstract_annbfis::abstract_annbfis(int nRules, double dbFrobeniusEpsilon, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm& tnorm, const implication& imp, const partitioner& Partitioner, double positive_class, double negative_class, double threshold_value, const double dbMinimalTypicality) : neuro_fuzzy_system() +{ + _nRules = nRules; + _dbFrobeniusEpsilon = dbFrobeniusEpsilon; + _nTuningIterations = nTuningIterations; + _dbLearningCoefficient = dbLearningCoefficient; + _bNormalisation = bNormalisation; + if (not _pTnorm) + _pTnorm = tnorm.clone(); + if (not _pPartitioner) + _pPartitioner = Partitioner.clone(); + if (not _pImplication) + _pImplication = imp.clone(); + _positive_class = positive_class; + _negative_class = negative_class; + _threshold_type = ksi::roc_threshold::manual; + _threshold_value = threshold_value; + _minimal_typicality = dbMinimalTypicality; +} + +ksi::abstract_annbfis::abstract_annbfis(int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm& tnorm, const implication& imp, const partitioner& Partitioner, double positive_class, double negative_class, double threshold_value, const double dbMinimalTypicality): neuro_fuzzy_system() +{ + _nRules = nRules; + _nClusteringIterations = nClusteringIterations; + _nTuningIterations = nTuningIterations; + _dbLearningCoefficient = dbLearningCoefficient; + _bNormalisation = bNormalisation; + if (not _pTnorm) + _pTnorm = tnorm.clone(); + if (not _pPartitioner) + _pPartitioner = Partitioner.clone(); + if (not _pImplication) + _pImplication = imp.clone(); + _positive_class = positive_class; + _negative_class = negative_class; + _threshold_type = ksi::roc_threshold::manual; + _threshold_value = threshold_value; + _minimal_typicality = dbMinimalTypicality; +} ksi::partition ksi::abstract_annbfis::doPartition(const ksi::dataset& X) { diff --git a/source/neuro-fuzzy/abstract-annbfis.h b/source/neuro-fuzzy/abstract-annbfis.h index e943894..968bc23 100644 --- a/source/neuro-fuzzy/abstract-annbfis.h +++ b/source/neuro-fuzzy/abstract-annbfis.h @@ -77,6 +77,23 @@ namespace ksi abstract_annbfis (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, const partitioner & Partitioner, double positive_class, double negative_class, ksi::roc_threshold threshold_type, const double dbMinimalTypicality = -1); + /** constructor + * @param nRules number of rules + * @param nClusteringIterations number of clustering iterations + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param imp implication + * @param Partitioner partition method object + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @param dbMinimalTypicality minimal typicality for outliers + * @date 2024-05-09 + */ + abstract_annbfis (int nRules, int nClusteringIterations, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, const partitioner & Partitioner, double positive_class, double negative_class, double threshold_value, const double dbMinimalTypicality = -1); + /** constructor * @param nRules number of rules * @param dbFrobeniusEpsilon epsilon for Frobenius norm in clustering @@ -94,6 +111,24 @@ namespace ksi abstract_annbfis (int nRules, double dbFrobeniusEpsilon, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, const partitioner & Partitioner, double positive_class, double negative_class, ksi::roc_threshold threshold_type, const double dbMinimalTypicality = -1); + /** constructor + * @param nRules number of rules + * @param dbFrobeniusEpsilon epsilon for Frobenius norm in clustering + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param imp implication + * @param Partitioner partition method object + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @param dbMinimalTypicality minimal typicality for outliers + * @date 2024-05-09 + */ + abstract_annbfis (int nRules, double dbFrobeniusEpsilon, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, const partitioner & Partitioner, double positive_class, double negative_class, double threshold_value, const double dbMinimalTypicality = -1); + + abstract_annbfis(const abstract_annbfis & a); abstract_annbfis(abstract_annbfis && a); @@ -109,9 +144,6 @@ namespace ksi ~abstract_annbfis(); - - - public: /** The method creates a fuzzy rulebase from the dataset. * @param nClusteringIterations number of clustering iterations @@ -126,7 +158,7 @@ namespace ksi virtual void createFuzzyRulebase ( int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, - const dataset & train, const dataset & validation); + const dataset & train, const dataset & validation) override; public: diff --git a/source/neuro-fuzzy/abstract-ma.cpp b/source/neuro-fuzzy/abstract-ma.cpp index 204e539..a9bf027 100644 --- a/source/neuro-fuzzy/abstract-ma.cpp +++ b/source/neuro-fuzzy/abstract-ma.cpp @@ -296,6 +296,63 @@ ksi::abstract_ma::abstract_ma(int nRules, _minimal_typicality = dbMinimalTypicality; } + +ksi::abstract_ma::abstract_ma(int nRules, + double dbFrobeniusEpsilon, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const ksi::t_norm & tnorm, + const ksi::partitioner & Partitioner, + double positive_class, + double negative_class, + const double threshold_value, + const double dbMinimalTypicality) +{ + _nRules = nRules; + _dbFrobeniusEpsilon = dbFrobeniusEpsilon; + _nTuningIterations = nTuningIterations; + _dbLearningCoefficient = dbLearningCoefficient; + _bNormalisation = bNormalisation; + if (not _pTnorm) + _pTnorm = tnorm.clone(); + if (not _pPartitioner) + _pPartitioner = Partitioner.clone(); + _positive_class = positive_class; + _negative_class = negative_class; + _threshold_type = ksi::roc_threshold::manual; + _threshold_value = threshold_value; + _minimal_typicality = dbMinimalTypicality; +} + + +ksi::abstract_ma::abstract_ma(int nRules, + int nClusteringIterations, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const ksi::t_norm & tnorm, + const partitioner & Partitioner, + double positive_class, + double negative_class, + ksi::roc_threshold threshold_type, + const double dbMinimalTypicality) +{ + _nRules = nRules; + _nClusteringIterations = nClusteringIterations; + _nTuningIterations = nTuningIterations; + _dbLearningCoefficient = dbLearningCoefficient; + _bNormalisation = bNormalisation; + if (not _pTnorm) + _pTnorm = tnorm.clone(); + if (not _pPartitioner) + _pPartitioner = Partitioner.clone(); + _positive_class = positive_class; + _negative_class = negative_class; + _threshold_type = threshold_type; + _minimal_typicality = dbMinimalTypicality; +} + ksi::abstract_ma::abstract_ma(int nRules, int nClusteringIterations, int nTuningIterations, @@ -305,17 +362,8 @@ ksi::abstract_ma::abstract_ma(int nRules, const partitioner & Partitioner, double positive_class, double negative_class, - ksi::roc_threshold threshold_type, - const double dbMinimalTypicality) -/* : abstract_ma (nRules, - nClusteringIterations, - nTuningIterations, - dbLearningCoefficient, - bNormalisation, - tnorm, - Partitioner, - dbMinimalTypicality) -*/ + const double threshold_value, + const double dbMinimalTypicality) { _nRules = nRules; _nClusteringIterations = nClusteringIterations; @@ -328,7 +376,8 @@ ksi::abstract_ma::abstract_ma(int nRules, _pPartitioner = Partitioner.clone(); _positive_class = positive_class; _negative_class = negative_class; - _threshold_type = threshold_type; + _threshold_type = ksi::roc_threshold::manual; + _threshold_value = threshold_value; _minimal_typicality = dbMinimalTypicality; } diff --git a/source/neuro-fuzzy/abstract-ma.h b/source/neuro-fuzzy/abstract-ma.h index 0f46866..21a494f 100644 --- a/source/neuro-fuzzy/abstract-ma.h +++ b/source/neuro-fuzzy/abstract-ma.h @@ -54,6 +54,21 @@ namespace ksi */ abstract_ma (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const partitioner & Partitioner, double positive_class, double negative_class, ksi::roc_threshold threshold_type, const double dbMinimalTypicality = -1); + + /** constructor + * @param nRules number of rules + * @param dbFrobeniusEpsilon epsilon for Frobeniu norm for the clustering algorithm + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param positive_class label for positive_class + * @param negative_class label for negative_class + * @param threshold_value threshold value for classification + * @param dbMinimalTypicality minimal typicality for outliers + */ + abstract_ma (int nRules, int nClusteringIterations, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const partitioner & Partitioner, double positive_class, double negative_class, const double threshold_value, const double dbMinimalTypicality = -1); + /** constructor @@ -74,6 +89,24 @@ namespace ksi double positive_class, double negative_class, ksi::roc_threshold threshold_type, const double dbMinimalTypicality = -1 ); + /** constructor + * @param nRules number of rules + * @param dbFrobeniusEpsilon epsilon for Frobeniu norm for the clustering algorithm + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param Partitioner clustering object + * @param positive_class label for positive_class + * @param negative_class label for negative_class + * @param threshold_value threshold value for classification + * @param dbMinimalTypicality minimal typicality for outliers + * @date 2024-05-09 + */ + abstract_ma (int nRules, double dbFrobeniusEpsilon, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const partitioner & Partitioner, + double positive_class, double negative_class, const double threshold_value, const double dbMinimalTypicality = -1 + ); + /** constructor * @param nRules number of rules diff --git a/source/neuro-fuzzy/abstract-tsk.cpp b/source/neuro-fuzzy/abstract-tsk.cpp index 988fedc..e446392 100644 --- a/source/neuro-fuzzy/abstract-tsk.cpp +++ b/source/neuro-fuzzy/abstract-tsk.cpp @@ -405,6 +405,63 @@ ksi::abstract_tsk::abstract_tsk(int nRules, } +ksi::abstract_tsk::abstract_tsk(int nRules, + double dbFrobeniusEpsilon, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const ksi::t_norm & tnorm, + const ksi::partitioner & Partitioner, + double positive_class, + double negative_class, + double threshold_value, + const double dbMinimalTypicality) +{ + _nRules = nRules; + _dbFrobeniusEpsilon = dbFrobeniusEpsilon; + _nTuningIterations = nTuningIterations; + _dbLearningCoefficient = dbLearningCoefficient; + _bNormalisation = bNormalisation; + if (not _pTnorm) + _pTnorm = tnorm.clone(); + if (not _pPartitioner) + _pPartitioner = Partitioner.clone(); + _positive_class = positive_class; + _negative_class = negative_class; + _threshold_type = ksi::roc_threshold::manual; + _threshold_value = threshold_value; + _minimal_typicality = dbMinimalTypicality; +} + +ksi::abstract_tsk::abstract_tsk(int nRules, + int nClusteringIterations, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const ksi::t_norm & tnorm, + const ksi::partitioner & Partitioner, + double positive_class, + double negative_class, + double threshold_value, + const double dbMinimalTypicality) +{ + _nRules = nRules; + _nClusteringIterations = nClusteringIterations; + _nTuningIterations = nTuningIterations; + _dbLearningCoefficient = dbLearningCoefficient; + _bNormalisation = bNormalisation; + if (not _pTnorm) + _pTnorm = tnorm.clone(); + if (not _pPartitioner) + _pPartitioner = Partitioner.clone(); + _positive_class = positive_class; + _negative_class = negative_class; + _threshold_type = ksi::roc_threshold::manual; + _threshold_value = threshold_value; + _minimal_typicality = dbMinimalTypicality; +} + + ksi::abstract_tsk::~abstract_tsk() { diff --git a/source/neuro-fuzzy/abstract-tsk.h b/source/neuro-fuzzy/abstract-tsk.h index b8fee90..cab461a 100644 --- a/source/neuro-fuzzy/abstract-tsk.h +++ b/source/neuro-fuzzy/abstract-tsk.h @@ -68,6 +68,42 @@ namespace ksi const partitioner & Partitioner, const double dbMinimalTypicality = -1 ); + /** constructor + * @param nRules number of rules + * @param nClusteringIterations number of clustering iterations + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param positive_class label for positive_class + * @param negative_class label for negative_class + * @param threshold_type threshold type for classification + * @param dbMinimalTypicality minimal typicality for outliers + * @date 2024-05-09 + */ + abstract_tsk (int nRules, int nClusteringIterations, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, + const partitioner & Partitioner, + double positive_class, double negative_class, double threshold_value, const double dbMinimalTypicality = -1 + ); + + /** constructor + * @param nRules number of rules + * @param dbFrobeniusEpsilon epsilon for Frobeniu norm for the clustering algorithm + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param positive_class label for positive_class + * @param negative_class label for negative_class + * @param threshold_type threshold type for classification + * @param dbMinimalTypicality minimal typicality for outliers + * @date 2024-05-09 + */ + abstract_tsk (int nRules, double dbFrobeniusEpsilon, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, + const partitioner & Partitioner, + double positive_class, double negative_class, double threshold_value, const double dbMinimalTypicality = -1 + ); + /** constructor * @param nRules number of rules * @param nClusteringIterations number of clustering iterations @@ -136,7 +172,8 @@ namespace ksi abstract_tsk & operator = (abstract_tsk && a); ~abstract_tsk(); - /** A constructor with partitioner * @date 2024-02-27 */ + /** A constructor with partitioner + * @date 2024-02-27 */ abstract_tsk (const partitioner & Partitioner); protected: @@ -189,7 +226,4 @@ namespace ksi }; } - - - #endif diff --git a/source/neuro-fuzzy/abstract_fcom.cpp b/source/neuro-fuzzy/abstract_fcom.cpp index ac38335..98f872c 100644 --- a/source/neuro-fuzzy/abstract_fcom.cpp +++ b/source/neuro-fuzzy/abstract_fcom.cpp @@ -1,5 +1,4 @@ - #include #include @@ -9,7 +8,7 @@ #include "../dissimilarities/dissimilarity.h" -std::string ksi::abstract_fcom::extra_report() +std::string ksi::abstract_fcom::extra_report() const { std::stringstream ss; ss << "epsilon for Frobenius norm in clustering: " << _dbFrobeniusEpsilon << std::endl; diff --git a/source/neuro-fuzzy/abstract_fcom.h b/source/neuro-fuzzy/abstract_fcom.h index 70301ee..60d1d4a 100644 --- a/source/neuro-fuzzy/abstract_fcom.h +++ b/source/neuro-fuzzy/abstract_fcom.h @@ -15,8 +15,6 @@ namespace ksi ksi::dissimilarity * _pDissimilarity = nullptr; ///< dissimilarity function ksi::owa * _pOwa = nullptr; ///< ordered weighting average function - - public: abstract_fcom (); abstract_fcom (const abstract_fcom & wzor); @@ -25,7 +23,7 @@ namespace ksi abstract_fcom & operator= (abstract_fcom && wzor); virtual ~abstract_fcom (); - virtual std::string extra_report(); + virtual std::string extra_report() const; }; } diff --git a/source/neuro-fuzzy/annbfis.cpp b/source/neuro-fuzzy/annbfis.cpp index de0eb3f..28f50c6 100644 --- a/source/neuro-fuzzy/annbfis.cpp +++ b/source/neuro-fuzzy/annbfis.cpp @@ -1,36 +1,11 @@ #include -#include -#include -#include #include -#include #include #include "annbfis.h" -#include "logicalrule.h" -#include "premise.h" -#include "consequence-CL.h" -#include "../tnorms/t-norm-product.h" -#include "../partitions/cluster.h" #include "../partitions/fcm.h" -#include "../neuro-fuzzy/premise.h" -#include "../tnorms/t-norm-product.h" -#include "../implications/imp-reichenbach.h" -#include "../descriptors/descriptor-gaussian.h" -#include "../neuro-fuzzy/consequence-CL.h" -#include "../auxiliary/least-error-squares-regression.h" -#include "../auxiliary/tempus.h" -#include "../auxiliary/clock.h" -#include "../auxiliary/confusion-matrix.h" #include "../auxiliary/roc.h" -#include "../service/debug.h" -#include "../auxiliary/error-RMSE.h" -#include "../auxiliary/error-MAE.h" -#include "../common/number.h" -#include "../readers/reader-complete.h" -#include "../common/data-modifier-normaliser.h" -#include "../partitions/partition.h" #include "../gan/discriminative_model.h" #include "../gan/generative_model.h" @@ -82,10 +57,8 @@ ksi::annbfis::annbfis() : neuro_fuzzy_system(), abstract_annbfis() ksi::annbfis::~annbfis() { - } - ksi::annbfis::annbfis(const ksi::annbfis & right) : neuro_fuzzy_system(right), abstract_annbfis(right) { set_name(); @@ -136,9 +109,6 @@ ksi::generative_model * ksi::annbfis::clone_generator() const return new ksi::annbfis (*this); } - - - ksi::annbfis::annbfis(int nRules, int nClusteringIterations, int nTuningIterations, @@ -153,7 +123,6 @@ ksi::annbfis::annbfis(int nRules, set_name(); } - ksi::annbfis::annbfis(int nRules, int nClusteringIterations, int nTuningIterations, @@ -207,6 +176,11 @@ ksi::annbfis::annbfis(int nRules, set_name(); } +ksi::annbfis::annbfis(int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm& tnorm, const implication& imp, double positive_class, double negative_class, const double threshold_value): neuro_fuzzy_system(), abstract_annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, ksi::fcm(nRules, nClusteringIterations), positive_class, negative_class, threshold_value) +{ + set_name(); +} + ksi::annbfis::annbfis(int nRules, int nClusteringIterations, int nTuningIterations, @@ -218,8 +192,14 @@ ksi::annbfis::annbfis(int nRules, double negative_class, const ksi::roc_threshold threshold_type, const ksi::data_modifier & modifier - ) : neuro_fuzzy_system(modifier), abstract_annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, ksi::fcm(nRules, nClusteringIterations), positive_class, negative_class, threshold_type ) + ) : neuro_fuzzy_system(modifier), abstract_annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, ksi::fcm(nRules, nClusteringIterations), positive_class, negative_class, threshold_type) { set_name(); } +ksi::annbfis::annbfis(int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm& tnorm, const implication& imp, double positive_class, double negative_class, const ksi::data_modifier& modifier, const double threshold_value): neuro_fuzzy_system(modifier), abstract_annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, ksi::fcm(nRules, nClusteringIterations), positive_class, negative_class, threshold_value) +{ + set_name(); +} + + diff --git a/source/neuro-fuzzy/annbfis.h b/source/neuro-fuzzy/annbfis.h index 037aab7..c845d49 100644 --- a/source/neuro-fuzzy/annbfis.h +++ b/source/neuro-fuzzy/annbfis.h @@ -4,6 +4,7 @@ #define ANNBFIS_H +#include #include #include @@ -25,12 +26,7 @@ namespace ksi */ class annbfis : virtual public abstract_annbfis { - protected: - - - public: - annbfis(); /** constructor @@ -84,6 +80,22 @@ namespace ksi */ annbfis (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, const ksi::roc_threshold threshold_type); + public: + /** constructor + * @param nRules number of rules + * @param nClusteringIterations number of clustering iterations + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param imp implication + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @date 2024-05-09 + */ + annbfis (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, const double threshold_value = 0.5); + + /** constructor * @param nRules number of rules * @param nClusteringIterations number of clustering iterations @@ -98,6 +110,22 @@ namespace ksi * @date 2021-01-09 */ annbfis (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, const ksi::roc_threshold threshold_type, const ksi::data_modifier & modifier); + + /** constructor + * @param nRules number of rules + * @param nClusteringIterations number of clustering iterations + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param imp implication + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @param modifier data modifier one or a chain of modifiers + * @date 2024-05-09 + */ + annbfis (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, const ksi::data_modifier & modifier, const double threshold_value = 0.5); + annbfis(const annbfis & a); annbfis(annbfis && a); @@ -124,8 +152,4 @@ namespace ksi }; } - - - - #endif diff --git a/source/neuro-fuzzy/fcom-annbfis.cpp b/source/neuro-fuzzy/fcom-annbfis.cpp index 47d1ed0..ff2e84a 100644 --- a/source/neuro-fuzzy/fcom-annbfis.cpp +++ b/source/neuro-fuzzy/fcom-annbfis.cpp @@ -126,6 +126,34 @@ ksi::fcom_annbfis::fcom_annbfis(int nRules, } +ksi::fcom_annbfis::fcom_annbfis(int nRules, + double dbFrobeniusEpsilon, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const ksi::t_norm& tnorm, + const ksi::implication & imp, + const ksi::dissimilarity & Dissimilarity, + const ksi::owa & Owa, + double positive_class, + double negative_class, + const double threshold_value, + const double dbMinimalTypicality) +: neuro_fuzzy_system (nRules, _dbFrobeniusEpsilon, + nTuningIterations, dbMinimalTypicality), + abstract_annbfis(nRules, dbFrobeniusEpsilon, + nTuningIterations, dbLearningCoefficient, + bNormalisation, tnorm, imp, ksi::fcom(nRules, dbFrobeniusEpsilon, Dissimilarity, Owa), + positive_class, negative_class, threshold_value, + dbMinimalTypicality + ) + { + set_name(); + _pDissimilarity = Dissimilarity.clone(); + _pOwa = Owa.clone(); + } + + ksi::fcom_annbfis::~fcom_annbfis() { @@ -185,7 +213,7 @@ ksi::generative_model * ksi::fcom_annbfis::clone_generator() const } -std::string ksi::fcom_annbfis::extra_report() +std::string ksi::fcom_annbfis::extra_report() const { std::stringstream ss; ss << ksi::abstract_fcom::extra_report(); diff --git a/source/neuro-fuzzy/fcom-annbfis.h b/source/neuro-fuzzy/fcom-annbfis.h index 9179db6..6c4b965 100644 --- a/source/neuro-fuzzy/fcom-annbfis.h +++ b/source/neuro-fuzzy/fcom-annbfis.h @@ -70,6 +70,25 @@ namespace ksi double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, const dissimilarity & Dissimilarity, const ksi::owa & Owa, double positive_class, double negative_class, const ksi::roc_threshold threshold_type, const double dbMinimalTypicality); + /** constructor + * @param nRules number of rules + * @param dbFrobeniusEpsilon epsilon for Frobeniu norm for the clustering algorithm + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param imp implication + * @param Dissimilarity object for dissimilarity + * @param Owa owa object + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @param dbMinimalTypicality minimal typicality for outliers + * @date 2024-05-10 + */ + fcom_annbfis (int nRules, double dbFrobeniusEpsilon, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, + const implication & imp, const dissimilarity & Dissimilarity, const ksi::owa & Owa, double positive_class, double negative_class, const double threshold_value, const double dbMinimalTypicality); + fcom_annbfis(const fcom_annbfis & a); fcom_annbfis(fcom_annbfis && a); fcom_annbfis & operator = (const fcom_annbfis & a); @@ -91,7 +110,7 @@ namespace ksi /** Prints extra report from abstract fcom plus add information on minimal typicality */ - virtual std::string extra_report(); + virtual std::string extra_report() const; }; } diff --git a/source/neuro-fuzzy/fcom-ma.cpp b/source/neuro-fuzzy/fcom-ma.cpp index 82616f5..ca3d2ab 100644 --- a/source/neuro-fuzzy/fcom-ma.cpp +++ b/source/neuro-fuzzy/fcom-ma.cpp @@ -116,11 +116,33 @@ ksi::fcom_ma::fcom_ma(int nRules, } +ksi::fcom_ma::fcom_ma(int nRules, + double dbFrobeniusEpsilon, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const ksi::t_norm& tnorm, + const ksi::dissimilarity & Dissimilarity, + const ksi::owa & Owa, + double positive_class, + double negative_class, + const double threshold_value, + const double dbMinimalTypicality) +: abstract_ma(nRules, dbFrobeniusEpsilon, + nTuningIterations, dbLearningCoefficient, + bNormalisation, tnorm, ksi::fcom(nRules, dbFrobeniusEpsilon, Dissimilarity, Owa), + positive_class, negative_class, threshold_value, dbMinimalTypicality +) +{ + set_name(); + _pDissimilarity = Dissimilarity.clone(); + _pOwa = Owa.clone(); +} + ksi::fcom_ma::~fcom_ma() { } - ksi::fcom_ma::fcom_ma(const ksi::fcom_ma & right) : neuro_fuzzy_system(right), abstract_ma(right), abstract_fcom(right) { } @@ -170,7 +192,7 @@ ksi::generative_model * ksi::fcom_ma::clone_generator() const return new ksi::fcom_ma (*this); } -std::string ksi::fcom_ma::extra_report() +std::string ksi::fcom_ma::extra_report() const { std::stringstream ss; ss << ksi::abstract_fcom::extra_report(); diff --git a/source/neuro-fuzzy/fcom-ma.h b/source/neuro-fuzzy/fcom-ma.h index a33c3e0..d95d4fe 100644 --- a/source/neuro-fuzzy/fcom-ma.h +++ b/source/neuro-fuzzy/fcom-ma.h @@ -62,8 +62,28 @@ namespace ksi * @date 2019-12-22 */ fcom_ma (int nRules, double dbFrobeniusEpsilon, int nTuningIterations, - double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, - const ksi::dissimilarity & Dissimilarity, const ksi::owa & Owa, double positive_class, double negative_class, const ksi::roc_threshold threshold_type, const double dbMinimalTypicality); + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, + const ksi::dissimilarity & Dissimilarity, const ksi::owa & Owa, double positive_class, double negative_class, const ksi::roc_threshold threshold_type, const double dbMinimalTypicality); + + + + /** constructor + * @param nRules number of rules + * @param dbFrobeniusEpsilon epsilon for Frobeniu norm for the clustering algorithm + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param Dissimilarity object for dissimilarity + * @param Owa owa object + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @param dbMinimalTypicality minimal typicality for outliers + * @date 2024-05-10 + */ + fcom_ma (int nRules, double dbFrobeniusEpsilon, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, + const ksi::dissimilarity & Dissimilarity, const ksi::owa & Owa, double positive_class, double negative_class, const double threshold_value, const double dbMinimalTypicality); fcom_ma(const fcom_ma & a); fcom_ma(fcom_ma && a); @@ -82,7 +102,7 @@ namespace ksi virtual partition doPartition (const dataset & X); /** Prints extra report from abstract fcom plus add information on minimal typicality */ - virtual std::string extra_report(); + virtual std::string extra_report() const; }; } diff --git a/source/neuro-fuzzy/fcom-tsk.cpp b/source/neuro-fuzzy/fcom-tsk.cpp index ae70b5d..ae144af 100644 --- a/source/neuro-fuzzy/fcom-tsk.cpp +++ b/source/neuro-fuzzy/fcom-tsk.cpp @@ -119,6 +119,30 @@ ksi::fcom_tsk::fcom_tsk(int nRules, _pOwa = Owa.clone(); } +ksi::fcom_tsk::fcom_tsk(int nRules, + double dbFrobeniusEpsilon, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const ksi::t_norm& tnorm, + const ksi::dissimilarity & Dissimilarity, + const ksi::owa & Owa, + double positive_class, + double negative_class, + const double threshold_value, + const double dbMinimalTypicality) +: abstract_tsk(nRules, dbFrobeniusEpsilon, + nTuningIterations, dbLearningCoefficient, + bNormalisation, tnorm, ksi::fcom(nRules, dbFrobeniusEpsilon, Dissimilarity, Owa), + positive_class, negative_class, threshold_value, + dbMinimalTypicality +) +{ + set_name(); + _pDissimilarity = Dissimilarity.clone(); + _pOwa = Owa.clone(); +} + ksi::fcom_tsk::~fcom_tsk() { @@ -178,7 +202,7 @@ ksi::generative_model * ksi::fcom_tsk::clone_generator() const } -std::string ksi::fcom_tsk::extra_report() +std::string ksi::fcom_tsk::extra_report() const { std::stringstream ss; ss << ksi::abstract_fcom::extra_report(); diff --git a/source/neuro-fuzzy/fcom-tsk.h b/source/neuro-fuzzy/fcom-tsk.h index 5a9dd7f..1bf3380 100644 --- a/source/neuro-fuzzy/fcom-tsk.h +++ b/source/neuro-fuzzy/fcom-tsk.h @@ -48,9 +48,7 @@ namespace ksi fcom_tsk (int nRules, double dbFrobeniusEpsilon, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const ksi::dissimilarity & Dissimilarity, const ksi::owa & Owa, const double dbMinimalTypicality); - - - + /** constructor * @param nRules number of rules * @param dbFrobeniusEpsilon epsilon for Frobeniu norm for the clustering algorithm @@ -70,6 +68,25 @@ namespace ksi const ksi::dissimilarity & Dissimilarity, const ksi::owa & Owa, double positive_class, double negative_class, const ksi::roc_threshold threshold_type, const double dbMinimalTypicality); + /** constructor + * @param nRules number of rules + * @param dbFrobeniusEpsilon epsilon for Frobeniu norm for the clustering algorithm + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param Dissimilarity object for dissimilarity + * @param Owa owa object + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @param dbMinimalTypicality minimal typicality for outliers + * @date 2024-05-10 + */ + fcom_tsk (int nRules, double dbFrobeniusEpsilon, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, + const ksi::dissimilarity & Dissimilarity, const ksi::owa & Owa, double positive_class, double negative_class, const double threshold_value, const double dbMinimalTypicality); + + fcom_tsk(const fcom_tsk & a); fcom_tsk(fcom_tsk && a); fcom_tsk & operator = (const fcom_tsk & a); @@ -91,11 +108,7 @@ namespace ksi /** Prints extra report from abstract fcom plus add information on minimal typicality */ - virtual std::string extra_report(); - - public: - - + virtual std::string extra_report() const; }; } diff --git a/source/neuro-fuzzy/fubi-annbfis.cpp b/source/neuro-fuzzy/fubi-annbfis.cpp index b678c0c..357df11 100644 --- a/source/neuro-fuzzy/fubi-annbfis.cpp +++ b/source/neuro-fuzzy/fubi-annbfis.cpp @@ -90,6 +90,26 @@ ksi::fubi_annbfis::fubi_annbfis(int nRules, init_name(); } + +ksi::fubi_annbfis::fubi_annbfis(int nRules, + int nClusteringIterations, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const t_norm & tnorm, + const implication & imp, + double positive_class, + double negative_class, + double threshold_value +) : +neuro_fuzzy_system(), +abstract_annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, ksi::fubi (nRules, nClusteringIterations), positive_class, negative_class, threshold_value), +annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, positive_class, negative_class, threshold_value), +subspace_annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, positive_class, negative_class, threshold_value) +{ + init_name(); +} + ksi::fubi_annbfis::fubi_annbfis (const ksi::implication & imp) : neuro_fuzzy_system(), ksi::abstract_annbfis(), ksi::annbfis(), ksi::subspace_annbfis() { init_name(); diff --git a/source/neuro-fuzzy/fubi-annbfis.h b/source/neuro-fuzzy/fubi-annbfis.h index ec764cf..5dc0dd1 100644 --- a/source/neuro-fuzzy/fubi-annbfis.h +++ b/source/neuro-fuzzy/fubi-annbfis.h @@ -80,6 +80,22 @@ namespace ksi double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, ksi::roc_threshold threshold_type); + /** constructor + * @param nRules number of rules + * @param nClusteringIterations number of clustering iterations + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param imp implication + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @date 2024-05-10 + */ + fubi_annbfis (int nRules, int nClusteringIterations, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, double threshold_value = 0.5); + + /** A constructor with implication. @param imp implication @date 2020-08-14 diff --git a/source/neuro-fuzzy/ma.cpp b/source/neuro-fuzzy/ma.cpp index 21e63b4..12be7d3 100644 --- a/source/neuro-fuzzy/ma.cpp +++ b/source/neuro-fuzzy/ma.cpp @@ -1,43 +1,17 @@ /** @file */ -#include -#include -#include -#include -#include -#include #include -#include -#include +#include -#include "../neuro-fuzzy/ma.h" +#include "../auxiliary/roc.h" +#include "../gan/discriminative_model.h" +#include "../gan/generative_model.h" #include "../neuro-fuzzy/abstract-ma.h" +#include "../neuro-fuzzy/ma.h" #include "../neuro-fuzzy/neuro-fuzzy-system.h" -#include "../neuro-fuzzy/rule.h" -#include "../neuro-fuzzy/premise.h" -#include "../neuro-fuzzy/consequence-MA.h" -#include "../tnorms/t-norm-product.h" -#include "../partitions/cluster.h" #include "../partitions/fcm.h" #include "../partitions/partition.h" -#include "../tnorms/t-norm-product.h" -#include "../implications/imp-reichenbach.h" -#include "../descriptors/descriptor-gaussian.h" -#include "../auxiliary/least-error-squares-regression.h" -#include "../auxiliary/tempus.h" -#include "../auxiliary/clock.h" -#include "../auxiliary/confusion-matrix.h" -#include "../auxiliary/roc.h" -#include "../service/debug.h" -#include "../auxiliary/error-RMSE.h" -#include "../auxiliary/error-MAE.h" -#include "../common/number.h" -#include "../readers/reader-complete.h" -#include "../common/data-modifier.h" -#include "../common/data-modifier-normaliser.h" #include "../partitions/partition.h" -#include "../gan/discriminative_model.h" -#include "../gan/generative_model.h" ksi::partition ksi::ma::doPartition(const ksi::dataset& X) { @@ -138,6 +112,29 @@ ksi::ma::ma(int nRules, set_name(); } +ksi::ma::ma(int nRules, + int nClusteringIterations, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const ksi::t_norm & tnorm, + double positive_class, + double negative_class, + const double threshold_value) +: neuro_fuzzy_system ( ksi::fcm (nRules, nClusteringIterations)), +abstract_ma (nRules, nClusteringIterations, + nTuningIterations, dbLearningCoefficient, + bNormalisation, tnorm, ksi::fcm (nRules, nClusteringIterations), + positive_class, negative_class, threshold_value) +{ + _positive_class = positive_class; + _negative_class = negative_class; + _threshold_type = ksi::roc_threshold::manual; + _threshold_value = threshold_value; + + set_name(); +} + ksi::neuro_fuzzy_system * ksi::ma::clone() const { try diff --git a/source/neuro-fuzzy/ma.h b/source/neuro-fuzzy/ma.h index 2661f51..57381ab 100644 --- a/source/neuro-fuzzy/ma.h +++ b/source/neuro-fuzzy/ma.h @@ -3,6 +3,7 @@ #ifndef MA_H #define MA_H +#include #include #include @@ -42,6 +43,8 @@ namespace ksi double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm); ma (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, double positive_class, double negative_class, ksi::roc_threshold threshold_type); + ma (int nRules, int nClusteringIterations, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, double positive_class, double negative_class, const double threshold_value = 0.5); ma(const ma & a); ma(ma && a); diff --git a/source/neuro-fuzzy/neuro-fuzzy-system.cpp b/source/neuro-fuzzy/neuro-fuzzy-system.cpp index 72e45a5..d80772e 100644 --- a/source/neuro-fuzzy/neuro-fuzzy-system.cpp +++ b/source/neuro-fuzzy/neuro-fuzzy-system.cpp @@ -1,12 +1,13 @@ /** @file */ #include +#include #include #include #include #include #include -#include +// #include #include #include "../auxiliary/clock.h" @@ -24,8 +25,7 @@ #include "../neuro-fuzzy/neuro-fuzzy-system.h" #include "../neuro-fuzzy/rulebase.h" #include "../readers/reader-complete.h" -#include "../service/debug.h" -#include "../service/debug.h" +// #include "../service/debug.h" @@ -120,7 +120,6 @@ void ksi::neuro_fuzzy_system::elaborate_answers_for_regression ( model << "data file: " << trainDataFile << std::endl; model << "normalisation: " << std::boolalpha << bNormalisation << std::endl; - model << "RMSE: " << blad_rmse_train << std::endl; model << "MAE: " << blad_mae_train << std::endl; @@ -249,10 +248,6 @@ ksi::neuro_fuzzy_system::neuro_fuzzy_system(const ksi::dataset& trainData, _pModyfikator = nullptr; } - - - - ksi::neuro_fuzzy_system::neuro_fuzzy_system(const std::string & trainDataFile, const std::string & testDataFile, const std::string & resultsFile, @@ -586,6 +581,16 @@ double ksi::neuro_fuzzy_system::elaborate_threshold_value( { try { + if (type == ksi::roc_threshold::manual) + { + if (std::isnan(_threshold_value)) + { + throw std::string ("Threshold type is manual, but values is not set."); + } + else + return _threshold_value; + } + roc ROC; auto progi = ROC.calculate_ROC_points(Expected, Elaborated, positiveClassvalue, negativeClassvalue); @@ -714,6 +719,7 @@ ksi::result ksi::neuro_fuzzy_system::experiment_classification_core( nNumberofTuningIterations, dbLearningCoefficient, _TrainDataset, _ValidationDataset); zegar.stop(); + run_extra_activities_for_the_model(); if (_pRulebase) { @@ -735,13 +741,10 @@ ksi::result ksi::neuro_fuzzy_system::experiment_classification_core( throw ss.str(); } - // thdebug(__LINE__); std::vector wYtestExpected, wYtestElaboratedClass, wYtestElaboratedNumeric, wYtrainExpected, wYtrainElaboratedClass, wYtrainElaboratedNumeric; - // thdebug(__LINE__); get_answers_for_train_classification(); - // thdebug(__LINE__); for (const auto & answer : _answers_for_train) { double expected, el_numeric; @@ -753,17 +756,13 @@ ksi::result ksi::neuro_fuzzy_system::experiment_classification_core( model << classification_intro() << std::endl; if (threshold_type != ksi::roc_threshold::none) model << "classification threshold type: " << ksi::to_string(threshold_type) << std::endl; - // thdebug(__LINE__); _threshold_value = elaborate_threshold_value (wYtrainExpected, wYtrainElaboratedNumeric, dbPositiveClass, dbNegativeClass, threshold_type); - // thdebug(__LINE__); wYtrainElaboratedClass.clear(); wYtrainElaboratedNumeric.clear(); wYtrainExpected.clear(); - // thdebug(__LINE__); get_answers_for_train_classification(); - // thdebug(__LINE__); for (const auto & answer : _answers_for_train) { double expected, el_numeric, el_class; @@ -777,9 +776,7 @@ ksi::result ksi::neuro_fuzzy_system::experiment_classification_core( wYtestElaboratedNumeric.clear(); wYtestExpected.clear(); - // thdebug(__LINE__); get_answers_for_test_classification(); - // thdebug(__LINE__); for (const auto & answer : _answers_for_test) { double expected, el_numeric, el_class; @@ -789,9 +786,7 @@ ksi::result ksi::neuro_fuzzy_system::experiment_classification_core( wYtestElaboratedClass.push_back(el_class); } - // thdebug(__LINE__); model << get_classification_threshold_value(); - // thdebug(__LINE__); model << "fuzzy rule base creation time: "; if (zegar.elapsed_seconds() > 10) @@ -810,30 +805,24 @@ ksi::result ksi::neuro_fuzzy_system::experiment_classification_core( model << std::endl; model << "confusion matrix for test data" << std::endl; - // thdebug(__LINE__); con_test.calculate_statistics(wYtestExpected, wYtestElaboratedClass, - dbPositiveClass, dbNegativeClass, - TP, TN, FP, FN); - // thdebug(__LINE__); + dbPositiveClass, dbNegativeClass, + TP, TN, FP, FN); wynik.TestPositive2Positive = TP; wynik.TestPositive2Negative = FN; wynik.TestNegative2Negative = TN; wynik.TestNegative2Positive = FP; - // thdebug(__LINE__); model << con_test.print(TP, TN, FP, FN); - // thdebug(__LINE__); model << std::endl; //---------------- model << std::endl; model << "confusion matrix for train data" << std::endl; - // thdebug(__LINE__); con_test.calculate_statistics(wYtrainExpected, wYtrainElaboratedClass, dbPositiveClass, dbNegativeClass, TP, TN, FP, FN); - // thdebug(__LINE__); wynik.TrainPositive2Positive = TP; wynik.TrainPositive2Negative = FN; wynik.TrainNegative2Negative = TN; @@ -856,7 +845,6 @@ ksi::result ksi::neuro_fuzzy_system::experiment_classification_core( model << "answers for the train set" << std::endl; model << "expected\telaborated_numeric\telaborated_class" << std::endl; - // thdebug(__LINE__); for (const auto & answer : _answers_for_train) { double expected, el_numeric, el_class; @@ -864,7 +852,6 @@ ksi::result ksi::neuro_fuzzy_system::experiment_classification_core( model << expected << " " << el_numeric << " " << el_class << std::endl; } - // thdebug(__LINE__); model << std::endl << std::endl; model << "answers for the test set" << std::endl; model << "expected\telaborated_numeric\telaborated_class" << std::endl; @@ -877,7 +864,6 @@ ksi::result ksi::neuro_fuzzy_system::experiment_classification_core( } model.close(); - // thdebug(__LINE__); return wynik; } @@ -1574,7 +1560,13 @@ void ksi::neuro_fuzzy_system::set_positive_class(const double p) void ksi::neuro_fuzzy_system::set_threshold_type (const ksi::roc_threshold & th) { - this->_threshold_type = th; + _threshold_type = th; +} + +void ksi::neuro_fuzzy_system::set_threshold_value (const double value) +{ + _threshold_value = value; + _threshold_type = ksi::roc_threshold::manual; } double ksi::neuro_fuzzy_system::modify_learning_coefficient(const double learning_coefficient, const std::deque& errors) @@ -1620,6 +1612,11 @@ std::string ksi::neuro_fuzzy_system::report_average_number_of_rules_for_train() return {}; } - +void ksi::neuro_fuzzy_system::run_extra_activities_for_the_model() +{ + // The body is empty. If some extra activities is needed after + // the model has been created, the class implements + // a non-empty body of this method. +} diff --git a/source/neuro-fuzzy/neuro-fuzzy-system.h b/source/neuro-fuzzy/neuro-fuzzy-system.h index 8afb8b6..f577454 100644 --- a/source/neuro-fuzzy/neuro-fuzzy-system.h +++ b/source/neuro-fuzzy/neuro-fuzzy-system.h @@ -71,24 +71,33 @@ namespace ksi /** short description of a neuro-fuzzy system showing its main features */ std::string _description_of_neuro_fuzzy_system; - std::string _train_data_file; ///< name of train data file - std::string _test_data_file; ///< name of test data file - std::string _validation_data_file ; ///< name of validation data file - std::string _output_file; ///< name of output file - - double _positive_class; ///< label for positive class in classification - double _negative_class; ///< label for negative class in classification - ksi::roc_threshold _threshold_type; ///< threshold type for classification - double _threshold_value = 0.5; ///< value of the threshold for classification + /** name of train data file */ + std::string _train_data_file; + /*** name of test data file */ + std::string _test_data_file; + /*** name of validation data file */ + std::string _validation_data_file ; + /*** name of output file */ + std::string _output_file; + + /*** label for positive class in classification */ + double _positive_class; + /*** label for negative class in classification */ + double _negative_class; + /*** threshold type for classification */ + ksi::roc_threshold _threshold_type; + /*** value of the threshold for classification */ + double _threshold_value = std::numeric_limits::quiet_NaN(); // <-- illegal value; will be set later. std::size_t _original_size_of_training_dataset = 0; std::size_t _reduced_size_of_training_dataset = 0; std::shared_ptr _pModyfikator { nullptr }; - std::vector> _answers_for_train; ///< answers for the train set: expected elaborated_numeric elaborated_class - std::vector> _answers_for_test; ///< answers for the test set: expected elaborated_numeric elaborated_class - + /** answers for the train set: expected elaborated_numeric elaborated_class */ + std::vector> _answers_for_train; + /** answers for the test set: expected elaborated_numeric elaborated_class */ + std::vector> _answers_for_test; public: /** @return number of item in the train dataset @@ -136,7 +145,12 @@ namespace ksi @param th threshold type to set @date 2021-12-27 */ void set_threshold_type (const ksi::roc_threshold & th); - + + /** The method sets the threshold value for a classifier. + * The method also sets the threshold type to ksi::roc_threshold::manual. + @param value threshold type to set + @date 2024-05-10 */ + void set_threshold_value (const double value); /** @return expected class, elaborated_numeric answer, elaborated_class for the train dataset @date 2021-09-16 @@ -653,7 +667,6 @@ virtual result experiment_regression_core( double positiveClassvalue, double negativeClassvalue, const ksi::roc_threshold & type); - public: /** @return rulebase's answer of a data item @@ -676,7 +689,6 @@ virtual result experiment_regression_core( /** The method trains the generative model. */ virtual void train_generative_model (const dataset & ds); - virtual std::string to_string (); public: @@ -710,6 +722,12 @@ virtual result experiment_regression_core( @date 2024-03-24 */ virtual std::string report_average_number_of_rules_for_test () const; + protected: + /** The body is empty. If some extra activities is needed after + * the model has been created, the class implements + * a non-empty body of this method. + * @date 2024-05-02 */ + virtual void run_extra_activities_for_the_model(); }; } diff --git a/source/neuro-fuzzy/subspace-annbfis.cpp b/source/neuro-fuzzy/subspace-annbfis.cpp index 225af6d..2189bed 100644 --- a/source/neuro-fuzzy/subspace-annbfis.cpp +++ b/source/neuro-fuzzy/subspace-annbfis.cpp @@ -1,9 +1,5 @@ /** @file */ -#include -#include -#include -#include #include #include @@ -15,21 +11,11 @@ #include "../partitions/cluster.h" #include "../partitions/sfcm.h" #include "../tnorms/t-norm-product.h" -#include "../implications/imp-reichenbach.h" -#include "../descriptors/descriptor-gaussian-subspace.h" #include "../auxiliary/least-error-squares-regression.h" -#include "../auxiliary/tempus.h" -#include "../auxiliary/confusion-matrix.h" #include "../auxiliary/roc.h" -#include "../service/debug.h" #include "../auxiliary/error-RMSE.h" -#include "../auxiliary/error-MAE.h" -#include "../readers/reader-complete.h" -#include "../common/data-modifier-normaliser.h" #include "../partitions/partition.h" -#include "../gan/discriminative_model.h" -#include "../gan/generative_model.h" - +// #include "../service/debug.h" void ksi::subspace_annbfis::set_name() { @@ -96,6 +82,24 @@ ksi::subspace_annbfis::subspace_annbfis(int nRules, init_name(); } + +ksi::subspace_annbfis::subspace_annbfis(int nRules, + int nClusteringIterations, + int nTuningIterations, + double dbLearningCoefficient, + bool bNormalisation, + const t_norm & tnorm, + const implication & imp, + double positive_class, + double negative_class, + double threshold_value +) : neuro_fuzzy_system(), +abstract_annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, ksi::sfcm (nRules, nClusteringIterations), positive_class, negative_class, threshold_value), +annbfis(nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, positive_class, negative_class, threshold_value) +{ + init_name(); +} + ksi::subspace_annbfis::subspace_annbfis (const ksi::implication & imp) : neuro_fuzzy_system(), ksi::abstract_annbfis(), ksi::annbfis() { init_name(); diff --git a/source/neuro-fuzzy/subspace-annbfis.h b/source/neuro-fuzzy/subspace-annbfis.h index 2d98e3c..ab8dc20 100644 --- a/source/neuro-fuzzy/subspace-annbfis.h +++ b/source/neuro-fuzzy/subspace-annbfis.h @@ -4,12 +4,13 @@ #define SUBSPACE_ANNBFIS_H +#include #include #include #include "../common/dataset.h" -#include "rulebase.h" -#include "annbfis.h" +#include "../neuro-fuzzy/rulebase.h" +#include "../neuro-fuzzy/annbfis.h" #include "../implications/implication.h" #include "../tnorms/t-norm.h" #include "../auxiliary/roc.h" @@ -86,6 +87,21 @@ namespace ksi subspace_annbfis (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, ksi::roc_threshold threshold_type); + /** constructor + * @param nRules number of rules + * @param nClusteringIterations number of clustering iterations + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param imp implication + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @date 2024-05-09 + */ + subspace_annbfis (int nRules, int nClusteringIterations, int nTuningIterations, + double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, double threshold_value = 0.5); + /** A constructor with implication. @param imp implication diff --git a/source/neuro-fuzzy/three_way_decision_nfs.cpp b/source/neuro-fuzzy/three_way_decision_nfs.cpp index 38f7ad1..6d9c8c7 100644 --- a/source/neuro-fuzzy/three_way_decision_nfs.cpp +++ b/source/neuro-fuzzy/three_way_decision_nfs.cpp @@ -4,6 +4,7 @@ #include #include #include +#include #include #include @@ -13,6 +14,7 @@ #include "../gan/generative_model.h" #include "../auxiliary/tempus.h" #include "../service/debug.h" +#include "../auxiliary/confusion-matrix.h" ksi::three_way_decision_nfs::three_way_decision_nfs () { @@ -64,7 +66,6 @@ ksi::three_way_decision_nfs::three_way_decision_nfs( CATCH; } - ksi::three_way_decision_nfs::three_way_decision_nfs( const std::vector > & cascade, const std::string & train, @@ -83,6 +84,8 @@ ksi::three_way_decision_nfs::three_way_decision_nfs( ksi::three_way_decision_nfs::three_way_decision_nfs(const std::vector>& cascade, const double maximal_deviation) : _cascade(cascade) { + set_positive_class(cascade[0]->get_positive_class()); + set_negative_class(cascade[0]->get_negative_class()); _threshold_type = ksi::roc_threshold::none; _noncommitment_widths = std::vector (cascade.size(), maximal_deviation); } @@ -98,11 +101,12 @@ ksi::three_way_decision_nfs::three_way_decision_nfs(const std::vectorget_positive_class()); + set_negative_class(cascade[0]->get_negative_class()); } CATCH; } - ksi::three_way_decision_nfs::three_way_decision_nfs( const std::vector >& cascade, const ksi::dataset& train, @@ -117,7 +121,6 @@ ksi::three_way_decision_nfs::three_way_decision_nfs( _noncommitment_widths = std::vector (cascade.size(), maximal_deviation); } - ksi::three_way_decision_nfs::three_way_decision_nfs( const std::vector >& cascade, const ksi::dataset& train, @@ -157,8 +160,7 @@ ksi::three_way_decision_nfs::three_way_decision_nfs( _noncommitment_widths = std::vector (cascade.size(), maximal_deviation); } - -void ksi::three_way_decision_nfs::copy_fields(const ksi::three_way_decision_nfs& _3wnfs) +void ksi::three_way_decision_nfs::three_copy_fields(const ksi::three_way_decision_nfs& _3wnfs) { _positive_class = _3wnfs._positive_class; _negative_class = _3wnfs._negative_class; @@ -182,7 +184,7 @@ ksi::three_way_decision_nfs::three_way_decision_nfs (const ksi::three_way_decisi for (const auto & s : wzor._cascade) _cascade.push_back(std::shared_ptr (s->clone())); - copy_fields(wzor); + three_copy_fields(wzor); } ksi::three_way_decision_nfs & ksi::three_way_decision_nfs::operator= (const ksi::three_way_decision_nfs & wzor) @@ -198,7 +200,7 @@ ksi::three_way_decision_nfs & ksi::three_way_decision_nfs::operator= (const ksi: for (const auto & s : wzor._cascade) _cascade.push_back(std::shared_ptr (s->clone())); - copy_fields(wzor); + three_copy_fields(wzor); return *this; } @@ -207,7 +209,7 @@ ksi::three_way_decision_nfs::three_way_decision_nfs (ksi::three_way_decision_nfs { // swap what is to swap std::swap(_cascade, wzor._cascade); - copy_fields(wzor); + three_copy_fields(wzor); } ksi::three_way_decision_nfs & ksi::three_way_decision_nfs::operator= (ksi::three_way_decision_nfs && wzor) @@ -219,7 +221,7 @@ ksi::three_way_decision_nfs & ksi::three_way_decision_nfs::operator= (ksi::three // swap what is to swap std::swap(_cascade, wzor._cascade); - copy_fields(wzor); + three_copy_fields(wzor); return *this; } @@ -248,7 +250,7 @@ double ksi::three_way_decision_nfs::discriminate(const ksi::datum& d) void ksi::three_way_decision_nfs::train_discriminative_model(const ksi::dataset& ds) { - throw (std::string ("not implemented")); + throw (std::string ("not implemented")); // interface pollution } void ksi::three_way_decision_nfs::createFuzzyRulebase(int, int, double, const ksi::dataset& train, const ksi::dataset& validate) @@ -256,8 +258,6 @@ void ksi::three_way_decision_nfs::createFuzzyRulebase(int, int, double, const ks createFuzzyRulebase(train, this->_TestDataset, validate); } - -/// @todo Po co test tutaj? void ksi::three_way_decision_nfs::createFuzzyRulebase(const ksi::dataset& train, const ksi::dataset & test, const ksi::dataset & validation) { try @@ -302,7 +302,6 @@ void ksi::three_way_decision_nfs::createFuzzyRulebase(const ksi::dataset& train, CATCH; } -/** @todo Tutaj jest problem. 3WNFS powinien zwracać już gotową klasę, a zwraca wartość numeryczną. Ale przecież każdy z systemów w kaskadzie ma swój własny próg. To trzeba jakoś fikuśnie przerobić */ double ksi::three_way_decision_nfs::answer(const ksi::datum& item) const { try @@ -349,6 +348,104 @@ std::vector> ksi::three_way_decision_nfs::get return result; } +double ksi::three_way_decision_nfs::get_train_F1score_for_part_of_the_cascade(const std::size_t depth) +{ + std::vector vExpected, vClass; + // expected, elaborated_numeric, elaborated_class + auto expected_elaborated_class = get_answers_for_train_classification(depth); + for (auto & [expected, _, elaborated_class] : expected_elaborated_class) + { + vExpected.push_back(expected); + vClass.push_back(elaborated_class); + } + + int p2p, n2n, n2p, p2n; + ksi::confusion_matrix cm; + cm.calculate_statistics(vExpected, vClass, this->_positive_class, this->_negative_class, p2p, n2n, n2p, p2n); + ksi::result r; + r.TrainPositive2Positive = p2p; + r.TrainPositive2Negative = p2n; + r.TrainNegative2Negative = n2n; + r.TrainNegative2Positive = n2p; + + return cm.TrainF1score(r); +} + +double ksi::three_way_decision_nfs::get_test_F1score_for_part_of_the_cascade(const std::size_t depth) +{ + std::vector vExpected, vClass; + // expected, elaborated_numeric, elaborated_class + auto expected_elaborated_class = get_answers_for_test_classification(depth); + for (auto & [expected, _, elaborated_class] : expected_elaborated_class) + { + vExpected.push_back(expected); + vClass.push_back(elaborated_class); + } + + int p2p, n2n, n2p, p2n; + ksi::confusion_matrix cm; + cm.calculate_statistics(vExpected, vClass, this->_positive_class, this->_negative_class, p2p, n2n, n2p, p2n); + ksi::result r; + r.TestPositive2Positive = p2p; + r.TestPositive2Negative = p2n; + r.TestNegative2Negative = n2n; + r.TestNegative2Positive = n2p; + + return cm.TestF1score(r); +} + +std::vector > ksi::three_way_decision_nfs::get_answers_for_train_classification(const std::size_t cascade_depth) +{ + _number_of_rules_used = 0; + _number_of_data_items = 0; + + _answers_for_train.clear(); + + auto XYtrain = _TrainDataset.splitDataSetVertically(_TrainDataset.getNumberOfAttributes() - 1); + std::size_t nXtrain = _TrainDataset.getNumberOfData(); + + _answers_for_train.resize(nXtrain); + #pragma omp parallel for + for (std::size_t i = 0; i < nXtrain; i++) + { + auto [ elaborated_numeric, elaborated_class ] = answer_classification(*(XYtrain.first.getDatum(i)), cascade_depth); + auto expected = XYtrain.second.get(i, 0); + + // _answers_for_train.push_back({expected, elaborated_numeric, elaborated_class}); + _answers_for_train[i] = {expected, elaborated_numeric, elaborated_class}; + } + + auto data_size = _TrainDataset.size(); + _dbTrainAverageNumerOfRulesUsed = 1.0 * _number_of_rules_used / _number_of_data_items; + return _answers_for_train; +} + +std::vector > ksi::three_way_decision_nfs::get_answers_for_test_classification(const std::size_t cascade_depth) +{ + _number_of_rules_used = 0; + _number_of_data_items = 0; + + _answers_for_test.clear(); + + auto XYtest = _TestDataset.splitDataSetVertically(_TestDataset.getNumberOfAttributes() - 1); + std::size_t nXtest = _TestDataset.getNumberOfData(); + + _answers_for_test.resize(nXtest); + #pragma omp parallel for + for (std::size_t i = 0; i < nXtest; i++) + { + auto [ elaborated_numeric, elaborated_class ] = answer_classification(*(XYtest.first.getDatum(i)), cascade_depth); + auto expected = XYtest.second.get(i, 0); + + // _answers_for_test.push_back({expected, elaborated_numeric, elaborated_class}); + _answers_for_test[i] = {expected, elaborated_numeric, elaborated_class}; + } + auto data_size = _TestDataset.size(); + _dbTestAverageNumerOfRulesUsed = 1.0 * _number_of_rules_used / _number_of_data_items; + return _answers_for_test; +} + +/* std::pair ksi::three_way_decision_nfs::answer_classification(const ksi::datum& item) const { try @@ -377,10 +474,47 @@ std::pair ksi::three_way_decision_nfs::answer_classification(con } CATCH; } +*/ + +std::pair ksi::three_way_decision_nfs::answer_classification(const ksi::datum& item) const +{ + constexpr std::size_t MAX {std::numeric_limits::max()}; + return ksi::three_way_decision_nfs::answer_classification(item, MAX); +} + +std::pair ksi::three_way_decision_nfs::answer_classification(const ksi::datum& item, const std::size_t cascade_depth) const +{ + try + { + auto nan = std::numeric_limits::signaling_NaN(); + auto result = std::make_pair (nan, nan); + std::size_t number_of_rules = 0; + + auto depth = std::min(cascade_depth, _cascade.size() - 1); + for (std::size_t i = 0; i < depth + 1; i++) + { + auto & pSystem = _cascade[i]; + result = pSystem->answer_classification(item); + number_of_rules += pSystem->get_number_of_rules(); + auto threshold_value = pSystem->get_threshold_value(); + auto numeric = result.first; + if ((i == depth) or (std::fabs(numeric - threshold_value) > _noncommitment_widths[i])) + { + ++_number_of_data_items; + _number_of_rules_used += number_of_rules; + return result; + } + } + ++_number_of_data_items; + _number_of_rules_used += number_of_rules; + return result; + } + CATCH; +} ksi::dataset ksi::three_way_decision_nfs::extract_poor_results( const ksi::dataset & data, - const std::vector> & results_train, + const std::vector> & answers, const double threshold_value, const double maximal_deviation) { @@ -388,10 +522,10 @@ ksi::dataset ksi::three_way_decision_nfs::extract_poor_results( { std::vector indices; - for (std::size_t i = 0; i < results_train.size(); i++) + for (std::size_t i = 0; i < answers.size(); i++) { double elaborated; - std::tie(std::ignore, elaborated, std::ignore) = results_train[i]; + std::tie(std::ignore, elaborated, std::ignore) = answers[i]; if (fabs(threshold_value - elaborated) < maximal_deviation) indices.push_back(i); } @@ -462,13 +596,53 @@ std::string ksi::three_way_decision_nfs::get_brief_cascade_names() const return ss.str(); } +std::string ksi::three_way_decision_nfs::print_f1scores_cascade() const +{ + try + { + const int PRECISION { 8}; + const int WIDTH_FLT {12}; + const int WIDTH_INT { 2}; + + std::stringstream ss; + // --------------- + // Print the F1scores for all cascade depths: + ss << "-------------------------" << std::endl; + ss << "F1scores for the cascade:" << std::endl; + ss << "depth\ttrain\ttest"<< std::endl; + auto size = _cascade.size(); + auto train = _cascade_F1score_train.size(); + auto test = _cascade_F1score_test.size(); + + for (std::size_t i = 0; i < size; ++i) + { + ss << std::setw(WIDTH_INT) << std::right << i + 1; + + if (i < train) + ss << std::fixed << std::setw(WIDTH_FLT) << std::setprecision(PRECISION) << _cascade_F1score_train[i]; + else + ss << std::fixed << std::setw(WIDTH_FLT) << std::setprecision(PRECISION) << "---"; + + if (i < test) + ss << std::fixed << std::setw(WIDTH_FLT) << std::setprecision(PRECISION) << _cascade_F1score_test[i]; + else + ss << std::fixed << std::setw(WIDTH_FLT) << std::setprecision(PRECISION) << "---"; + ss << std::endl; + } + ss << "-------------------------" << std::endl; + return ss.str(); + } + CATCH; +} + std::string ksi::three_way_decision_nfs::extra_report() const { try { std::stringstream ss; ss << *this << std::endl; - + ss << print_f1scores_cascade() << std::endl; + return ss.str(); } CATCH; @@ -499,9 +673,9 @@ std::string ksi::three_way_decision_nfs::classification_intro() const // model << "train data set modifier(s): " << _pModyfikator->print() << std::endl; auto report = extra_report (); - if (not report.empty()) - model << report << std::endl; - + if (not report.empty()) + model << report << std::endl; + return model.str(); } CATCH; @@ -529,8 +703,11 @@ void ksi::three_way_decision_nfs::printRulebase(std::ostream& ss) ss << p->get_nfs_name() << std::endl; ss << "number of rules: " << p->get_number_of_rules() << std::endl; ss << "threshold type: " << ksi::to_string(p->get_threshold_type()) << std::endl; - ss << "threshold value: " << p->get_threshold_value() << i << std::endl; - ss << "noncommitment width: " << _noncommitment_widths[i] << std::endl; + double threshold = p->get_threshold_value(); + ss << "threshold value: " << threshold << std::endl; + double delta = _noncommitment_widths[i]; + ss << "noncommitment width: " << delta << std::endl; + ss << "noncommintment interval: [" << threshold - delta << ", " << threshold + delta << "]" << std::endl; ss << "size of train dataset: " << p->get_train_dataset_size() << std::endl; ss << "cardinality of train dataset: " << p->get_train_dataset_cardinality() << std::endl; ss << "--------------------------------------" << std::endl; @@ -593,8 +770,6 @@ std::string ksi::three_way_decision_nfs::report_average_number_of_rules_for_trai return std::string {"average number of rules used for one train data item: "} + stringified; } - - namespace ksi { std::ostream & operator<< (std::ostream & ss, const ksi::three_way_decision_nfs & system) @@ -605,10 +780,8 @@ namespace ksi ss << std::endl; ss << "number of classifiers: " << system._cascade.size() << std::endl; - - - for (std::size_t i = 0; i < system._cascade.size(); i++) + for (std::size_t i = 0; i < system._cascade.size(); ++i) { auto & p = system._cascade[i]; ss << "classifier: " << i << std::endl; @@ -616,11 +789,34 @@ namespace ksi ss << p->get_nfs_name() << std::endl; ss << "number of rules: " << p->get_number_of_rules() << std::endl; ss << "threshold type: " << to_string(p->get_threshold_type()) << std::endl; - ss << "threshold value: " << p->get_threshold_value() << std::endl; - ss << "noncommitment width: " << system._noncommitment_widths[i] << std::endl; + double threshold = p->get_threshold_value(); + ss << "threshold value: " << threshold << std::endl; + double delta = system._noncommitment_widths[i]; + ss << "noncommitment width: " << delta << std::endl; + ss << "noncommintment interval: [" << threshold - delta << ", " << threshold + delta << "]" << std::endl; + ss << "size of train dataset: " << p->get_train_dataset_size() << std::endl; + ss << "cardinality of train dataset: " << p->get_train_dataset_cardinality() << std::endl; ss << std::endl; } ss << "-------------------------" << std::endl; return ss; } } + +void ksi::three_way_decision_nfs::elaborate_cascade_f1scores() +{ + _cascade_F1score_train.clear(); + _cascade_F1score_test.clear(); + + for (std::size_t i = 0; i < _cascade.size(); ++i) + { + _cascade_F1score_train.push_back(get_train_F1score_for_part_of_the_cascade(i)); + _cascade_F1score_test.push_back(get_test_F1score_for_part_of_the_cascade(i)); + } +} + +void ksi::three_way_decision_nfs::run_extra_activities_for_the_model() +{ + ksi::three_way_decision_nfs::elaborate_cascade_f1scores(); +} + diff --git a/source/neuro-fuzzy/three_way_decision_nfs.h b/source/neuro-fuzzy/three_way_decision_nfs.h index 43652ae..411145c 100644 --- a/source/neuro-fuzzy/three_way_decision_nfs.h +++ b/source/neuro-fuzzy/three_way_decision_nfs.h @@ -25,7 +25,18 @@ namespace ksi mutable std::size_t _number_of_data_items {0}; mutable double _dbTrainAverageNumerOfRulesUsed {0.0}; mutable double _dbTestAverageNumerOfRulesUsed {0.0}; - + + protected: + /** The vector stores F1score for the train dataset. + * The i-th cell holds the value for [0 .. i-1] NFS in the cascade. + * @date 2024-04-30 */ + std::vector _cascade_F1score_train; + + /** The vector stores F1score for the test dataset. + * The i-th cell holds the value for [0 .. i-1] NFS in the cascade. + * @date 2024-04-30 */ + std::vector _cascade_F1score_test; + public: std::vector> get_answers_for_train_classification() override; @@ -193,9 +204,17 @@ namespace ksi /** The method elaborates answer for classification. @return a pair: elaborated numeric, class - @date 2021-09-27 - */ + @date 2021-09-27 */ virtual std::pair answer_classification (const datum & item) const override; + + + /** The method elaborates answer for classification. + * @param item data item to elaborated answer for + * @param depth number of systems (in the cascade) used to elaborate the answer + * Depth is indexed from 0 up to _cascade.size() - 1. + * @return a pair: elaborated numeric, class + * @date 2024-04-29 */ + virtual std::pair answer_classification (const datum & item, const std::size_t depth) const; /** The method prints rule base. * @param ss ostream to print to */ @@ -216,6 +235,35 @@ namespace ksi * @date 2024-04-21 */ virtual std::string get_brief_cascade_names () const; + /** @param cascade_depth number of neuro-fuzzy systems that will be used to elaborate answers + * Depth is indexed from 0 up to _cascade.size() - 1. + * @return expected class, elaborated_numeric answer, elaborated_class for the test dataset + * @date 2024-04-29 */ + virtual std::vector> get_answers_for_train_classification(const std::size_t cascade_depth); + + /** @param cascade_depth number of neuro-fuzzy systems that will be used to elaborate answers + * Depth is indexed from 0 up to _cascade.size() - 1. + * @return expected class, elaborated_numeric answer, elaborated_class for the test dataset + * @date 2024-04-29 */ + virtual std::vector> get_answers_for_test_classification(const std::size_t cascade_depth); + + /** @return The F1score for the train dataset for a part of the cascade. + * Only ``depth'' systems in the cascade are used. + * Depth is indexed from 0 up to _cascade.size() - 1. + * @date 2024-04-29 */ + double get_train_F1score_for_part_of_the_cascade(const std::size_t depth); + + /** @return The F1score for the test dataset for a part of the cascade. + * Only ``depth'' systems in the cascade are used. + * Depth is indexed from 0 up to _cascade.size() - 1. + * @date 2024-04-29 */ + double get_test_F1score_for_part_of_the_cascade(const std::size_t depth); + + /** The method elaborates F1scores for all depth of the cascade + * (both for the train and test datasets). + @date 2024-04-30 */ + void elaborate_cascade_f1scores(); + protected: /** @return The method returns always an empty string. @date 2021-09-28 @@ -250,28 +298,40 @@ namespace ksi protected: std::string extra_report() const override; virtual std::string classification_intro() const override; - virtual void copy_fields(const three_way_decision_nfs & _3wnfs); + virtual void three_copy_fields(const three_way_decision_nfs & _3wnfs); public: /** A report entry on the average number of rules for the train dataset. * It is used in three way decision NFS and fuzzy three way decision NFS. - * @return empty string + * @return string with the report * @date 2024-03-24 */ virtual std::string report_average_number_of_rules_for_train () const override; public: /** @return average number of rules fired for the test dateset - * @date 2024-03-28 */ + * @date 2024-03-28 */ virtual double get_number_of_rules () const override; public: /** A report entry on the average number of rules for the test dataset. * It is used in three way decision NFS and fuzzy three way decision NFS. - @ return empty string * - @date 2024-03-24 */ + * @return string with the report + * @date 2024-03-24 */ virtual std::string report_average_number_of_rules_for_test () const override; + protected: + /** This method elaborates the F1score for all depth + * of the cascade for train and test dataset. + * @date 2024-05-02 */ + virtual void run_extra_activities_for_the_model() override; + + protected: + /** The functions prints the f1scores for all depth + * of the cascade for train and test datasets + * in a string and returns it. + * @date 2024-05-02 */ + virtual std::string print_f1scores_cascade() const; public: friend diff --git a/source/neuro-fuzzy/tsk.cpp b/source/neuro-fuzzy/tsk.cpp index 4111a0c..3c199af 100644 --- a/source/neuro-fuzzy/tsk.cpp +++ b/source/neuro-fuzzy/tsk.cpp @@ -1,36 +1,11 @@ /** @file */ -#include -#include -#include -#include #include -#include #include "tsk.h" -#include "rule.h" -#include "premise.h" -#include "consequence-TSK.h" -#include "../tnorms/t-norm-product.h" -#include "../partitions/cluster.h" #include "../partitions/fcm.h" -#include "../partitions/partition.h" -#include "../tnorms/t-norm-product.h" -#include "../implications/imp-reichenbach.h" -#include "../descriptors/descriptor-gaussian.h" -#include "../auxiliary/least-error-squares-regression.h" -#include "../auxiliary/tempus.h" -#include "../auxiliary/clock.h" -#include "../auxiliary/confusion-matrix.h" #include "../auxiliary/roc.h" -#include "../service/debug.h" -#include "../auxiliary/error-RMSE.h" -#include "../auxiliary/error-MAE.h" -#include "../common/number.h" -#include "../readers/reader-complete.h" #include "../common/data-modifier.h" -#include "../common/data-modifier-normaliser.h" -#include "../partitions/partition.h" #include "../gan/discriminative_model.h" #include "../gan/generative_model.h" @@ -88,15 +63,9 @@ ksi::tsk::tsk(int nRules, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, ksi::fcm (nRules, nClusteringIterations)) { - set_name(); -// debug(_pPartitioner); -// debug(__FUNCTION__); + set_name(); } - - - - ksi::tsk::tsk(const std::string& trainDataFile, const std::string& testDataFile, const std::string& resultsFile, @@ -108,8 +77,7 @@ ksi::tsk::tsk(const std::string& trainDataFile, const bool bNormalisation) : neuro_fuzzy_system (trainDataFile, testDataFile, resultsFile, - ksi::fcm(nRules, - nClusteringIterations)), + ksi::fcm(nRules, nClusteringIterations)), abstract_tsk (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm) { @@ -137,15 +105,15 @@ ksi::tsk::tsk(const std::string& trainDataFile, set_name(); } -ksi::tsk::tsk(int nRules, - int nClusteringIterations, - int nTuningIterations, - double dbLearningCoefficient, - bool bNormalisation, +ksi::tsk::tsk(const int nRules, + const int nClusteringIterations, + const int nTuningIterations, + const double dbLearningCoefficient, + const bool bNormalisation, const ksi::t_norm & tnorm, - double positive_class, - double negative_class, - ksi::roc_threshold threshold_type) + const double positive_class, + const double negative_class, + const ksi::roc_threshold threshold_type) : neuro_fuzzy_system (ksi::fcm(nRules, nClusteringIterations)) , abstract_tsk (nRules, nClusteringIterations, @@ -154,20 +122,39 @@ ksi::tsk::tsk(int nRules, positive_class, negative_class, threshold_type) { set_name(); -// debug(_pPartitioner); } +ksi::tsk::tsk(const int nRules, + const int nClusteringIterations, + const int nTuningIterations, + const double dbLearningCoefficient, + const bool bNormalisation, + const ksi::t_norm & tnorm, + const double positive_class, + const double negative_class, + const double threshold_value) + : neuro_fuzzy_system (ksi::fcm(nRules, nClusteringIterations)) + , + abstract_tsk (nRules, nClusteringIterations, + nTuningIterations, dbLearningCoefficient, + bNormalisation, tnorm, ksi::fcm (nRules, nClusteringIterations), + positive_class, negative_class, threshold_value) +{ + set_name(); +} -ksi::tsk::tsk(int nRules, - int nClusteringIterations, - int nTuningIterations, - double dbLearningCoefficient, - bool bNormalisation, + +ksi::tsk::tsk(const int nRules, + const int nClusteringIterations, + const int nTuningIterations, + const double dbLearningCoefficient, + const bool bNormalisation, const ksi::t_norm& tnorm, - double positive_class, - double negative_class, - ksi::roc_threshold threshold_type, - const ksi::data_modifier& modifier) + const double positive_class, + const double negative_class, + const ksi::roc_threshold threshold_type, + const ksi::data_modifier& modifier + ) : neuro_fuzzy_system (ksi::fcm(nRules, nClusteringIterations), modifier) , abstract_tsk (nRules, nClusteringIterations, @@ -180,10 +167,30 @@ ksi::tsk::tsk(int nRules, } +ksi::tsk::tsk(const int nRules, + const int nClusteringIterations, + const int nTuningIterations, + const double dbLearningCoefficient, + const bool bNormalisation, + const ksi::t_norm& tnorm, + const double positive_class, + const double negative_class, + const ksi::data_modifier& modifier, + const double threshold_value + ) + : neuro_fuzzy_system (ksi::fcm(nRules, nClusteringIterations), modifier) + , + abstract_tsk (nRules, nClusteringIterations, + nTuningIterations, dbLearningCoefficient, + bNormalisation, tnorm, ksi::fcm (nRules, nClusteringIterations), + positive_class, negative_class, threshold_value) +{ + set_name(); +// debug(_pPartitioner); +} ksi::tsk::~tsk() { - } diff --git a/source/neuro-fuzzy/tsk.h b/source/neuro-fuzzy/tsk.h index ef3923f..d259c4d 100644 --- a/source/neuro-fuzzy/tsk.h +++ b/source/neuro-fuzzy/tsk.h @@ -3,7 +3,7 @@ #ifndef TSK_H #define TSK_H - +#include #include #include @@ -26,9 +26,7 @@ namespace ksi */ class tsk : public abstract_tsk { - public: - tsk(); /** constructor @@ -57,8 +55,6 @@ namespace ksi double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const ksi::data_modifier & modifier); - - /** constructor * @param nRules number of rules * @param nClusteringIterations number of clustering iterations @@ -70,8 +66,21 @@ namespace ksi * @param threshold_type classification threshold type * @date 2019-12-22 */ - tsk (int nRules, int nClusteringIterations, int nTuningIterations, - double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, double positive_class, double negative_class, ksi::roc_threshold threshold_type); + tsk (const int nRules, const int nClusteringIterations, const int nTuningIterations, const double dbLearningCoefficient, const bool bNormalisation, const t_norm & tnorm, const double positive_class, const double negative_class, const ksi::roc_threshold threshold_type); + + + /** constructor + * @param nRules number of rules + * @param nClusteringIterations number of clustering iterations + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @date 2024-05-09 + */ + tsk (const int nRules, const int nClusteringIterations, const int nTuningIterations, const double dbLearningCoefficient, const bool bNormalisation, const t_norm & tnorm, const double positive_class, const double negative_class, const double threshold_value = 0.5); /** constructor @@ -86,11 +95,21 @@ namespace ksi * @param modifier data modifier one or a chain of modifiers * @date 2021-01-09 */ - tsk (int nRules, int nClusteringIterations, int nTuningIterations, - double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, double positive_class, double negative_class, ksi::roc_threshold threshold_type, - const ksi::data_modifier & modifier); - + tsk (const int nRules, const int nClusteringIterations, const int nTuningIterations, const double dbLearningCoefficient, const bool bNormalisation, const t_norm & tnorm, const double positive_class, const double negative_class, const ksi::roc_threshold threshold_type, const ksi::data_modifier & modifier); + /** constructor + * @param nRules number of rules + * @param nClusteringIterations number of clustering iterations + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param modifier data modifier one or a chain of modifiers + * @param threshold_type classification threshold type + * @date 2024-05-09 + */ + tsk (const int nRules, const int nClusteringIterations, const int nTuningIterations, const double dbLearningCoefficient, const bool bNormalisation, const t_norm & tnorm, const double positive_class, const double negative_class, const ksi::data_modifier & modifier, const double threshold_value = 0.5); /** * @param trainDataFile diff --git a/source/neuro-fuzzy/weighted_annbfis.cpp b/source/neuro-fuzzy/weighted_annbfis.cpp index 1532b63..2e10c15 100644 --- a/source/neuro-fuzzy/weighted_annbfis.cpp +++ b/source/neuro-fuzzy/weighted_annbfis.cpp @@ -31,13 +31,19 @@ ksi::weighted_annbfis::weighted_annbfis(int nRules, double positive_class, double negative_class, const ksi::roc_threshold threshold_type -) :// neuro_fuzzy_system(ksi::fcm(nRules, nClusteringIterations)) -//, +) : abstract_annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, ksi::fcm(nRules, nClusteringIterations), positive_class, negative_class, threshold_type ) { set_name(); } +ksi::weighted_annbfis::weighted_annbfis(int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm& tnorm, const implication& imp, double positive_class, double negative_class, const double threshold_value): +abstract_annbfis (nRules, nClusteringIterations, nTuningIterations, dbLearningCoefficient, bNormalisation, tnorm, imp, ksi::fcm(nRules, nClusteringIterations), positive_class, negative_class, threshold_value) +{ + set_name(); +} + + ksi::weighted_annbfis::weighted_annbfis (const ksi::weighted_annbfis & wzor) : ksi::neuro_fuzzy_system(wzor), ksi::abstract_annbfis(wzor) { // copy what is to copy diff --git a/source/neuro-fuzzy/weighted_annbfis.h b/source/neuro-fuzzy/weighted_annbfis.h index 79ea357..7c0ea1e 100644 --- a/source/neuro-fuzzy/weighted_annbfis.h +++ b/source/neuro-fuzzy/weighted_annbfis.h @@ -61,7 +61,23 @@ namespace ksi */ weighted_annbfis (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, const ksi::roc_threshold threshold_type); - + + public: + /** constructor + * @param nRules number of rules + * @param nClusteringIterations number of clustering iterations + * @param nTuningIterations number of tuning iterations + * @param dbLearningCoefficient learning coefficient for gradient method + * @param tnorm a t-norm + * @param imp implication + * @param dbPositiveClass label of a positive class + * @param dbNegativeClass label of a negative class + * @param threshold_value classification threshold value + * @date 2024-05-09 + */ + weighted_annbfis (int nRules, int nClusteringIterations, int nTuningIterations, double dbLearningCoefficient, bool bNormalisation, const t_norm & tnorm, const implication & imp, double positive_class, double negative_class, const double threshold_value); + + /** The method creates a fuzzy rulebase from the dataset. * @param nClusteringIterations number of clustering iterations * @param nTuningIterations number of tuning iterations