From f26909a08df54b8c85e6bc225b9267319ae3fe6b Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:19:11 +0100 Subject: [PATCH 01/15] Add and enable editor config --- .editorconfig | 362 ++++++++++++++++++++++++++++++++++++++ SharpLearning.sln | 1 + src/Directory.Build.props | 5 +- 3 files changed, 365 insertions(+), 3 deletions(-) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..866a4a94 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,362 @@ +# Learn more about editorconfig here: https://docs.microsoft.com/en-us/visualstudio/ide/editorconfig-code-style-settings-reference +# Also see: https://docs.microsoft.com/en-us/dotnet/fundamentals/code-analysis/code-style-rule-options +root=true + +# All files +[*] +indent_style = space +spelling_languages = en-us +spelling_checkable_types = strings,identifiers,comments +spelling_error_severity = warning +spelling_exclusion_path = .\exclusion.dic + +# XML project files +[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj}] +indent_size = 2 + +# XML config files +[*.{props,targets,ruleset,config,nuspec,resx,vsixmanifest,vsct}] +indent_size = 2 + +# Code files +[*.{cs,csx,vb,vbx}] +indent_size = 4 +insert_final_newline = true +charset = utf-8-bom + +# C# files + +[*.cs] +#Formatting - indentation options + +#indent switch case contents. +csharp_indent_case_contents = true +#indent switch labels +csharp_indent_switch_labels = true + +#Formatting - new line options + +#place catch statements on a new line +csharp_new_line_before_catch = true +#place else statements on a new line +csharp_new_line_before_else = true +#require braces to be on a new line for lambdas, types, methods, properties, and control_blocks (also known as "Allman" style) +# https://docs.microsoft.com/en-us/dotnet/fundamentals/code-analysis/style-rules/formatting-rules#new-line-options +# Option values: +# all +# none +# accessors, anonymous_methods, anonymous_types, control_blocks, events, indexers, lambdas, local_functions, methods, object_collection_array_initializers, properties, types +csharp_new_line_before_open_brace = accessors, lambdas, types, methods, properties, control_blocks, object_collection_array_initializers, anonymous_methods, anonymous_types + +#Formatting - organize using options + +#sort System.* using directives alphabetically, and place them before other usings +dotnet_sort_system_directives_first = true + +csharp_style_namespace_declarations = file_scoped:warning + +#Formatting - spacing options + +#require NO space between a cast and the value +csharp_space_after_cast = false +#require a space before the colon for bases or interfaces in a type declaration +csharp_space_after_colon_in_inheritance_clause = true +#require a space after a keyword in a control flow statement such as a for loop +csharp_space_after_keywords_in_control_flow_statements = true +#require a space before the colon for bases or interfaces in a type declaration +csharp_space_before_colon_in_inheritance_clause = true +#remove space within empty argument list parentheses +csharp_space_between_method_call_empty_parameter_list_parentheses = false +#remove space between method call name and opening parenthesis +csharp_space_between_method_call_name_and_opening_parenthesis = false +#do not place space characters after the opening parenthesis and before the closing parenthesis of a method call +csharp_space_between_method_call_parameter_list_parentheses = false +#remove space within empty parameter list parentheses for a method declaration +csharp_space_between_method_declaration_empty_parameter_list_parentheses = false +#place a space character after the opening parenthesis and before the closing parenthesis of a method declaration parameter list. +csharp_space_between_method_declaration_parameter_list_parentheses = false + +#Formatting - wrapping options + +#leave code block on single line +csharp_preserve_single_line_blocks = true +#leave statements and member declarations on the same line +csharp_preserve_single_line_statements = true + +#Style - expression bodied member options + +#prefer block bodies for accessors +csharp_style_expression_bodied_accessors = false:none +#prefer block bodies for constructors +csharp_style_expression_bodied_constructors = false:silent +#prefer block bodies for methods +csharp_style_expression_bodied_methods = false:silent +#prefer expression-bodied members for properties +csharp_style_expression_bodied_properties = true:suggestion + +#Style - expression level options + +#prefer out variables to be declared before the method call +csharp_style_inlined_variable_declaration = false:suggestion +#prefer the language keyword for member access expressions, instead of the type name, for types that have a keyword to represent them +dotnet_style_predefined_type_for_member_access = true:suggestion + +#Style - implicit and explicit types + +#prefer var is used to declare variables with built-in system types such as int +csharp_style_var_for_built_in_types = true:suggestion +#prefer var when the type is already mentioned on the right-hand side of a declaration expression +csharp_style_var_when_type_is_apparent = true:suggestion + +#Style - language keyword and framework type options + +#prefer the language keyword for local variables, method parameters, and class members, instead of the type name, for types that have a keyword to represent them +dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion + +#Style - qualification options + +#prefer fields not to be prefaced with this. or Me. in Visual Basic +dotnet_style_qualification_for_field = false:suggestion +#prefer methods not to be prefaced with this. or Me. in Visual Basic +dotnet_style_qualification_for_method = false:suggestion +#prefer properties not to be prefaced with this. or Me. in Visual Basic +dotnet_style_qualification_for_property = false:suggestion + +# Avoid 'private ' on members, methods etc. +dotnet_style_require_accessibility_modifiers = omit_if_default:error + +# Ensure readonly fields are marked as such +dotnet_style_readonly_field = true:error + +# Prefix private members with underscore +dotnet_naming_rule.private_members_with_underscore.symbols = private_members +dotnet_naming_rule.private_members_with_underscore.style = prefix_underscore +dotnet_naming_rule.private_members_with_underscore.severity = suggestion + +dotnet_naming_symbols.private_members.applicable_kinds = field +dotnet_naming_symbols.private_members.applicable_accessibilities = private +dotnet_naming_symbols.private_members.required_modifiers = readonly + +dotnet_naming_style.prefix_underscore.capitalization = camel_case +dotnet_naming_style.prefix_underscore.required_prefix = m_ +dotnet_naming_style.prefix_underscore.required_suffix = +dotnet_naming_style.prefix_underscore.word_separator = + +# Non-public instance fields are camelCase and start with m_ +dotnet_naming_rule.instance_fields_should_be_camel_case.severity = suggestion +dotnet_naming_rule.instance_fields_should_be_camel_case.symbols = instance_fields +dotnet_naming_rule.instance_fields_should_be_camel_case.style = instance_field_style + +dotnet_naming_symbols.instance_fields.applicable_kinds = field +dotnet_naming_symbols.instance_fields.applicable_accessibilities = internal, protected, protected_internal, private_protected, private + +dotnet_naming_style.instance_field_style.capitalization = camel_case +dotnet_naming_style.instance_field_style.required_prefix = m_ + +# static readonly PascalCase +dotnet_naming_rule.static_readonly_pascal_case.symbols = static_readonly +dotnet_naming_rule.static_readonly_pascal_case.style = pascal_case +dotnet_naming_rule.static_readonly_pascal_case.severity = suggestion + +dotnet_naming_style.static_readonly.applicable_kinds = field +dotnet_naming_symbols.static_readonly.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.static_readonly.required_modifiers = static, readonly + +dotnet_naming_style.pascal_case.capitalization = pascal_case + +# const PascalCase +dotnet_naming_rule.const_pascal_case.symbols = all_const +dotnet_naming_rule.const_pascal_case.style = pascal_case +dotnet_naming_rule.const_pascal_case.severity = suggestion + +dotnet_naming_style.all_const.applicable_kinds = field +dotnet_naming_symbols.all_const.applicable_accessibilities = public, internal, protected, protected_internal, private_protected, private +dotnet_naming_symbols.all_const.required_modifiers = const + +dotnet_naming_style.pascal_case.capitalization = pascal_case + +# Null-checking preferences +csharp_style_throw_expression = true:error + +# CA1034: Nested types should not be visible +dotnet_diagnostic.CA1034.severity = none + +# IDE0021: Use block body for constructors +dotnet_diagnostic.IDE0021.severity = silent + +# IDE0022: Use block body for methods +dotnet_diagnostic.IDE0022.severity = silent + +# IDE0055: Fix formatting +dotnet_diagnostic.IDE0055.severity = error + +# CA1822: Mark members as static +dotnet_diagnostic.CA1822.severity = suggestion + +# CA1050: Declare types in namespaces +dotnet_diagnostic.CA1050.severity = none + +# IDE0052: Remove unread private members +dotnet_diagnostic.IDE0052.severity = warning + +# IDE0051: Remove unused private members +dotnet_diagnostic.IDE0051.severity = warning + +# IDE0066: Convert switch statement to expression +dotnet_diagnostic.IDE0066.severity = silent + +# CA2211: Non-constant fields should not be visible +dotnet_diagnostic.CA2211.severity = warning + +# CA1822: Member does not access instance data and can be marked as static +dotnet_diagnostic.CA1822.severity = warning + +# CA1012: Abstract types should not have public constructors +dotnet_diagnostic.CA1012.severity = warning + +# CA1044: Properties should not be write only +dotnet_diagnostic.CA1044.severity = warning + +# CA1052: Static holder types should be Static or NotInheritable +dotnet_diagnostic.CA1052.severity = warning + +# CA1063: Implement IDisposable correctly +dotnet_diagnostic.CA1063.severity = warning + +# CA1065: Do not raise exceptions in unexpected locations +dotnet_diagnostic.CA1065.severity = warning + +# CA1508: Avoid dead conditional code +dotnet_diagnostic.CA1508.severity = suggestion + +# CA1802: Use Literals Where Appropriate +dotnet_diagnostic.CA1802.severity = warning + +# CA1813: Avoid unsealed attributes +dotnet_diagnostic.CA1813.severity = warning + +# CA1816: Call GC.SuppressFinalize correctly +dotnet_diagnostic.CA1816.severity = warning + +# CA1820: Test for empty strings using string length +dotnet_diagnostic.CA1820.severity = warning + +# CA1823: Avoid unused private fields +dotnet_diagnostic.CA1823.severity = warning + +# CA1834: Use StringBuilder.Append(char) for single character strings +dotnet_diagnostic.CA1834.severity = warning + +# CA1851: Possible multiple enumerations of IEnumerable collection +dotnet_diagnostic.CA1851.severity = warning + +# CA1852: Type 'XYZ' can be sealed because it has no subtypes in its containing assembly and is not externally visible +dotnet_diagnostic.CA1834.severity = warning + +# CA2007: Do not directly await a Task +dotnet_diagnostic.CA2007.severity = warning + +# CA2008: Do not create tasks without passing a TaskScheduler +dotnet_diagnostic.CA2008.severity = warning + +# CA2208: Instantiate argument exceptions correctly +dotnet_diagnostic.CA2208.severity = warning + +# CA2213: Disposable fields should be disposed +dotnet_diagnostic.CA2213.severity = warning + +# CA2214: Do not call overridable methods in constructors +dotnet_diagnostic.CA2214.severity = warning + +# CA2227: Collection properties should be read only +dotnet_diagnostic.CA2227.severity = warning + +# IDE0090: Use 'new(...)' +dotnet_diagnostic.IDE0090.severity = warning + +# IDE0039: Use local function +dotnet_diagnostic.IDE0039.severity = none + +# IDE0044: Add readonly modifier +dotnet_diagnostic.IDE0044.severity = warning + +# IDE0005: Remove unnecessary using directives +dotnet_diagnostic.IDE0005.severity = warning + +# CA1002: Do not expose generic lists (don't follow suggestion text prefer IReadOnlyList) +dotnet_diagnostic.CA1002.severity = suggestion +# CA1003: Use generic event handler instances (never follow this - it's dumb coupling) +dotnet_diagnostic.CA1003.severity = none +# CA1005: Avoid excessive parameters on generic types (rarely relevant) +dotnet_diagnostic.CA1005.severity = none +# CA1008: Enums should have zero value (not always!) +dotnet_diagnostic.CA1008.severity = none +# CA1014: Mark assemblies with CLSCompliantAttribute +dotnet_diagnostic.CA1014.severity = none +# CA1028: Enum storage should be Int32 (no!) +dotnet_diagnostic.CA1028.severity = none +# CA1030: Use events where appropriate (flags incorrect places) +dotnet_diagnostic.CA1030.severity = none +# CA1031: Do not catch general exception types (in general no, but for externally facing code sometimes fine) +dotnet_diagnostic.CA1031.severity = none +# CA1032: Implement standard exception constructors (not having empty ctor is fine) +dotnet_diagnostic.CA1032.severity = none +# CA1040: Avoid empty interfaces (completely ignore this, empty marker interfaces are fine) +dotnet_diagnostic.CA1040.severity = none +# CA1060: Move P/Invokes to NativeMethods class (a bit pedantic so not enforced) +dotnet_diagnostic.CA1060.severity = none +# CA1062: Validate arguments of public methods (perhaps relevant in widely used class library but less so at app level) +dotnet_diagnostic.CA1062.severity = none +# CA1305: Specify IFormatProvider (while definitely relevant in some cases also overly pedantic for all) +dotnet_diagnostic.CA1305.severity = none +# CA1307: Specify StringComparison for clarity. +dotnet_diagnostic.CA1307.severity = none +# CA1308: Normalize strings to uppercase (this will flag weird places and is considered irrelevant) +dotnet_diagnostic.CA1308.severity = none +# CA1309: Use ordinal string comparison +dotnet_diagnostic.CA1309.severity = none +# CA1310: Specify StringComparison for correctness +dotnet_diagnostic.CA1310.severity = none +# CA1501: Avoid excessive inheritance (too pedantic and view models will often violate it) +dotnet_diagnostic.CA1501.severity = none +# CA1506: Avoid excessive class coupling (composition/Autofac code intentionally violates this) +dotnet_diagnostic.CA1506.severity = none +# CA1707: Identifiers should not contain underscores (to define units often identifiers suffixed with `_unit`) +dotnet_diagnostic.CA1707.severity = none +# CA1711: Identifiers should not have incorrect suffix (completely wrong) +dotnet_diagnostic.CA1711.severity = none +# CA1716: Identifiers should not match keywords (flags `Stop` and other irrelevant things) +dotnet_diagnostic.CA1716.severity = none +# CA1724: Type names should not match namespaces (this will flag nested class names and conflicts with any namespace) +dotnet_diagnostic.CA1724.severity = none +# CA1725: Parameter names should match base declaration (this is often pragmatically violated for succinctness) +dotnet_diagnostic.CA1725.severity = none +# CA1805: Do not initialize unnecessarily (we prefer explicitness) +dotnet_diagnostic.CA1805.severity = none +# CA1810: Initialize reference type static fields inline (has false positives) +dotnet_diagnostic.CA1810.severity = none +# CA1814: Prefer jagged arrays over multidimensional (NEVER EVER FOLLOW THIS!) +dotnet_diagnostic.CA1814.severity = none +# CA1815: Override equals and operator equals on value types (relevant but too many cases recommend use record struct) +dotnet_diagnostic.CA1815.severity = none +# CA1819: Properties should not return arrays (lots of settings, tests or similar do this otherwise definitely relevant) +dotnet_diagnostic.CA1819.severity = none +# CA2000: Dispose objects before losing scope (while relevant has too many false positives for our code) +dotnet_diagnostic.CA2000.severity = none +# CA2201: Do not raise reserved exception types (good suggestion but failing build due to this too pedantic) +dotnet_diagnostic.CA2201.severity = none +# CA2217: Do not mark enums with FlagsAttribute (will fail on correctly defined flags enums) +dotnet_diagnostic.CA2217.severity = none +# CA2225: Operator overloads have named alternates (not always necessary) +dotnet_diagnostic.CA2225.severity = none +# CA5394: Do not use insecure randomness (we don't use randomness for security) +dotnet_diagnostic.CA5394.severity = none + +# CS1573: Parameter 'parameter' has no matching param tag in the XML comment for 'parameter' (but other parameters do) +dotnet_diagnostic.CS1573.severity = none +# CS1591: Missing XML comment for publicly visible type or member 'Type_or_Member' (we don't document everything) +dotnet_diagnostic.CS1591.severity = none + +# SkiaSharp only contains distribution for net7-win-x64 so get a warning - trying to disable +# NETSDK1206: Found version-specific or distribution-specific runtime identifier(s) +dotnet_diagnostic.NETSDK1206.severity = none diff --git a/SharpLearning.sln b/SharpLearning.sln index 49eec32d..5d60d821 100644 --- a/SharpLearning.sln +++ b/SharpLearning.sln @@ -64,6 +64,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ci", "ci", "{45CBAF1D-3116- EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "root", "root", "{D83436F7-2EE6-4D59-ACC3-355C973BCDAF}" ProjectSection(SolutionItems) = preProject + .editorconfig = .editorconfig .gitattributes = .gitattributes .gitignore = .gitignore CONTRIBUTING.md = CONTRIBUTING.md diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 4fdc86f6..acd51e1a 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -29,12 +29,11 @@ true - - + true CS1591 From 966e8e334908f6084535d2a0b82658239d8bba3d Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:21:41 +0100 Subject: [PATCH 02/15] Fix IDE0055 --- .../ClassificationAdaBoostLearnerTest.cs | 4 +- .../Learners/RegressionAdaBoostLearnerTest.cs | 2 +- .../Models/ClassificationAdaBoostModelTest.cs | 8 +- .../Models/RegressionAdaBoostModelTest.cs | 4 +- .../WeightedRandomSamplerTest.cs | 2 +- .../Learners/ClassificationAdaBoostLearner.cs | 30 +++---- .../Learners/RegressionAdaBoostLearner.cs | 28 +++---- .../Arithmetic/MatrixAddTest.cs | 2 +- .../ArrayExtensionsTest.cs | 28 +++---- .../FloatingPointConversionTest.cs | 4 +- .../Matrices/F64MatrixExtensionsTest.cs | 18 ++--- .../Matrices/F64MatrixTest.cs | 8 +- .../Matrices/StringMatrixExtensionsTest.cs | 6 +- .../Matrices/StringMatrixTest.cs | 4 +- .../Views/F64MatrixViewTest.cs | 6 +- .../Arithmetic/MatrixAdd.cs | 8 +- .../Arithmetic/MatrixMultiplication.cs | 20 ++--- .../Arithmetic/MatrixSubtraction.cs | 10 +-- src/SharpLearning.Containers/Checks.cs | 16 ++-- .../FloatingPointConversion.cs | 2 +- .../Matrices/F64Matrix.cs | 2 +- .../Matrices/F64MatrixExtensions.cs | 4 +- .../ObservationTargetSet.cs | 6 +- .../ProbabilityPrediction.cs | 10 +-- .../Views/F64MatrixPinnedPtr.cs | 6 +- .../Views/F64VectorPinnedPtr.cs | 4 +- .../Views/F64VectorView.cs | 2 +- .../Views/Interval1D.cs | 2 +- .../Views/Interval2D.cs | 12 +-- .../CrossValidationUtilitiesTest.cs | 2 +- .../LearningCurvePointExtensionsTest.cs | 6 +- .../LearningCurves/LearningCurvePointTest.cs | 2 +- .../LearningCurvesCalculatorTest.cs | 30 +++---- .../NoShuffleLearningCurvesCalculatorTest.cs | 10 +-- .../RandomLearningCurvesCalculatorTest.cs | 4 +- .../StratifiedLearningCurvesCalculatorTest.cs | 4 +- .../Samplers/StratifiedIndexSamplerTest.cs | 2 +- .../TimeSeriesCrossValidationTest.cs | 2 +- ...TrainingTestIndexSplitterExtensionsTest.cs | 6 +- .../TrainingTestIndexSplitterTest.cs | 2 +- .../TrainingTestSetSplitTest.cs | 2 +- .../ContinuousMungeAugmentator.cs | 8 +- .../Augmentators/NominalMungeAugmentator.cs | 10 +-- .../CrossValidationUtilities.cs | 2 +- .../CrossValidators/ICrossValidation.cs | 6 +- .../LearningCurvePointExtensions.cs | 4 +- .../LearningCurvesCalculator.cs | 18 ++--- .../NoShuffleLearningCurvesCalculator.cs | 2 +- .../RandomShuffleLearningCurvesCalculator.cs | 4 +- .../StratifiedLearningCurvesCalculator.cs | 2 +- .../Samplers/IIndexSampler.cs | 2 +- .../Samplers/NoShuffleIndexSampler.cs | 10 +-- .../Samplers/RandomIndexSampler.cs | 12 +-- .../TimeSeries/TimeSeriesCrossValidation.cs | 22 +++--- .../TrainingTestIndexSplitter.cs | 4 +- .../TrainingTestIndexSplitterExtensions.cs | 2 +- .../TrainingTestSetSplit.cs | 4 +- .../ChildImpuritiesTest.cs | 2 +- .../RegressionImpurityCalculatorTest.cs | 2 +- .../RegressionDecisionTreeLearnerTest.cs | 2 +- .../ClassificationDecisionTreeModelTest.cs | 10 +-- .../Models/RegressionDecisionTreeModelTest.cs | 2 +- .../SplitSearchers/LinearSplitSearcherTest.cs | 10 +-- .../OnlyUniqueThresholdsSplitSearcherTest.cs | 10 +-- .../ClassificationImpurityCalculator.cs | 8 +- .../IImpurityCalculator.cs | 8 +- .../RegressionImpurityCalculator.cs | 10 +-- .../ImpurityCalculators/TargetCounts.cs | 8 +- .../ClassificationDecisionTreeLearner.cs | 24 +++--- .../Learners/RegressionDecisionTreeLearner.cs | 24 +++--- .../Models/ClassificationDecisionTreeModel.cs | 2 +- .../Nodes/BinaryTree.cs | 6 +- .../Nodes/DecisionNodeCreationItem.cs | 2 +- src/SharpLearning.DecisionTrees/Nodes/Node.cs | 4 +- .../SplitSearchers/LinearSplitSearcher.cs | 6 +- .../OnlyUniqueThresholdsSplitSearcher.cs | 6 +- .../SplitSearchers/RandomSplitSearcher.cs | 18 ++--- .../TreeBuilders/BestFirstTreeBuilder.cs | 12 +-- .../TreeBuilders/DepthFirstTreeBuilder.cs | 12 +-- ...tionClassificationEnsembleSelectionTest.cs | 4 +- ...minationRegressionEnsembleSelectionTest.cs | 4 +- ...archClassificationEnsembleSelectionTest.cs | 4 +- ...rdSearchRegressionEnsembleSelectionTest.cs | 4 +- ...ndomClassificationEnsembleSelectionTest.cs | 4 +- .../RandomRegressionEnsembleSelectionTest.cs | 4 +- .../ClassificationEnsembleLearnerTest.cs | 6 +- ...cationModelSelectingEnsembleLearnerTest.cs | 20 ++--- ...RandomModelSelectingEnsembleLearnerTest.cs | 4 +- ...assificationStackingEnsembleLearnerTest.cs | 8 +- ...SearchModelSelectingEnsembleLearnerTest.cs | 4 +- ...essionModelSelectingEnsembleLearnerTest.cs | 10 +-- ...RandomModelSelectingEnsembleLearnerTest.cs | 6 +- .../RegressionStackingEnsembleLearnerTest.cs | 8 +- .../Models/ClassificationEnsembleModelTest.cs | 16 ++-- ...ClassificationStackingEnsembleModelTest.cs | 32 ++++---- .../Models/RegressionEnsembleModelTest.cs | 6 +- .../RegressionStackingEnsembleModelTest.cs | 10 +-- ...minationClassificationEnsembleSelection.cs | 8 +- ...dEliminationRegressionEnsembleSelection.cs | 18 ++--- ...rdSearchClassificationEnsembleSelection.cs | 20 ++--- ...orwardSearchRegressionEnsembleSelection.cs | 12 +-- .../RandomClassificationEnsembleSelection.cs | 14 ++-- .../RandomRegressionEnsembleSelection.cs | 14 ++-- ...liminationModelSelectingEnsembleLearner.cs | 14 ++-- .../Learners/ClassificationEnsembleLearner.cs | 20 ++--- ...wardSearchModelSelectingEnsembleLearner.cs | 20 ++--- ...sificationModelSelectingEnsembleLearner.cs | 18 ++--- ...tionRandomModelSelectingEnsembleLearner.cs | 24 +++--- .../ClassificationStackingEnsembleLearner.cs | 34 ++++---- ...liminationModelSelectingEnsembleLearner.cs | 16 ++-- .../Learners/RegressionEnsembleLearner.cs | 22 +++--- ...wardSearchModelSelectingEnsembleLearner.cs | 20 ++--- ...RegressionModelSelectingEnsembleLearner.cs | 22 +++--- ...sionRandomModelSelectingEnsembleLearner.cs | 26 +++---- .../RegressionStackingEnsembleLearner.cs | 28 +++---- .../Models/ClassificationEnsembleModel.cs | 4 +- .../ClassificationStackingEnsembleModel.cs | 6 +- .../Models/RegressionEnsembleModel.cs | 6 +- .../Models/RegressionStackingEnsembleModel.cs | 2 +- ...obabilityClassificationEnsembleStrategy.cs | 4 +- .../DateTimeFeatureTransformerTest.cs | 2 +- .../DateTimeFeatureTransformer.cs | 2 +- .../MapCategoricalFeaturesTransformer.cs | 4 +- .../CsvRowTransforms/OneHotTransformer.cs | 2 +- .../ReplaceMissingValuesTransformer.cs | 2 +- .../FeatureTransformationExtensions.cs | 6 +- .../MatrixTransforms/MinMaxTransformer.cs | 8 +- .../GBMDecisionTreeLearnerTest.cs | 10 +-- ...ionAbsoluteLossGradientBoostLearnerTest.cs | 2 +- ...ionQuantileLossGradientBoostLearnerTest.cs | 2 +- ...ssionSquareLossGradientBoostLearnerTest.cs | 2 +- .../Loss/GradientBoostBinomialLossTest.cs | 6 +- .../ClassificationGradientBoostModelTest.cs | 18 ++--- .../RegressionGradientBoostModelTest.cs | 14 ++-- .../GBMDecisionTree/GBMDecisionTreeLearner.cs | 78 +++++++++---------- .../GBMDecisionTree/GBMNode.cs | 8 +- .../GBMDecisionTree/GBMSplitInfo.cs | 2 +- .../GBMDecisionTree/GBMSplitResult.cs | 4 +- .../GBMDecisionTree/GBMTree.cs | 6 +- ...ssificationBinomialGradientBoostLearner.cs | 14 ++-- .../ClassificationGradientBoostLearner.cs | 74 +++++++++--------- ...ressionAbsoluteLossGradientBoostLearner.cs | 12 +-- .../RegressionGradientBoostLearner.cs | 44 +++++------ ...RegressionHuberLossGradientBoostLearner.cs | 14 ++-- ...ressionQuantileLossGradientBoostLearner.cs | 14 ++-- ...egressionSquareLossGradientBoostLearner.cs | 14 ++-- .../Loss/GradientBoostAbsoluteLoss.cs | 8 +- .../Loss/GradientBoostBinomialLoss.cs | 6 +- .../Loss/GradientBoostHuberLoss.cs | 14 ++-- .../Loss/GradientBoostQuantileLoss.cs | 6 +- .../Loss/GradientBoostSquaredLoss.cs | 10 +-- .../Loss/IGradientBoostLoss.cs | 2 +- .../ClassificationGradientBoostModel.cs | 20 ++--- .../Models/RegressionGradientBoostModel.cs | 8 +- .../Csv/CsvParserTest.cs | 2 +- .../Csv/CsvRowExtensionsTest.cs | 16 ++-- .../Csv/CsvRowTest.cs | 16 ++-- .../Csv/CsvWriterTest.cs | 2 +- .../Csv/DictionaryExtensionsTest.cs | 4 +- .../GenericXmlDataContractSerializerTest.cs | 2 +- .../Csv/CsvParser.cs | 28 +++---- src/SharpLearning.InputOutput/Csv/CsvRow.cs | 2 +- .../Csv/CsvRowExtensions.cs | 16 ++-- .../Csv/CsvWriter.cs | 4 +- .../Serialization/GenericBinarySerializer.cs | 4 +- .../GenericXmlDataContractSerializer.cs | 50 ++++++------ ...ClassificationMatrixStringConverterTest.cs | 4 +- .../ClassificationMatrixTest.cs | 2 +- .../Classification/F1ScoreMetricTest.cs | 2 +- ...LossClassificationProbabilityMetricTest.cs | 10 +-- .../Classification/PrecisionMetricTest.cs | 2 +- .../Classification/RecallMetricTest.cs | 2 +- ...cAucClassificationProbabilityMetricTest.cs | 6 +- .../TotalErrorClassificationMetricTest.cs | 4 +- .../MeanSquaredErrorRegressionMetricTest.cs | 2 +- ...eanSquarePercentageRegressionMetricTest.cs | 2 +- .../RootMeanSquareRegressionMetricTest.cs | 2 +- .../Classification/ClassificationMatrix.cs | 2 +- .../ClassificationMatrixStringConverter.cs | 16 ++-- .../Classification/IClassificationMetric.cs | 2 +- .../IClassificationProbabilityMetric.cs | 4 +- .../LogLossClassificationProbabilityMetric.cs | 8 +- .../Classification/PrecisionMetric.cs | 6 +- .../Classification/RecallMetric.cs | 4 +- .../RocAucClassificationProbabilityMetric.cs | 20 ++--- .../TotalErrorClassificationMetric.cs | 2 +- .../Classification/Utilities.cs | 2 +- .../Impurity/GiniImpurityMetric.cs | 2 +- .../ModelComparison/McNemarModelComparison.cs | 10 +-- .../Ranking/AveragePrecisionRankingMetric.cs | 2 +- .../CoefficientOfDeterminationMetric.cs | 4 +- .../MeanSquaredErrorRegressionMetric.cs | 2 +- ...rmalizedGiniCoefficientRegressionMetric.cs | 66 ++++++++-------- .../ConvUtilsTest.cs | 30 +++---- .../GradientCheckerTool.cs | 4 +- .../Layers/BatchNormalizationLayerTest.cs | 10 +-- .../Layers/Conv2DLayerTest.cs | 6 +- .../Layers/DenseLayerTest.cs | 2 +- .../Layers/MaxPool2DLayerTest.cs | 22 +++--- .../RegressionNeuralNetLearnerTest.cs | 6 +- .../Loss/SquareLossTest.cs | 4 +- .../MathNetExtensionsTest.cs | 4 +- .../Optimizers/NeuralNetOptimizerTest.cs | 2 +- .../TargetEncoders/OneOfNTargetEncoderTest.cs | 2 +- .../Activations/SigmoidActivation.cs | 4 +- src/SharpLearning.Neural/ConvUtils.cs | 24 +++--- .../Initializations/WeightInitialization.cs | 2 +- .../Layers/ActivationLayer.cs | 4 +- .../Layers/BatchNormalizationLayer.cs | 2 +- .../Layers/Conv2DLayer.cs | 28 +++---- src/SharpLearning.Neural/Layers/DenseLayer.cs | 10 +-- .../Layers/DropoutLayer.cs | 2 +- src/SharpLearning.Neural/Layers/ILayer.cs | 4 +- src/SharpLearning.Neural/Layers/InputLayer.cs | 4 +- .../Layers/MaxPool2DLayer.cs | 16 ++-- .../Layers/SquaredErrorRegressionLayer.cs | 2 +- src/SharpLearning.Neural/Layers/SvmLayer.cs | 10 +-- .../ClassificationNeuralNetLearner.cs | 28 +++---- .../Learners/NeuralNetLearner.cs | 36 ++++----- .../Learners/RegressionNeuralNetLearner.cs | 24 +++--- src/SharpLearning.Neural/Loss/AccuracyLoss.cs | 4 +- src/SharpLearning.Neural/MathNetExtensions.cs | 16 ++-- .../Models/ClassificationNeuralNetModel.cs | 4 +- src/SharpLearning.Neural/Models/NeuralNet.cs | 22 +++--- .../Models/RegressionNeuralNetModel.cs | 2 +- .../Optimizers/NeuralNetOptimizer.cs | 34 ++++---- .../ArrayAssert.cs | 2 +- ...lobalizedBoundedNelderMeadOptimizerTest.cs | 6 +- .../GridSearchOptimizationTest.cs | 20 ++--- .../HyperbandOptimizerTest.cs | 6 +- .../ParameterBoundsTest.cs | 8 +- .../ParticleSwarmOptimizerTest.cs | 6 +- .../RandomSearchOptimizationTest.cs | 12 +-- .../SmacOptimizerTest.cs | 2 +- .../ExponentialAverageTransformTest.cs | 4 +- .../Transforms/LinearTransformTest.cs | 2 +- .../Transforms/Log10TransformTest.cs | 2 +- .../BayesianOptimizer.cs | 4 +- .../GlobalizedBoundedNelderMeadOptimizer.cs | 22 +++--- .../GridSearchOptimizer.cs | 4 +- .../HyperbandOptimizer.cs | 8 +- src/SharpLearning.Optimization/IOptimizer.cs | 2 +- .../ParameterSpecs/MinMaxParameterSpec.cs | 6 +- .../ParticleSwarmOptimizer.cs | 12 +-- .../RandomSearchOptimizer.cs | 12 +-- .../SmacOptimizer.cs | 18 ++--- .../Transforms/ExponentialAverageTransform.cs | 2 +- .../Transforms/Log10Transform.cs | 2 +- ...tionExtremelyRandomizedTreesLearnerTest.cs | 12 +-- .../ClassificationRandomForestLearnerTest.cs | 6 +- ...sionExtremelyRandomizedTreesLearnerTest.cs | 8 +- .../RegressionRandomForestLearnerTest.cs | 6 +- .../Models/ClassificationForestModelTest.cs | 2 +- .../Models/RegressionForestModelTest.cs | 2 +- ...ficationExtremelyRandomizedTreesLearner.cs | 24 +++--- .../ClassificationRandomForestLearner.cs | 26 +++---- ...gressionExtremelyRandomizedTreesLearner.cs | 22 +++--- .../Learners/RegressionRandomForestLearner.cs | 16 ++-- .../Models/ClassificationForestModel.cs | 8 +- .../Models/RegressionForestModel.cs | 2 +- .../Models/FeatureImportanceParserTest.cs | 2 +- .../Models/RegressionXGBoostModelTest.cs | 2 +- src/SharpLearning.XGBoost/Conversions.cs | 4 +- .../Learners/ArgumentChecks.cs | 2 +- .../Learners/ClassificationXGBoostLearner.cs | 26 +++---- .../Learners/RegressionXGBoostLearner.cs | 20 ++--- .../Models/ClassificationXGBoostModel.cs | 8 +- .../Models/FeatureImportanceParser.cs | 6 +- .../Models/RegressionXGBoostModel.cs | 2 +- 269 files changed, 1338 insertions(+), 1334 deletions(-) diff --git a/src/SharpLearning.AdaBoost.Test/Learners/ClassificationAdaBoostLearnerTest.cs b/src/SharpLearning.AdaBoost.Test/Learners/ClassificationAdaBoostLearnerTest.cs index 665c4804..27a8321f 100644 --- a/src/SharpLearning.AdaBoost.Test/Learners/ClassificationAdaBoostLearnerTest.cs +++ b/src/SharpLearning.AdaBoost.Test/Learners/ClassificationAdaBoostLearnerTest.cs @@ -16,7 +16,7 @@ public void ClassificationAdaBoostLearner_Learn_AptitudeData() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var sut = new ClassificationAdaBoostLearner(10); - + var model = sut.Learn(observations, targets); var predictions = model.Predict(observations); @@ -49,7 +49,7 @@ public void ClassificationAdaBoostLearner_Learn_AptitudeData_SequenceContainNoIt var indices = new int[] { 22, 6, 23, 12 }; var sut = new ClassificationAdaBoostLearner(10); - + var model = sut.Learn(observations, targets, indices); var predictions = model.Predict(observations); var indexedPredictions = predictions.GetIndices(indices); diff --git a/src/SharpLearning.AdaBoost.Test/Learners/RegressionAdaBoostLearnerTest.cs b/src/SharpLearning.AdaBoost.Test/Learners/RegressionAdaBoostLearnerTest.cs index 575c1f1f..e2128146 100644 --- a/src/SharpLearning.AdaBoost.Test/Learners/RegressionAdaBoostLearnerTest.cs +++ b/src/SharpLearning.AdaBoost.Test/Learners/RegressionAdaBoostLearnerTest.cs @@ -16,7 +16,7 @@ public void RegressionAdaBoostLearner_Learn_AptitudeData_LinearLoss() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var sut = new RegressionAdaBoostLearner(10); - + var model = sut.Learn(observations, targets); var predictions = model.Predict(observations); diff --git a/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs b/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs index 2ad393d9..316513c8 100644 --- a/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs +++ b/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs @@ -40,7 +40,7 @@ public void ClassificationAdaBoostModel_Predict_Single() public void ClassificationAdaBoostModel_Precit_Multiple() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - + var learner = new ClassificationAdaBoostLearner(10); var sut = learner.Learn(observations, targets); @@ -72,7 +72,7 @@ public void ClassificationAdaBoostModel_PredictProbability_Single() Assert.AreEqual(0.038461538461538464, error, 0.0000001); - var expected = new ProbabilityPrediction[] {new ProbabilityPrediction(0, new Dictionary {{0, 0.553917222019051}, {1, 0.446082777980949}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.455270122123639}, {1, 0.544729877876361}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.590671208378385}, {1, 0.409328791621616}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.564961572849738}, {1, 0.435038427150263}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.455270122123639}, {1, 0.544729877876361}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.549970403132686}, {1, 0.450029596867314}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.417527839140627}, {1, 0.582472160859373}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.409988559960094}, {1, 0.590011440039906}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.630894242807786}, {1, 0.369105757192214}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.436954866525023}, {1, 0.563045133474978}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.461264944069783}, {1, 0.538735055930217}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.590671208378385}, {1, 0.409328791621616}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.549503146925505}, {1, 0.450496853074495}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.537653803214063}, {1, 0.462346196785938}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.37650723540928}, {1, 0.62349276459072}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.573579890413618}, {1, 0.426420109586382}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.549970403132686}, {1, 0.450029596867314}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.524371409810479}, {1, 0.475628590189522}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.436954866525023}, {1, 0.563045133474978}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.471117379964633}, {1, 0.528882620035367}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.630894242807786}, {1, 0.369105757192214}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.436954866525023}, {1, 0.563045133474978}, }),new ProbabilityPrediction(1, new Dictionary {{0, 0.404976804073458}, {1, 0.595023195926542}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.573579890413618}, {1, 0.426420109586382}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.549970403132686}, {1, 0.450029596867314}, }),new ProbabilityPrediction(0, new Dictionary {{0, 0.630894242807786}, {1, 0.369105757192214}, }),}; + var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), }; CollectionAssert.AreEqual(expected, actual); } @@ -99,14 +99,14 @@ public void ClassificationAdaBoostModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; var learner = new ClassificationAdaBoostLearner(10, 1, 3); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); - var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, + var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, { "AptitudeTestScore", 24.0268096428771 } }; Assert.AreEqual(expected.Count, actual.Count); diff --git a/src/SharpLearning.AdaBoost.Test/Models/RegressionAdaBoostModelTest.cs b/src/SharpLearning.AdaBoost.Test/Models/RegressionAdaBoostModelTest.cs index 26a2f8af..17c33887 100644 --- a/src/SharpLearning.AdaBoost.Test/Models/RegressionAdaBoostModelTest.cs +++ b/src/SharpLearning.AdaBoost.Test/Models/RegressionAdaBoostModelTest.cs @@ -55,14 +55,14 @@ public void RegressionAdaBoostModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; var learner = new RegressionAdaBoostLearner(10); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); - var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, + var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, { "AptitudeTestScore", 33.8004886838701 } }; Assert.AreEqual(expected.Count, actual.Count); diff --git a/src/SharpLearning.AdaBoost.Test/WeightedRandomSamplerTest.cs b/src/SharpLearning.AdaBoost.Test/WeightedRandomSamplerTest.cs index 7e1a6134..0f6096e6 100644 --- a/src/SharpLearning.AdaBoost.Test/WeightedRandomSamplerTest.cs +++ b/src/SharpLearning.AdaBoost.Test/WeightedRandomSamplerTest.cs @@ -15,7 +15,7 @@ public void WeightedRandomSampler_Sample_Weight_10() var actual = new int[indices.Length]; sut.Sample(indices, weights, actual); - + var expected = new int[] { 2, 5, 6, 7, 7, 8, 8, 8, 9, 9 }; CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs b/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs index bae0f1ce..e7ac6856 100644 --- a/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs +++ b/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs @@ -17,7 +17,7 @@ namespace SharpLearning.AdaBoost.Learners /// Classification AdaBoost learner using the SAMME algorithm for multi-class support: /// http://web.stanford.edu/~hastie/Papers/samme.pdf /// - public sealed class ClassificationAdaBoostLearner + public sealed class ClassificationAdaBoostLearner : IIndexedLearner , IIndexedLearner , ILearner @@ -33,14 +33,14 @@ public sealed class ClassificationAdaBoostLearner int m_uniqueTargetValues; ClassificationDecisionTreeLearner m_modelLearner; - readonly TotalErrorClassificationMetric m_errorMetric = + readonly TotalErrorClassificationMetric m_errorMetric = new TotalErrorClassificationMetric(); List m_modelErrors = new List(); List m_modelWeights = new List(); - List m_models = + List m_models = new List(); - + double[] m_workErrors = new double[0]; double[] m_sampleWeights = new double[0]; double[] m_indexedTargets = new double[0]; @@ -56,10 +56,10 @@ public sealed class ClassificationAdaBoostLearner /// 0 will set the depth equal to the number of classes in the problem /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - public ClassificationAdaBoostLearner(int iterations = 50, - double learningRate = 1, - int maximumTreeDepth = 0, - int minimumSplitSize = 1, + public ClassificationAdaBoostLearner(int iterations = 50, + double learningRate = 1, + int maximumTreeDepth = 0, + int minimumSplitSize = 1, double minimumInformationGain = 0.000001) { if (iterations < 1) { throw new ArgumentException("Iterations must be at least 1"); } @@ -67,7 +67,7 @@ public ClassificationAdaBoostLearner(int iterations = 50, if (minimumSplitSize <= 0) { throw new ArgumentException("minimum split size must be larger than 0"); } if (maximumTreeDepth < 0) { throw new ArgumentException("maximum tree depth must be larger than 0"); } if (minimumInformationGain <= 0) { throw new ArgumentException("minimum information gain must be larger than 0"); } - + m_iterations = iterations; m_learningRate = learningRate; @@ -95,7 +95,7 @@ public ClassificationAdaBoostModel Learn(F64Matrix observations, double[] target /// /// /// - public ClassificationAdaBoostModel Learn(F64Matrix observations, double[] targets, + public ClassificationAdaBoostModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -119,7 +119,7 @@ public ClassificationAdaBoostModel Learn(F64Matrix observations, double[] target m_maximumTreeDepth = m_uniqueTargetValues; } - m_modelLearner = new ClassificationDecisionTreeLearner(m_maximumTreeDepth, m_minimumSplitSize, + m_modelLearner = new ClassificationDecisionTreeLearner(m_maximumTreeDepth, m_minimumSplitSize, observations.ColumnCount, m_minimumInformationGain, 42); m_modelErrors.Clear(); @@ -216,7 +216,7 @@ IPredictorModel ILearner.Learn( bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteration) { - var model = m_modelLearner.Learn(observations, targets, + var model = m_modelLearner.Learn(observations, targets, indices, m_sampleWeights); var predictions = model.Predict(observations, indices); @@ -252,7 +252,7 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio } var modelWeight = m_learningRate * ( - Math.Log((1.0 - modelError) / modelError) + + Math.Log((1.0 - modelError) / modelError) + Math.Log(m_uniqueTargetValues - 1.0)); // Only boost if not last iteration @@ -262,7 +262,7 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio { var index = indices[i]; var sampleWeight = m_sampleWeights[index]; - if(sampleWeight > 0.0 || modelWeight < 0.0) + if (sampleWeight > 0.0 || modelWeight < 0.0) { m_sampleWeights[index] = sampleWeight * Math.Exp(modelWeight * m_workErrors[index]); } @@ -280,7 +280,7 @@ double ErrorEstimate(F64Matrix observations, int[] indices) { var rows = indices.Length; var predictions = new double[rows]; - + for (int i = 0; i < rows; i++) { var index = indices[i]; diff --git a/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs b/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs index 099d55ec..db4188b2 100644 --- a/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs +++ b/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs @@ -35,7 +35,7 @@ public sealed class RegressionAdaBoostLearner : IIndexedLearner, ILearne List m_modelErrors = new List(); List m_modelWeights = new List(); List m_models = new List(); - + double[] m_workErrors = new double[0]; double[] m_sampleWeights = new double[0]; double[] m_indexedTargets = new double[0]; @@ -56,12 +56,12 @@ public sealed class RegressionAdaBoostLearner : IIndexedLearner, ILearne /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made /// Seed for the random sampling - public RegressionAdaBoostLearner(int iterations = 50, - double learningRate = 1, - int maximumTreeDepth = 0, - AdaBoostRegressionLoss loss = AdaBoostRegressionLoss.Linear, - int minimumSplitSize = 1, - double minimumInformationGain = 0.000001, + public RegressionAdaBoostLearner(int iterations = 50, + double learningRate = 1, + int maximumTreeDepth = 0, + AdaBoostRegressionLoss loss = AdaBoostRegressionLoss.Linear, + int minimumSplitSize = 1, + double minimumInformationGain = 0.000001, int seed = 42) { if (iterations < 1) { throw new ArgumentException("Iterations must be at least 1"); } @@ -69,7 +69,7 @@ public RegressionAdaBoostLearner(int iterations = 50, if (minimumSplitSize <= 0) { throw new ArgumentException("minimum split size must be larger than 0"); } if (maximumTreeDepth < 0) { throw new ArgumentException("maximum tree depth must be larger than 0"); } if (minimumInformationGain <= 0) { throw new ArgumentException("minimum information gain must be larger than 0"); } - + m_iterations = iterations; m_learningRate = learningRate; @@ -100,7 +100,7 @@ public RegressionAdaBoostModel Learn(F64Matrix observations, double[] targets) /// /// /// - public RegressionAdaBoostModel Learn(F64Matrix observations, double[] targets, + public RegressionAdaBoostModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -111,7 +111,7 @@ public RegressionAdaBoostModel Learn(F64Matrix observations, double[] targets, m_maximumTreeDepth = 3; } - m_modelLearner = new RegressionDecisionTreeLearner(m_maximumTreeDepth, m_minimumSplitSize, + m_modelLearner = new RegressionDecisionTreeLearner(m_maximumTreeDepth, m_minimumSplitSize, observations.ColumnCount, m_minimumInformationGain, 42); m_modelErrors.Clear(); @@ -194,7 +194,7 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio var model = m_modelLearner.Learn(observations, targets, m_sampleIndices); // weighted sampling is used instead of weights in training - + var predictions = model.Predict(observations, indices); @@ -211,7 +211,7 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio var error = m_workErrors[i]; - if(maxError != 0.0) + if (maxError != 0.0) { error = error / maxError; } @@ -229,7 +229,7 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio default: throw new ArgumentException("Unsupported loss type"); } - + m_workErrors[i] = error; } @@ -274,7 +274,7 @@ double ErrorEstimate(F64Matrix observations, int[] indices) { var rows = indices.Length; var predictions = new double[rows]; - + for (int i = 0; i < rows; i++) { var index = indices[i]; diff --git a/src/SharpLearning.Containers.Test/Arithmetic/MatrixAddTest.cs b/src/SharpLearning.Containers.Test/Arithmetic/MatrixAddTest.cs index 07e9a3b5..f3a81398 100644 --- a/src/SharpLearning.Containers.Test/Arithmetic/MatrixAddTest.cs +++ b/src/SharpLearning.Containers.Test/Arithmetic/MatrixAddTest.cs @@ -61,7 +61,7 @@ public void MatrixAdd_Add_Vectors() public void MatrixAdd_Add_Vectors_Different_Lengths() { var v1 = new double[] { 2, 3, 5, 10 }; - var v2 = new double[] { 1, 1 }; + var v2 = new double[] { 1, 1 }; v1.Add(v2); } diff --git a/src/SharpLearning.Containers.Test/ArrayExtensionsTest.cs b/src/SharpLearning.Containers.Test/ArrayExtensionsTest.cs index e216ea66..223c4c16 100644 --- a/src/SharpLearning.Containers.Test/ArrayExtensionsTest.cs +++ b/src/SharpLearning.Containers.Test/ArrayExtensionsTest.cs @@ -33,7 +33,7 @@ public void ArrayExtensions_Map_Param() CollectionAssert.AreEqual(expected, sut); } - + [TestMethod] public void ArrayExtensions_Clear() { @@ -68,7 +68,7 @@ public void ArrayExtensions_AsString() [TestMethod] public void ArrayExtensions_AsF64() { - var sut = new string[] { "1", "2", "3", "4", "5", "6", "7", "8", "9" }; + var sut = new string[] { "1", "2", "3", "4", "5", "6", "7", "8", "9" }; var actual = sut.AsF64(); var expected = new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }; @@ -89,7 +89,7 @@ public void ArrayExtensions_AsInt32() public void ArrayExtensions_SortWith() { var values = new int[] { 0, 1, 2, 3, 4, 5 }; - var keys = new int[] { 5, 4, 3, 2, 1, 0 }; + var keys = new int[] { 5, 4, 3, 2, 1, 0 }; var interval = Interval1D.Create(0, keys.Length); keys.SortWith(interval, values); @@ -136,7 +136,7 @@ public void ArrayExtensions_CopyTo_Interval() var destination = new int[values.Length]; values.CopyTo(interval, destination); - var expected = new int[] {0, 1, 2, 3, 4, 0 }; + var expected = new int[] { 0, 1, 2, 3, 4, 0 }; CollectionAssert.AreEqual(expected, destination); } @@ -241,9 +241,10 @@ public void ArrayExtensions_WeightedMedian_1() var w = new double[n]; var x = new double[n]; - for (int j = 0; j < n; j++) { - w[j] = j + 1; - x[j] = j; + for (int j = 0; j < n; j++) + { + w[j] = j + 1; + x[j] = j; } var actual = x.WeightedMedian(w); @@ -258,9 +259,10 @@ public void ArrayExtensions_WeightedMedian_2() var w = new double[n]; var x = new double[n]; - for (int j = 0; j < n; j++) { - w[j] = j + ((j<6) ? 1 : 0); - x[j] = j + 1; + for (int j = 0; j < n; j++) + { + w[j] = j + ((j < 6) ? 1 : 0); + x[j] = j + 1; } var actual = x.WeightedMedian(w); @@ -339,7 +341,7 @@ public void ArrayExtensions_ScoreAtPercentile_010() Assert.AreEqual(2.0, actual); } - + [TestMethod] public void ArrayExtensions_Shuffle() { @@ -379,7 +381,7 @@ public void ArrayExtensions_StratifiedIndexSampling_Equal_Class_Size_50_Percent_ [TestMethod] public void ArrayExtensions_StratifiedIndexSampling_Unequal_Class_Size_50_Percent_Sample() { - var values = new int[] { 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3 }; + var values = new int[] { 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3 }; var sampleSize = values.Length / 2; var sampleIndices = values.StratifiedIndexSampling(sampleSize, new Random(42)); @@ -451,7 +453,7 @@ public void ArrayExtensions_StratifiedIndexSampling_Indexed_Indices_Larger_Than_ values.StratifiedIndexSampling(sampleSize, indices, new Random(42)); } - [TestMethod] + [TestMethod] public void ArrayExtensions_List_ToF64Matrix() { var sut = new List diff --git a/src/SharpLearning.Containers.Test/FloatingPointConversionTest.cs b/src/SharpLearning.Containers.Test/FloatingPointConversionTest.cs index ee6c6c69..38e7b9d2 100644 --- a/src/SharpLearning.Containers.Test/FloatingPointConversionTest.cs +++ b/src/SharpLearning.Containers.Test/FloatingPointConversionTest.cs @@ -60,8 +60,8 @@ public void FloatingPointConversion_ToF64_to_from_custom_infinity() PositiveInfinitySymbol = text[0], NegativeInfinitySymbol = text[1], }; - - var actual = text.Select(x => FloatingPointConversion.ToF64(x, + + var actual = text.Select(x => FloatingPointConversion.ToF64(x, converter: t => double.Parse(t, nfi))).ToArray(); var expected = new double[] { double.PositiveInfinity, double.NegativeInfinity, 0.0, 75357.987 }; diff --git a/src/SharpLearning.Containers.Test/Matrices/F64MatrixExtensionsTest.cs b/src/SharpLearning.Containers.Test/Matrices/F64MatrixExtensionsTest.cs index 64f5466f..15acc7ef 100644 --- a/src/SharpLearning.Containers.Test/Matrices/F64MatrixExtensionsTest.cs +++ b/src/SharpLearning.Containers.Test/Matrices/F64MatrixExtensionsTest.cs @@ -29,7 +29,7 @@ public void F64MatrixExtensions_Map() var expected = Enumerable.Range(0, matrix.Data().Length).Select(v => 10.0).ToArray(); CollectionAssert.AreEqual(expected, matrix.Data()); } - + [TestMethod] public void F64MatrixExtensions_Map2() { @@ -40,14 +40,14 @@ public void F64MatrixExtensions_Map2() var expected = Enumerable.Range(0, matrix.Data().Length).Select(v => 11.0).ToArray(); CollectionAssert.AreEqual(expected, matrix.Data()); } - + [TestMethod] public void F64MatrixExtensions_ToStringMatrix() { var matrix = new F64Matrix(m_inputData, 2, 3); var actual = matrix.ToStringMatrix(); - var expected = new StringMatrix(m_inputData.Select(v => + var expected = new StringMatrix(m_inputData.Select(v => FloatingPointConversion.ToString(v)).ToArray(), 2, 3); Assert.AreEqual(expected, actual); @@ -73,7 +73,7 @@ public void F64MatrixExtensions_CombineF64MatrixAndVector() var expected = new F64Matrix(new double[] {1, 2, 3, 3, 4, 5, 6, 6}, 2, 4); var actual = matrix.CombineCols(vector); - + Assert.AreEqual(expected, actual); } @@ -97,7 +97,7 @@ public void F64MatrixExtensions_VectorAndVector() var v2 = new double[] { 1, 2, 3, 4 }; var actual = v1.CombineCols(v2); - Assert.AreEqual(new F64Matrix(new double[] { 1, 1, 2, 2, 3, 3, 4, 4}, 4, 2), actual); + Assert.AreEqual(new F64Matrix(new double[] { 1, 1, 2, 2, 3, 3, 4, 4 }, 4, 2), actual); } [TestMethod] @@ -116,8 +116,8 @@ public void F64MatrixExtensions_CombineRows_F64MatrixAndVector() var matrix = new F64Matrix(m_inputData, 2, 3); var vector = new double[] { 3, 6, 7 }; - var expected = new F64Matrix(new double[] {1, 2, 3, - 4, 5, 6, + var expected = new F64Matrix(new double[] {1, 2, 3, + 4, 5, 6, 3, 6, 7}, 3, 3); var actual = matrix.CombineRows(vector); @@ -131,8 +131,8 @@ public void F64MatrixExtensions_CombineRows_VectorAndF64Matrix() var vector = new double[] { 3, 6, 7 }; var expected = new F64Matrix(new double[] {3, 6, 7, - 1, 2, 3, - 4, 5, 6 + 1, 2, 3, + 4, 5, 6 }, 3, 3); var actual = vector.CombineRows(matrix); diff --git a/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs b/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs index 6f38a8dc..ccbb3479 100644 --- a/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs +++ b/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs @@ -136,10 +136,10 @@ public void F64Matrix_Implicit_Conversion() var actual = converter(new double[][] { new double[] { 0, 1 }, new double[] { 2, 3 } }); - Assert.AreEqual(0, actual.At(0,0)); - Assert.AreEqual(1, actual.At(0,1)); - Assert.AreEqual(2, actual.At(1,0)); - Assert.AreEqual(3, actual.At(1,1)); + Assert.AreEqual(0, actual.At(0, 0)); + Assert.AreEqual(1, actual.At(0, 1)); + Assert.AreEqual(2, actual.At(1, 0)); + Assert.AreEqual(3, actual.At(1, 1)); } double[] GetExpectedColumn() diff --git a/src/SharpLearning.Containers.Test/Matrices/StringMatrixExtensionsTest.cs b/src/SharpLearning.Containers.Test/Matrices/StringMatrixExtensionsTest.cs index aff240dd..c1911bde 100644 --- a/src/SharpLearning.Containers.Test/Matrices/StringMatrixExtensionsTest.cs +++ b/src/SharpLearning.Containers.Test/Matrices/StringMatrixExtensionsTest.cs @@ -17,7 +17,7 @@ public void StringMatrixExtensions_ToF64Matrix() var stringMatrix = new StringMatrix(m_inputData, 2, 3); var actual = stringMatrix.ToF64Matrix(); - var expected = new F64Matrix(m_inputData.Select(v => + var expected = new F64Matrix(m_inputData.Select(v => FloatingPointConversion.ToF64(v)).ToArray(), 2, 3); Assert.AreEqual(expected, actual); @@ -47,8 +47,8 @@ public void StringMatrixExtensions_Map2() [TestMethod] public void StringMatrixExtensions_CombineStringMatrices_Cols() { - var matrix1 = new StringMatrix(m_inputData,2, 3); - var matrix2 = new StringMatrix(m_inputData,2, 3); + var matrix1 = new StringMatrix(m_inputData, 2, 3); + var matrix2 = new StringMatrix(m_inputData, 2, 3); var actual = matrix1.CombineCols(matrix2); diff --git a/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs b/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs index e94492ed..6f2a750c 100644 --- a/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs +++ b/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs @@ -43,7 +43,7 @@ public void StringMatrix_At_Set_Indexer() { var sut = CreateFeatures(); var item = "123.0"; - sut[1, 1]= item; + sut[1, 1] = item; var value = sut.At(1, 1); Assert.AreEqual(item, value); @@ -121,7 +121,7 @@ public void StringMatrix_Columns_Predefined() { var sut = CreateFeatures(); var actual = new StringMatrix(3, 2); - sut.Columns(new int [] { 0, 2 }, actual); + sut.Columns(new int[] { 0, 2 }, actual); var expected = GetExpectedColSubMatrix(); Assert.IsTrue(expected.Equals(actual)); diff --git a/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs b/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs index e1927196..3d49df45 100644 --- a/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs +++ b/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs @@ -26,9 +26,9 @@ public unsafe void F64MatrixView_ColumnView() { var view = pinnedMatrix.View(); for (int i = 0; i < matrix.ColumnCount; i++) - { + { AssertColumnView(matrix.Column(i), view.ColumnView(i)); - } + } } } @@ -39,7 +39,7 @@ public unsafe void F64MatrixView_SubView() using (var pinnedMatrix = matrix.GetPinnedPointer()) { var subView = pinnedMatrix.View().View( - Interval2D.Create(Interval1D.Create(0, 2), + Interval2D.Create(Interval1D.Create(0, 2), Interval1D.Create(0, 3))); var subMatrix = matrix.Rows(new int[] { 0, 1 }); diff --git a/src/SharpLearning.Containers/Arithmetic/MatrixAdd.cs b/src/SharpLearning.Containers/Arithmetic/MatrixAdd.cs index f2e3877c..dd24087b 100644 --- a/src/SharpLearning.Containers/Arithmetic/MatrixAdd.cs +++ b/src/SharpLearning.Containers/Arithmetic/MatrixAdd.cs @@ -15,13 +15,13 @@ public static class MatrixAdd /// /// public static void AddF64(F64Matrix m, double[] v, F64Matrix output) - { + { var rows = m.RowCount; var cols = m.ColumnCount; - if (v.Length != rows) + if (v.Length != rows) { - throw new ArgumentException("matrix rows: " + rows + + throw new ArgumentException("matrix rows: " + rows + " differs from vector length: " + v.Length); } @@ -43,7 +43,7 @@ public static double[] AddF64(double[] v1, double[] v2) { if (v1.Length != v2.Length) { - throw new ArgumentException("v1 length: " + v1.Length + + throw new ArgumentException("v1 length: " + v1.Length + " differs from v2 length: " + v2.Length); } diff --git a/src/SharpLearning.Containers/Arithmetic/MatrixMultiplication.cs b/src/SharpLearning.Containers/Arithmetic/MatrixMultiplication.cs index 0f53c8db..edcde5d2 100644 --- a/src/SharpLearning.Containers/Arithmetic/MatrixMultiplication.cs +++ b/src/SharpLearning.Containers/Arithmetic/MatrixMultiplication.cs @@ -19,13 +19,13 @@ public static double[] MultiplyVectorF64(F64Matrix a, double[] v) { var rows = a.RowCount; var cols = a.ColumnCount; - + var data = a.Data(); var output = new double[rows]; - if (cols != v.Length) + if (cols != v.Length) { - throw new ArgumentException("matrix cols: " + cols + + throw new ArgumentException("matrix cols: " + cols + " differs from vector length: " + v.Length); } @@ -83,7 +83,7 @@ public static double[] MultiplyScalarF64(double[] v, double s) vs[i] = v[i] * s; } return vs; - } + } /// /// Multiply vector v with scalar a @@ -95,8 +95,8 @@ public static double[] Multiply(this double[] v, double s) { return MultiplyScalarF64(v, s); } - - + + /// /// Multiply vector v with matrix a /// @@ -169,8 +169,10 @@ public static void MultiplyF64(F64Matrix a, F64Matrix b, F64Matrix output) if (output.RowCount != aRows) - { throw new ArgumentException("output matrix rows: " + output.RowCount - + " differs from matrix a rows: " + aRows); } + { + throw new ArgumentException("output matrix rows: " + output.RowCount + + " differs from matrix a rows: " + aRows); + } if (output.ColumnCount != bCols) { @@ -190,7 +192,7 @@ public static void MultiplyF64(F64Matrix a, F64Matrix b, F64Matrix output) }); } - + /// /// Multiply matrix a with matrix b /// diff --git a/src/SharpLearning.Containers/Arithmetic/MatrixSubtraction.cs b/src/SharpLearning.Containers/Arithmetic/MatrixSubtraction.cs index 2362913e..74f504aa 100644 --- a/src/SharpLearning.Containers/Arithmetic/MatrixSubtraction.cs +++ b/src/SharpLearning.Containers/Arithmetic/MatrixSubtraction.cs @@ -22,7 +22,7 @@ public static double[] SubtractF64(double[] v1, double[] v2) string.Format("Vectors have different lengths: v1: {0}, v2: {1}", v1.Length, v2.Length)); } - + var v3 = new double[v1.Length]; for (int i = 0; i < v1.Length; i++) @@ -53,25 +53,25 @@ public static void SubtractF64(F64Matrix m1, F64Matrix m2, F64Matrix output) if (m1Cols != m2Cols) { - throw new ArgumentException("matrix m1 cols: " + m1Cols + + throw new ArgumentException("matrix m1 cols: " + m1Cols + " differs from matrix m2 cols: " + m2Cols); } if (m1Rows != m2Rows) { - throw new ArgumentException("matrix m1 rows: " + m1Rows + + throw new ArgumentException("matrix m1 rows: " + m1Rows + " differs from matrix m2 rows: " + m2Rows); } if (m1Cols != outputCols) { - throw new ArgumentException("matrix m1 cols: " + m1Cols + + throw new ArgumentException("matrix m1 cols: " + m1Cols + " differs from matrix output cols: " + outputCols); } if (m1Rows != outputRows) { - throw new ArgumentException("matrix m1 rows: " + m1Rows + + throw new ArgumentException("matrix m1 rows: " + m1Rows + " differs from matrix output rows: " + outputRows); } diff --git a/src/SharpLearning.Containers/Checks.cs b/src/SharpLearning.Containers/Checks.cs index c52ec76c..0cb91c58 100644 --- a/src/SharpLearning.Containers/Checks.cs +++ b/src/SharpLearning.Containers/Checks.cs @@ -36,7 +36,7 @@ public static void VerifyObservationsAndTargets(F64Matrix observations, double[] /// /// /// - public static void VerifyObservationsAndTargets(int observationsRowCount, + public static void VerifyObservationsAndTargets(int observationsRowCount, int observationsColumnCount, int targetLength) { VerifyObservations(observationsRowCount, observationsColumnCount); @@ -51,7 +51,7 @@ public static void VerifyObservationsAndTargets(int observationsRowCount, /// public static void VerifyObservations(int rowCount, int columnCount) { - if(rowCount == 0) + if (rowCount == 0) { throw new ArgumentException("Observations does not contain any rows"); } @@ -81,11 +81,11 @@ public static void VerifyTargets(int targetLength) /// public static void VerifyObservationsAndTargetsDimensions(int observationRowCount, int targetLength) { - if(observationRowCount != targetLength) + if (observationRowCount != targetLength) { - throw new ArgumentException($"Observations and targets mismatch." + + throw new ArgumentException($"Observations and targets mismatch." + $"Observations row count: {observationRowCount}, targets row count: {targetLength}"); - } + } } /// @@ -119,16 +119,16 @@ public static void VerifyIndices(int[] indices, F64Matrix observations, double[] public static void VerifyIndices(int[] indices, int observationRowCount, int targetLength) { var min = indices.Min(); - if(min < 0) + if (min < 0) { - throw new ArgumentException($"Indices contains negative " + + throw new ArgumentException($"Indices contains negative " + $"values: {string.Join(",", indices.Where(v => v < 0))}"); } var max = indices.Max(); if (max >= observationRowCount || max >= targetLength) { - throw new ArgumentException($"Indices contains elements exceeding the row count of observations and targets. " + + throw new ArgumentException($"Indices contains elements exceeding the row count of observations and targets. " + $"Indices Max: {max}, observations row count: {observationRowCount}, target length: {targetLength}"); } } diff --git a/src/SharpLearning.Containers/FloatingPointConversion.cs b/src/SharpLearning.Containers/FloatingPointConversion.cs index 1b4f479a..cfe13679 100644 --- a/src/SharpLearning.Containers/FloatingPointConversion.cs +++ b/src/SharpLearning.Containers/FloatingPointConversion.cs @@ -60,7 +60,7 @@ static double ParseAnyNumberStyle(string value) } else { - throw new ArgumentException($"Unable to parse \"{ value }\" to double"); + throw new ArgumentException($"Unable to parse \"{value}\" to double"); } } } diff --git a/src/SharpLearning.Containers/Matrices/F64Matrix.cs b/src/SharpLearning.Containers/Matrices/F64Matrix.cs index c6e578b2..cff8f816 100644 --- a/src/SharpLearning.Containers/Matrices/F64Matrix.cs +++ b/src/SharpLearning.Containers/Matrices/F64Matrix.cs @@ -35,7 +35,7 @@ public F64Matrix(double[] values, int rows, int cols) if (values.Length != rows * cols) { throw new ArgumentException("feature array length does not match row * cols"); } if (rows < 1) { throw new ArgumentException("matrix must have at least 1 row"); } if (cols < 1) { throw new ArgumentException("matrix must have at least 1 col"); } - + m_featureArray = values; RowCount = rows; ColumnCount = cols; diff --git a/src/SharpLearning.Containers/Matrices/F64MatrixExtensions.cs b/src/SharpLearning.Containers/Matrices/F64MatrixExtensions.cs index 54fb97d5..6620d7d0 100644 --- a/src/SharpLearning.Containers/Matrices/F64MatrixExtensions.cs +++ b/src/SharpLearning.Containers/Matrices/F64MatrixExtensions.cs @@ -146,13 +146,13 @@ public static F64Matrix CombineCols(this double[] v, F64Matrix m) { Array.Copy(v, i, features, combineIndex, 1); combineIndex += 1; - + var matrixIndex = i * m.ColumnCount; Array.Copy(matrixArray, matrixIndex, features, combineIndex, m.ColumnCount); combineIndex += m.ColumnCount; } - + return new F64Matrix(features, rows, cols); } diff --git a/src/SharpLearning.Containers/ObservationTargetSet.cs b/src/SharpLearning.Containers/ObservationTargetSet.cs index 2b1b5eb9..955a4051 100644 --- a/src/SharpLearning.Containers/ObservationTargetSet.cs +++ b/src/SharpLearning.Containers/ObservationTargetSet.cs @@ -25,10 +25,10 @@ public sealed class ObservationTargetSet : IEquatable /// /// public ObservationTargetSet(F64Matrix observations, double[] targets) - { + { Observations = observations ?? throw new ArgumentNullException(nameof(observations)); Targets = targets ?? throw new ArgumentNullException(nameof(targets)); - } + } /// /// @@ -50,7 +50,7 @@ public bool Equals(ObservationTargetSet other) /// public override bool Equals(object obj) { - if (obj is ObservationTargetSet other && this.Equals(other)) + if (obj is ObservationTargetSet other && this.Equals(other)) { return true; } diff --git a/src/SharpLearning.Containers/ProbabilityPrediction.cs b/src/SharpLearning.Containers/ProbabilityPrediction.cs index 6d580a37..d51d891c 100644 --- a/src/SharpLearning.Containers/ProbabilityPrediction.cs +++ b/src/SharpLearning.Containers/ProbabilityPrediction.cs @@ -38,10 +38,10 @@ public ProbabilityPrediction(double prediction, Dictionary proba /// public bool Equals(ProbabilityPrediction other) { - if(!Equal(this.Prediction, other.Prediction)) { return false; } + if (!Equal(this.Prediction, other.Prediction)) { return false; } if (this.Probabilities.Count != other.Probabilities.Count) { return false; } - - var zip = this.Probabilities.Zip(other.Probabilities, (t, o) => new {This = t, Other = o}); + + var zip = this.Probabilities.Zip(other.Probabilities, (t, o) => new { This = t, Other = o }); foreach (var item in zip) { if (item.This.Key != item.Other.Key) @@ -60,7 +60,7 @@ public bool Equals(ProbabilityPrediction other) /// public override bool Equals(object obj) { - if(obj is ProbabilityPrediction) + if (obj is ProbabilityPrediction) return Equals((ProbabilityPrediction)obj); return false; } @@ -101,7 +101,7 @@ public override int GetHashCode() bool Equal(double a, double b) { var diff = Math.Abs(a * m_tolerence); - if(Math.Abs(a - b) <= diff) + if (Math.Abs(a - b) <= diff) { return true; } diff --git a/src/SharpLearning.Containers/Views/F64MatrixPinnedPtr.cs b/src/SharpLearning.Containers/Views/F64MatrixPinnedPtr.cs index 00948f61..2ba9cb0b 100644 --- a/src/SharpLearning.Containers/Views/F64MatrixPinnedPtr.cs +++ b/src/SharpLearning.Containers/Views/F64MatrixPinnedPtr.cs @@ -27,7 +27,7 @@ public unsafe struct F64MatrixPinnedPtr : IDisposable public F64MatrixPinnedPtr(F64Matrix matrix) { if (matrix == null) { throw new ArgumentNullException("matrix"); } - + var data = matrix.Data(); m_handle = GCHandle.Alloc(data, GCHandleType.Pinned); m_ptr = (double*)m_handle.AddrOfPinnedObject().ToPointer(); @@ -40,8 +40,8 @@ public F64MatrixPinnedPtr(F64Matrix matrix) /// /// public F64MatrixView View() - { - return new F64MatrixView(m_ptr, m_rows, m_cols); + { + return new F64MatrixView(m_ptr, m_rows, m_cols); } /// diff --git a/src/SharpLearning.Containers/Views/F64VectorPinnedPtr.cs b/src/SharpLearning.Containers/Views/F64VectorPinnedPtr.cs index 984a7781..813dac0b 100644 --- a/src/SharpLearning.Containers/Views/F64VectorPinnedPtr.cs +++ b/src/SharpLearning.Containers/Views/F64VectorPinnedPtr.cs @@ -34,8 +34,8 @@ public F64VectorPinnedPtr(double[] v) /// /// public F64VectorView View() - { - return new F64VectorView(m_ptr, m_length); + { + return new F64VectorView(m_ptr, m_length); } /// diff --git a/src/SharpLearning.Containers/Views/F64VectorView.cs b/src/SharpLearning.Containers/Views/F64VectorView.cs index 5f2d21ec..a6b631e8 100644 --- a/src/SharpLearning.Containers/Views/F64VectorView.cs +++ b/src/SharpLearning.Containers/Views/F64VectorView.cs @@ -53,7 +53,7 @@ public F64VectorView View(Interval1D interval) { return new F64VectorView(GetSubViewDataPointer(interval), interval.Length); } - + double* GetSubViewDataPointer(Interval1D interval) { return m_ptr + interval.FromInclusive; diff --git a/src/SharpLearning.Containers/Views/Interval1D.cs b/src/SharpLearning.Containers/Views/Interval1D.cs index 9dc7607d..ea52ecdf 100644 --- a/src/SharpLearning.Containers/Views/Interval1D.cs +++ b/src/SharpLearning.Containers/Views/Interval1D.cs @@ -77,7 +77,7 @@ public static Interval1D Create(int fromInclusive, int toExclusive) /// public bool Equals(Interval1D other) { - return (this.FromInclusive == other.FromInclusive) && + return (this.FromInclusive == other.FromInclusive) && (this.ToExclusive == other.ToExclusive) && (this.Length == other.Length); } diff --git a/src/SharpLearning.Containers/Views/Interval2D.cs b/src/SharpLearning.Containers/Views/Interval2D.cs index e29d9713..307624df 100644 --- a/src/SharpLearning.Containers/Views/Interval2D.cs +++ b/src/SharpLearning.Containers/Views/Interval2D.cs @@ -71,10 +71,10 @@ public override bool Equals(object other) /// /// public static bool operator !=(Interval2D x, Interval2D y) - { - return !(x == y); + { + return !(x == y); } - + /// /// /// @@ -82,9 +82,9 @@ public override bool Equals(object other) /// /// public static bool operator ==(Interval2D x, Interval2D y) - { - return (x.Cols == y.Cols) && - (x.Rows== y.Rows); + { + return (x.Cols == y.Cols) && + (x.Rows == y.Rows); } /// diff --git a/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs b/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs index dfdaf508..f71a14b6 100644 --- a/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs +++ b/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs @@ -69,7 +69,7 @@ public void CrossValidationUtilities_GetKFoldCrossValidationIndexSets_Handle_Rem { var targets = new double[] { 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3 }; var sampler = new StratifiedIndexSampler(seed: 242); - var actuals = CrossValidationUtilities.GetKFoldCrossValidationIndexSets(sampler, + var actuals = CrossValidationUtilities.GetKFoldCrossValidationIndexSets(sampler, foldCount: 4, targets: targets); var expecteds = new List<(int[] trainingIndices, int[] validationIndices)> diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs index 62d10ccd..29a9ebed 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs @@ -13,12 +13,12 @@ public class LearningCurvePointExtensionsTest [TestMethod] public void BiasVarianceLearningCurvePointExtensions_ToF64Matrix() { - var sut = new List { new LearningCurvePoint(10, 0.0, 1.0), + var sut = new List { new LearningCurvePoint(10, 0.0, 1.0), new LearningCurvePoint(100, 3.0, 8.0), new LearningCurvePoint(1000, 4.0, 4.0) }; var actual = sut.ToF64Matrix(); var expected = new F64Matrix(new double[] { 10, 0.0, 1.0, - 100, 3.0, 8.0, + 100, 3.0, 8.0, 1000, 4.0, 4.0 }, 3, 3); @@ -28,7 +28,7 @@ public void BiasVarianceLearningCurvePointExtensions_ToF64Matrix() [TestMethod] public void BiasVarianceLearningCurvePointExtensions_Write() { - var sut = new List { new LearningCurvePoint(10, 0.0, 1.0), + var sut = new List { new LearningCurvePoint(10, 0.0, 1.0), new LearningCurvePoint(100, 3.0, 8.0), new LearningCurvePoint(1000, 4.0, 4.0) }; var writer = new StringWriter(); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointTest.cs index e195976c..14f85141 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointTest.cs @@ -23,7 +23,7 @@ public void BiasVarianceLearningCurvePoint_Equals() Assert.IsFalse(sut.Equals(notEqual1)); Assert.IsTrue(sut != notEqual1); Assert.IsFalse(sut == notEqual1); - + Assert.IsFalse(sut.Equals(notEqual2)); Assert.IsTrue(sut != notEqual2); Assert.IsFalse(sut == notEqual2); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs index b5097028..dc24eca5 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs @@ -18,8 +18,8 @@ public void LearningCurvesCalculator_Calculate() { var sut = new LearningCurvesCalculator( new RandomTrainingTestIndexSplitter(0.8, 42), - new RandomIndexSampler(42), - new MeanSquaredErrorRegressionMetric(), + new RandomIndexSampler(42), + new MeanSquaredErrorRegressionMetric(), new double[] { 0.2, 0.8 }); var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); @@ -29,10 +29,10 @@ public void LearningCurvesCalculator_Calculate() var expected = new List() { - new LearningCurvePoint(32, 0, 0.141565953928265), + new LearningCurvePoint(32, 0, 0.141565953928265), new LearningCurvePoint(128, 0.0, 0.068970597423950036) }; - + CollectionAssert.AreEqual(expected, actual); } @@ -40,11 +40,11 @@ public void LearningCurvesCalculator_Calculate() public void LearningCurvesCalculator_Calculate_Indices_Provided() { var splitter = new RandomTrainingTestIndexSplitter(0.8, 42); - + var sut = new LearningCurvesCalculator( - splitter, + splitter, new RandomIndexSampler(42), - new MeanSquaredErrorRegressionMetric(), + new MeanSquaredErrorRegressionMetric(), new double[] { 0.2, 0.8 }); var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); @@ -55,7 +55,7 @@ public void LearningCurvesCalculator_Calculate_Indices_Provided() var expected = new List() { - new LearningCurvePoint(32, 0, 0.141565953928265), + new LearningCurvePoint(32, 0, 0.141565953928265), new LearningCurvePoint(128, 0.0, 0.068970597423950036) }; @@ -69,8 +69,8 @@ public void LearningCurvesCalculator_Calculate_Metric_Null() new LearningCurvesCalculator( new RandomTrainingTestIndexSplitter(0.8, 42), new RandomIndexSampler(42), - null, - new double[] { 0.2, 0.8 } ); + null, + new double[] { 0.2, 0.8 }); } [TestMethod] @@ -80,7 +80,7 @@ public void LearningCurvesCalculator_Calculate_Sample_Percentages_Null() new LearningCurvesCalculator( new RandomTrainingTestIndexSplitter(0.8, 42), new RandomIndexSampler(42), - new MeanSquaredErrorRegressionMetric(), + new MeanSquaredErrorRegressionMetric(), null); } @@ -91,7 +91,7 @@ public void LearningCurvesCalculator_Calculate_Sample_Percentages_Empty() new LearningCurvesCalculator( new RandomTrainingTestIndexSplitter(0.8, 42), new RandomIndexSampler(42), - new MeanSquaredErrorRegressionMetric(), + new MeanSquaredErrorRegressionMetric(), new double[] { }); } @@ -102,7 +102,7 @@ public void LearningCurvesCalculator_Calculate_Sample_Percentage_Too_Low() var sut = new LearningCurvesCalculator( new RandomTrainingTestIndexSplitter(0.8, 42), new RandomIndexSampler(42), - new MeanSquaredErrorRegressionMetric(), + new MeanSquaredErrorRegressionMetric(), new double[] { 0.0, 0.8 }); var observations = new F64Matrix(10, 10); @@ -119,7 +119,7 @@ public void LearningCurvesCalculator_Calculate_Sample_Percentage_Too_High() var sut = new LearningCurvesCalculator( new RandomTrainingTestIndexSplitter(0.8, 42), new RandomIndexSampler(42), - new MeanSquaredErrorRegressionMetric(), + new MeanSquaredErrorRegressionMetric(), new double[] { 1.1, 0.8 }); var observations = new F64Matrix(10, 10); @@ -127,6 +127,6 @@ public void LearningCurvesCalculator_Calculate_Sample_Percentage_Too_High() sut.Calculate(new RegressionDecisionTreeLearner(), observations, targets); - } + } } } diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs index d13c6746..419ca44d 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs @@ -13,9 +13,9 @@ public class NoShuffleLearningCurvesCalculatorTest public void NoShuffleLearningCurvesCalculator_Calculate() { var sut = new NoShuffleLearningCurvesCalculator( - new MeanSquaredErrorRegressionMetric(), - new double[] { 0.2, 0.8 }, - 0.8 ); + new MeanSquaredErrorRegressionMetric(), + new double[] { 0.2, 0.8 }, + 0.8); var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); @@ -24,10 +24,10 @@ public void NoShuffleLearningCurvesCalculator_Calculate() var expected = new List() { - new LearningCurvePoint(32, 0, 0.12874833873980004), + new LearningCurvePoint(32, 0, 0.12874833873980004), new LearningCurvePoint(128, 0.0, 0.067720786718774989) }; - + CollectionAssert.AreEqual(expected, actual); } } diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs index 90b3ea98..0350564a 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs @@ -13,7 +13,7 @@ public class RandomLearningCurvesCalculatorTest public void RandomLearningCurvesCalculator_Calculate() { var sut = new RandomShuffleLearningCurvesCalculator( - new MeanSquaredErrorRegressionMetric(), + new MeanSquaredErrorRegressionMetric(), new double[] { 0.2, 0.8 }, 0.8, 42, 5); @@ -24,7 +24,7 @@ public void RandomLearningCurvesCalculator_Calculate() var expected = new List() { - new LearningCurvePoint(32, 0, 0.141565953928265), + new LearningCurvePoint(32, 0, 0.141565953928265), new LearningCurvePoint(128, 0.0, 0.068970597423950036) }; diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs index f20225e3..0aecbff0 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs @@ -14,7 +14,7 @@ public void StratifiedLearningCurvesCalculator_Calculate() { var sut = new StratifiedLearningCurvesCalculator( new TotalErrorClassificationMetric(), - new double[] { 0.2, 0.8 }, + new double[] { 0.2, 0.8 }, 0.8, 5, 42); var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); @@ -24,7 +24,7 @@ public void StratifiedLearningCurvesCalculator_Calculate() var expected = new List() { - new LearningCurvePoint(4, 0, 0.39999999999999997), + new LearningCurvePoint(4, 0, 0.39999999999999997), new LearningCurvePoint(16, 0.0625, 0.33333333333333331) }; diff --git a/src/SharpLearning.CrossValidation.Test/Samplers/StratifiedIndexSamplerTest.cs b/src/SharpLearning.CrossValidation.Test/Samplers/StratifiedIndexSamplerTest.cs index f85d8445..2785b8cd 100644 --- a/src/SharpLearning.CrossValidation.Test/Samplers/StratifiedIndexSamplerTest.cs +++ b/src/SharpLearning.CrossValidation.Test/Samplers/StratifiedIndexSamplerTest.cs @@ -17,7 +17,7 @@ public void StratifiedIndexSampler_Sample_Returned_SampleSize_Differs_From_Speci var actual = sut.Sample(values, trainingSize); var expected = new int[] { 4142, 2349, 3748, 491, 1141, 3922, 959, 3975, 3257, 3711, 1788, 1649, 19, 1626, 994, 4139, 1972, 72, 1940, 2231, 4164, 84, 3402, 4076, 3866, 1864, 2907, 1877, 2579, 2185, 2848, 1755, 3547, 2299, 3307, 708, 27, 2777, 1712, 2816, 621, 3528, 2329, 2291, 2178, 2074, 3381, 3508, 3512, 167, 1785, 4265, 521, 2817, 1083, 1308, 1035, 3304, 4235, 1132, 257, 2434, 3225, 3930, 2068, 3429, 2373, 1074, 3929, 2951, 3498, 1968, 824, 3090, 2176, 614, 1650, 2385, 4032, 3926, 2124, 1691, 2738, 206, 1256, 4063, 2498, 1324, 702, 2336, 2788, 2866, 8, 2785, 1714, 2512, 4085, 2319, 1355, 182, 45, 348, 3800, 2139, 2098, 1660, 2634, 2430, 3670, 3961, 1813, 91, 2025, 562, 873, 317, 4148, 1753, 2686, 3318, 1187, 379, 2099, 1045, 35, 4237, 2729, 3361, 1881, 1299, 4038, 800, 3608, 892, 3170, 4206, 3449, 529, 4161, 3885, 3598, 1703, 273, 3265, 1792, 1957, 904, 3591, 1987, 2397, 2642, 4147, 4290, 778, 1961, 4084, 142, 4249, 3967, 3439, 2888, 3422, 3467, 1850, 3338, 590, 4221, 1999, 309, 2063, 2952, 1690, 1212, 154, 1802, 2535, 2173, 4014, 1893, 1047, 3232, 954, 1988, 3344, 1210, 488, 4192, 2957, 1423, 1694, 1667, 3169, 1899, 3731, 3943, 822, 2157, 425, 513, 3505, 2739, 1075, 1147, 953, 865, 1002, 489, 3662, 480, 2877, 4041, 3321, 1887, 1816, 1795, 1513, 3631, 1057, 3647, 2055, 2270, 1658, 769, 2174, 3940, 1282, 1402, 1267, 3258, 549, 2862, 2559, 3682, 1158, 1674, 4162, 4167, 3248, 1333, 1164, 3237, 452, 2228, 2060, 2991, 352, 707, 3790, 3996, 2469, 3728, 2429, 3200, 2086, 525, 1807, 340, 2306, 1220, 2900, 1381, 3339, 4328, 3078, 2059, 737, 3649, 1322, 3155, 1676, 1470, 1884, 582, 2318, 490, 4306, 2245, 3040, 721, 3087, 2954, 1736, 2140, 4102, 4207, 1283, 2338, 66, 1953, 3896, 4337, 2204, 4318, 3298, 691, 1292, 3538, 4190, 3322, 3665, 3088, 3629, 3796, 1538, 1991, 3581, 2409, 34, 290, 826, 2996, 891, 3601, 1610, 3576, 4303, 907, 69, 622, 2198, 2743, 2081, 2687, 1070, 255, 920, 2497, 2927, 3700, 1101, 4214, 475, 1411, 1710, 237, 1044, 4009, 3149, 2189, 1895, 2852, 1915, 1853, 482, 2520, 287, 952, 3362, 1822, 860, 2452, 1334, 632, 2708, 3678, 3802, 2598, 185, 1717, 3272, 3317, 2353, 2104, 2017, 3945, 3899, 781, 3936, 95, 227, 2045, 3099, 1285, 2466, 2327, 849, 1376, 57, 3857, 2717, 4295, 2110, 514, 3260, 1870, 3955, 4277, 2158, 998, 3732, 187, 1739, 1631, 81, 2076, 1448, 2310, 2527, 2402, 1389, 277, 1539, 1084, 1838, 1599, 2556, 1699, 2644, 1369, 2123, 4300, 785, 4215, 1190, 3808, 2333, 1645, 1298, 2978, 48, 2965, 631, 2905, 3008, 2849, 3104, 3977, 964, 88, 2342, 2998, 3035, 620, 4170, 3396, 1994, 1770, 2226, 3282, 2828, 3972, 164, 3842, 1600, 3687, 3993, 1826, 4115, 1555, 78, 4196, 2416, 3294, 1495, 1161, 2184, 1151, 1008, 4008, 624, 280, 1442, 3814, 2300, 4135, 2833, 3365, 1825, 304, 1549, 3236, 2127, 874, 3376, 476, 3825, 4272, 2675, 2393, 4006, 97, 1076, 2274, 4198, 3020, 2142, 690, 2056, 613, 4193, 2621, 3256, 4271, 1095, 558, 2539, 391, 2269, 1956, 4248, 1430, 969, 1275, 4114, 4176, 2201, 2919, 2472, 4331, 2845, 2961, 1683, 107, 62, 2939, 395, 1457, 1933, 1574, 991, 3716, 282, 2610, 3898, 2678, 315, 3510, 3540, 4163, 3431, 1077, 3671, 3988, 1794, 605, 3759, 3003, 3764, 1607, 3077, 4105, 1487, 265, 2183, 677, 619, 1604, 3657, 349, 1535, 1030, 220, 2912, 1590, 2511, 3474, 2553, 2771, 4282, 3427, 3182, 3089, 2005, 1184, 3558, 112, 1670, 1992, 193, 3963, 186, 817, 2193, 486, 1380, 996, 2747, 1111, 1260, 1273, 1758, 989, 2404, 3807, 951, 4281, 3763, 4112, 671, 1911, 3680, 2537, 4051, 2006, 2617, 2297, 1537, 2451, 460, 3791, 1350, 1265, 136, 3475, 2859, 2654, 1756, 2572, 893, 2896, 398, 841, 2917, 3784, 971, 2070, 2843, 537, 2458, 2526, 3883, 1399, 3542, 3618, 4083, 146, 2531, 1152, 1178, 557, 2215, 4074, 245, 625, 3775, 3829, 1352, 414, 2084, 909, 923, 3518, 4111, 3127, 4274, 3919, 3309, 3401, 3571, 200, 2989, 680, 4133, 3220, 980, 2474, 2376, 1119, 31, 848, 3727, 1115, 46, 2280, 3548, 4219, 788, 2591, 699, 3049, 2461, 1611, 673, 3861, 2895, 2775, 2427, 3803, 3771, 1004, 3215, 357, 1135, 2732, 121, 137, 1979, 3184, 1233, 2324, 3108, 1449, 575, 2030, 264, 3738, 1559, 3388, 386, 3120, 141, 2243, 2296, 4183, 552, 1447, 2716, 80, 3543, 637, 3357, 3147, 1521, 1668, 1941, 2567, 1345, 1806, 941, 216, 267, 2212, 2425, 4095, 1441, 3479, 3327, 2061, 949, 917, 2149, 1323, 2138, 1320, 2044, 4043, 2668, 2445, 2894, 4201, 4055, 3654, 2641, 3000, 1312, 3044, 3489, 913, 2934, 2799, 1981, 2633, 1722, 3964, 1520, 2383, 4308, 881, 389, 1473, 431, 3161, 417, 3473, 2855, 1848, 2540, 629, 1993, 2477, 1909, 3356, 2563, 3643, 254, 295, 2087, 2569, 4070, 2146, 2780, 171, 2476, 3074, 985, 3234, 1761, 3092, 3292, 1664, 970, 2873, 3059, 3164, 3394, 958, 1854, 184, 1426, 4266, 2013, 2518, 2936, 947, 4154, 3623, 790, 709, 3148, 342, 87, 286, 1396, 3756, 4000, 1277, 743, 1889, 858, 1228, 1237, 2661, 3912, 1340, 1338, 1133, 1209, 940, 1844, 4100, 2229, 642, 4125, 197, 2568, 1916, 288, 1040, 3290, 127, 3895, 1053, 2592, 337, 3768, 744, 843, 3572, 2449, 2740, 576, 3666, 2763, 3163, 4058, 864, 1789, 41, 3371, 4024, 2757, 593, 1723, 559, 711, 3392, 1335, 1901, 3293, 783, 22, 512, 3551, 1954, 332, 2673, 3140, 2372, 99, 50, 4317, 1476, 2340, 311, 3855, 3713, 70, 2665, 3255, 978, 472, 3602, 1268, 1939, 2772, 2159, 1088, 1180, 3668, 1518, 961, 2317, 1852, 1033, 229, 155, 3411, 74, 9, 3801, 846, 1558, 1230, 2517, 2627, 1815, 713, 524, 224, 3698, 1276, 2721, 712, 4175, 2153, 1266, 704, 211, 1894, 2248, 299, 4007, 1917, 4210, 4257, 2190, 1173, 173, 867, 1114, 594, 4245, 2710, 4289, 682, 3766, 3664, 3880, 2723, 3387, 2982, 840, 948, 2924, 808, 3188, 1205, 4275, 2108, 3884, 875, 844, 1287, 2949, 1424, 2390, 1702, 68, 2188, 623, 2696, 1890, 2255, 3788, 3726, 2399, 2295, 1445, 4217, 93, 2966, 2211, 2335, 4117, 1337, 2493, 1089, 2605, 1440, 3950, 1486, 3279, 2679, 1963, 1727, 921, 166, 2337, 2988, 1841, 2525, 3441, 2114, 3533, 1819, 3134, 138, 1386, 663, 2334, 1774, 2358, 3203, 2918, 2048, 3377, 816, 1185, 1686, 3204, 2489, 3621, 2597, 2529, 1851, 3015, 2118, 1409, 291, 1571, 1977, 2850, 3364, 3342, 2020, 3289, 2768, 943, 4232, 3055, 679, 3103, 2288, 818, 2552, 2906, 694, 3106, 1163, 274, 2347, 2547, 1504, 1368, 4001, 3034, 1242, 1980, 3938, 269, 1552, 2486, 1639, 2302, 2847, 3983, 1061, 553, 872, 2870, 3793, 1665, 1405, 3837, 2698, 230, 4046, 1693, 829, 3806, 2267, 551, 413, 4015, 3516, 4242, 3544, 2741, 3400, 2346, 589, 2967, 2018, 2116, 2643, 4059, 1700, 2762, 1244, 3445, 556, 1969, 1643, 1860, 3329, 1379, 4069, 383, 2712, 2913, 4118, 736, 2284, 89, 2515, 1054, 3897, 308, 2614, 3744, 1657, 1454, 3016, 10, 1892, 2470, 2911, 2233, 1750, 2433, 3123, 2672, 3363, 1608, 4211, 4019, 1493, 3588, 4138, 3970, 2692, 2823, 153, 195, 2036, 3320, 2604, 3254, 2639, 2844, 462, 1817, 2727, 1908, 4025, 355, 1492, 1677, 2031, 240, 1082, 2481, 2713, 3778, 449, 2179, 1560, 3369, 3247, 1046, 1264, 3849, 2196, 678, 3965, 2199, 950, 1485, 2285, 660, 2412, 4302, 3378, 3299, 140, 2758, 1776, 4262, 1377, 2128, 1246, 972, 2883, 2033, 439, 2801, 1295, 2680, 3415, 851, 1174, 495, 2683, 2887, 2787, 4039, 1336, 2622, 115, 2938, 723, 2239, 3221, 1462, 3725, 364, 3270, 3277, 2945, 3300, 77, 71, 571, 791, 1016, 3001, 1067, 484, 3535, 2956, 1107, 4228, 468, 1704, 1136, 1587, 1627, 4316, 4153, 3151, 4229, 329, 129, 3050, 2523, 236, 2699, 1784, 1943, 1926, 2322, 1591, 3974, 2008, 3350, 1128, 1867, 3202, 2885, 689, 2403, 4054, 3207, 252, 2027, 3079, 259, 1022, 1155, 4255, 2115, 1092, 3677, 205, 1066, 2305, 3167, 1735, 3537, 3811, 3660, 415, 2443, 3550, 1509, 1347, 1208, 1524, 2629, 1071, 4090, 2533, 504, 684, 3931, 3721, 2148, 2439, 3969, 1630, 3080, 2028, 1461, 3968, 4044, 4010, 3570, 2779, 2869, 1400, 956, 1450, 1006, 1421, 100, 3509, 3444, 3081, 3159, 1063, 435, 3372, 3949, 1038, 942, 3410, 1105, 3160, 1496, 448, 3266, 1882, 1913, 3005, 207, 202, 2356, 2815, 2868, 1395, 3737, 2609, 3874, 4268, 827, 2073, 2928, 2759, 1573, 2964, 1229, 1090, 2976, 60, 3776, 871, 1990, 1013, 1935, 1453, 3069, 3351, 754, 3335, 3900, 2812, 2014, 527, 1781, 2197, 4223, 1603, 667, 1948, 1169, 3333, 2379, 1097, 2406, 3242, 2364, 4327, 960, 4052, 3070, 3171, 43, 3146, 15, 2119, 915, 1339, 2789, 3417, 3882, 2237, 3925, 2265, 3374, 1695, 990, 390, 1344, 175, 3868, 4239, 511, 3039, 2822, 1842, 3506, 323, 2902, 3853, 3717, 3259, 3472, 2880, 1581, 1217, 2864, 1127, 3288, 1873, 899, 898, 300, 1213, 3821, 4240, 260, 306, 3767, 343, 1569, 2656, 3302, 543, 4110, 3336, 1498, 2867, 4027, 1897, 3420, 3249, 857, 2890, 540, 3892, 1942, 292, 3944, 1417, 2422, 4332, 1532, 4023, 3928, 163, 1831, 3075, 2398, 2745, 1832, 4103, 760, 2415, 3086, 724, 2112, 3646, 1139, 3463, 1227, 1291, 2182, 1069, 1891, 3951, 1099, 1986, 2624, 131, 2923, 3109, 4047, 2690, 3098, 3638, 2152, 2684, 1888, 2369, 2078, 369, 3848, 40, 597, 4078, 3111, 3560, 3286, 4042, 3832, 2223, 3575, 2205, 4002, 1397, 2522, 502, 1018, 16, 1706, 3754, 3617, 2287, 281, 768, 1536, 2003, 2156, 3971, 3223, 579, 443, 2990, 4309, 2636, 2459, 1602, 2806, 3651, 1920, 307, 1563, 1708, 2155, 4149, 1224, 578, 3128, 272, 61, 2463, 1557, 1586, 2725, 3559, 3877, 305, 2899, 2271, 3587, 885, 832, 2583, 3979, 1740, 94, 3273, 2903, 2532, 159, 974, 3046, 301, 2100, 4195, 633, 1698, 3455, 3491, 1512, 158, 2619, 3152, 3458, 3706, 3641, 555, 1625, 3653, 3150, 3391, 1235, 3424, 696, 1517, 3244, 2647, 2835, 2160, 428, 4065, 3863, 2851, 3568, 3522, 2382, 3060, 239, 2825, 210, 2345, 4202, 2561, 4216, 859, 842, 1293, 1490, 1503, 4312, 531, 2776, 76, 2857, 3947, 675, 3699, 2021, 1556, 3007, 868, 3426, 1692, 538, 1820, 2734, 1742, 1588, 1108, 132, 419, 1112, 963, 58, 587, 3650, 1697, 2703, 2247, 2141, 812, 3154, 2431, 1106, 988, 3830, 2947, 4269, 3071, 2753, 4173, 1252, 1374, 1238, 3689, 518, 4087, 261, 3121, 1104, 726, 493, 3819, 1310, 3162, 3730, 3579, 2077, 3724, 2293, 1491, 3986, 749, 2861, 1634, 1177, 4151, 748, 1836, 600, 501, 1866, 454, 919, 1463, 346, 3124, 2882, 4286, 2039, 2090, 2050, 2660, 3685, 1189, 793, 3524, 3817, 196, 4258, 1165, 693, 2676, 1005, 29, 4022, 3253, 747, 3032, 2884, 2282, 1839, 2246, 1673, 412, 233, 3345, 876, 2999, 3027, 1910, 2418, 973, 925, 2731, 2797, 1522, 2932, 4321, 1170, 1767, 1029, 3194, 1269, 574, 2186, 1055, 55, 2163, 2368, 2586, 2213, 3352, 1321, 3982, 1800, 2344, 118, 2782, 1620, 247, 3408, 588, 3824, 1437, 2811, 4155, 3189, 3789, 3101, 453, 3423, 3927, 2126, 2222, 1117, 3962, 416, 2821, 397, 105, 1605, 373, 3368, 347, 3139, 1902, 3199, 3390, 1744, 1745, 3612, 3751, 2819, 3067, 310, 4064, 116, 1730, 2203, 1366, 2824, 1682, 1803, 3191, 835, 4106, 1856, 473, 3419, 820, 3860, 368, 2092, 370, 2914, 3978, 3578, 3942, 3703, 1898, 3315, 2813, 2066, 2264, 2071, 1263, 930, 2790, 4314, 365, 918, 2394, 3058, 4253, 483, 779, 3520, 3135, 719, 1124, 2970, 3483, 761, 981, 2351, 1192, 608, 1460, 283, 1014, 782, 1049, 1946, 2473, 3712, 1720, 668, 646, 3457, 3063, 120, 955, 3428, 2132, 2856, 1970, 2468, 2004, 1859, 1259, 2263, 3736, 1429, 2935, 1, 3053, 1313, 1927, 1279, 3241, 2802, 2575, 539, 4037, 345, 922, 914, 3403, 1499, 492, 3306, 962, 758, 4319, 1064, 847, 987, 2863, 852, 3592, 2091, 2467, 698, 3580, 759, 1154, 111, 1247, 1357, 765, 362, 3747, 2107, 3795, 2578, 2874, 2820, 3263, 1965, 3466, 526, 1314, 1593, 2625, 3476, 1923, 2652, 1583, 1166, 1373, 3656, 1718, 3504, 1042, 507, 1086, 2219, 481, 1636, 547, 1797, 1474, 1515, 1214, 4073, 3168, 647, 4124, 1905, 328, 313, 455, 4261, 1614, 2414, 2460, 862, 1116, 3107, 3239, 3923, 1930, 814, 968, 53, 1978, 3511, 3296, 130, 2719, 3676, 3297, 451, 209, 523, 1594, 1544, 536, 2257, 3939, 2343, 946, 3611, 2480, 410, 1465, 1403, 2735, 3658, 2102, 1102, 3393, 740, 149, 774, 384, 1134, 4322, 3526, 3780, 44, 3347, 4168, 3663, 380, 738, 1949, 2986, 2594, 2596, 3555, 3905, 1148, 3573, 4121, 3733, 361, 2331, 2281, 1865, 3691, 2450, 2560, 1080, 3385, 2475, 2765, 2192, 1663, 3770, 151, 2437, 545, 1031, 2750, 789, 1540, 4005, 14, 3856, 1353, 4077, 1541, 2700, 3752, 4236, 2016, 2435, 567, 2145, 2380, 2253, 1584, 341, 144, 1828, 2218, 464, 3952, 2530, 4050, 2653, 1651, 4049, 1219, 3820, 688, 1153, 2507, 3994, 1011, 3840, 2447, 2499, 2655, 931, 1225, 797, 3541, 1160, 2728, 3443, 3230, 2941, 1642, 2122, 3117, 1255, 3718, 2632, 2058, 3469, 2240, 3635, 3757, 2606, 3619, 4034, 3893, 1309, 2428, 3143, 2386, 246, 1751, 276, 653, 572, 1619, 3783, 3211, 1638, 997, 2348, 1759, 1545, 831, 715, 2088, 639, 1936, 2805, 1547, 3308, 1516, 3370, 3799, 2541, 4293, 2549, 1249, 591, 344, 497, 3816, 4200, 1829, 3622, 2053, 1659, 3818, 3701, 1779, 1705, 1526, 1501, 1221, 2479, 1236, 4031, 1325, 266, 1343, 1875, 3987, 4254, 3758, 1827, 2309, 1715, 3667, 3231, 3054, 3606, 2166, 863, 1034, 3406, 977, 2876, 4107, 2769, 1003, 1329, 3822, 4157, 2315, 1998, 3642, 143, 2357, 1223, 3407, 3847, 1007, 1138, 1478, 3486, 3648, 652, 935, 2872, 2411, 929, 2814, 312, 3946, 2279, 3523, 1052, 3681, 1514, 2242, 1749, 4109, 174, 1422, 666, 1130, 2377, 2304, 2516, 4233, 3341, 3072, 3084, 2326, 1931, 895, 1443, 1947, 3141, 1623, 393, 456, 2047, 4298, 3218, 2035, 1528, 2022, 28, 1195, 902, 2766, 757, 3769, 4113, 1477, 1937, 3453, 2500, 3692, 2754, 2577, 1419, 1595, 752, 1196, 683, 3166, 3233, 1835, 2946, 1989, 2701, 2355, 2438, 983, 2664, 3144, 2464, 3655, 900, 324, 2808, 3129, 792, 3465, 1707, 1364, 4230, 2325, 500, 3021, 1732, 1534, 1728, 1734, 4263, 1302, 4241, 3267, 1762, 2909, 117, 1207, 1796, 1724, 1356, 2650, 1056, 2041, 1438, 199, 4199, 3582, 2958, 1763, 3531, 630, 651, 853, 1059, 4304, 3029, 336, 2940, 3278, 1472, 3122, 2677, 108, 2510, 3684, 4071, 2072, 408, 2220, 3261, 2682, 3061, 1469, 701, 3460, 1577, 385, 4159, 1315, 3890, 933, 3998, 2051, 1885, 2258, 1984, 3229, 3462, 2216, 2446, 2440, 3615, 728, 3264, 2551, 2773, 2471, 3193, 1738, 3276, 2365, 1688, 607, 3513, 2544, 573, 3549, 1036, 3349, 2360, 3985, 258, 3834, 828, 63, 2972, 3430, 3114, 911, 4191, 1622, 2444, 2462, 516, 1330, 1900, 1824, 1823, 1764, 3082, 1678, 3073, 530, 461, 2854, 1019, 2841, 735, 2378, 3636, 2079, 3634, 1768, 2019, 4231, 1253, 4310, 192, 374, 3702, 1928, 2400, 4036, 2314, 2032, 755, 353, 2103, 4299, 106, 1439, 12, 469, 59, 3482, 2524, 1529, 1719, 2484, 2929, 3729, 561, 85, 2892, 2384, 1288, 560, 103, 225, 1258, 2052, 3941, 3610, 2278, 1270, 3901, 845, 2085, 2206, 2968, 1489, 423, 1319, 79, 3187, 2089, 700, 2691, 2170, 4119, 104, 2276, 3841, 2277, 3564, 4305, 1043, 1301, 2933, 3496, 98, 1863, 2693, 1226, 3980, 1281, 2506, 1186, 4156, 1872, 1425, 3023, 4251, 4227, 278, 330, 3026, 3953, 2640, 3984, 705, 672, 1985, 351, 42, 4144, 2681, 2979, 1041, 1289, 839, 3490, 3100, 1365, 1502, 3850, 4187, 2718, 4220, 52, 3991, 1427, 2948, 1721, 1243, 3494, 2908, 836, 1413, 2495, 2121, 4234, 1142, 889, 3281, 1615, 1211, 894, 1506, 2704, 3251, 1316, 3627, 602, 1129, 4252, 4094, 1248, 3201, 4288, 327, 3110, 4091, 1401, 3909, 3012, 2316, 3844, 1261, 1562, 3471, 1087, 2595, 1199, 3448, 1996, 3156, 3245, 110, 2075, 3226, 4326, 3976, 2977, 1466, 2097, 2663, 3093, 3252, 2543, 3585, 3567, 1951, 1847, 2794, 1546, 1406, 1168, 1601, 2810, 4045, 2268, 2037, 270, 741, 3459, 887, 542, 1020, 2915, 4020, 1065, 2167, 2514, 3056, 1543, 4116, 1997, 3018, 2029, 3679, 965, 430, 753, 2791, 2026, 3918, 1303, 1772, 3913, 3734, 183, 3862, 2298, 2054, 2080, 90, 3765, 1278, 3910, 3876, 231, 1383, 360, 746, 1039, 333, 3118, 3076, 4208, 4270, 4067, 4029, 3433, 599, 3246, 3002, 3870, 1629, 3644, 1840, 2930, 787, 2436, 1467, 3719, 3536, 2550, 2881, 3607, 1973, 3346, 3500, 3185, 243, 3854, 1641, 4278, 314, 906, 1484, 3033, 1203, 2603, 3873, 3451, 1131, 1311, 3960, 3145, 3596, 2082, 3989, 2974, 3495, 1861, 3586, 402, 4030, 1078, 1974, 1837, 3686, 3812, 356, 3115, 3311, 3584, 2860, 3502, 856, 3640, 3399, 2997, 2724, 3565, 1747, 1471, 208, 1464, 2043, 1482, 1765, 2244, 809, 2374, 1628, 2901, 1318, 2410, 648, 2538, 3358, 3740, 3785, 2838, 3212, 285, 4028, 3827, 834, 506, 3456, 1378, 2456, 1960, 1805, 2407, 3805, 2971, 1612, 3210, 250, 2558, 366, 2827, 3599, 1687, 2136, 3826, 3674, 2252, 2893, 2313, 3501, 617, 2260, 3186, 2846, 2350, 1179, 669, 2755, 3051, 1332, 3028, 1475, 1929, 3810, 232, 1500, 1648, 2392, 3291, 3589, 2283, 459, 2150, 3017, 101, 598, 1372, 4185, 1567, 903, 2194, 3530, 1613, 586, 2871, 2234, 585, 1348, 2582, 661, 928, 883, 718, 2002, 392, 64, 732, 325, 372, 3132, 897, 1726, 618, 1201, 2232, 1632, 1769, 4203, 775, 2208, 1869, 3038, 993, 1198, 3062, 4218, 1570, 2705, 3661, 3924, 2180, 2585, 2942, 2387, 485, 1771, 2113, 1167, 1390, 884, 3328, 470, 2616, 3271, 2129, 5, 334, 409, 503, 2626, 73, 2793, 1156, 4099, 253, 3190, 2925, 3091, 387, 1801, 957, 4225, 806, 180, 3183, 223, 1766, 3710, 3464, 2784, 2980, 1878, 3845, 1955, 609, 3659, 249, 3753, 1254, 2886, 3383, 3337, 681, 2442, 807, 3556, 3, 322, 548, 2144, 1748, 3064, 218, 3180, 6, 638, 2658, 49, 3981, 563, 995, 2273, 128, 1200, 4166, 888, 2657, 1182, 1542, 3519, 742, 1616, 810, 4285, 3871, 982, 1428, 610, 3914, 1110, 795, 2803, 382, 1798, 1150, 878, 2809, 4143, 284, 4140, 3603, 4146, 2513, 2095, 1621, 819, 697, 2133, 194, 1058, 924, 804, 238, 4134, 1081, 24, 2937, 3762, 3858, 3935, 2842, 2829, 1060, 275, 2685, 2648, 4104, 833, 442, 2501, 1971, 706, 3478, 1812, 3047, 3197, 1671, 2508, 762, 2891, 1296, 1375, 528, 1306, 1408, 568, 2576, 3735, 3583, 1618, 463, 3514, 1896, 3669, 3206, 1912, 2879, 4101, 4246, 2420, 3250, 3493, 595, 3958, 2395, 2361, 1392, 4158, 3196, 821, 3398, 4013, 2046, 47, 2635, 3175, 3037, 3354, 1576, 1468, 2154, 1009, 2878, 1050, 298, 1433, 109, 3413, 3714, 4048, 2707, 376, 1746, 2837, 2993, 976, 4120, 2381, 4177, 21, 2057, 1790, 4226, 3859, 3707, 4184, 367, 2042, 2832, 2831, 2354, 2646, 1689, 477, 2307, 796, 18, 25, 3715, 1773, 2426, 3779, 3720, 221, 444, 4061, 3386, 3176, 2105, 3179, 850, 1202, 3280, 3198, 3852, 4056, 3831, 2950, 4197, 427, 204, 2555, 945, 3990, 634, 157, 3628, 4130, 2311, 4057, 465, 268, 581, 3066, 3380, 1028, 256, 447, 2574, 799, 1216, 580, 1589, 771, 201, 3036, 4086, 441, 3313, 1606, 3452, 297, 710, 1787, 2767, 1510, 1958, 3360, 3045, 2214, 1183, 3177, 658, 1068, 1193, 445, 3879, 1171, 32, 1733, 401, 1415, 1149, 2007, 4256, 2483, 2359, 1327, 124, 936, 2960, 2352, 2818, 4150, 1349, 1793, 429, 1025, 2225, 294, 2200, 3025, 2238, 2396, 4097, 803, 3787, 2580, 3574, 3030, 1391, 2130, 3613, 1218, 3235, 3142, 2251, 3173, 645, 2289, 3323, 3604, 1370, 3835, 2628, 1479, 411, 4335, 2528, 359, 3786, 4284, 916, 1341, 1635, 3113, 564, 1157, 3052, 3694, 1551, 4213, 3324, 2332, 335, 1880, 1197, 1144, 1921, 1206, 798, 26, 2457, 3878, 420, 1554, 2241, 3881, 650, 717, 4128, 1012, 3224, 3083, 3774, 2505, 4259, 777, 3569, 440, 4250, 3331, 4160, 3130, 4165, 188, 1436, 458, 4081, 135, 2227, 3696, 3839, 3497, 3287, 3274, 2715, 535, 2147, 866, 3367, 1181, 3484, 659, 3174, 1952, 1328, 1446, 2834, 664, 2049, 4313, 1780, 319, 3085, 1085, 2804, 3425, 1783, 4283, 2454, 4291, 1021, 3435, 1393, 160, 2995, 601, 3723, 213, 3217, 1661, 1037, 986, 770, 479, 4194, 3932, 1966, 3843, 4016, 686, 3172, 3525, 1903, 3563, 3605, 3683, 2482, 1644, 3022, 2744, 2217, 3227, 4292, 1188, 1684, 2111, 1024, 1944, 908, 901, 4021, 2807, 2038, 730, 670, 2599, 4026, 3620, 2760, 2618, 1531, 3921, 2602, 1103, 649, 2733, 626, 3773, 2120, 674, 1519, 4260, 3412, 2034, 3552, 3755, 326, 4243, 2865, 499, 2992, 242, 3632, 3688, 3894, 577, 825, 2566, 2889, 2931, 2064, 1945, 3359, 1527, 1382, 1709, 2235, 399, 1280, 2649, 734, 3590, 604, 3095, 695, 2904, 0, 263, 3373, 3488, 3828, 1434, 2175, 3094, 2171, 1032, 36, 3920, 731, 3303, 3228, 3798, 1294, 1317, 975, 3208, 3438, 2465, 2023, 2613, 3010, 2737, 1879, 1862, 4108, 4267, 1656, 1575, 2573, 2826, 676, 39, 2083, 2959, 2736, 3872, 520, 406, 65, 685, 1284, 3440, 4334, 2488, 3869, 2593, 1782, 1159, 2590, 3112, 2984, 522, 1918, 1051, 3389, 2987, 2730, 1458, 1145, 1432, 1257, 2542, 3348, 424, 992, 203, 1655, 4224, 1711, 1786, 2256, 181, 1307, 3743, 92, 3695, 3432, 4204, 83, 2836, 3416, 2015, 641, 466, 3116, 3213, 1027, 2697, 3999, 2210, 248, 938, 2774, 1907, 339, 1010, 2720, 1232, 4189, 3102, 3454, 1646, 2638, 3708, 1596, 262, 1414, 33, 4330, 4096, 3887, 772, 2922, 241, 4186, 2536, 2985, 496, 2748, 4080, 2858, 2109, 3722, 4093, 2709, 2534, 3561, 1731, 2752, 508, 222, 2962, 1451, 1566, 4169, 2101, 4315, 3875, 3672, 3527, 2432, 3609, 1713, 122, 3597, 3554, 20 }; - + CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.CrossValidation.Test/TimeSeries/TimeSeriesCrossValidationTest.cs b/src/SharpLearning.CrossValidation.Test/TimeSeries/TimeSeriesCrossValidationTest.cs index f7e33749..8ba4c8c2 100644 --- a/src/SharpLearning.CrossValidation.Test/TimeSeries/TimeSeriesCrossValidationTest.cs +++ b/src/SharpLearning.CrossValidation.Test/TimeSeries/TimeSeriesCrossValidationTest.cs @@ -26,7 +26,7 @@ public void TimeSeriesCrossValidation_Validate() Assert.AreEqual(0.098690664447830825, error, 0.00001); } - + [TestMethod] public void TimeSeriesCrossValidation_Validate_MaxTrainingSetSize() { diff --git a/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestIndexSplitterExtensionsTest.cs b/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestIndexSplitterExtensionsTest.cs index 5319e0dd..43fe7566 100644 --- a/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestIndexSplitterExtensionsTest.cs +++ b/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestIndexSplitterExtensionsTest.cs @@ -26,16 +26,16 @@ public void TrainingTestIndexSplitterExtensions_SplitSet() var targets = new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; var splitter = new NoShuffleTrainingTestIndexSplitter(0.6); - + var actual = splitter.SplitSet(observations, targets); var trainingIndices = Enumerable.Range(0, 6).ToArray(); var testIndices = Enumerable.Range(6, 4).ToArray(); var expected = new TrainingTestSetSplit( - new ObservationTargetSet((F64Matrix)observations.Rows(trainingIndices), + new ObservationTargetSet((F64Matrix)observations.Rows(trainingIndices), targets.GetIndices(trainingIndices)), - new ObservationTargetSet((F64Matrix)observations.Rows(testIndices), + new ObservationTargetSet((F64Matrix)observations.Rows(testIndices), targets.GetIndices(testIndices))); Assert.AreEqual(expected, actual); diff --git a/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestIndexSplitterTest.cs b/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestIndexSplitterTest.cs index a6080ddb..728cdc33 100644 --- a/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestIndexSplitterTest.cs +++ b/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestIndexSplitterTest.cs @@ -13,7 +13,7 @@ public void TrainingTestIndexSplitter_Split() { var sut = new TrainingTestIndexSplitter( new NoShuffleIndexSampler(), 0.8); - + var targets = new double[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; var actual = sut.Split(targets); diff --git a/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestSetSplitTest.cs b/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestSetSplitTest.cs index af7c6599..f2e8db02 100644 --- a/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestSetSplitTest.cs +++ b/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestSetSplitTest.cs @@ -43,7 +43,7 @@ public void TrainingTestSetSplit_Equals() [ExpectedException(typeof(ArgumentNullException))] public void TrainingTestSetSplit_TrainingSet_Null() { - new TrainingTestSetSplit(null, + new TrainingTestSetSplit(null, new ObservationTargetSet(new F64Matrix(new double[] { 1, 2, 3, 4 }, 2, 2), new double[] { 1, 2 })); } diff --git a/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs b/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs index a976510f..4de7b2e1 100644 --- a/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs +++ b/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs @@ -28,7 +28,7 @@ public sealed class ContinuousMungeAugmentator /// Must be between 0.0 and 1.0. (Default is 0.2) /// Variance when sampling a new value for an augmentated sample. (Default is 1.0) /// Seed for random augmentation - public ContinuousMungeAugmentator(double probabilityParameter=0.2, double localVariance=1.0, int seed = 432) + public ContinuousMungeAugmentator(double probabilityParameter = 0.2, double localVariance = 1.0, int seed = 432) { if (probabilityParameter > 1.0 || probabilityParameter < 0.0) { throw new ArgumentException("probabilityParameter must be between 0.0 and 1.0. Was: " + probabilityParameter); } @@ -73,14 +73,14 @@ public F64Matrix Agument(F64Matrix dataset) dataset.Row(f, candidate); var distance = GetDistance(sample, candidate); - if(distance < closestDistance) + if (distance < closestDistance) { closestDistance = distance; closestIndex = f; } } - if(closestIndex != -1) + if (closestIndex != -1) { dataset.Row(closestIndex, candidate); indicesVisited.Add(closestIndex); @@ -119,7 +119,7 @@ double SampleRandom(double mean, double std) return randNormal; } - + double GetDistance(double[] p, double[] q) { double distance = 0; diff --git a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs index db3de9f4..b0d290e3 100644 --- a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs +++ b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs @@ -26,7 +26,7 @@ public sealed class NominalMungeAugmentator /// The probability that a feature will be altered with its nearest neighbour. /// Must be between 0.0 and 1.0. (Default is 0.2) /// Seed for random augmentation - public NominalMungeAugmentator(double probabilityParameter=0.2, int seed = 432) + public NominalMungeAugmentator(double probabilityParameter = 0.2, int seed = 432) { if (probabilityParameter > 1.0 || probabilityParameter < 0.0) { throw new ArgumentException("probabilityParameter must be between 0.0 and 1.0. Was: " + probabilityParameter); } @@ -69,14 +69,14 @@ public F64Matrix Agument(F64Matrix dataset) dataset.Row(f, candidate); var distance = GetHammingDistance(sample, candidate); - if(distance < closestDistance) + if (distance < closestDistance) { closestDistance = distance; closestIndex = f; } } - if(closestIndex != -1) + if (closestIndex != -1) { dataset.Row(closestIndex, candidate); indicesVisited.Add(closestIndex); @@ -92,7 +92,7 @@ public F64Matrix Agument(F64Matrix dataset) augmentation.At(j, h, candiateValue); augmentation.At(closestIndex, h, sampleValue); } - else + else { // keep values augmentation.At(j, h, sampleValue); @@ -104,7 +104,7 @@ public F64Matrix Agument(F64Matrix dataset) return augmentation; } - + double GetHammingDistance(double[] a, double[] b) { diff --git a/src/SharpLearning.CrossValidation/CrossValidationUtilities.cs b/src/SharpLearning.CrossValidation/CrossValidationUtilities.cs index dbf6ed33..07d81957 100644 --- a/src/SharpLearning.CrossValidation/CrossValidationUtilities.cs +++ b/src/SharpLearning.CrossValidation/CrossValidationUtilities.cs @@ -40,7 +40,7 @@ public static class CrossValidationUtilities var currentIndices = indices.ToArray(); var crossValidationIndexSets = new List<(int[] training, int[] validation)>(); - + for (int i = 0; i < foldCount; i++) { var lastFold = (i == foldCount - 1); diff --git a/src/SharpLearning.CrossValidation/CrossValidators/ICrossValidation.cs b/src/SharpLearning.CrossValidation/CrossValidators/ICrossValidation.cs index a63da6d8..3f8778bf 100644 --- a/src/SharpLearning.CrossValidation/CrossValidators/ICrossValidation.cs +++ b/src/SharpLearning.CrossValidation/CrossValidators/ICrossValidation.cs @@ -30,9 +30,9 @@ TPrediction[] CrossValidate(IIndexedLearner learner, /// /// void CrossValidate(IIndexedLearner learner, - F64Matrix observations, - double[] targets, - int[] crossValidationIndices, + F64Matrix observations, + double[] targets, + int[] crossValidationIndices, TPrediction[] crossValidatedPredictions); } } diff --git a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePointExtensions.cs b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePointExtensions.cs index ac99da7d..f021db9d 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePointExtensions.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePointExtensions.cs @@ -40,7 +40,7 @@ public static F64Matrix ToF64Matrix(this List points) /// /// /// - public static void Write(this List points, Func writer, + public static void Write(this List points, Func writer, char separator = CsvParser.DefaultDelimiter) { var columnNameToIndex = new Dictionary @@ -49,7 +49,7 @@ public static void Write(this List points, Func { "TrainingError", 1 }, { "ValidationError", 2 } }; - + points.ToF64Matrix() .EnumerateCsvRows(columnNameToIndex) .Write(writer, separator); diff --git a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs index dffa5dd2..c5dfaaec 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs @@ -49,7 +49,7 @@ public LearningCurvesCalculator(ITrainingTestIndexSplitter trainingValid if (samplePercentages == null) { throw new ArgumentNullException("samplePercentages"); } if (samplePercentages.Length < 1) { throw new ArgumentException("SamplePercentages length must be at least 1"); } if (numberOfShufflesPrSample < 1) { throw new ArgumentNullException("numberOfShufflesPrSample must be at least 1"); } - + m_samplePercentages = samplePercentages; m_numberOfShufflesPrSample = numberOfShufflesPrSample; m_random = new Random(42); @@ -67,7 +67,7 @@ public List Calculate(IIndexedLearner learnerFa F64Matrix observations, double[] targets) { var trainingValidationIndices = m_trainingValidationIndexSplitter.Split(targets); - + return Calculate(learnerFactory, observations, targets, trainingValidationIndices.TrainingIndices, trainingValidationIndices.TestIndices); @@ -94,15 +94,15 @@ public List Calculate(IIndexedLearner learner, foreach (var samplePercentage in m_samplePercentages) { if (samplePercentage <= 0.0 || samplePercentage > 1.0) - { - throw new ArgumentException("Sample percentage must be larger than 0.0 and smaller than or equal to 1.0"); + { + throw new ArgumentException("Sample percentage must be larger than 0.0 and smaller than or equal to 1.0"); } var sampleSize = (int)Math.Round(samplePercentage * (double)trainingIndices.Length); if (sampleSize <= 0) - { - throw new ArgumentException("Sample percentage " + samplePercentage + - " too small for training set size " +trainingIndices.Length); + { + throw new ArgumentException("Sample percentage " + samplePercentage + + " too small for training set size " + trainingIndices.Length); } var trainError = 0.0; @@ -134,9 +134,9 @@ public List Calculate(IIndexedLearner learner, trainError = trainError / m_numberOfShufflesPrSample; validationError = validationError / m_numberOfShufflesPrSample; - + learningCurves.Add(new LearningCurvePoint(sampleSize, - trainError , validationError)); + trainError, validationError)); } return learningCurves; diff --git a/src/SharpLearning.CrossValidation/LearningCurves/NoShuffleLearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/NoShuffleLearningCurvesCalculator.cs index eaaa668c..583e2fd2 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/NoShuffleLearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/NoShuffleLearningCurvesCalculator.cs @@ -19,7 +19,7 @@ namespace SharpLearning.CrossValidation.LearningCurves /// - Use more training samples. /// - Increase Regularization. /// - public sealed class NoShuffleLearningCurvesCalculator : + public sealed class NoShuffleLearningCurvesCalculator : LearningCurvesCalculator { /// diff --git a/src/SharpLearning.CrossValidation/LearningCurves/RandomShuffleLearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/RandomShuffleLearningCurvesCalculator.cs index a07aa5f1..a55faefc 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/RandomShuffleLearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/RandomShuffleLearningCurvesCalculator.cs @@ -19,7 +19,7 @@ namespace SharpLearning.CrossValidation.LearningCurves /// - Use more training samples. /// - Increase Regularization. /// - public sealed class RandomShuffleLearningCurvesCalculator : + public sealed class RandomShuffleLearningCurvesCalculator : LearningCurvesCalculator { /// @@ -33,7 +33,7 @@ public sealed class RandomShuffleLearningCurvesCalculator : /// training data used in each point of the learning curve /// Number of shuffles done at each sampling point /// Total percentage of data used for training - public RandomShuffleLearningCurvesCalculator(IMetric metric, double[] samplePercentages, + public RandomShuffleLearningCurvesCalculator(IMetric metric, double[] samplePercentages, double trainingPercentage, int numberOfShufflesPrSample = 5) : base(new RandomTrainingTestIndexSplitter(trainingPercentage), new RandomIndexSampler(), metric, samplePercentages, numberOfShufflesPrSample) diff --git a/src/SharpLearning.CrossValidation/LearningCurves/StratifiedLearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/StratifiedLearningCurvesCalculator.cs index f37f55ea..aa96a6bf 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/StratifiedLearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/StratifiedLearningCurvesCalculator.cs @@ -19,7 +19,7 @@ namespace SharpLearning.CrossValidation.LearningCurves /// - Use more training samples. /// - Increase Regularization. /// - public sealed class StratifiedLearningCurvesCalculator : + public sealed class StratifiedLearningCurvesCalculator : LearningCurvesCalculator { /// diff --git a/src/SharpLearning.CrossValidation/Samplers/IIndexSampler.cs b/src/SharpLearning.CrossValidation/Samplers/IIndexSampler.cs index ce259d77..9bf8c384 100644 --- a/src/SharpLearning.CrossValidation/Samplers/IIndexSampler.cs +++ b/src/SharpLearning.CrossValidation/Samplers/IIndexSampler.cs @@ -14,7 +14,7 @@ public interface IIndexSampler /// /// int[] Sample(T[] data, int sampleSize); - + /// /// /// diff --git a/src/SharpLearning.CrossValidation/Samplers/NoShuffleIndexSampler.cs b/src/SharpLearning.CrossValidation/Samplers/NoShuffleIndexSampler.cs index 5c9810bf..91a8f6ba 100644 --- a/src/SharpLearning.CrossValidation/Samplers/NoShuffleIndexSampler.cs +++ b/src/SharpLearning.CrossValidation/Samplers/NoShuffleIndexSampler.cs @@ -20,10 +20,10 @@ public int[] Sample(T[] data, int sampleSize) { if (data.Length < sampleSize) { - throw new ArgumentException("Sample size " + sampleSize + + throw new ArgumentException("Sample size " + sampleSize + " is larger than data size " + data.Length); } - + return Enumerable.Range(0, sampleSize).ToArray(); } @@ -38,16 +38,16 @@ public int[] Sample(T[] data, int sampleSize, int[] dataIndices) { if (data.Length < sampleSize) { - throw new ArgumentException("Sample size " + sampleSize + + throw new ArgumentException("Sample size " + sampleSize + " is larger than data size " + data.Length); } if (data.Length < dataIndices.Length) { - throw new ArgumentException("dataIndice size " + dataIndices.Length + + throw new ArgumentException("dataIndice size " + dataIndices.Length + " is larger than data size " + data.Length); } - + return dataIndices.Take(sampleSize).ToArray(); } } diff --git a/src/SharpLearning.CrossValidation/Samplers/RandomIndexSampler.cs b/src/SharpLearning.CrossValidation/Samplers/RandomIndexSampler.cs index 62d6c877..b2877fd0 100644 --- a/src/SharpLearning.CrossValidation/Samplers/RandomIndexSampler.cs +++ b/src/SharpLearning.CrossValidation/Samplers/RandomIndexSampler.cs @@ -39,13 +39,13 @@ public int[] Sample(T[] data, int sampleSize) { if (data.Length < sampleSize) { - throw new ArgumentException("Sample size " + sampleSize + + throw new ArgumentException("Sample size " + sampleSize + " is larger than data size " + data.Length); } var indices = Enumerable.Range(0, data.Length).ToArray(); indices.Shuffle(m_random); - + return indices.Take(sampleSize).ToArray(); } @@ -59,15 +59,15 @@ public int[] Sample(T[] data, int sampleSize) /// public int[] Sample(T[] data, int sampleSize, int[] dataIndices) { - if (data.Length < sampleSize) + if (data.Length < sampleSize) { - throw new ArgumentException("Sample size " + sampleSize + + throw new ArgumentException("Sample size " + sampleSize + " is larger than data size " + data.Length); } - if (data.Length < dataIndices.Length) + if (data.Length < dataIndices.Length) { - throw new ArgumentException("dataIndice size " + dataIndices.Length + + throw new ArgumentException("dataIndice size " + dataIndices.Length + " is larger than data size " + data.Length); } diff --git a/src/SharpLearning.CrossValidation/TimeSeries/TimeSeriesCrossValidation.cs b/src/SharpLearning.CrossValidation/TimeSeries/TimeSeriesCrossValidation.cs index 68b38ba2..9ba6a784 100644 --- a/src/SharpLearning.CrossValidation/TimeSeries/TimeSeriesCrossValidation.cs +++ b/src/SharpLearning.CrossValidation/TimeSeries/TimeSeriesCrossValidation.cs @@ -34,28 +34,28 @@ public TimeSeriesCrossValidation(int initialTrainingSize, int maxTrainingSetSize { if (initialTrainingSize <= 0) { - throw new ArgumentException($"{nameof(initialTrainingSize)} " + + throw new ArgumentException($"{nameof(initialTrainingSize)} " + $"much be larger than 0, was {initialTrainingSize}"); } if (maxTrainingSetSize < 0) { - throw new ArgumentException($"{nameof(maxTrainingSetSize)} " + + throw new ArgumentException($"{nameof(maxTrainingSetSize)} " + $"much be larger than 0, was {maxTrainingSetSize}"); } if ((maxTrainingSetSize != 0) && (initialTrainingSize > maxTrainingSetSize)) { - throw new ArgumentException($"{nameof(initialTrainingSize)} = {initialTrainingSize} " + + throw new ArgumentException($"{nameof(initialTrainingSize)} = {initialTrainingSize} " + $"is larger than {nameof(maxTrainingSetSize)} = {maxTrainingSetSize}"); } if (retrainInterval < 1) { - throw new ArgumentException($"{nameof(retrainInterval)} much be larger than 1, " + + throw new ArgumentException($"{nameof(retrainInterval)} much be larger than 1, " + $"was {retrainInterval}"); } - + m_initialTrainingSize = initialTrainingSize; m_maxTrainingSetSize = maxTrainingSetSize; m_retrainInterval = retrainInterval; @@ -77,13 +77,13 @@ public TPrediction[] Validate(IIndexedLearner learner, F64Matrix ob { if (observations.RowCount != targets.Length) { - throw new ArgumentException($"observation row count {observations.RowCount} " + + throw new ArgumentException($"observation row count {observations.RowCount} " + $"must match target length {targets.Length}"); } if (m_initialTrainingSize >= observations.RowCount) { - throw new ArgumentException($"observation row count {observations.RowCount} " + + throw new ArgumentException($"observation row count {observations.RowCount} " + $"is smaller than initial training size {m_initialTrainingSize}"); } @@ -99,7 +99,7 @@ public TPrediction[] Validate(IIndexedLearner learner, F64Matrix ob for (int i = 0; i < predictions.Length; i++) { // Only train a new model at each retrain interval. - if((m_retrainInterval == 1 || i % m_retrainInterval == 0) && i != 0) + if ((m_retrainInterval == 1 || i % m_retrainInterval == 0) && i != 0) { model = learner.Learn(observations, targets, trainingIndices); } @@ -109,12 +109,12 @@ public TPrediction[] Validate(IIndexedLearner learner, F64Matrix ob predictions[i] = model.Predict(observation); lastTrainingIndex++; - + // determine start index and length of the training period, if maxTrainingSetSize is specified. - var startIndex = m_maxTrainingSetSize != 0 ? + var startIndex = m_maxTrainingSetSize != 0 ? Math.Max(0, (lastTrainingIndex + 1) - m_maxTrainingSetSize) : 0; - var length = m_maxTrainingSetSize != 0 ? + var length = m_maxTrainingSetSize != 0 ? Math.Min(m_maxTrainingSetSize, lastTrainingIndex) : lastTrainingIndex; trainingIndices = Enumerable.Range(startIndex, length).ToArray(); diff --git a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitter.cs b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitter.cs index 110baa70..a8485bc1 100644 --- a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitter.cs +++ b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitter.cs @@ -24,7 +24,7 @@ public TrainingTestIndexSplitter(IIndexSampler shuffler, double trainingPerce m_indexSampler = shuffler ?? throw new ArgumentNullException(nameof(shuffler)); if (trainingPercentage <= 0.0 || trainingPercentage >= 1.0) { throw new ArgumentException("Training percentage must be larger than 0.0 and smaller than 1.0"); } - m_trainingPercentage = trainingPercentage; + m_trainingPercentage = trainingPercentage; } /// @@ -42,7 +42,7 @@ public TrainingTestIndexSplit Split(T[] targets) var testIndices = indices.Except(trainingIndices) .ToArray(); - return new TrainingTestIndexSplit(trainingIndices, + return new TrainingTestIndexSplit(trainingIndices, testIndices); } } diff --git a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitterExtensions.cs b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitterExtensions.cs index c573d45c..2f3e5bdf 100644 --- a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitterExtensions.cs +++ b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitterExtensions.cs @@ -17,7 +17,7 @@ public static class TrainingTestIndexSplitterExtensions /// The observations for the problem /// The targets for the problem /// - public static TrainingTestSetSplit SplitSet(this ITrainingTestIndexSplitter splitter, + public static TrainingTestSetSplit SplitSet(this ITrainingTestIndexSplitter splitter, F64Matrix observations, double[] targets) { if (observations.RowCount != targets.Length) diff --git a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestSetSplit.cs b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestSetSplit.cs index 7f6fb90c..2b06b195 100644 --- a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestSetSplit.cs +++ b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestSetSplit.cs @@ -37,9 +37,9 @@ public TrainingTestSetSplit(ObservationTargetSet trainingSet, ObservationTargetS /// /// /// - public TrainingTestSetSplit(F64Matrix trainingObservations, double[] trainingTargets, + public TrainingTestSetSplit(F64Matrix trainingObservations, double[] trainingTargets, F64Matrix testObservations, double[] testTargets) - : this(new ObservationTargetSet(trainingObservations, trainingTargets), + : this(new ObservationTargetSet(trainingObservations, trainingTargets), new ObservationTargetSet(testObservations, testTargets)) { } diff --git a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/ChildImpuritiesTest.cs b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/ChildImpuritiesTest.cs index f78f36d7..96a77d0b 100644 --- a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/ChildImpuritiesTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/ChildImpuritiesTest.cs @@ -10,7 +10,7 @@ public class ChildImpuritiesTest public void ChildImpurities_Equal() { var sut = new ChildImpurities(0.23, 0.55); - + var equal = new ChildImpurities(0.23, 0.55); var notEqual1 = new ChildImpurities(0.19, 0.55); var notEqual2 = new ChildImpurities(0.23, 0.213); diff --git a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs index d98d74f7..6a4d1e07 100644 --- a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs @@ -12,7 +12,7 @@ public class RegressionImpurityCalculatorTest public void RegressionImpurityCalculator_ImpurityImprovement() { var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; - + var parentInterval = Interval1D.Create(0, values.Length); var sut = new RegressionImpurityCalculator(); diff --git a/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs b/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs index c3d443a5..3b6705f4 100644 --- a/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs @@ -24,7 +24,7 @@ public void RegressionDecisionTreeLearner_Learn_Reuse_No_Valid_Split() var onlyOneUniqueObservations = (F64Matrix)observations.Rows(0, 1, 2, 3, 4); var onlyOneUniquetargets = Enumerable.Range(0, onlyOneUniqueObservations.RowCount).Select(v => onlyUniqueTargetValue).ToArray(); var model = sut.Learn(onlyOneUniqueObservations, onlyOneUniquetargets); - + var predictions = model.Predict(onlyOneUniqueObservations); // no valid split, so should result in the model always returning the onlyUniqueTargetValue. for (int i = 0; i < predictions.Length; i++) diff --git a/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs b/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs index 3360cd62..361902d1 100644 --- a/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs @@ -109,7 +109,7 @@ public void ClassificationDecisionTreeModel_PredictProbability_Multiple() var error = evaluator.Error(targets, actual.Select(p => p.Prediction).ToArray()); Assert.AreEqual(0.23076923076923078, error, 0.0000001); - + var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), }; CollectionAssert.AreEqual(expected, actual); } @@ -144,7 +144,7 @@ public void ClassificationDecisionTreeModel_GetVariableImportance() var learner = new ClassificationDecisionTreeLearner(100, 1, 2, 0.001, 42); var sut = learner.Learn(observations, targets); - + var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, { "AptitudeTestScore", 19.5121951219512 } }; @@ -173,7 +173,7 @@ public void ClassificationDecisionTreeModel_GetRawVariableImportance() for (int i = 0; i < expected.Length; i++) { - Assert.AreEqual(expected[i], actual[i], 0.000001); + Assert.AreEqual(expected[i], actual[i], 0.000001); } } @@ -184,10 +184,10 @@ public void ClassificationDecisionTreeModel_Save() var learner = new ClassificationDecisionTreeLearner(2); var sut = learner.Learn(observations, targets); - + var writer = new StringWriter(); sut.Save(() => writer); - + var actual = writer.ToString(); Assert.AreEqual(m_classificationDecisionTreeModelString, actual); } diff --git a/src/SharpLearning.DecisionTrees.Test/Models/RegressionDecisionTreeModelTest.cs b/src/SharpLearning.DecisionTrees.Test/Models/RegressionDecisionTreeModelTest.cs index 1b30118d..72ad8d77 100644 --- a/src/SharpLearning.DecisionTrees.Test/Models/RegressionDecisionTreeModelTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Models/RegressionDecisionTreeModelTest.cs @@ -25,7 +25,7 @@ public void RegressionDecisionTreeModel_Predict_Single() var predictions = new double[rows]; for (int i = 0; i < rows; i++) { - predictions[i] = sut.Predict(observations.Row(i)); + predictions[i] = sut.Predict(observations.Row(i)); } var evaluator = new MeanSquaredErrorRegressionMetric(); diff --git a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs index 289c6f12..2d34ceb6 100644 --- a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs @@ -39,7 +39,7 @@ public void LinearSplitSearcher_FindBestSplit() interval, impurity); var expected = new SplitResult(15, 3.5, 0.037941545633853213, - 0.39111111111111119, 0.49586776859504134); + 0.39111111111111119, 0.49586776859504134); Assert.AreEqual(expected, actual); } @@ -61,12 +61,12 @@ public void LinearSplitSearcher_FindBestSplit_Weight() var impurity = impurityCalculator.NodeImpurity(); var sut = new LinearSplitSearcher(1); - + var actual = sut.FindBestSplit(impurityCalculator, feature, targets, interval, impurity); var expected = new SplitResult(15, 3.5, 0.045729402872260017, - 0.375, 0.49382716049382713); + 0.375, 0.49382716049382713); Assert.AreEqual(expected, actual); } @@ -92,7 +92,7 @@ public void LinearSplitSearcher_FindBestSplit_DecisionTreeData() interval, impurity); var expected = new SplitResult(75, 0.397254, 309.6695422760252, - 0.30040476584587461, -9.0812926380897121); + 0.30040476584587461, -9.0812926380897121); Assert.AreEqual(expected, actual); } @@ -116,7 +116,7 @@ public void LinearSplitSearcher_FindBestSplit_Large() interval, impurity); var expected = new SplitResult(6391, 1.7005986908310751, 0.02182606648710006, - 0.70606468409101264, 0.597976432567197); + 0.70606468409101264, 0.597976432567197); Assert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs index ea449285..a7d51a04 100644 --- a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs @@ -39,7 +39,7 @@ public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit() interval, impurity); var expected = new SplitResult(15, 3.5, 0.037941545633853213, - 0.39111111111111119, 0.49586776859504134); + 0.39111111111111119, 0.49586776859504134); Assert.AreEqual(expected, actual); } @@ -61,12 +61,12 @@ public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit_Weight() var impurity = impurityCalculator.NodeImpurity(); var sut = new OnlyUniqueThresholdsSplitSearcher(1); - + var actual = sut.FindBestSplit(impurityCalculator, feature, targets, interval, impurity); var expected = new SplitResult(15, 3.5, 0.045729402872260017, - 0.375, 0.49382716049382713); + 0.375, 0.49382716049382713); Assert.AreEqual(expected, actual); } @@ -92,7 +92,7 @@ public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit_DecisionTreeData() interval, impurity); var expected = new SplitResult(75, 0.397254, 309.6695422760252, - 0.30040476584587461, -9.0812926380897121); + 0.30040476584587461, -9.0812926380897121); Assert.AreEqual(expected, actual); } @@ -116,7 +116,7 @@ public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit_Large() interval, impurity); var expected = new SplitResult(4529, 1.242453324894, 0.023317576756311298, - 0.7202197013910161, 0.62315647087159443); + 0.7202197013910161, 0.62315647087159443); Assert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs index 151437e5..e6a0b8c0 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs @@ -94,13 +94,13 @@ public void Init(double[] targetNames, double[] targets, double[] weights, Inter m_interval = interval; SetMinMaxTargetNames(); - if(m_targetIndexOffSet > 0) + if (m_targetIndexOffSet > 0) { m_targetIndexOffSet = 0; } else { - m_targetIndexOffSet = m_targetIndexOffSet * -1; + m_targetIndexOffSet = m_targetIndexOffSet * -1; } m_weightedTargetCount.Reset(m_maxTargetNameIndex, m_targetIndexOffSet); @@ -128,7 +128,7 @@ public void Init(double[] targetNames, double[] targets, double[] weights, Inter m_currentPosition = m_interval.FromInclusive; this.Reset(); } - + /// /// Resets impurity calculator /// @@ -159,7 +159,7 @@ public void UpdateInterval(Interval1D newInterval) /// public void UpdateIndex(int newPosition) { - if(m_currentPosition > newPosition) + if (m_currentPosition > newPosition) { throw new ArgumentException("New position: " + newPosition + " must be larger than current: " + m_currentPosition); diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/IImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/IImpurityCalculator.cs index 0b219b05..248ce450 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/IImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/IImpurityCalculator.cs @@ -15,19 +15,19 @@ public interface IImpurityCalculator /// /// void Init(double[] uniqueTargets, double[] targets, double[] weights, Interval1D interval); - + /// /// Update the work interval. /// /// void UpdateInterval(Interval1D newInterval); - + /// /// Update the split index /// /// void UpdateIndex(int newPosition); - + /// /// Reset the calculation within the current work interval /// @@ -61,7 +61,7 @@ public interface IImpurityCalculator /// Returns the weighted size of the current right split /// double WeightedRight { get; } - + /// /// Calculates the leaf value based on the current work interval /// diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs index a752a9ae..41dc06a3 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs @@ -19,18 +19,18 @@ public sealed class RegressionImpurityCalculator : IImpurityCalculator double m_meanLeft = 0.0; double m_meanRight = 0.0; double m_meanTotal = 0.0; - + double m_sqSumLeft = 0.0; double m_sqSumRight = 0.0; double m_sqSumTotal = 0.0; - + double m_varLeft = 0.0; double m_varRight = 0.0; - + double m_sumLeft = 0.0; double m_sumRight = 0.0; double m_sumTotal = 0.0; - + double[] m_targets; double[] m_weights; @@ -152,7 +152,7 @@ public void UpdateIndex(int newPosition) var weightsPresent = m_weights.Length != 0; var w = 1.0; var w_diff = 0.0; - + for (int i = m_currentPosition; i < newPosition; i++) { if (weightsPresent) diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs index f9bd2474..4f5b355a 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs @@ -14,14 +14,14 @@ internal class TargetCounts public TargetCounts() : this(0, 0) { - } + } - public TargetCounts (int size, int offset) - { + public TargetCounts(int size, int offset) + { OffSet = offset; Length = offset + size; m_counts = new double[Length]; - } + } public double[] Counts { get => m_counts; } public int OffSet { get; private set; } diff --git a/src/SharpLearning.DecisionTrees/Learners/ClassificationDecisionTreeLearner.cs b/src/SharpLearning.DecisionTrees/Learners/ClassificationDecisionTreeLearner.cs index 66104961..60e8d5cd 100644 --- a/src/SharpLearning.DecisionTrees/Learners/ClassificationDecisionTreeLearner.cs +++ b/src/SharpLearning.DecisionTrees/Learners/ClassificationDecisionTreeLearner.cs @@ -13,7 +13,7 @@ namespace SharpLearning.DecisionTrees.Learners /// Trains a Classification Decision tree /// http://en.wikipedia.org/wiki/Decision_tree_learning /// - public sealed class ClassificationDecisionTreeLearner + public sealed class ClassificationDecisionTreeLearner : DecisionTreeLearner , IIndexedLearner , IIndexedLearner @@ -30,16 +30,16 @@ public sealed class ClassificationDecisionTreeLearner /// The minimum improvement in information gain before a split is made /// Seed for feature selection if number of features pr split is not equal /// to the total amount of features in observations. The features will be selected at random for each split - public ClassificationDecisionTreeLearner(int maximumTreeDepth=2000, - int minimumSplitSize=1, - int featuresPrSplit=0, - double minimumInformationGain=0.000001, int seed=42) - : base(new DepthFirstTreeBuilder(maximumTreeDepth, featuresPrSplit, minimumInformationGain, seed, + public ClassificationDecisionTreeLearner(int maximumTreeDepth = 2000, + int minimumSplitSize = 1, + int featuresPrSplit = 0, + double minimumInformationGain = 0.000001, int seed = 42) + : base(new DepthFirstTreeBuilder(maximumTreeDepth, featuresPrSplit, minimumInformationGain, seed, new OnlyUniqueThresholdsSplitSearcher(minimumSplitSize), - new GiniClassificationImpurityCalculator())) + new GiniClassificationImpurityCalculator())) { } - + /// /// /// @@ -72,7 +72,7 @@ public ClassificationDecisionTreeLearner(int maximumTreeDepth=2000, /// /// /// - public new ClassificationDecisionTreeModel Learn(F64Matrix observations, double[] targets, + public new ClassificationDecisionTreeModel Learn(F64Matrix observations, double[] targets, int[] indices) { return new ClassificationDecisionTreeModel(base.Learn(observations, targets, indices)); @@ -87,7 +87,7 @@ public ClassificationDecisionTreeLearner(int maximumTreeDepth=2000, /// /// /// - public new ClassificationDecisionTreeModel Learn(F64Matrix observations, double[] targets, + public new ClassificationDecisionTreeModel Learn(F64Matrix observations, double[] targets, int[] indices, double[] weights) { return new ClassificationDecisionTreeModel(base.Learn(observations, targets, indices, weights)); @@ -101,7 +101,7 @@ public ClassificationDecisionTreeLearner(int maximumTreeDepth=2000, /// /// /// - public new ClassificationDecisionTreeModel Learn(F64MatrixView observations, double[] targets, + public new ClassificationDecisionTreeModel Learn(F64MatrixView observations, double[] targets, int[] indices) { return new ClassificationDecisionTreeModel(base.Learn(observations, targets, indices)); @@ -116,7 +116,7 @@ public ClassificationDecisionTreeLearner(int maximumTreeDepth=2000, /// /// /// - public new ClassificationDecisionTreeModel Learn(F64MatrixView observations, double[] targets, + public new ClassificationDecisionTreeModel Learn(F64MatrixView observations, double[] targets, int[] indices, double[] weights) { return new ClassificationDecisionTreeModel(base.Learn(observations, targets, indices, weights)); diff --git a/src/SharpLearning.DecisionTrees/Learners/RegressionDecisionTreeLearner.cs b/src/SharpLearning.DecisionTrees/Learners/RegressionDecisionTreeLearner.cs index 90149f00..a0858fad 100644 --- a/src/SharpLearning.DecisionTrees/Learners/RegressionDecisionTreeLearner.cs +++ b/src/SharpLearning.DecisionTrees/Learners/RegressionDecisionTreeLearner.cs @@ -24,13 +24,13 @@ public sealed class RegressionDecisionTreeLearner : DecisionTreeLearner, /// The minimum improvement in information gain before a split is made /// Seed for feature selection if number of features pr split is not equal /// to the total amount of features in observations. The features will be selected at random for each split - public RegressionDecisionTreeLearner(int maximumTreeDepth=2000, - int minimumSplitSize=1, - int featuresPrSplit=0, - double minimumInformationGain=0.000001, - int seed=42) - : base(new DepthFirstTreeBuilder(maximumTreeDepth, featuresPrSplit, minimumInformationGain, seed, - new OnlyUniqueThresholdsSplitSearcher(minimumSplitSize), + public RegressionDecisionTreeLearner(int maximumTreeDepth = 2000, + int minimumSplitSize = 1, + int featuresPrSplit = 0, + double minimumInformationGain = 0.000001, + int seed = 42) + : base(new DepthFirstTreeBuilder(maximumTreeDepth, featuresPrSplit, minimumInformationGain, seed, + new OnlyUniqueThresholdsSplitSearcher(minimumSplitSize), new RegressionImpurityCalculator())) { } @@ -53,7 +53,7 @@ public RegressionDecisionTreeLearner(int maximumTreeDepth=2000, /// /// /// - public new RegressionDecisionTreeModel Learn(F64Matrix observations, double[] targets, + public new RegressionDecisionTreeModel Learn(F64Matrix observations, double[] targets, double[] weights) { return new RegressionDecisionTreeModel(base.Learn(observations, targets, weights)); @@ -66,7 +66,7 @@ public RegressionDecisionTreeLearner(int maximumTreeDepth=2000, /// /// /// - public new RegressionDecisionTreeModel Learn(F64Matrix observations, double[] targets, + public new RegressionDecisionTreeModel Learn(F64Matrix observations, double[] targets, int[] indices) { return new RegressionDecisionTreeModel(base.Learn(observations, targets, indices)); @@ -81,7 +81,7 @@ public RegressionDecisionTreeLearner(int maximumTreeDepth=2000, /// /// /// - public new RegressionDecisionTreeModel Learn(F64Matrix observations, double[] targets, + public new RegressionDecisionTreeModel Learn(F64Matrix observations, double[] targets, int[] indices, double[] weights) { return new RegressionDecisionTreeModel(base.Learn(observations, targets, indices, weights)); @@ -95,7 +95,7 @@ public RegressionDecisionTreeLearner(int maximumTreeDepth=2000, /// /// /// - public new RegressionDecisionTreeModel Learn(F64MatrixView observations, double[] targets, + public new RegressionDecisionTreeModel Learn(F64MatrixView observations, double[] targets, int[] indices) { return new RegressionDecisionTreeModel(base.Learn(observations, targets, indices)); @@ -110,7 +110,7 @@ public RegressionDecisionTreeLearner(int maximumTreeDepth=2000, /// /// /// - public new RegressionDecisionTreeModel Learn(F64MatrixView observations, double[] targets, + public new RegressionDecisionTreeModel Learn(F64MatrixView observations, double[] targets, int[] indices, double[] weights) { return new RegressionDecisionTreeModel(base.Learn(observations, targets, indices, weights)); diff --git a/src/SharpLearning.DecisionTrees/Models/ClassificationDecisionTreeModel.cs b/src/SharpLearning.DecisionTrees/Models/ClassificationDecisionTreeModel.cs index 3ff0de7a..ff245821 100644 --- a/src/SharpLearning.DecisionTrees/Models/ClassificationDecisionTreeModel.cs +++ b/src/SharpLearning.DecisionTrees/Models/ClassificationDecisionTreeModel.cs @@ -132,7 +132,7 @@ public ProbabilityPrediction[] PredictProbability(F64Matrix observations, int[] public Dictionary GetVariableImportance(Dictionary featureNameToIndex) { var max = m_variableImportance.Max(); - + var scaledVariableImportance = m_variableImportance .Select(v => (v / max) * 100.0) .ToArray(); diff --git a/src/SharpLearning.DecisionTrees/Nodes/BinaryTree.cs b/src/SharpLearning.DecisionTrees/Nodes/BinaryTree.cs index 7d5217cb..74657f40 100644 --- a/src/SharpLearning.DecisionTrees/Nodes/BinaryTree.cs +++ b/src/SharpLearning.DecisionTrees/Nodes/BinaryTree.cs @@ -14,7 +14,7 @@ public sealed class BinaryTree /// Tree Nodes /// public readonly List Nodes; - + /// /// Leaf node probabilities /// @@ -30,7 +30,7 @@ public sealed class BinaryTree /// public readonly double[] VariableImportance; - + /// /// /// @@ -38,7 +38,7 @@ public sealed class BinaryTree /// /// /// - public BinaryTree(List nodes, List probabilities, double[] targetNames, + public BinaryTree(List nodes, List probabilities, double[] targetNames, double[] variableImportance) { Nodes = nodes ?? throw new ArgumentNullException(nameof(nodes)); diff --git a/src/SharpLearning.DecisionTrees/Nodes/DecisionNodeCreationItem.cs b/src/SharpLearning.DecisionTrees/Nodes/DecisionNodeCreationItem.cs index 00948ce4..05a76d67 100644 --- a/src/SharpLearning.DecisionTrees/Nodes/DecisionNodeCreationItem.cs +++ b/src/SharpLearning.DecisionTrees/Nodes/DecisionNodeCreationItem.cs @@ -40,7 +40,7 @@ public struct DecisionNodeCreationItem /// /// /// - public DecisionNodeCreationItem(int parentIndex, NodePositionType nodeType, Interval1D interval, + public DecisionNodeCreationItem(int parentIndex, NodePositionType nodeType, Interval1D interval, double impurity, int nodeDepth) { ParentIndex = parentIndex; diff --git a/src/SharpLearning.DecisionTrees/Nodes/Node.cs b/src/SharpLearning.DecisionTrees/Nodes/Node.cs index 9c1bf18d..fa6e3e7a 100644 --- a/src/SharpLearning.DecisionTrees/Nodes/Node.cs +++ b/src/SharpLearning.DecisionTrees/Nodes/Node.cs @@ -22,7 +22,7 @@ public struct Node /// Right child tree index /// public readonly int RightIndex; - + /// /// Left child tree index /// @@ -47,7 +47,7 @@ public struct Node /// /// /// - public Node(int featureIndex, double value, int leftIndex, + public Node(int featureIndex, double value, int leftIndex, int rightIndex, int nodeIndex, int leafProbabilityIndex) { FeatureIndex = featureIndex; diff --git a/src/SharpLearning.DecisionTrees/SplitSearchers/LinearSplitSearcher.cs b/src/SharpLearning.DecisionTrees/SplitSearchers/LinearSplitSearcher.cs index 0e81b4e4..48de4d9c 100644 --- a/src/SharpLearning.DecisionTrees/SplitSearchers/LinearSplitSearcher.cs +++ b/src/SharpLearning.DecisionTrees/SplitSearchers/LinearSplitSearcher.cs @@ -54,7 +54,7 @@ public LinearSplitSearcher(int minimumSplitSize, double minimumLeafWeight) /// /// /// - public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[] feature, double[] targets, + public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[] feature, double[] targets, Interval1D parentInterval, double parentImpurity) { @@ -63,7 +63,7 @@ public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[ var bestImpurityImprovement = 0.0; var bestImpurityLeft = 0.0; var bestImpurityRight = 0.0; - + int prevSplit = parentInterval.FromInclusive; var prevValue = feature[prevSplit]; var prevTarget = targets[prevSplit]; @@ -83,7 +83,7 @@ public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[ if (Math.Min(leftSize, rightSize) >= m_minimumSplitSize) { impurityCalculator.UpdateIndex(currentSplit); - + if (impurityCalculator.WeightedLeft < m_minimumLeafWeight || impurityCalculator.WeightedRight < m_minimumLeafWeight) { diff --git a/src/SharpLearning.DecisionTrees/SplitSearchers/OnlyUniqueThresholdsSplitSearcher.cs b/src/SharpLearning.DecisionTrees/SplitSearchers/OnlyUniqueThresholdsSplitSearcher.cs index 8a56a82c..dc7437a1 100644 --- a/src/SharpLearning.DecisionTrees/SplitSearchers/OnlyUniqueThresholdsSplitSearcher.cs +++ b/src/SharpLearning.DecisionTrees/SplitSearchers/OnlyUniqueThresholdsSplitSearcher.cs @@ -55,7 +55,7 @@ public OnlyUniqueThresholdsSplitSearcher(int minimumSplitSize, double minimumLea /// /// /// - public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[] feature, double[] targets, + public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[] feature, double[] targets, Interval1D parentInterval, double parentImpurity) { @@ -64,7 +64,7 @@ public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[ var bestImpurityImprovement = 0.0; var bestImpurityLeft = 0.0; var bestImpurityRight = 0.0; - + int prevSplit = parentInterval.FromInclusive; var prevValue = feature[prevSplit]; var prevTarget = targets[prevSplit]; @@ -84,7 +84,7 @@ public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[ if (Math.Min(leftSize, rightSize) >= m_minimumSplitSize) { impurityCalculator.UpdateIndex(currentSplit); - + if (impurityCalculator.WeightedLeft < m_minimumLeafWeight || impurityCalculator.WeightedRight < m_minimumLeafWeight) { diff --git a/src/SharpLearning.DecisionTrees/SplitSearchers/RandomSplitSearcher.cs b/src/SharpLearning.DecisionTrees/SplitSearchers/RandomSplitSearcher.cs index 1357744f..2710540d 100644 --- a/src/SharpLearning.DecisionTrees/SplitSearchers/RandomSplitSearcher.cs +++ b/src/SharpLearning.DecisionTrees/SplitSearchers/RandomSplitSearcher.cs @@ -41,27 +41,27 @@ public RandomSplitSearcher(int minimumSplitSize, int seed) /// /// /// - public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[] feature, double[] targets, + public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[] feature, double[] targets, Interval1D parentInterval, double parentImpurity) { var min = double.MaxValue; var max = double.MinValue; - + for (int i = parentInterval.FromInclusive; i < parentInterval.ToExclusive; i++) { var value = feature[i]; - - if(value < min) + + if (value < min) { min = value; } - else if(value > max) + else if (value > max) { max = value; } } - if(min == max) + if (min == max) { return SplitResult.Initial(); } @@ -77,7 +77,7 @@ public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[ var impurityImprovement = 0.0; var impurityLeft = 0.0; var impurityRight = 0.0; - + var currentFeature = double.MinValue; for (int i = parentInterval.FromInclusive; i < parentInterval.ToExclusive; i++) @@ -90,11 +90,11 @@ public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[ if (currentFeature > threshold && Math.Min(leftSize, rightSize) >= m_minimumSplitSize) { splitIndex = i; - + impurityCalculator.UpdateInterval(parentInterval); impurityCalculator.UpdateIndex(i); impurityImprovement = impurityCalculator.ImpurityImprovement(parentImpurity); - + var childImpurities = impurityCalculator.ChildImpurities(); impurityLeft = childImpurities.Left; impurityRight = childImpurities.Right; diff --git a/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs b/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs index ca9f2231..8a10ab4b 100644 --- a/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs +++ b/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs @@ -56,12 +56,12 @@ public sealed class BestFirstTreeBuilder : ITreeBuilder /// to the total amount of features in observations. The features will be selected at random for each split /// The type of searcher used for finding the best features splits when learning the tree /// Impurity calculator used to decide which split is optimal - public BestFirstTreeBuilder(int maximumTreeDepth, - int maximumLeafCount, - int featuresPrSplit, - double minimumInformationGain, + public BestFirstTreeBuilder(int maximumTreeDepth, + int maximumLeafCount, + int featuresPrSplit, + double minimumInformationGain, int seed, - ISplitSearcher splitSearcher, + ISplitSearcher splitSearcher, IImpurityCalculator impurityCalculator) { if (maximumTreeDepth <= 0) { throw new ArgumentException("maximum tree depth must be larger than 0"); } @@ -259,7 +259,7 @@ public BinaryTree Build(F64MatrixView observations, double[] targets, int[] indi nodes.Add(leaf); } - return new BinaryTree(nodes, probabilities, targetNames, + return new BinaryTree(nodes, probabilities, targetNames, m_variableImportance.ToArray()); } diff --git a/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs b/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs index cf72e958..4610db50 100644 --- a/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs +++ b/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs @@ -53,11 +53,11 @@ public sealed class DepthFirstTreeBuilder : ITreeBuilder /// to the total amount of features in observations. The features will be selected at random for each split /// The type of searcher used for finding the best features splits when learning the tree /// Impurity calculator used to decide which split is optimal - public DepthFirstTreeBuilder(int maximumTreeDepth, - int featuresPrSplit, - double minimumInformationGain, + public DepthFirstTreeBuilder(int maximumTreeDepth, + int featuresPrSplit, + double minimumInformationGain, int seed, - ISplitSearcher splitSearcher, + ISplitSearcher splitSearcher, IImpurityCalculator impurityCalculator) { if (maximumTreeDepth <= 0) { throw new ArgumentException("maximum tree depth must be larger than 0"); } @@ -95,7 +95,7 @@ public BinaryTree Build(F64MatrixView observations, double[] targets, int[] indi { m_featuresPrSplit = numberOfFeatures; } - + Array.Resize(ref m_bestSplitWorkIndices, indices.Length); m_bestSplitWorkIndices.Clear(); Array.Resize(ref m_variableImportance, numberOfFeatures); @@ -250,7 +250,7 @@ public BinaryTree Build(F64MatrixView observations, double[] targets, int[] indi nodes.Add(leaf); } - return new BinaryTree(nodes, probabilities, targetNames, + return new BinaryTree(nodes, probabilities, targetNames, m_variableImportance.ToArray()); } diff --git a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelectionTest.cs b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelectionTest.cs index d2ca1326..d68b0c39 100644 --- a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelectionTest.cs +++ b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelectionTest.cs @@ -16,7 +16,7 @@ public class BackwardEliminationClassificationEnsembleSelectionTest [ExpectedException(typeof(ArgumentNullException))] public void BackwardEliminationClassificationEnsembleSelection_Constructor_Metric_Null() { - var sut = new BackwardEliminationClassificationEnsembleSelection(null, + var sut = new BackwardEliminationClassificationEnsembleSelection(null, new MeanProbabilityClassificationEnsembleStrategy(), 5); } @@ -33,7 +33,7 @@ public void BackwardEliminationClassificationEnsembleSelection_Constructor_Ensem public void BackwardEliminationClassificationEnsembleSelection_Constructor_Number_Of_Models_Too_Low() { var sut = new BackwardEliminationClassificationEnsembleSelection( - new LogLossClassificationProbabilityMetric(), + new LogLossClassificationProbabilityMetric(), new MeanProbabilityClassificationEnsembleStrategy(), 0); } diff --git a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelectionTest.cs b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelectionTest.cs index 06ab4f0a..7059d335 100644 --- a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelectionTest.cs +++ b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelectionTest.cs @@ -15,7 +15,7 @@ public class BackwardEliminationRegressionEnsembleSelectionTest [ExpectedException(typeof(ArgumentNullException))] public void BackwardEliminationRegressionEnsembleSelection_Constructor_Metric_Null() { - var sut = new BackwardEliminationRegressionEnsembleSelection(null, + var sut = new BackwardEliminationRegressionEnsembleSelection(null, new MeanRegressionEnsembleStrategy(), 5); } @@ -32,7 +32,7 @@ public void BackwardEliminationRegressionEnsembleSelection_Constructor_EnsembleS public void BackwardEliminationRegressionEnsembleSelection_Constructor_Number_Of_Models_Too_Low() { var sut = new BackwardEliminationRegressionEnsembleSelection( - new MeanSquaredErrorRegressionMetric(), + new MeanSquaredErrorRegressionMetric(), new MeanRegressionEnsembleStrategy(), 0); } diff --git a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchClassificationEnsembleSelectionTest.cs b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchClassificationEnsembleSelectionTest.cs index 5acf44f7..53415bc2 100644 --- a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchClassificationEnsembleSelectionTest.cs +++ b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchClassificationEnsembleSelectionTest.cs @@ -16,7 +16,7 @@ public class ForwardSearchClassificationEnsembleSelectionTest [ExpectedException(typeof(ArgumentNullException))] public void ForwardSearchClassificationEnsembleSelection_Constructor_Metric_Null() { - var sut = new ForwardSearchClassificationEnsembleSelection(null, + var sut = new ForwardSearchClassificationEnsembleSelection(null, new MeanProbabilityClassificationEnsembleStrategy(), 5, 1, true); } @@ -33,7 +33,7 @@ public void ForwardSearchClassificationEnsembleSelection_Constructor_EnsembleStr public void ForwardSearchClassificationEnsembleSelection_Constructor_Number_Of_Models_Too_Low() { var sut = new ForwardSearchClassificationEnsembleSelection( - new LogLossClassificationProbabilityMetric(), + new LogLossClassificationProbabilityMetric(), new MeanProbabilityClassificationEnsembleStrategy(), 0, 1, true); } diff --git a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchRegressionEnsembleSelectionTest.cs b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchRegressionEnsembleSelectionTest.cs index 751ba08e..4939f163 100644 --- a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchRegressionEnsembleSelectionTest.cs +++ b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchRegressionEnsembleSelectionTest.cs @@ -15,7 +15,7 @@ public class ForwardSearchRegressionEnsembleSelectionTest [ExpectedException(typeof(ArgumentNullException))] public void ForwardSearchRegressionEnsembleSelection_Constructor_Metric_Null() { - var sut = new ForwardSearchRegressionEnsembleSelection(null, + var sut = new ForwardSearchRegressionEnsembleSelection(null, new MeanRegressionEnsembleStrategy(), 5, 1, true); } @@ -32,7 +32,7 @@ public void ForwardSearchRegressionEnsembleSelection_Constructor_EnsembleStratey public void ForwardSearchRegressionEnsembleSelection_Constructor_Number_Of_Models_Too_Low() { var sut = new ForwardSearchRegressionEnsembleSelection( - new MeanSquaredErrorRegressionMetric(), + new MeanSquaredErrorRegressionMetric(), new MeanRegressionEnsembleStrategy(), 0, 1, true); } diff --git a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/RandomClassificationEnsembleSelectionTest.cs b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/RandomClassificationEnsembleSelectionTest.cs index c92ab083..e09977e5 100644 --- a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/RandomClassificationEnsembleSelectionTest.cs +++ b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/RandomClassificationEnsembleSelectionTest.cs @@ -16,7 +16,7 @@ public class RandomClassificationEnsembleSelectionTest [ExpectedException(typeof(ArgumentNullException))] public void RandomClassificationEnsembleSelection_Constructor_Metric_Null() { - var sut = new RandomClassificationEnsembleSelection(null, + var sut = new RandomClassificationEnsembleSelection(null, new MeanProbabilityClassificationEnsembleStrategy(), 5, 1, true); } @@ -33,7 +33,7 @@ public void RandomClassificationEnsembleSelection_Constructor_EnsembleStratey_Nu public void RandomClassificationEnsembleSelection_Constructor_Number_Of_Models_Too_Low() { var sut = new RandomClassificationEnsembleSelection( - new LogLossClassificationProbabilityMetric(), + new LogLossClassificationProbabilityMetric(), new MeanProbabilityClassificationEnsembleStrategy(), 0, 1, true); } diff --git a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/RandomRegressionEnsembleSelectionTest.cs b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/RandomRegressionEnsembleSelectionTest.cs index f7f4dc13..517da0d4 100644 --- a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/RandomRegressionEnsembleSelectionTest.cs +++ b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/RandomRegressionEnsembleSelectionTest.cs @@ -15,7 +15,7 @@ public class RandomRegressionEnsembleSelectionTest [ExpectedException(typeof(ArgumentNullException))] public void RandomRegressionEnsembleSelection_Constructor_Metric_Null() { - var sut = new RandomRegressionEnsembleSelection(null, + var sut = new RandomRegressionEnsembleSelection(null, new MeanRegressionEnsembleStrategy(), 5, 1, true); } @@ -32,7 +32,7 @@ public void RandomRegressionEnsembleSelection_Constructor_EnsembleStratey_Null() public void RandomRegressionEnsembleSelection_Constructor_Number_Of_Models_Too_Low() { var sut = new RandomRegressionEnsembleSelection( - new MeanSquaredErrorRegressionMetric(), + new MeanSquaredErrorRegressionMetric(), new MeanRegressionEnsembleStrategy(), 0, 1, true); } diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationEnsembleLearnerTest.cs index d3f6d3cf..862a327a 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationEnsembleLearnerTest.cs @@ -23,7 +23,7 @@ public void ClassificationEnsembleLearner_Learn() new ClassificationDecisionTreeLearner(9) }; - var sut = new ClassificationEnsembleLearner(learners, + var sut = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy()); var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); @@ -48,7 +48,7 @@ public void ClassificationEnsembleLearner_Learn_Bagging() new ClassificationDecisionTreeLearner(9) }; - var sut = new ClassificationEnsembleLearner(learners, + var sut = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy(), 0.7); var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); @@ -73,7 +73,7 @@ public void ClassificationEnsembleLearner_Learn_Indexed() new ClassificationDecisionTreeLearner(9) }; - var sut = new ClassificationEnsembleLearner(learners, + var sut = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy()); var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationModelSelectingEnsembleLearnerTest.cs index 6d0b2992..7ee4055a 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationModelSelectingEnsembleLearnerTest.cs @@ -39,7 +39,7 @@ public void ClassificationModelSelectingEnsembleLearner_Learn() var ensembleSelection = new ForwardSearchClassificationEnsembleSelection( metric, ensembleStrategy, 5, 1, true); - var sut = new ClassificationModelSelectingEnsembleLearner(learners, + var sut = new ClassificationModelSelectingEnsembleLearner(learners, new RandomCrossValidation(5, 23), ensembleStrategy, ensembleSelection); @@ -77,7 +77,7 @@ public void ClassificationModelSelectingEnsembleLearner_Learn_Without_Replacemen var ensembleSelection = new ForwardSearchClassificationEnsembleSelection( metric, ensembleStrategy, 5, 1, false); - var sut = new ClassificationModelSelectingEnsembleLearner(learners, + var sut = new ClassificationModelSelectingEnsembleLearner(learners, new RandomCrossValidation(5, 23), ensembleStrategy, ensembleSelection); @@ -112,10 +112,10 @@ public void ClassificationModelSelectingEnsembleLearner_Learn_Start_With_3_Model var metric = new LogLossClassificationProbabilityMetric(); var ensembleStrategy = new MeanProbabilityClassificationEnsembleStrategy(); - var ensembleSelection = new ForwardSearchClassificationEnsembleSelection(metric, + var ensembleSelection = new ForwardSearchClassificationEnsembleSelection(metric, ensembleStrategy, 5, 3, true); - var sut = new ClassificationModelSelectingEnsembleLearner(learners, + var sut = new ClassificationModelSelectingEnsembleLearner(learners, new RandomCrossValidation(5, 23), ensembleStrategy, ensembleSelection); @@ -150,10 +150,10 @@ public void ClassificationModelSelectingEnsembleLearner_Learn_Indexed() var metric = new LogLossClassificationProbabilityMetric(); var ensembleStrategy = new MeanProbabilityClassificationEnsembleStrategy(); - var ensembleSelection = new ForwardSearchClassificationEnsembleSelection(metric, + var ensembleSelection = new ForwardSearchClassificationEnsembleSelection(metric, ensembleStrategy, 5, 1, true); - var sut = new ClassificationModelSelectingEnsembleLearner(learners, + var sut = new ClassificationModelSelectingEnsembleLearner(learners, new RandomCrossValidation(5, 23), ensembleStrategy, ensembleSelection); @@ -175,11 +175,11 @@ public void ClassificationModelSelectingEnsembleLearner_Constructor_Learners_Nul { var metric = new LogLossClassificationProbabilityMetric(); var ensembleStrategy = new MeanProbabilityClassificationEnsembleStrategy(); - var ensembleSelection = new ForwardSearchClassificationEnsembleSelection(metric, + var ensembleSelection = new ForwardSearchClassificationEnsembleSelection(metric, ensembleStrategy, 5, 1, true); var crossValidation = new RandomCrossValidation(5); - var sut = new ClassificationModelSelectingEnsembleLearner(null, crossValidation, + var sut = new ClassificationModelSelectingEnsembleLearner(null, crossValidation, ensembleStrategy, ensembleSelection); } @@ -193,7 +193,7 @@ public void ClassificationModelSelectingEnsembleLearner_Constructor_CrossValidat var ensembleSelection = new ForwardSearchClassificationEnsembleSelection(metric, ensembleStrategy, 5, 1, true); - var sut = new ClassificationModelSelectingEnsembleLearner(learners, null, + var sut = new ClassificationModelSelectingEnsembleLearner(learners, null, ensembleStrategy, ensembleSelection); } @@ -206,7 +206,7 @@ public void ClassificationModelSelectingEnsembleLearner_Constructor_EnsembleSele var ensembleStrategy = new MeanProbabilityClassificationEnsembleStrategy(); var crossValidation = new RandomCrossValidation(5); - var sut = new ClassificationModelSelectingEnsembleLearner(learners, crossValidation, + var sut = new ClassificationModelSelectingEnsembleLearner(learners, crossValidation, ensembleStrategy, null); } } diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationRandomModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationRandomModelSelectingEnsembleLearnerTest.cs index 2e91f9c3..3946252c 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationRandomModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationRandomModelSelectingEnsembleLearnerTest.cs @@ -68,7 +68,7 @@ public void ClassificationRandomModelSelectingEnsembleLearner_Learn_Without_Repl var ensembleStrategy = new MeanProbabilityClassificationEnsembleStrategy(); var sut = new ClassificationRandomModelSelectingEnsembleLearner(learners, 5, - new StratifiedCrossValidation(5, 23), ensembleStrategy, + new StratifiedCrossValidation(5, 23), ensembleStrategy, metric, 1, false); var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); @@ -104,7 +104,7 @@ public void ClassificationRandomModelSelectingEnsembleLearner_Learn_Indexed() var ensembleStrategy = new MeanProbabilityClassificationEnsembleStrategy(); var sut = new ClassificationRandomModelSelectingEnsembleLearner(learners, 5, - new RandomCrossValidation(5, 23), ensembleStrategy, + new RandomCrossValidation(5, 23), ensembleStrategy, metric, 3, true); var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationStackingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationStackingEnsembleLearnerTest.cs index e210a8e6..1a312e39 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationStackingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationStackingEnsembleLearnerTest.cs @@ -23,7 +23,7 @@ public void ClassificationStackingEnsembleLearner_Learn() new ClassificationDecisionTreeLearner(9) }; - var sut = new ClassificationStackingEnsembleLearner(learners, + var sut = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); @@ -49,7 +49,7 @@ public void ClassificationStackingEnsembleLearner_CreateMetaFeatures_Then_Learn( new ClassificationDecisionTreeLearner(9) }; - var sut = new ClassificationStackingEnsembleLearner(learners, + var sut = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); @@ -77,7 +77,7 @@ public void ClassificationStackingEnsembleLearner_Learn_Include_Original_Feature new ClassificationDecisionTreeLearner(9) }; - var sut = new ClassificationStackingEnsembleLearner(learners, + var sut = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), new RandomCrossValidation(5, 23), true); @@ -103,7 +103,7 @@ public void ClassificationStackingEnsembleLearner_Learn_Indexed() new ClassificationDecisionTreeLearner(9) }; - var sut = new ClassificationStackingEnsembleLearner(learners, + var sut = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); diff --git a/src/SharpLearning.Ensemble.Test/Learners/RegressionForwardSearchModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/RegressionForwardSearchModelSelectingEnsembleLearnerTest.cs index edd45a12..04524bf7 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/RegressionForwardSearchModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/RegressionForwardSearchModelSelectingEnsembleLearnerTest.cs @@ -68,7 +68,7 @@ public void RegressionForwardSearchModelSelectingEnsembleLearner_Learn_Without_R var metric = new MeanSquaredErrorRegressionMetric(); var sut = new RegressionForwardSearchModelSelectingEnsembleLearner(learners, 5, - new RandomCrossValidation(5, 42), + new RandomCrossValidation(5, 42), new MeanRegressionEnsembleStrategy(), metric, 1, false); var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); @@ -104,7 +104,7 @@ public void RegressionForwardSearchModelSelectingEnsembleLearner_Learn_Start_Wit var metric = new MeanSquaredErrorRegressionMetric(); var sut = new RegressionForwardSearchModelSelectingEnsembleLearner(learners, 5, - new RandomCrossValidation(5, 42), + new RandomCrossValidation(5, 42), new MeanRegressionEnsembleStrategy(), metric, 3, false); var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); diff --git a/src/SharpLearning.Ensemble.Test/Learners/RegressionModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/RegressionModelSelectingEnsembleLearnerTest.cs index 1d955e1d..d0cb5468 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/RegressionModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/RegressionModelSelectingEnsembleLearnerTest.cs @@ -18,11 +18,11 @@ public void RegressionModelSelectingEnsembleLearner_Constructor_Learners_Null() { var metric = new MeanSquaredErrorRegressionMetric(); var ensembleStrategy = new MeanRegressionEnsembleStrategy(); - var ensembleSelection = new ForwardSearchRegressionEnsembleSelection(metric, + var ensembleSelection = new ForwardSearchRegressionEnsembleSelection(metric, ensembleStrategy, 5, 1, true); var crossValidation = new RandomCrossValidation(5); - var sut = new RegressionModelSelectingEnsembleLearner(null, crossValidation, + var sut = new RegressionModelSelectingEnsembleLearner(null, crossValidation, ensembleStrategy, ensembleSelection); } @@ -30,13 +30,13 @@ public void RegressionModelSelectingEnsembleLearner_Constructor_Learners_Null() [ExpectedException(typeof(ArgumentNullException))] public void RegressionModelSelectingEnsembleLearner_Constructor_CrossValidation_Null() { - var learners = new IIndexedLearner [4]; + var learners = new IIndexedLearner[4]; var metric = new MeanSquaredErrorRegressionMetric(); var ensembleStrategy = new MeanRegressionEnsembleStrategy(); - var ensembleSelection = new ForwardSearchRegressionEnsembleSelection(metric, + var ensembleSelection = new ForwardSearchRegressionEnsembleSelection(metric, ensembleStrategy, 5, 1, true); - var sut = new RegressionModelSelectingEnsembleLearner(learners, null, + var sut = new RegressionModelSelectingEnsembleLearner(learners, null, ensembleStrategy, ensembleSelection); } diff --git a/src/SharpLearning.Ensemble.Test/Learners/RegressionRandomModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/RegressionRandomModelSelectingEnsembleLearnerTest.cs index 7ce33375..024a6425 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/RegressionRandomModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/RegressionRandomModelSelectingEnsembleLearnerTest.cs @@ -68,8 +68,8 @@ public void RegressionRandomModelSelectingEnsembleLearner_Learn_Without_Replacem var metric = new MeanSquaredErrorRegressionMetric(); var sut = new RegressionRandomModelSelectingEnsembleLearner(learners, 5, - new RandomCrossValidation(5, 42), - new MeanRegressionEnsembleStrategy(), + new RandomCrossValidation(5, 42), + new MeanRegressionEnsembleStrategy(), metric, 1, false); var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); @@ -105,7 +105,7 @@ public void RegressionRandomModelSelectingEnsembleLearner_Learn_Start_With_3_Mod var metric = new MeanSquaredErrorRegressionMetric(); var sut = new RegressionRandomModelSelectingEnsembleLearner(learners, 5, - new RandomCrossValidation(5, 42), + new RandomCrossValidation(5, 42), new MeanRegressionEnsembleStrategy(), metric, 3, false); diff --git a/src/SharpLearning.Ensemble.Test/Learners/RegressionStackingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/RegressionStackingEnsembleLearnerTest.cs index 32ac45a8..1ea182ac 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/RegressionStackingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/RegressionStackingEnsembleLearnerTest.cs @@ -22,7 +22,7 @@ public void RegressionStackingEnsembleLearner_Learn() new RegressionDecisionTreeLearner(9) }; - var sut = new RegressionStackingEnsembleLearner(learners, + var sut = new RegressionStackingEnsembleLearner(learners, new RegressionDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); @@ -48,7 +48,7 @@ public void RegressionStackingEnsembleLearner_CreateMetaFeatures_Then_Learn() new RegressionDecisionTreeLearner(9) }; - var sut = new RegressionStackingEnsembleLearner(learners, + var sut = new RegressionStackingEnsembleLearner(learners, new RegressionDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); @@ -76,7 +76,7 @@ public void RegressionStackingEnsembleLearner_Learn_Keep_Original_Features() new RegressionDecisionTreeLearner(9) }; - var sut = new RegressionStackingEnsembleLearner(learners, + var sut = new RegressionStackingEnsembleLearner(learners, new RegressionDecisionTreeLearner(9), new RandomCrossValidation(5, 23), true); @@ -102,7 +102,7 @@ public void RegressionStackingEnsembleLearner_Learn_Indexed() new RegressionDecisionTreeLearner(9) }; - var sut = new RegressionStackingEnsembleLearner(learners, + var sut = new RegressionStackingEnsembleLearner(learners, new RegressionDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); diff --git a/src/SharpLearning.Ensemble.Test/Models/ClassificationEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/ClassificationEnsembleModelTest.cs index 9a042957..aab594c0 100644 --- a/src/SharpLearning.Ensemble.Test/Models/ClassificationEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/ClassificationEnsembleModelTest.cs @@ -26,7 +26,7 @@ public void ClassificationEnsembleModel_Predict_single() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationEnsembleLearner(learners, + var learner = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy()); var sut = learner.Learn(observations, targets); @@ -57,7 +57,7 @@ public void ClassificationEnsembleModel_Predict_Multiple() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationEnsembleLearner(learners, + var learner = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy()); var sut = learner.Learn(observations, targets); @@ -83,7 +83,7 @@ public void ClassificationEnsembleModel_PredictProbability_single() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationEnsembleLearner(learners, + var learner = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy()); var sut = learner.Learn(observations, targets); @@ -114,7 +114,7 @@ public void ClassificationEnsembleModel_PredictProbability_Multiple() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationEnsembleLearner(learners, + var learner = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy()); var sut = learner.Learn(observations, targets); @@ -132,7 +132,7 @@ public void ClassificationEnsembleModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; var learners = new IIndexedLearner[] @@ -143,13 +143,13 @@ public void ClassificationEnsembleModel_GetVariableImportance() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationEnsembleLearner(learners, + var learner = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy()); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); - var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, + var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, { "AptitudeTestScore", 15.6771501925546 } }; Assert.AreEqual(expected.Count, actual.Count); @@ -175,7 +175,7 @@ public void ClassificationEnsembleModel_GetRawVariableImportance() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationEnsembleLearner(learners, + var learner = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy()); var sut = learner.Learn(observations, targets); diff --git a/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs index e943e5ba..73167372 100644 --- a/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs @@ -27,10 +27,10 @@ public void ClassificationStackingEnsembleModel_Predict_single() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationStackingEnsembleLearner(learners, + var learner = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), - new RandomCrossValidation(5, 23), false); - + new RandomCrossValidation(5, 23), false); + var sut = learner.Learn(observations, targets); var rows = targets.Length; @@ -59,10 +59,10 @@ public void ClassificationStackingEnsembleModel_Predict_Multiple() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationStackingEnsembleLearner(learners, + var learner = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); - + var sut = learner.Learn(observations, targets); var predictions = sut.Predict(observations); @@ -86,10 +86,10 @@ public void ClassificationStackingEnsembleModel_PredictProbability_single() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationStackingEnsembleLearner(learners, + var learner = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); - + var sut = learner.Learn(observations, targets); var rows = targets.Length; @@ -118,10 +118,10 @@ public void ClassificationStackingEnsembleModel_PredictProbability_Multiple() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationStackingEnsembleLearner(learners, + var learner = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); - + var sut = learner.Learn(observations, targets); var predictions = sut.PredictProbability(observations); @@ -137,7 +137,7 @@ public void ClassificationStackingEnsembleModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; var learners = new IIndexedLearner[] @@ -148,16 +148,16 @@ public void ClassificationStackingEnsembleModel_GetVariableImportance() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationStackingEnsembleLearner(learners, + var learner = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); - + var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); WriteImportances(actual); - var expected = new Dictionary { { "ClassificationDecisionTreeModel_1_Class_Probability_0", 100 }, { "ClassificationDecisionTreeModel_2_Class_Probability_0", 92.2443379072288 }, { "ClassificationDecisionTreeModel_0_Class_Probability_0", 76.9658783620323 }, { "ClassificationDecisionTreeModel_1_Class_Probability_1", 21.1944454897829 }, { "ClassificationDecisionTreeModel_0_Class_Probability_1", 0 }, { "ClassificationDecisionTreeModel_2_Class_Probability_1", 0 }, { "ClassificationDecisionTreeModel_3_Class_Probability_0", 0 }, { "ClassificationDecisionTreeModel_3_Class_Probability_1", 0 } }; + var expected = new Dictionary { { "ClassificationDecisionTreeModel_1_Class_Probability_0", 100 }, { "ClassificationDecisionTreeModel_2_Class_Probability_0", 92.2443379072288 }, { "ClassificationDecisionTreeModel_0_Class_Probability_0", 76.9658783620323 }, { "ClassificationDecisionTreeModel_1_Class_Probability_1", 21.1944454897829 }, { "ClassificationDecisionTreeModel_0_Class_Probability_1", 0 }, { "ClassificationDecisionTreeModel_2_Class_Probability_1", 0 }, { "ClassificationDecisionTreeModel_3_Class_Probability_0", 0 }, { "ClassificationDecisionTreeModel_3_Class_Probability_1", 0 } }; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); @@ -182,14 +182,14 @@ public void ClassificationStackingEnsembleModel_GetRawVariableImportance() new ClassificationDecisionTreeLearner(9) }; - var learner = new ClassificationStackingEnsembleLearner(learners, + var learner = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); - + var sut = learner.Learn(observations, targets); var actual = sut.GetRawVariableImportance(); - var expected = new double[] { 0.12545787545787546, 0, 0.16300453932032882, 0.0345479082321188, 0.15036245805476572, 0, 0, 0 }; + var expected = new double[] { 0.12545787545787546, 0, 0.16300453932032882, 0.0345479082321188, 0.15036245805476572, 0, 0, 0 }; Assert.AreEqual(expected.Length, actual.Length); diff --git a/src/SharpLearning.Ensemble.Test/Models/RegressionEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/RegressionEnsembleModelTest.cs index d4ee994d..3833c4c9 100644 --- a/src/SharpLearning.Ensemble.Test/Models/RegressionEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/RegressionEnsembleModelTest.cs @@ -25,7 +25,7 @@ public void RegressionEnsembleModel_Predict_single() new RegressionDecisionTreeLearner(9) }; - var learner = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy()); + var learner = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy()); var sut = learner.Learn(observations, targets); var rows = targets.Length; @@ -70,7 +70,7 @@ public void RegressionEnsembleModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; var learners = new IIndexedLearner[] @@ -85,7 +85,7 @@ public void RegressionEnsembleModel_GetVariableImportance() var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); - var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, + var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, { "AptitudeTestScore", 3.46067371526717 } }; Assert.AreEqual(expected.Count, actual.Count); diff --git a/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs index 3decfc1b..2636b571 100644 --- a/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs @@ -29,7 +29,7 @@ public void RegressionStackingEnsembleModel_Predict_single() new RegressionDecisionTreeLearner(9) }; - var learner = new RegressionStackingEnsembleLearner(learners, + var learner = new RegressionStackingEnsembleLearner(learners, new RegressionDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); @@ -61,7 +61,7 @@ public void RegressionStackingEnsembleModel_Predict_Multiple() new RegressionDecisionTreeLearner(9) }; - var learner = new RegressionStackingEnsembleLearner(learners, + var learner = new RegressionStackingEnsembleLearner(learners, new RegressionDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); @@ -80,7 +80,7 @@ public void RegressionStackingEnsembleModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; var learners = new IIndexedLearner[] @@ -91,7 +91,7 @@ public void RegressionStackingEnsembleModel_GetVariableImportance() new RegressionDecisionTreeLearner(9) }; - var learner = new RegressionStackingEnsembleLearner(learners, + var learner = new RegressionStackingEnsembleLearner(learners, new RegressionDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); @@ -123,7 +123,7 @@ public void RegressionStackingEnsembleModel_GetRawVariableImportance() new RegressionDecisionTreeLearner(9) }; - var learner = new RegressionStackingEnsembleLearner(learners, + var learner = new RegressionStackingEnsembleLearner(learners, new RegressionDecisionTreeLearner(9), new RandomCrossValidation(5, 23), false); diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelection.cs index b9a33ec4..612662f9 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelection.cs @@ -26,7 +26,7 @@ public sealed class BackwardEliminationClassificationEnsembleSelection : IClassi /// Strategy for ensembling models /// Number of models to select public BackwardEliminationClassificationEnsembleSelection( - IMetric metric, + IMetric metric, IClassificationEnsembleStrategy ensembleStrategy, int numberOfModelsToSelect) { @@ -46,7 +46,7 @@ public BackwardEliminationClassificationEnsembleSelection( /// The indices of the selected model public int[] Select(ProbabilityPrediction[][] crossValidatedModelPredictions, double[] targets) { - if(crossValidatedModelPredictions.Length < m_numberOfModelsToSelect) + if (crossValidatedModelPredictions.Length < m_numberOfModelsToSelect) { throw new ArgumentException("Available models: " + crossValidatedModelPredictions.Length + " is smaller than number of models to select: " + m_numberOfModelsToSelect); @@ -74,8 +74,8 @@ public int[] Select(ProbabilityPrediction[][] crossValidatedModelPredictions, do return m_bestModelIndices.ToArray(); } - double SelectNextModelToRemove(ProbabilityPrediction[][] crossValidatedModelPredictions, - double[] targets, + double SelectNextModelToRemove(ProbabilityPrediction[][] crossValidatedModelPredictions, + double[] targets, double currentBestError) { var rows = crossValidatedModelPredictions.First().Length; diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelection.cs index 387e741e..adce962d 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelection.cs @@ -26,7 +26,7 @@ public sealed class BackwardEliminationRegressionEnsembleSelection : IRegression /// Strategy for ensembling models /// Number of models to select public BackwardEliminationRegressionEnsembleSelection( - IMetric metric, + IMetric metric, IRegressionEnsembleStrategy ensembleStrategy, int numberOfModelsToSelect) { @@ -46,7 +46,7 @@ public BackwardEliminationRegressionEnsembleSelection( /// The indices of the selected model public int[] Select(F64Matrix crossValidatedModelPredictions, double[] targets) { - if(crossValidatedModelPredictions.ColumnCount < m_numberOfModelsToSelect) + if (crossValidatedModelPredictions.ColumnCount < m_numberOfModelsToSelect) { throw new ArgumentException("Available models: " + crossValidatedModelPredictions.ColumnCount + " is smaller than number of models to select: " + m_numberOfModelsToSelect); @@ -61,7 +61,7 @@ public int[] Select(F64Matrix crossValidatedModelPredictions, double[] targets) { var error = SelectNextModelToRemove(crossValidatedModelPredictions, targets, currentError); - if(error < currentError && m_remainingModelIndices.Count <= m_numberOfModelsToSelect) + if (error < currentError && m_remainingModelIndices.Count <= m_numberOfModelsToSelect) { currentError = error; m_bestModelIndices = m_remainingModelIndices.ToList(); @@ -69,16 +69,16 @@ public int[] Select(F64Matrix crossValidatedModelPredictions, double[] targets) } } - Trace.WriteLine("Selected model indices: " + string.Join(", ", m_bestModelIndices.ToArray())); + Trace.WriteLine("Selected model indices: " + string.Join(", ", m_bestModelIndices.ToArray())); return m_bestModelIndices.ToArray(); } - double SelectNextModelToRemove(F64Matrix crossValidatedModelPredictions, - double[] targets, + double SelectNextModelToRemove(F64Matrix crossValidatedModelPredictions, + double[] targets, double currentBestError) { - var candidateModelMatrix = new F64Matrix(crossValidatedModelPredictions.RowCount, + var candidateModelMatrix = new F64Matrix(crossValidatedModelPredictions.RowCount, m_remainingModelIndices.Count - 1); var candidatePredictions = new double[crossValidatedModelPredictions.RowCount]; @@ -93,14 +93,14 @@ double SelectNextModelToRemove(F64Matrix crossValidatedModelPredictions, for (int i = 0; i < m_remainingModelIndices.Count; i++) { var curIndex = m_remainingModelIndices[i]; - if(curIndex != index) + if (curIndex != index) { candidateModelIndices[candidateIndex++] = m_remainingModelIndices[i]; } } crossValidatedModelPredictions.Columns(candidateModelIndices, candidateModelMatrix); - + m_ensembleStrategy.Combine(candidateModelMatrix, candidatePredictions); var error = m_metric.Error(targets, candidatePredictions); diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchClassificationEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchClassificationEnsembleSelection.cs index 55c12049..9e368e2a 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchClassificationEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchClassificationEnsembleSelection.cs @@ -32,10 +32,10 @@ public sealed class ForwardSearchClassificationEnsembleSelection : IClassificati /// If true the same model can be selected multiple times. /// This will correspond to weighting the models. If false each model can only be selected once public ForwardSearchClassificationEnsembleSelection( - IMetric metric, + IMetric metric, IClassificationEnsembleStrategy ensembleStrategy, - int numberOfModelsToSelect, - int numberOfModelsFromStart, + int numberOfModelsToSelect, + int numberOfModelsFromStart, bool selectWithReplacement) { m_metric = metric ?? throw new ArgumentNullException(nameof(metric)); @@ -58,7 +58,7 @@ public ForwardSearchClassificationEnsembleSelection( /// The indices of the selected model public int[] Select(ProbabilityPrediction[][] crossValidatedModelPredictions, double[] targets) { - if(crossValidatedModelPredictions.Length < m_numberOfModelsToSelect) + if (crossValidatedModelPredictions.Length < m_numberOfModelsToSelect) { throw new ArgumentException("Available models: " + crossValidatedModelPredictions.Length + " is smaller than number of models to select: " + m_numberOfModelsToSelect); @@ -83,7 +83,7 @@ public int[] Select(ProbabilityPrediction[][] crossValidatedModelPredictions, do { var error = SelectNextModelToAdd(crossValidatedModelPredictions, targets, currentError); - if(error < currentError) + if (error < currentError) { currentError = error; Trace.WriteLine("Models Selected: " + i + " Error: " + error); @@ -100,8 +100,8 @@ public int[] Select(ProbabilityPrediction[][] crossValidatedModelPredictions, do return m_selectedModelIndices.ToArray(); } - double SelectNextModelToAdd(ProbabilityPrediction[][] crossValidatedModelPredictions, - double[] targets, + double SelectNextModelToAdd(ProbabilityPrediction[][] crossValidatedModelPredictions, + double[] targets, double currentBestError) { var rows = crossValidatedModelPredictions.First().Length; @@ -132,11 +132,11 @@ double SelectNextModelToAdd(ProbabilityPrediction[][] crossValidatedModelPredict } } - if(bestIndex != -1) + if (bestIndex != -1) { m_selectedModelIndices.Add(bestIndex); - - if(!m_selectWithReplacement) + + if (!m_selectWithReplacement) { m_remainingModelIndices.Remove(bestIndex); } diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchRegressionEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchRegressionEnsembleSelection.cs index 557a39fa..17e22487 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchRegressionEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchRegressionEnsembleSelection.cs @@ -54,7 +54,7 @@ public ForwardSearchRegressionEnsembleSelection(IMetric metric, /// The indices of the selected model public int[] Select(F64Matrix crossValidatedModelPredictions, double[] targets) { - if(crossValidatedModelPredictions.ColumnCount < m_numberOfModelsToSelect) + if (crossValidatedModelPredictions.ColumnCount < m_numberOfModelsToSelect) { throw new ArgumentException("Available models: " + crossValidatedModelPredictions.ColumnCount + " is smaller than number of models to select: " + m_numberOfModelsToSelect); @@ -79,7 +79,7 @@ public int[] Select(F64Matrix crossValidatedModelPredictions, double[] targets) { var error = SelectNextModelToAdd(crossValidatedModelPredictions, targets, currentError); - if(error < currentError) + if (error < currentError) { currentError = error; Trace.WriteLine("Models Selected: " + i + " Error: " + error); @@ -111,7 +111,7 @@ double SelectNextModelToAdd(F64Matrix crossValidatedModelPredictions, double[] t candidateModelIndices[candidateModelIndices.Length - 1] = index; crossValidatedModelPredictions.Columns(candidateModelIndices, candidateModelMatrix); - + m_ensembleStrategy.Combine(candidateModelMatrix, candidatePredictions); var error = m_metric.Error(targets, candidatePredictions); @@ -122,11 +122,11 @@ double SelectNextModelToAdd(F64Matrix crossValidatedModelPredictions, double[] t } } - if(bestIndex != -1) + if (bestIndex != -1) { m_selectedModelIndices.Add(bestIndex); - - if(!m_selectWithReplacement) + + if (!m_selectWithReplacement) { m_remainingModelIndices.Remove(bestIndex); } diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/RandomClassificationEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/RandomClassificationEnsembleSelection.cs index ec901eb0..8da7ecf3 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/RandomClassificationEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/RandomClassificationEnsembleSelection.cs @@ -33,11 +33,11 @@ public sealed class RandomClassificationEnsembleSelection : IClassificationEnsem /// This will correspond to weighting the models. If false each model can only be selected once /// public RandomClassificationEnsembleSelection( - IMetric metric, + IMetric metric, IClassificationEnsembleStrategy ensembleStrategy, - int numberOfModelsToSelect, - int iterations, - bool selectWithReplacement, + int numberOfModelsToSelect, + int iterations, + bool selectWithReplacement, int seed = 42) { m_metric = metric ?? throw new ArgumentNullException(nameof(metric)); @@ -60,7 +60,7 @@ public RandomClassificationEnsembleSelection( /// The indices of the selected model public int[] Select(ProbabilityPrediction[][] crossValidatedModelPredictions, double[] targets) { - if(crossValidatedModelPredictions.Length < m_numberOfModelsToSelect) + if (crossValidatedModelPredictions.Length < m_numberOfModelsToSelect) { throw new ArgumentException("Available models: " + crossValidatedModelPredictions.Length + " is smaller than number of models to select: " + m_numberOfModelsToSelect); @@ -92,7 +92,7 @@ public int[] Select(ProbabilityPrediction[][] crossValidatedModelPredictions, do { bestError = error; candidateModelIndices.CopyTo(bestModelIndices, 0); - Trace.WriteLine("Models selected: " + bestModelIndices.Length+ ": " + error); + Trace.WriteLine("Models selected: " + bestModelIndices.Length + ": " + error); } } @@ -103,7 +103,7 @@ public int[] Select(ProbabilityPrediction[][] crossValidatedModelPredictions, do void SelectNextRandomIndices(int[] candidateModelIndices) { - if(m_selectWithReplacement) + if (m_selectWithReplacement) { for (int i = 0; i < candidateModelIndices.Length; i++) { diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/RandomRegressionEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/RandomRegressionEnsembleSelection.cs index d5105969..1485c74d 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/RandomRegressionEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/RandomRegressionEnsembleSelection.cs @@ -33,12 +33,12 @@ public sealed class RandomRegressionEnsembleSelection : IRegressionEnsembleSelec /// This will correspond to weighting the models. If false each model can only be selected once /// public RandomRegressionEnsembleSelection( - IMetric metric, + IMetric metric, IRegressionEnsembleStrategy ensembleStrategy, - int numberOfModelsToSelect, - int iterations, - bool selectWithReplacement, - int seed=42) + int numberOfModelsToSelect, + int iterations, + bool selectWithReplacement, + int seed = 42) { m_metric = metric ?? throw new ArgumentNullException(nameof(metric)); m_ensembleStrategy = ensembleStrategy ?? throw new ArgumentNullException(nameof(ensembleStrategy)); @@ -60,7 +60,7 @@ public RandomRegressionEnsembleSelection( /// The indices of the selected model public int[] Select(F64Matrix crossValidatedModelPredictions, double[] targets) { - if(crossValidatedModelPredictions.ColumnCount < m_numberOfModelsToSelect) + if (crossValidatedModelPredictions.ColumnCount < m_numberOfModelsToSelect) { throw new ArgumentException("Available models: " + crossValidatedModelPredictions.ColumnCount + " is smaller than number of models to select: " + m_numberOfModelsToSelect); @@ -99,7 +99,7 @@ public int[] Select(F64Matrix crossValidatedModelPredictions, double[] targets) void SelectNextRandomIndices(int[] candidateModelIndices) { - if(m_selectWithReplacement) + if (m_selectWithReplacement) { for (int i = 0; i < candidateModelIndices.Length; i++) { diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearner.cs index 3926b057..c859eeba 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearner.cs @@ -25,10 +25,10 @@ public sealed class ClassificationBackwardEliminationModelSelectingEnsembleLearn /// Learners in the ensemble /// Number of models to select public ClassificationBackwardEliminationModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect) - : this(learners, numberOfModelsToSelect, - new StratifiedCrossValidation(5, 42), + : this(learners, numberOfModelsToSelect, + new StratifiedCrossValidation(5, 42), new MeanProbabilityClassificationEnsembleStrategy(), new LogLossClassificationProbabilityMetric()) { @@ -46,12 +46,12 @@ public ClassificationBackwardEliminationModelSelectingEnsembleLearner( /// Strategy for ensembling models /// Metric to minimize public ClassificationBackwardEliminationModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect, - ICrossValidation crossValidation, - IClassificationEnsembleStrategy ensembleStrategy, + ICrossValidation crossValidation, + IClassificationEnsembleStrategy ensembleStrategy, IMetric metric) - : base(learners, crossValidation, ensembleStrategy, + : base(learners, crossValidation, ensembleStrategy, new BackwardEliminationClassificationEnsembleSelection( metric, ensembleStrategy, numberOfModelsToSelect)) { diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationEnsembleLearner.cs index f5ab3a9f..06329abe 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationEnsembleLearner.cs @@ -13,7 +13,7 @@ namespace SharpLearning.Ensemble.Learners /// Classification ensemble learner. /// http://mlwave.com/kaggle-ensembling-guide/ /// - public sealed class ClassificationEnsembleLearner + public sealed class ClassificationEnsembleLearner : ILearner , IIndexedLearner , ILearner @@ -33,8 +33,8 @@ public sealed class ClassificationEnsembleLearner /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public ClassificationEnsembleLearner( - IIndexedLearner[] learners, - double subSampleRatio = 1.0, + IIndexedLearner[] learners, + double subSampleRatio = 1.0, int seed = 24) : this(learners.Select(l => new Func>((o, t, i) => l.Learn(o, t, i))).ToArray(), () => new MeanProbabilityClassificationEnsembleStrategy(), subSampleRatio, seed) @@ -50,11 +50,11 @@ public ClassificationEnsembleLearner( /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public ClassificationEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, IClassificationEnsembleStrategy ensembleStrategy, - double subSampleRatio = 1.0, + double subSampleRatio = 1.0, int seed = 24) - : this(learners.Select(l => new Func>((o, t, i) => l.Learn(o, t, i))).ToArray(), + : this(learners.Select(l => new Func>((o, t, i) => l.Learn(o, t, i))).ToArray(), () => ensembleStrategy, subSampleRatio, seed) { } @@ -68,9 +68,9 @@ public ClassificationEnsembleLearner( /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public ClassificationEnsembleLearner( - Func>[] learners, + Func>[] learners, Func ensembleStrategy, - double subSampleRatio = 1.0, + double subSampleRatio = 1.0, int seed = 24) { m_learners = learners ?? throw new ArgumentNullException(nameof(learners)); @@ -101,13 +101,13 @@ public ClassificationEnsembleModel Learn(F64Matrix observations, double[] target /// /// /// - public ClassificationEnsembleModel Learn(F64Matrix observations, double[] targets, + public ClassificationEnsembleModel Learn(F64Matrix observations, double[] targets, int[] indices) { var ensembleModels = new IPredictorModel[m_learners.Length]; var sampleSize = (int)Math.Round(m_subSampleRatio * indices.Length); - if(sampleSize < 1) { throw new ArgumentException("subSampleRatio two small"); } + if (sampleSize < 1) { throw new ArgumentException("subSampleRatio two small"); } var inSample = new int[sampleSize]; diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationForwardSearchModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationForwardSearchModelSelectingEnsembleLearner.cs index 34a4798f..427c0825 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationForwardSearchModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationForwardSearchModelSelectingEnsembleLearner.cs @@ -25,10 +25,10 @@ public sealed class ClassificationForwardSearchModelSelectingEnsembleLearner : C /// Learners in the ensemble /// Number of models to select public ClassificationForwardSearchModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect) - : this(learners, numberOfModelsToSelect, - new StratifiedCrossValidation(5, 42), + : this(learners, numberOfModelsToSelect, + new StratifiedCrossValidation(5, 42), new MeanProbabilityClassificationEnsembleStrategy(), new LogLossClassificationProbabilityMetric()) { @@ -50,16 +50,16 @@ public ClassificationForwardSearchModelSelectingEnsembleLearner( /// If true the same model can be selected multiple times. /// This will correspond to weighting the models. If false each model can only be selected once. Default is true public ClassificationForwardSearchModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect, - ICrossValidation crossValidation, - IClassificationEnsembleStrategy ensembleStrategy, - IMetric metric, - int numberOfModelsFromStart = 1, + ICrossValidation crossValidation, + IClassificationEnsembleStrategy ensembleStrategy, + IMetric metric, + int numberOfModelsFromStart = 1, bool selectWithReplacement = true) - : base(learners, crossValidation, ensembleStrategy, + : base(learners, crossValidation, ensembleStrategy, new ForwardSearchClassificationEnsembleSelection( - metric, ensembleStrategy, numberOfModelsToSelect, + metric, ensembleStrategy, numberOfModelsToSelect, numberOfModelsFromStart, selectWithReplacement)) { } diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationModelSelectingEnsembleLearner.cs index 753a8e04..2df12907 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationModelSelectingEnsembleLearner.cs @@ -16,7 +16,7 @@ namespace SharpLearning.Ensemble.Learners /// Classification model selecting EnsembleLearner. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// - public class ClassificationModelSelectingEnsembleLearner + public class ClassificationModelSelectingEnsembleLearner : ILearner , IIndexedLearner , ILearner @@ -38,9 +38,9 @@ public class ClassificationModelSelectingEnsembleLearner /// Strategy on how to combine the models /// Ensemble selection method used to find the beset subset of models public ClassificationModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, ICrossValidation crossValidation, - IClassificationEnsembleStrategy ensembleStrategy, + IClassificationEnsembleStrategy ensembleStrategy, IClassificationEnsembleSelection ensembleSelection) : this(learners, crossValidation, () => ensembleStrategy, ensembleSelection) { @@ -57,9 +57,9 @@ public ClassificationModelSelectingEnsembleLearner( /// Strategy on how to combine the models /// Ensemble selection method used to find the beset subset of models public ClassificationModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, ICrossValidation crossValidation, - Func ensembleStrategy, + Func ensembleStrategy, IClassificationEnsembleSelection ensembleSelection) { m_learners = learners ?? throw new ArgumentNullException(nameof(learners)); @@ -67,7 +67,7 @@ public ClassificationModelSelectingEnsembleLearner( m_ensembleStrategy = ensembleStrategy ?? throw new ArgumentNullException(nameof(ensembleStrategy)); m_ensembleSelection = ensembleSelection ?? throw new ArgumentNullException(nameof(ensembleSelection)); } - + /// /// Learns a ClassificationEnsembleModel based on model selection. /// Trains several models and selects the best subset of models for the ensemble. @@ -93,7 +93,7 @@ public ClassificationEnsembleModel Learn(F64Matrix observations, double[] target /// /// /// - public ClassificationEnsembleModel Learn(F64Matrix observations, double[] targets, + public ClassificationEnsembleModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -130,7 +130,7 @@ public ProbabilityPrediction[][] LearnMetaFeatures(F64Matrix observations, doubl /// /// /// - public ProbabilityPrediction[][] LearnMetaFeatures(F64Matrix observations, double[] targets, + public ProbabilityPrediction[][] LearnMetaFeatures(F64Matrix observations, double[] targets, int[] indices) { var cvRows = indices.Length; @@ -160,7 +160,7 @@ public ProbabilityPrediction[][] LearnMetaFeatures(F64Matrix observations, doubl /// public ClassificationEnsembleModel SelectModels( F64Matrix observations, - ProbabilityPrediction[][] metaObservations, + ProbabilityPrediction[][] metaObservations, double[] targets) { var indices = Enumerable.Range(0, targets.Length).ToArray(); diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationRandomModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationRandomModelSelectingEnsembleLearner.cs index 3c6ddc6c..0298779f 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationRandomModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationRandomModelSelectingEnsembleLearner.cs @@ -26,11 +26,11 @@ public sealed class ClassificationRandomModelSelectingEnsembleLearner : Classifi /// Number of models to select /// Number of iterations to random select model combinations. public ClassificationRandomModelSelectingEnsembleLearner( - IIndexedLearner[] learners, - int numberOfModelsToSelect, + IIndexedLearner[] learners, + int numberOfModelsToSelect, int iterations = 50) - : this(learners, numberOfModelsToSelect, - new StratifiedCrossValidation(5, 42), + : this(learners, numberOfModelsToSelect, + new StratifiedCrossValidation(5, 42), new MeanProbabilityClassificationEnsembleStrategy(), new LogLossClassificationProbabilityMetric(), iterations) { @@ -51,17 +51,17 @@ public ClassificationRandomModelSelectingEnsembleLearner( /// If true the same model can be selected multiple times. /// public ClassificationRandomModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect, - ICrossValidation crossValidation, - IClassificationEnsembleStrategy ensembleStrategy, - IMetric metric, - int iterations = 50, - bool selectWithReplacement = true, + ICrossValidation crossValidation, + IClassificationEnsembleStrategy ensembleStrategy, + IMetric metric, + int iterations = 50, + bool selectWithReplacement = true, int seed = 42) - : base(learners, crossValidation, ensembleStrategy, + : base(learners, crossValidation, ensembleStrategy, new RandomClassificationEnsembleSelection( - metric, ensembleStrategy, numberOfModelsToSelect, + metric, ensembleStrategy, numberOfModelsToSelect, iterations, selectWithReplacement, seed)) { } diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs index 81994135..385faede 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs @@ -14,7 +14,7 @@ namespace SharpLearning.Ensemble.Learners /// Stacking Classification Ensemble Learner. /// http://mlwave.com/kaggle-ensembling-guide/ /// - public sealed class ClassificationStackingEnsembleLearner + public sealed class ClassificationStackingEnsembleLearner : ILearner , IIndexedLearner , ILearner @@ -36,11 +36,11 @@ public sealed class ClassificationStackingEnsembleLearner /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features. Default is true public ClassificationStackingEnsembleLearner( - IIndexedLearner[] learners, - ILearner metaLearner, + IIndexedLearner[] learners, + ILearner metaLearner, bool includeOriginalFeaturesForMetaLearner = true) - : this(learners, (obs, targets) => metaLearner.Learn(obs, targets), - new RandomCrossValidation(5, 42), + : this(learners, (obs, targets) => metaLearner.Learn(obs, targets), + new RandomCrossValidation(5, 42), includeOriginalFeaturesForMetaLearner) { } @@ -57,11 +57,11 @@ public ClassificationStackingEnsembleLearner( /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features. Default is true public ClassificationStackingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, ILearner metaLearner, - ICrossValidation crossValidation, + ICrossValidation crossValidation, bool includeOriginalFeaturesForMetaLearner = true) - : this(learners, (obs, targets) => metaLearner.Learn(obs, targets), + : this(learners, (obs, targets) => metaLearner.Learn(obs, targets), crossValidation, includeOriginalFeaturesForMetaLearner) { } @@ -77,9 +77,9 @@ public ClassificationStackingEnsembleLearner( /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features public ClassificationStackingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, Func> metaLearner, - ICrossValidation crossValidation, + ICrossValidation crossValidation, bool includeOriginalFeaturesForMetaLearner = true) { m_learners = learners ?? throw new ArgumentException(nameof(learners)); @@ -107,7 +107,7 @@ public ClassificationStackingEnsembleModel Learn(F64Matrix observations, double[ /// /// /// - public ClassificationStackingEnsembleModel Learn(F64Matrix observations, double[] targets, + public ClassificationStackingEnsembleModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -121,7 +121,7 @@ public ClassificationStackingEnsembleModel Learn(F64Matrix observations, double[ .ToArray(); var numberOfClasses = targets.Distinct().Count(); - return new ClassificationStackingEnsembleModel(ensembleModels, metaModel, + return new ClassificationStackingEnsembleModel(ensembleModels, metaModel, m_includeOriginalFeaturesForMetaLearner, numberOfClasses); } @@ -144,7 +144,7 @@ public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets) /// /// /// - public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, + public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, int[] indices) { var numberOfClasses = targets.Distinct().Count(); @@ -164,7 +164,7 @@ public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, { Trace.WriteLine("Training model: " + (i + 1)); var learner = m_learners[i]; - m_crossValidation.CrossValidate(learner, observations, targets, + m_crossValidation.CrossValidate(learner, observations, targets, indices, modelPredictions); for (int j = 0; j < modelPredictions.Length; j++) @@ -200,8 +200,8 @@ public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, /// /// public ClassificationStackingEnsembleModel LearnStackingModel( - F64Matrix observations, - F64Matrix metaObservations, + F64Matrix observations, + F64Matrix metaObservations, double[] targets) { var indices = Enumerable.Range(0, targets.Length).ToArray(); @@ -209,7 +209,7 @@ public ClassificationStackingEnsembleModel LearnStackingModel( var ensembleModels = m_learners.Select(learner => learner.Learn(observations, targets, indices)).ToArray(); var numberOfClasses = targets.Distinct().Count(); - return new ClassificationStackingEnsembleModel(ensembleModels, metaModel, + return new ClassificationStackingEnsembleModel(ensembleModels, metaModel, m_includeOriginalFeaturesForMetaLearner, numberOfClasses); } diff --git a/src/SharpLearning.Ensemble/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearner.cs index 879817be..4b888815 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearner.cs @@ -24,10 +24,10 @@ public sealed class RegressionBackwardEliminationModelSelectingEnsembleLearner : /// Learners in the ensemble /// Number of models to select public RegressionBackwardEliminationModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect) - : this(learners, numberOfModelsToSelect, - new RandomCrossValidation(5, 42), + : this(learners, numberOfModelsToSelect, + new RandomCrossValidation(5, 42), new MeanRegressionEnsembleStrategy(), new MeanSquaredErrorRegressionMetric()) { @@ -45,13 +45,13 @@ public RegressionBackwardEliminationModelSelectingEnsembleLearner( /// Strategy for ensembling models /// Metric to minimize public RegressionBackwardEliminationModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect, - ICrossValidation crossValidation, - IRegressionEnsembleStrategy ensembleStrategy, + ICrossValidation crossValidation, + IRegressionEnsembleStrategy ensembleStrategy, IMetric metric) - : base(learners, crossValidation, ensembleStrategy, - new BackwardEliminationRegressionEnsembleSelection(metric, + : base(learners, crossValidation, ensembleStrategy, + new BackwardEliminationRegressionEnsembleSelection(metric, ensembleStrategy, numberOfModelsToSelect)) { } diff --git a/src/SharpLearning.Ensemble/Learners/RegressionEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionEnsembleLearner.cs index 3139c0f0..7081730e 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionEnsembleLearner.cs @@ -29,8 +29,8 @@ public sealed class RegressionEnsembleLearner : ILearner, IIndexedLearne /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public RegressionEnsembleLearner( - IIndexedLearner[] learners, - double subSampleRatio = 1.0, + IIndexedLearner[] learners, + double subSampleRatio = 1.0, int seed = 24) : this(learners.Select(l => new Func>((o, t, i) => l.Learn(o, t, i))).ToArray(), () => new MeanRegressionEnsembleStrategy(), subSampleRatio, seed) @@ -46,11 +46,11 @@ public RegressionEnsembleLearner( /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public RegressionEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, IRegressionEnsembleStrategy ensembleStrategy, - double subSampleRatio = 1.0, + double subSampleRatio = 1.0, int seed = 24) - : this(learners.Select(l => new Func>((o, t, i) => l.Learn(o, t, i))).ToArray(), + : this(learners.Select(l => new Func>((o, t, i) => l.Learn(o, t, i))).ToArray(), () => ensembleStrategy, subSampleRatio, seed) { } @@ -64,19 +64,19 @@ public RegressionEnsembleLearner( /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public RegressionEnsembleLearner( - Func>[] learners, + Func>[] learners, Func ensembleStrategy, - double subSampleRatio = 1.0, + double subSampleRatio = 1.0, int seed = 24) { - m_learners = learners ?? throw new ArgumentNullException("learners"); + m_learners = learners ?? throw new ArgumentNullException("learners"); if (learners.Length < 1) { throw new ArgumentException("there must be at least 1 learner"); } m_ensembleStrategy = ensembleStrategy ?? throw new ArgumentNullException("ensembleStrategy"); m_random = new Random(seed); m_subSampleRatio = subSampleRatio; } - + /// /// Learns a regression ensemble /// @@ -96,7 +96,7 @@ public RegressionEnsembleModel Learn(F64Matrix observations, double[] targets) /// /// /// - public RegressionEnsembleModel Learn(F64Matrix observations, double[] targets, + public RegressionEnsembleModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -105,7 +105,7 @@ public RegressionEnsembleModel Learn(F64Matrix observations, double[] targets, var ensembleModels = new IPredictorModel[m_learners.Length]; var sampleSize = (int)Math.Round(m_subSampleRatio * indices.Length); - if(sampleSize < 1) { throw new ArgumentException("subSampleRatio two small"); } + if (sampleSize < 1) { throw new ArgumentException("subSampleRatio two small"); } var inSample = new int[sampleSize]; diff --git a/src/SharpLearning.Ensemble/Learners/RegressionForwardSearchModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionForwardSearchModelSelectingEnsembleLearner.cs index 7ff9cb52..0a266abd 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionForwardSearchModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionForwardSearchModelSelectingEnsembleLearner.cs @@ -24,10 +24,10 @@ public sealed class RegressionForwardSearchModelSelectingEnsembleLearner : Regre /// Learners in the ensemble /// Number of models to select public RegressionForwardSearchModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect) - : this(learners, numberOfModelsToSelect, - new RandomCrossValidation(5, 42), + : this(learners, numberOfModelsToSelect, + new RandomCrossValidation(5, 42), new MeanRegressionEnsembleStrategy(), new MeanSquaredErrorRegressionMetric()) { @@ -49,15 +49,15 @@ public RegressionForwardSearchModelSelectingEnsembleLearner( /// If true the same model can be selected multiple times. /// This will correspond to weighting the models. If false each model can only be selected once. Default is true public RegressionForwardSearchModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect, - ICrossValidation crossValidation, - IRegressionEnsembleStrategy ensembleStrategy, - IMetric metric, - int numberOfModelsFromStart = 1, + ICrossValidation crossValidation, + IRegressionEnsembleStrategy ensembleStrategy, + IMetric metric, + int numberOfModelsFromStart = 1, bool selectWithReplacement = true) - : base(learners, crossValidation, ensembleStrategy, - new ForwardSearchRegressionEnsembleSelection(metric, ensembleStrategy, numberOfModelsToSelect, + : base(learners, crossValidation, ensembleStrategy, + new ForwardSearchRegressionEnsembleSelection(metric, ensembleStrategy, numberOfModelsToSelect, numberOfModelsFromStart, selectWithReplacement)) { } diff --git a/src/SharpLearning.Ensemble/Learners/RegressionModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionModelSelectingEnsembleLearner.cs index 2461d5f8..a2c0b5e3 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionModelSelectingEnsembleLearner.cs @@ -34,9 +34,9 @@ public class RegressionModelSelectingEnsembleLearner : ILearner, IIndexe /// Strategy on how to combine the models /// Ensemble selection method used to find the beset subset of models public RegressionModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, ICrossValidation crossValidation, - IRegressionEnsembleStrategy ensembleStrategy, + IRegressionEnsembleStrategy ensembleStrategy, IRegressionEnsembleSelection ensembleSelection) : this(learners, crossValidation, () => ensembleStrategy, ensembleSelection) { @@ -54,9 +54,9 @@ public RegressionModelSelectingEnsembleLearner( /// Strategy on how to combine the models /// Ensemble selection method used to find the beset subset of models public RegressionModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, ICrossValidation crossValidation, - Func ensembleStrategy, + Func ensembleStrategy, IRegressionEnsembleSelection ensembleSelection) { m_learners = learners ?? throw new ArgumentNullException(nameof(learners)); @@ -87,7 +87,7 @@ public RegressionEnsembleModel Learn(F64Matrix observations, double[] targets) /// /// /// - public RegressionEnsembleModel Learn(F64Matrix observations, double[] targets, + public RegressionEnsembleModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -96,9 +96,9 @@ public RegressionEnsembleModel Learn(F64Matrix observations, double[] targets, var metaObservations = LearnMetaFeatures(observations, targets, indices); var metaModelTargets = targets.GetIndices(indices); - var ensembleModelIndices = m_ensembleSelection.Select(metaObservations, + var ensembleModelIndices = m_ensembleSelection.Select(metaObservations, metaModelTargets); - + var ensembleModels = m_learners.GetIndices(ensembleModelIndices) .Select(learner => learner.Learn(observations, targets, indices)).ToArray(); @@ -124,7 +124,7 @@ public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets) /// /// /// - public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, + public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, int[] indices) { var cvRows = indices.Length; @@ -136,7 +136,7 @@ public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, { Trace.WriteLine("Training model: " + (i + 1)); var learner = m_learners[i]; - m_crossValidation.CrossValidate(learner, observations, targets, + m_crossValidation.CrossValidate(learner, observations, targets, indices, modelPredictions); for (int j = 0; j < modelPredictions.Length; j++) @@ -157,8 +157,8 @@ public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, /// /// public RegressionEnsembleModel SelectModels( - F64Matrix observations, - F64Matrix metaObservations, + F64Matrix observations, + F64Matrix metaObservations, double[] targets) { var indices = Enumerable.Range(0, targets.Length).ToArray(); diff --git a/src/SharpLearning.Ensemble/Learners/RegressionRandomModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionRandomModelSelectingEnsembleLearner.cs index ec6da5cb..3d498e0a 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionRandomModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionRandomModelSelectingEnsembleLearner.cs @@ -25,11 +25,11 @@ public sealed class RegressionRandomModelSelectingEnsembleLearner : RegressionMo /// Number of models to select /// Number of iterations to random select model combinations. public RegressionRandomModelSelectingEnsembleLearner( - IIndexedLearner[] learners, - int numberOfModelsToSelect, - int iterations=50) - : this(learners, numberOfModelsToSelect, - new RandomCrossValidation(5, 42), + IIndexedLearner[] learners, + int numberOfModelsToSelect, + int iterations = 50) + : this(learners, numberOfModelsToSelect, + new RandomCrossValidation(5, 42), new MeanRegressionEnsembleStrategy(), new MeanSquaredErrorRegressionMetric()) { @@ -51,16 +51,16 @@ public RegressionRandomModelSelectingEnsembleLearner( /// If true the same model can be selected multiple times. /// public RegressionRandomModelSelectingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, int numberOfModelsToSelect, - ICrossValidation crossValidation, - IRegressionEnsembleStrategy ensembleStrategy, - IMetric metric, - int iterations = 50, - bool selectWithReplacement = true, + ICrossValidation crossValidation, + IRegressionEnsembleStrategy ensembleStrategy, + IMetric metric, + int iterations = 50, + bool selectWithReplacement = true, int seed = 42) - : base(learners, crossValidation, ensembleStrategy, - new RandomRegressionEnsembleSelection(metric, ensembleStrategy, numberOfModelsToSelect, + : base(learners, crossValidation, ensembleStrategy, + new RandomRegressionEnsembleSelection(metric, ensembleStrategy, numberOfModelsToSelect, iterations, selectWithReplacement, seed)) { } diff --git a/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs index bb3b8469..41209e51 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs @@ -32,10 +32,10 @@ public sealed class RegressionStackingEnsembleLearner : ILearner, IIndex /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features. Default is true public RegressionStackingEnsembleLearner( - IIndexedLearner[] learners, - ILearner metaLearner, + IIndexedLearner[] learners, + ILearner metaLearner, bool includeOriginalFeaturesForMetaLearner = true) - : this(learners, (obs, targets) => metaLearner.Learn(obs, targets), + : this(learners, (obs, targets) => metaLearner.Learn(obs, targets), new RandomCrossValidation(5, 42), includeOriginalFeaturesForMetaLearner) { } @@ -51,11 +51,11 @@ public RegressionStackingEnsembleLearner( /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features. Default is true public RegressionStackingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, ILearner metaLearner, - ICrossValidation crossValidation, + ICrossValidation crossValidation, bool includeOriginalFeaturesForMetaLearner = true) - : this(learners, (obs, targets) => metaLearner.Learn(obs, targets), + : this(learners, (obs, targets) => metaLearner.Learn(obs, targets), crossValidation, includeOriginalFeaturesForMetaLearner) { } @@ -71,9 +71,9 @@ public RegressionStackingEnsembleLearner( /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features public RegressionStackingEnsembleLearner( - IIndexedLearner[] learners, + IIndexedLearner[] learners, Func> metaLearner, - ICrossValidation crossValidation, + ICrossValidation crossValidation, bool includeOriginalFeaturesForMetaLearner = true) { m_learners = learners ?? throw new ArgumentException(nameof(learners)); @@ -101,7 +101,7 @@ public RegressionStackingEnsembleModel Learn(F64Matrix observations, double[] ta /// /// /// - public RegressionStackingEnsembleModel Learn(F64Matrix observations, double[] targets, + public RegressionStackingEnsembleModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -114,7 +114,7 @@ public RegressionStackingEnsembleModel Learn(F64Matrix observations, double[] ta var ensembleModels = m_learners.Select(learner => learner.Learn(observations, targets, indices)) .ToArray(); - return new RegressionStackingEnsembleModel(ensembleModels, metaModel, + return new RegressionStackingEnsembleModel(ensembleModels, metaModel, m_includeOriginalFeaturesForMetaLearner); } @@ -137,7 +137,7 @@ public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets) /// /// /// - public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, + public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, int[] indices) { var cvRows = indices.Length; @@ -155,7 +155,7 @@ public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, Trace.WriteLine("Training model: " + (i + 1)); var learner = m_learners[i]; - m_crossValidation.CrossValidate(learner, observations, targets, + m_crossValidation.CrossValidate(learner, observations, targets, indices, modelPredictions); for (int j = 0; j < modelPredictions.Length; j++) @@ -185,8 +185,8 @@ public F64Matrix LearnMetaFeatures(F64Matrix observations, double[] targets, /// /// public RegressionStackingEnsembleModel LearnStackingModel( - F64Matrix observations, - F64Matrix metaObservations, + F64Matrix observations, + F64Matrix metaObservations, double[] targets) { var indices = Enumerable.Range(0, targets.Length).ToArray(); diff --git a/src/SharpLearning.Ensemble/Models/ClassificationEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/ClassificationEnsembleModel.cs index a241eb54..f466f85b 100644 --- a/src/SharpLearning.Ensemble/Models/ClassificationEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/ClassificationEnsembleModel.cs @@ -22,7 +22,7 @@ public class ClassificationEnsembleModel : IPredictorModel, IPredictorMo /// /// Models included in the ensemble /// Strategy on how to combine the models - public ClassificationEnsembleModel(IPredictorModel[] ensembleModels, + public ClassificationEnsembleModel(IPredictorModel[] ensembleModels, IClassificationEnsembleStrategy ensembleStrategy) { m_ensembleModels = ensembleModels ?? throw new ArgumentNullException(nameof(ensembleModels)); @@ -123,7 +123,7 @@ public Dictionary GetVariableImportance(Dictionary var modelImportances = model.GetVariableImportance(featureNameToIndex); foreach (var importance in modelImportances) { - variableImportance[importance.Key] += importance.Value; + variableImportance[importance.Key] += importance.Value; } } diff --git a/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs index 4190356b..5bfa9eab 100644 --- a/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs @@ -26,9 +26,9 @@ public class ClassificationStackingEnsembleModel : IPredictorModel, IPre /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features /// Number of classes in the classification problem - public ClassificationStackingEnsembleModel(IPredictorModel[] ensembleModels, + public ClassificationStackingEnsembleModel(IPredictorModel[] ensembleModels, IPredictorModel metaModel, - bool includeOriginalFeaturesForMetaLearner, + bool includeOriginalFeaturesForMetaLearner, int numberOfClasses) { m_ensembleModels = ensembleModels ?? throw new ArgumentException(nameof(ensembleModels)); @@ -188,7 +188,7 @@ ProbabilityPrediction[] IPredictor.Predict(F64Matrix obse string GetNewFeatureName(string name, Dictionary ensembleFeatureNameToIndex) { - if(ensembleFeatureNameToIndex.ContainsKey(name)) + if (ensembleFeatureNameToIndex.ContainsKey(name)) { name += "_PreviousStack"; return GetNewFeatureName(name, ensembleFeatureNameToIndex); diff --git a/src/SharpLearning.Ensemble/Models/RegressionEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/RegressionEnsembleModel.cs index bb2df4b9..d3fb0f9d 100644 --- a/src/SharpLearning.Ensemble/Models/RegressionEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/RegressionEnsembleModel.cs @@ -21,7 +21,7 @@ public class RegressionEnsembleModel : IPredictorModel /// /// Models included in the ensemble /// Strategy on how to combine the models - public RegressionEnsembleModel(IPredictorModel[] ensembleModels, + public RegressionEnsembleModel(IPredictorModel[] ensembleModels, IRegressionEnsembleStrategy ensembleStrategy) { m_ensembleModels = ensembleModels ?? throw new ArgumentNullException(nameof(ensembleModels)); @@ -63,7 +63,7 @@ public double[] Predict(F64Matrix observations) return predictions; } - + /// /// Gets the raw unsorted variable importance scores /// @@ -94,7 +94,7 @@ public Dictionary GetVariableImportance(Dictionary var modelImportances = model.GetVariableImportance(featureNameToIndex); foreach (var importance in modelImportances) { - variableImportance[importance.Key] += importance.Value; + variableImportance[importance.Key] += importance.Value; } } diff --git a/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs index 3e2ca3d6..ea28a2bc 100644 --- a/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs @@ -102,7 +102,7 @@ public Dictionary GetVariableImportance(Dictionary { duplicateModelCount[name] += 1; } - + ensembleFeatureNameToIndex.Add(name + "_" + duplicateModelCount[name].ToString(), index++); } diff --git a/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs b/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs index 388542e1..05bc49af 100644 --- a/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs +++ b/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs @@ -24,7 +24,7 @@ public ProbabilityPrediction Combine(ProbabilityPrediction[] ensemblePredictions var sum = averageProbabilities.Values.Sum(); averageProbabilities = averageProbabilities.ToDictionary(p => p.Key, p => p.Value / sum); - + var prediction = averageProbabilities.OrderByDescending(d => d.Value).First().Key; return new ProbabilityPrediction(prediction, averageProbabilities); @@ -54,7 +54,7 @@ double GeometricMean(double[] values) var geoMean = 0.0; for (int i = 0; i < values.Length; i++) { - if(i == 0) + if (i == 0) { geoMean = values[i]; } diff --git a/src/SharpLearning.FeatureTransformations.Test/CsvRowTransforms/DateTimeFeatureTransformerTest.cs b/src/SharpLearning.FeatureTransformations.Test/CsvRowTransforms/DateTimeFeatureTransformerTest.cs index cbc25cc0..634b6c4c 100644 --- a/src/SharpLearning.FeatureTransformations.Test/CsvRowTransforms/DateTimeFeatureTransformerTest.cs +++ b/src/SharpLearning.FeatureTransformations.Test/CsvRowTransforms/DateTimeFeatureTransformerTest.cs @@ -38,7 +38,7 @@ public class DateTimeFeatureTransformerTest public void DateTimeFeatureTransformer_Transform() { var sut = new DateTimeFeatureTransformer("Date"); - + var writer = new StringWriter(); new CsvParser(() => new StringReader(m_input)) diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs index 911d447b..a2a503c5 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs @@ -51,7 +51,7 @@ public DateTimeFeatureTransformer(string dateTimeColumn, DateTime startDate) /// public string[] FeatureNames { - get => new [] { "Year", "Month", "WeekOfYear", "DayOfMonth", "DayOfWeek", "HourOfDay", "TotalDays", "TotalHours" }; + get => new[] { "Year", "Month", "WeekOfYear", "DayOfMonth", "DayOfWeek", "HourOfDay", "TotalDays", "TotalHours" }; } /// diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/MapCategoricalFeaturesTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/MapCategoricalFeaturesTransformer.cs index 0bd2e249..8c5db10f 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/MapCategoricalFeaturesTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/MapCategoricalFeaturesTransformer.cs @@ -37,7 +37,7 @@ public MapCategoricalFeaturesTransformer(params string[] columnsToMap) /// public IEnumerable Transform(IEnumerable rows) { - if(m_namedFeatureMapping.Count == 0) + if (m_namedFeatureMapping.Count == 0) { foreach (var column in m_columnsToMap) { @@ -52,7 +52,7 @@ public IEnumerable Transform(IEnumerable rows) { var columnMap = m_namedFeatureMapping[column]; var value = row.GetValue(column); - if(columnMap.ContainsKey(value)) + if (columnMap.ContainsKey(value)) { row.SetValue(column, columnMap[value]); } diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/OneHotTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/OneHotTransformer.cs index a6bda639..2c4b1147 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/OneHotTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/OneHotTransformer.cs @@ -45,7 +45,7 @@ public OneHotTransformer(params string[] columnsToMap) /// public IEnumerable Transform(IEnumerable rows) { - if(m_featureMap.Count == 0) + if (m_featureMap.Count == 0) { foreach (var column in m_columnsToMap) { diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs index c8a545bb..538316b6 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs @@ -43,7 +43,7 @@ public IEnumerable Transform(IEnumerable rows) for (int i = 0; i < values.Length; i++) { var value = values[i]; - if(m_missingValueIdentifiers.ContainsKey(value)) + if (m_missingValueIdentifiers.ContainsKey(value)) { values[i] = m_replacementValue; } diff --git a/src/SharpLearning.FeatureTransformations/FeatureTransformationExtensions.cs b/src/SharpLearning.FeatureTransformations/FeatureTransformationExtensions.cs index 9235564b..82ebb33a 100644 --- a/src/SharpLearning.FeatureTransformations/FeatureTransformationExtensions.cs +++ b/src/SharpLearning.FeatureTransformations/FeatureTransformationExtensions.cs @@ -16,7 +16,7 @@ public static class FeatureTransformationExtensions /// /// /// - public static IEnumerable Transform(this IEnumerable rows, + public static IEnumerable Transform(this IEnumerable rows, Func, IEnumerable> transformFunc) { return transformFunc(rows); @@ -28,7 +28,7 @@ public static IEnumerable Transform(this IEnumerable rows, /// /// /// - public static StringMatrix Transform(this StringMatrix matrix, + public static StringMatrix Transform(this StringMatrix matrix, Action transformFunc) { transformFunc(matrix, matrix); @@ -41,7 +41,7 @@ public static StringMatrix Transform(this StringMatrix matrix, /// /// /// - public static F64Matrix Transform(this F64Matrix matrix, + public static F64Matrix Transform(this F64Matrix matrix, Action transformFunc) { transformFunc(matrix, matrix); diff --git a/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs b/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs index 211d38ea..cbbeffca 100644 --- a/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs @@ -14,7 +14,7 @@ public sealed class MinMaxTransformer : IF64MatrixTransform, IF64VectorTransform [Serializable] class FeatureMinMax { - public double Min {get; set;} + public double Min { get; set; } public double Max { get; set; } } @@ -83,7 +83,7 @@ public void Transform(F64Matrix matrix, F64Matrix output) var newValue = m_normalizer.Normalize(m_min, m_max, minMax.Min, minMax.Max, value); output[j, i] = newValue; } - } + } } void CreateFeatureMinMax(F64Matrix matrix) @@ -104,9 +104,9 @@ void CreateFeatureMinMax(F64Matrix matrix) else if (value > minMax.Max) { minMax.Max = value; - } + } } - } + } } /// diff --git a/src/SharpLearning.GradientBoost.Test/GBMDecisionTree/GBMDecisionTreeLearnerTest.cs b/src/SharpLearning.GradientBoost.Test/GBMDecisionTree/GBMDecisionTreeLearnerTest.cs index 4319afc2..6b5516b0 100644 --- a/src/SharpLearning.GradientBoost.Test/GBMDecisionTree/GBMDecisionTreeLearnerTest.cs +++ b/src/SharpLearning.GradientBoost.Test/GBMDecisionTree/GBMDecisionTreeLearnerTest.cs @@ -44,7 +44,7 @@ public void GBMDecisionTreeLearner_Constructor_Loss_Null() { new GBMDecisionTreeLearner(1, 1, 1.0, 1, null, false); } - + [TestMethod] public void GBMDecisionTreeLearner_Learn() { @@ -55,16 +55,16 @@ public void GBMDecisionTreeLearner_Learn() var rows = observations.RowCount; for (int i = 0; i < observations.ColumnCount; i++) - { - var feature = observations.Column(i); + { + var feature = observations.Column(i); var indices = Enumerable.Range(0, rows).ToArray(); feature.SortWith(indices); orderedElements[i] = indices; - } + } var sut = new GBMDecisionTreeLearner(10); var tree = sut.Learn(observations, targets, targets, targets, orderedElements, inSample); - + var predictions = tree.Predict(observations); var evaluator = new MeanSquaredErrorRegressionMetric(); var actual = evaluator.Error(targets, predictions); diff --git a/src/SharpLearning.GradientBoost.Test/Learners/RegressionAbsoluteLossGradientBoostLearnerTest.cs b/src/SharpLearning.GradientBoost.Test/Learners/RegressionAbsoluteLossGradientBoostLearnerTest.cs index 13fd8e3a..16f62645 100644 --- a/src/SharpLearning.GradientBoost.Test/Learners/RegressionAbsoluteLossGradientBoostLearnerTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Learners/RegressionAbsoluteLossGradientBoostLearnerTest.cs @@ -19,7 +19,7 @@ public void RegressionAbsoluteLossGradientBoostLearner_Learn() var model = sut.Learn(observations, targets); var predictions = model.Predict(observations); - + var evaluator = new MeanSquaredErrorRegressionMetric(); var actual = evaluator.Error(targets, predictions); diff --git a/src/SharpLearning.GradientBoost.Test/Learners/RegressionQuantileLossGradientBoostLearnerTest.cs b/src/SharpLearning.GradientBoost.Test/Learners/RegressionQuantileLossGradientBoostLearnerTest.cs index f3b9be5b..21644380 100644 --- a/src/SharpLearning.GradientBoost.Test/Learners/RegressionQuantileLossGradientBoostLearnerTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Learners/RegressionQuantileLossGradientBoostLearnerTest.cs @@ -19,7 +19,7 @@ public void RegressionQuantileLossGradientBoostLearner_Learn() var model = sut.Learn(observations, targets); var predictions = model.Predict(observations); - + var evaluator = new MeanSquaredErrorRegressionMetric(); var actual = evaluator.Error(targets, predictions); diff --git a/src/SharpLearning.GradientBoost.Test/Learners/RegressionSquareLossGradientBoostLearnerTest.cs b/src/SharpLearning.GradientBoost.Test/Learners/RegressionSquareLossGradientBoostLearnerTest.cs index 512b4e46..06614fca 100644 --- a/src/SharpLearning.GradientBoost.Test/Learners/RegressionSquareLossGradientBoostLearnerTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Learners/RegressionSquareLossGradientBoostLearnerTest.cs @@ -19,7 +19,7 @@ public void RegressionSquareLossGradientBoostLearner_Learn() var model = sut.Learn(observations, targets); var predictions = model.Predict(observations); - + var evaluator = new MeanSquaredErrorRegressionMetric(); var actual = evaluator.Error(targets, predictions); diff --git a/src/SharpLearning.GradientBoost.Test/Loss/GradientBoostBinomialLossTest.cs b/src/SharpLearning.GradientBoost.Test/Loss/GradientBoostBinomialLossTest.cs index 9e591e75..fe6b46eb 100644 --- a/src/SharpLearning.GradientBoost.Test/Loss/GradientBoostBinomialLossTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Loss/GradientBoostBinomialLossTest.cs @@ -40,7 +40,7 @@ public void GBMBinomialLoss_UpdateResiduals() public void GBMBinomialLoss_UpdateResiduals_Indexed() { var targets = new double[] { 1, 1, 1, 1, 0, 0, 0, 0, 0 }; - var predictions = new double[] { 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0 }; + var predictions = new double[] { 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0 }; var actual = new double[targets.Length]; var inSample = new bool[] { true, false, true, false, true, false, true, false, true }; var sut = new GradientBoostBinomialLoss(); @@ -60,7 +60,7 @@ public void GBMBinomialLoss_UpdateResiduals_Indexed() public void GBMBinomialLoss_UpdatedLeafValue() { var targets = new double[] { 1, 1, 1, 1, 0, 0, 0, 0, 0 }; - var predictions = new double[] { 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0 }; + var predictions = new double[] { 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0 }; var inSample = new bool[] { true, true, true, true, true, true, true, true, true }; var sut = new GradientBoostBinomialLoss(); @@ -72,7 +72,7 @@ public void GBMBinomialLoss_UpdatedLeafValue() public void GBMBinomialLoss_UpdatedLeafValue_Indexed() { var targets = new double[] { 1, 1, 1, 1, 0, 0, 0, 0, 0 }; - var predictions = new double[] { 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0 }; + var predictions = new double[] { 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0 }; var inSample = new bool[] { true, false, true, false, true, false, true, false, true }; var sut = new GradientBoostBinomialLoss(); diff --git a/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs b/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs index 71142e75..1001c8ce 100644 --- a/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs @@ -21,7 +21,7 @@ public void ClassificationGradientBoostModel_Predict_Single() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, + var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, new GradientBoostBinomialLoss(), false); var sut = learner.Learn(observations, targets); @@ -44,7 +44,7 @@ public void ClassificationGradientBoostModel_Predict_Multiple() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, + var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, new GradientBoostBinomialLoss(), false); var sut = learner.Learn(observations, targets); @@ -62,7 +62,7 @@ public void ClassificationGradientBoostModel_PredictProbability_Single() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, + var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, new GradientBoostBinomialLoss(), false); var sut = learner.Learn(observations, targets); @@ -88,7 +88,7 @@ public void ClassificationGradientBoostModel_PredictProbability_Multiple() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, + var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, new GradientBoostBinomialLoss(), false); var sut = learner.Learn(observations, targets); @@ -108,16 +108,16 @@ public void ClassificationGradientBoostModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; - var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, + var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, new GradientBoostBinomialLoss(), false); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); - var expected = new Dictionary { {"PreviousExperience_month", 100}, + var expected = new Dictionary { {"PreviousExperience_month", 100}, {"AptitudeTestScore", 56.81853305612 } }; Assert.AreEqual(expected.Count, actual.Count); @@ -135,7 +135,7 @@ public void ClassificationGradientBoostModel_GetRawVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, + var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, new GradientBoostBinomialLoss(), false); var sut = learner.Learn(observations, targets); @@ -204,6 +204,6 @@ void Write(ProbabilityPrediction[] predictions) value += "};"; Trace.WriteLine(value); - } + } } } diff --git a/src/SharpLearning.GradientBoost.Test/Models/RegressionGradientBoostModelTest.cs b/src/SharpLearning.GradientBoost.Test/Models/RegressionGradientBoostModelTest.cs index 91a1ed0b..9ac01f7a 100644 --- a/src/SharpLearning.GradientBoost.Test/Models/RegressionGradientBoostModelTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Models/RegressionGradientBoostModelTest.cs @@ -19,7 +19,7 @@ public void RegressionGradientBoostModel_Predict_Single() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, + var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, new GradientBoostSquaredLoss(), false); var sut = learner.Learn(observations, targets); @@ -42,7 +42,7 @@ public void RegressionGradientBoostModel_Precit_Multiple() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, + var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, new GradientBoostSquaredLoss(), false); var sut = learner.Learn(observations, targets); @@ -60,16 +60,16 @@ public void RegressionGradientBoostModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; - var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, + var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, new GradientBoostSquaredLoss(), false); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); - var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, + var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, { "AptitudeTestScore", 72.1682473281495 } }; Assert.AreEqual(expected.Count, actual.Count); @@ -87,7 +87,7 @@ public void RegressionGradientBoostModel_GetRawVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, + var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, new GradientBoostSquaredLoss(), false); var sut = learner.Learn(observations, targets); @@ -108,7 +108,7 @@ public void RegressionGradientBoostModel_Save() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var learner = new RegressionGradientBoostLearner(2, 0.1, 3, 1, 1e-6, 1.0, 0, + var learner = new RegressionGradientBoostLearner(2, 0.1, 3, 1, 1e-6, 1.0, 0, new GradientBoostSquaredLoss(), false); var sut = learner.Learn(observations, targets); diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs index 78495fd8..8207b9f3 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs @@ -34,11 +34,11 @@ public sealed class GBMDecisionTreeLearner /// loss function used /// Use multi threading to speed up execution public GBMDecisionTreeLearner( - int maximumTreeDepth, - int minimumSplitSize, - double minimumInformationGain, - int featuresPrSplit, - IGradientBoostLoss loss, + int maximumTreeDepth, + int minimumSplitSize, + double minimumInformationGain, + int featuresPrSplit, + IGradientBoostLoss loss, bool runParallel) { if (maximumTreeDepth <= 0) { throw new ArgumentException("maximum tree depth must be larger than 0"); } @@ -62,12 +62,12 @@ public GBMDecisionTreeLearner( /// The minimum improvement in information gain before a split is made /// Number of features used at each split in the tree. 0 means all will be used public GBMDecisionTreeLearner( - int maximumTreeDepth = 2000, - int minimumSplitSize = 1, - double minimumInformationGain = 1E-6, + int maximumTreeDepth = 2000, + int minimumSplitSize = 1, + double minimumInformationGain = 1E-6, int featuresPrSplit = 0) - : this(maximumTreeDepth, minimumSplitSize, - minimumInformationGain, featuresPrSplit, + : this(maximumTreeDepth, minimumSplitSize, + minimumInformationGain, featuresPrSplit, new GradientBoostSquaredLoss(), true) { } @@ -83,17 +83,17 @@ public GBMDecisionTreeLearner( /// Bool array containing the samples to use /// public GBMTree Learn( - F64Matrix observations, - double[] targets, - double[] residuals, + F64Matrix observations, + double[] targets, + double[] residuals, double[] predictions, - int[][] orderedElements, + int[][] orderedElements, bool[] inSample) { var rootValues = m_loss.InitSplit(targets, residuals, inSample); var bestConstant = rootValues.BestConstant; - if(m_loss.UpdateLeafValues()) + if (m_loss.UpdateLeafValues()) { bestConstant = m_loss.UpdatedLeafValue(bestConstant, targets, predictions, inSample); @@ -109,7 +109,7 @@ public GBMTree Learn( RightConstant = bestConstant, SampleCount = rootValues.Samples }; - + var nodes = new List { root }; var queue = new Queue(100); @@ -156,7 +156,7 @@ public GBMTree Learn( allFeatureIndices.Shuffle(m_random); Array.Copy(allFeatureIndices, featuresPrSplit, featuresPrSplit.Length); } - + if (!m_runParallel) { foreach (var i in featuresPrSplit) @@ -175,7 +175,7 @@ public GBMTree Learn( } void FindSplit() => SplitWorker( - observations, residuals, targets, predictions, + observations, residuals, targets, predictions, orderedElements, parentItem, parentInSample, workItems, splitResults); var workers = new List(); @@ -299,32 +299,32 @@ static void EmpytySplitResults(ConcurrentBag splitResults) while (splitResults.TryTake(out GBMSplitResult result)) ; } - void SplitWorker(F64Matrix observations, - double[] residuals, - double[] targets, - double[] predictions, - int[][] orderedElements, - GBMTreeCreationItem parentItem, - bool[] parentInSample, - ConcurrentQueue featureIndices, + void SplitWorker(F64Matrix observations, + double[] residuals, + double[] targets, + double[] predictions, + int[][] orderedElements, + GBMTreeCreationItem parentItem, + bool[] parentInSample, + ConcurrentQueue featureIndices, ConcurrentBag results) { int featureIndex = -1; while (featureIndices.TryDequeue(out featureIndex)) { - FindBestSplit(observations, residuals, targets, predictions, orderedElements, + FindBestSplit(observations, residuals, targets, predictions, orderedElements, parentItem, parentInSample, featureIndex, results); } } - void FindBestSplit(F64Matrix observations, - double[] residuals, - double[] targets, - double[] predictions, - int[][] orderedElements, - GBMTreeCreationItem parentItem, - bool[] parentInSample, - int featureIndex, + void FindBestSplit(F64Matrix observations, + double[] residuals, + double[] targets, + double[] predictions, + int[][] orderedElements, + GBMTreeCreationItem parentItem, + bool[] parentInSample, + int featureIndex, ConcurrentBag results) { var bestSplit = new GBMSplit @@ -355,12 +355,12 @@ void FindBestSplit(F64Matrix observations, } var currentIndex = orderedIndices[j]; - + m_loss.UpdateSplitConstants(ref left, ref right, targets[currentIndex], residuals[currentIndex]); var previousValue = observations.At(currentIndex, featureIndex); - - while(right.Samples > 0) + + while (right.Samples > 0) { j = NextAllowedIndex(j + 1, orderedIndices, parentInSample); currentIndex = orderedIndices[j]; @@ -393,7 +393,7 @@ void FindBestSplit(F64Matrix observations, previousValue = currentValue; } - if(bestSplit.FeatureIndex != -1) + if (bestSplit.FeatureIndex != -1) { results.Add(new GBMSplitResult { BestSplit = bestSplit, Left = bestLeft, Right = bestRight }); } diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMNode.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMNode.cs index 659961a2..9b3fc5a5 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMNode.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMNode.cs @@ -33,12 +33,12 @@ public class GBMNode /// Left constant (fitted value) of the split /// public double LeftConstant; - + /// /// Right constant (fitted value) of the split /// public double RightConstant; - + /// /// Depth of the node in the decision tree /// @@ -48,12 +48,12 @@ public class GBMNode /// Index of the left child node the node in the decision tree array /// public int LeftIndex = -1; - + /// /// Index of the left child node the node in the decision tree array /// public int RightIndex = -1; - + /// /// The number of observations in the node /// diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitInfo.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitInfo.cs index 9276888b..142ffe2a 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitInfo.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitInfo.cs @@ -21,7 +21,7 @@ public struct GBMSplitInfo /// Current sum of squares of the split /// public double SumOfSquares; - + /// /// Current cost of the split /// diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitResult.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitResult.cs index f5fd3b73..17e6202a 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitResult.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitResult.cs @@ -10,12 +10,12 @@ public struct GBMSplitResult /// Best split found /// public GBMSplit BestSplit; - + /// /// Left values corresponding to best split /// public GBMSplitInfo Left; - + /// /// Right values corresponding to best split /// diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMTree.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMTree.cs index 611dbd4c..1b1df09f 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMTree.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMTree.cs @@ -16,7 +16,7 @@ public class GBMTree /// /// public readonly List Nodes; - + /// /// Creates a GBMTree from the provided nodes /// @@ -93,7 +93,7 @@ public double Predict(double[] observation) /// public void AddRawVariableImportances(double[] rawVariableImportances) { - if(Nodes.Count == 1) { return; } // no splits no importance + if (Nodes.Count == 1) { return; } // no splits no importance var rootError = Nodes[0].LeftError; var totalSampleCount = Nodes[0].SampleCount; @@ -133,7 +133,7 @@ public void TraceNodesDepth() text += string.Format("{0:0.000} I:{1} ", node.Node.SplitValue, node.Node.FeatureIndex); - if(node.Node.LeftIndex == -1) + if (node.Node.LeftIndex == -1) { text += string.Format("L: {0:0.000} ", node.Node.LeftConstant); } diff --git a/src/SharpLearning.GradientBoost/Learners/ClassificationBinomialGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/ClassificationBinomialGradientBoostLearner.cs index cf27c86b..56e0561a 100644 --- a/src/SharpLearning.GradientBoost/Learners/ClassificationBinomialGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/ClassificationBinomialGradientBoostLearner.cs @@ -27,15 +27,15 @@ public class ClassificationBinomialGradientBoostLearner : ClassificationGradient /// Number of features used at each split in the tree. 0 means all will be used /// Use multi threading to speed up execution (default is true) public ClassificationBinomialGradientBoostLearner( - int iterations = 100, - double learningRate = 0.1, + int iterations = 100, + double learningRate = 0.1, int maximumTreeDepth = 3, - int minimumSplitSize = 1, - double minimumInformationGain = 0.000001, - double subSampleRatio = 1.0, - int featuresPrSplit = 0, + int minimumSplitSize = 1, + double minimumInformationGain = 0.000001, + double subSampleRatio = 1.0, + int featuresPrSplit = 0, bool runParallel = true) - : base(iterations, learningRate, maximumTreeDepth, minimumSplitSize, minimumInformationGain, + : base(iterations, learningRate, maximumTreeDepth, minimumSplitSize, minimumInformationGain, subSampleRatio, featuresPrSplit, new GradientBoostBinomialLoss(), runParallel) { } diff --git a/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs index ff419409..014c24d5 100644 --- a/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs @@ -18,7 +18,7 @@ namespace SharpLearning.GradientBoost.Learners /// The resulting models are ensembled together using addition. Implementation based on: /// http://gradientboostedmodels.googlecode.com/files/report.pdf /// - public class ClassificationGradientBoostLearner + public class ClassificationGradientBoostLearner : IIndexedLearner , IIndexedLearner , ILearner @@ -47,14 +47,14 @@ public class ClassificationGradientBoostLearner /// loss function used /// Use multi threading to speed up execution public ClassificationGradientBoostLearner( - int iterations, - double learningRate, + int iterations, + double learningRate, int maximumTreeDepth, - int minimumSplitSize, - double minimumInformationGain, - double subSampleRatio, - int featuresPrSplit, - IGradientBoostLoss loss, + int minimumSplitSize, + double minimumInformationGain, + double subSampleRatio, + int featuresPrSplit, + IGradientBoostLoss loss, bool runParallel) { if (iterations < 1) { throw new ArgumentException("Iterations must be at least 1"); } @@ -69,7 +69,7 @@ public ClassificationGradientBoostLearner( m_iterations = iterations; m_learningRate = learningRate; m_subSampleRatio = subSampleRatio; - m_learner = new GBMDecisionTreeLearner(maximumTreeDepth, minimumSplitSize, + m_learner = new GBMDecisionTreeLearner(maximumTreeDepth, minimumSplitSize, minimumInformationGain, featuresPrSplit, m_loss, runParallel); } @@ -87,12 +87,12 @@ public ClassificationGradientBoostLearner( /// This reduces variance in the ensemble and can help counter overfitting /// Number of features used at each split in the tree. 0 means all will be used public ClassificationGradientBoostLearner( - int iterations = 100, - double learningRate = 0.1, + int iterations = 100, + double learningRate = 0.1, int maximumTreeDepth = 3, - int minimumSplitSize = 1, - double minimumInformationGain = 0.000001, - double subSampleRatio = 1.0, + int minimumSplitSize = 1, + double minimumInformationGain = 0.000001, + double subSampleRatio = 1.0, int featuresPrSplit = 0) : this(iterations, learningRate, maximumTreeDepth, minimumSplitSize, minimumInformationGain, subSampleRatio, featuresPrSplit, new GradientBoostBinomialLoss(), true) @@ -118,7 +118,7 @@ public ClassificationGradientBoostModel Learn(F64Matrix observations, double[] t /// /// /// - public ClassificationGradientBoostModel Learn(F64Matrix observations, double[] targets, + public ClassificationGradientBoostModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -139,7 +139,7 @@ public ClassificationGradientBoostModel Learn(F64Matrix observations, double[] t double[][] residuals = null; GBMTree[][] trees = null; - if(uniqueTargets.Length == 2) // Binary case - only need to fit to one class and use (1.0 - probability) + if (uniqueTargets.Length == 2) // Binary case - only need to fit to one class and use (1.0 - probability) { trees = new GBMTree[][] { new GBMTree[m_iterations] }; predictions = new double[][] { targets.Select(_ => initialLoss).ToArray() }; @@ -172,7 +172,7 @@ public ClassificationGradientBoostModel Learn(F64Matrix observations, double[] t { for (int itarget = 0; itarget < trees.Length; itarget++) { - m_loss.UpdateResiduals(oneVsAllTargets[itarget], predictions[itarget], + m_loss.UpdateResiduals(oneVsAllTargets[itarget], predictions[itarget], residuals[itarget], inSample); var sampleSize = targets.Length; @@ -181,12 +181,12 @@ public ClassificationGradientBoostModel Learn(F64Matrix observations, double[] t sampleSize = (int)Math.Round(m_subSampleRatio * workIndices.Length); var currentInSample = Sample(sampleSize, workIndices, targets.Length); - trees[itarget][iteration] = m_learner.Learn(observations, oneVsAllTargets[itarget], + trees[itarget][iteration] = m_learner.Learn(observations, oneVsAllTargets[itarget], residuals[itarget], predictions[itarget], orderedElements, currentInSample); } else { - trees[itarget][iteration] = m_learner.Learn(observations, oneVsAllTargets[itarget], + trees[itarget][iteration] = m_learner.Learn(observations, oneVsAllTargets[itarget], residuals[itarget], predictions[itarget], orderedElements, inSample); } @@ -198,7 +198,7 @@ public ClassificationGradientBoostModel Learn(F64Matrix observations, double[] t } } - return new ClassificationGradientBoostModel(trees, uniqueTargets, m_learningRate, + return new ClassificationGradientBoostModel(trees, uniqueTargets, m_learningRate, initialLoss, observations.ColumnCount); } @@ -218,16 +218,16 @@ public ClassificationGradientBoostModel Learn(F64Matrix observations, double[] t /// This controls how often the validation error is checked to estimate the best number of iterations. /// ClassificationGradientBoostModel with early stopping. The number of iterations will equal the number of trees in the model public ClassificationGradientBoostModel LearnWithEarlyStopping( - F64Matrix trainingObservations, + F64Matrix trainingObservations, double[] trainingTargets, - F64Matrix validationObservations, + F64Matrix validationObservations, double[] validationTargets, - IMetric metric, + IMetric metric, int earlyStoppingRounds) { if (earlyStoppingRounds >= m_iterations) { - throw new ArgumentException("Number of iterations " + m_iterations + + throw new ArgumentException("Number of iterations " + m_iterations + " is smaller than earlyStoppingRounds " + earlyStoppingRounds); } @@ -286,7 +286,7 @@ public ClassificationGradientBoostModel LearnWithEarlyStopping( { for (int itarget = 0; itarget < trees.Length; itarget++) { - m_loss.UpdateResiduals(oneVsAllTargets[itarget], predictions[itarget], + m_loss.UpdateResiduals(oneVsAllTargets[itarget], predictions[itarget], residuals[itarget], inSample); var sampleSize = trainingTargets.Length; @@ -295,12 +295,12 @@ public ClassificationGradientBoostModel LearnWithEarlyStopping( sampleSize = (int)Math.Round(m_subSampleRatio * workIndices.Length); var currentInSample = Sample(sampleSize, workIndices, trainingTargets.Length); - trees[itarget][iteration] = m_learner.Learn(trainingObservations, oneVsAllTargets[itarget], + trees[itarget][iteration] = m_learner.Learn(trainingObservations, oneVsAllTargets[itarget], residuals[itarget], predictions[itarget], orderedElements, currentInSample); } else { - trees[itarget][iteration] = m_learner.Learn(trainingObservations, oneVsAllTargets[itarget], + trees[itarget][iteration] = m_learner.Learn(trainingObservations, oneVsAllTargets[itarget], residuals[itarget], predictions[itarget], orderedElements, inSample); } @@ -310,10 +310,10 @@ public ClassificationGradientBoostModel LearnWithEarlyStopping( predictions[itarget][i] += m_learningRate * predictWork[i]; } } - + // When using early stopping, Check that the validation error is not increasing between earlyStoppingRounds // If the validation error has increased, stop the learning and return the model with the best number of iterations (trees). - if(iteration % earlyStoppingRounds == 0) + if (iteration % earlyStoppingRounds == 0) { var model = new ClassificationGradientBoostModel( trees.Select(t => t.Take(iteration).ToArray()).ToArray(), @@ -333,7 +333,7 @@ public ClassificationGradientBoostModel LearnWithEarlyStopping( } return new ClassificationGradientBoostModel( - trees.Select(t => t.Take(bestIterationCount).ToArray()).ToArray(), + trees.Select(t => t.Take(bestIterationCount).ToArray()).ToArray(), uniqueTargets, m_learningRate, initialLoss, trainingObservations.ColumnCount); } @@ -353,16 +353,16 @@ public ClassificationGradientBoostModel LearnWithEarlyStopping( /// This controls how often the validation error is checked to estimate the best number of iterations /// ClassificationGradientBoostModel with early stopping. The number of iterations will equal the number of trees in the model public ClassificationGradientBoostModel LearnWithEarlyStopping( - F64Matrix trainingObservations, + F64Matrix trainingObservations, double[] trainingTargets, - F64Matrix validationObservations, + F64Matrix validationObservations, double[] validationTargets, - IMetric metric, + IMetric metric, int earlyStoppingRounds) { if (earlyStoppingRounds >= m_iterations) { - throw new ArgumentException("Number of iterations " + m_iterations + + throw new ArgumentException("Number of iterations " + m_iterations + " is smaller than earlyStoppingRounds " + earlyStoppingRounds); } @@ -417,7 +417,7 @@ public ClassificationGradientBoostModel LearnWithEarlyStopping( { for (int itarget = 0; itarget < trees.Length; itarget++) { - m_loss.UpdateResiduals(oneVsAllTargets[itarget], predictions[itarget], + m_loss.UpdateResiduals(oneVsAllTargets[itarget], predictions[itarget], residuals[itarget], inSample); var sampleSize = trainingTargets.Length; @@ -426,12 +426,12 @@ public ClassificationGradientBoostModel LearnWithEarlyStopping( sampleSize = (int)Math.Round(m_subSampleRatio * workIndices.Length); var currentInSample = Sample(sampleSize, workIndices, trainingTargets.Length); - trees[itarget][iteration] = m_learner.Learn(trainingObservations, oneVsAllTargets[itarget], + trees[itarget][iteration] = m_learner.Learn(trainingObservations, oneVsAllTargets[itarget], residuals[itarget], predictions[itarget], orderedElements, currentInSample); } else { - trees[itarget][iteration] = m_learner.Learn(trainingObservations, oneVsAllTargets[itarget], + trees[itarget][iteration] = m_learner.Learn(trainingObservations, oneVsAllTargets[itarget], residuals[itarget], predictions[itarget], orderedElements, inSample); } diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionAbsoluteLossGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionAbsoluteLossGradientBoostLearner.cs index 175483a4..bfdb7841 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionAbsoluteLossGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionAbsoluteLossGradientBoostLearner.cs @@ -28,13 +28,13 @@ public class RegressionAbsoluteLossGradientBoostLearner : RegressionGradientBoos /// Number of features used at each split in the tree. 0 means all will be used /// Use multi threading to speed up execution (default is true) public RegressionAbsoluteLossGradientBoostLearner( - int iterations = 100, - double learningRate = 0.1, + int iterations = 100, + double learningRate = 0.1, int maximumTreeDepth = 3, - int minimumSplitSize = 1, - double minimumInformationGain = 0.000001, - double subSampleRatio = 1.0, - int featuresPrSplit = 0, + int minimumSplitSize = 1, + double minimumInformationGain = 0.000001, + double subSampleRatio = 1.0, + int featuresPrSplit = 0, bool runParallel = true) : base(iterations, learningRate, maximumTreeDepth, minimumSplitSize, minimumInformationGain, subSampleRatio, featuresPrSplit, new GradientBoostAbsoluteLoss(), runParallel) diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs index 7821d2db..7e579105 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs @@ -45,14 +45,14 @@ public class RegressionGradientBoostLearner : IIndexedLearner, ILearner< /// loss function used /// Use multi threading to speed up execution public RegressionGradientBoostLearner( - int iterations, - double learningRate, + int iterations, + double learningRate, int maximumTreeDepth, - int minimumSplitSize, - double minimumInformationGain, - double subSampleRatio, - int featuresPrSplit, - IGradientBoostLoss loss, + int minimumSplitSize, + double minimumInformationGain, + double subSampleRatio, + int featuresPrSplit, + IGradientBoostLoss loss, bool runParallel) { if (iterations < 1) { throw new ArgumentException("Iterations must be at least 1"); } @@ -67,7 +67,7 @@ public RegressionGradientBoostLearner( m_iterations = iterations; m_learningRate = learningRate; m_subSampleRatio = subSampleRatio; - m_learner = new GBMDecisionTreeLearner(maximumTreeDepth, minimumSplitSize, + m_learner = new GBMDecisionTreeLearner(maximumTreeDepth, minimumSplitSize, minimumInformationGain, featuresPrSplit, m_loss, runParallel); } @@ -85,12 +85,12 @@ public RegressionGradientBoostLearner( /// This reduces variance in the ensemble and can help outer overfitting /// Number of features used at each split in the tree. 0 means all will be used public RegressionGradientBoostLearner( - int iterations = 100, - double learningRate = 0.1, + int iterations = 100, + double learningRate = 0.1, int maximumTreeDepth = 3, - int minimumSplitSize = 1, - double minimumInformationGain = 0.000001, - double subSampleRatio = 1.0, + int minimumSplitSize = 1, + double minimumInformationGain = 0.000001, + double subSampleRatio = 1.0, int featuresPrSplit = 0) : this(iterations, learningRate, maximumTreeDepth, minimumSplitSize, minimumInformationGain, subSampleRatio, featuresPrSplit, new GradientBoostSquaredLoss(), true) @@ -116,7 +116,7 @@ public RegressionGradientBoostModel Learn(F64Matrix observations, double[] targe /// /// /// - public RegressionGradientBoostModel Learn(F64Matrix observations, double[] targets, + public RegressionGradientBoostModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -145,7 +145,7 @@ public RegressionGradientBoostModel Learn(F64Matrix observations, double[] targe { sampleSize = (int)Math.Round(m_subSampleRatio * workIndices.Length); var currentInSample = Sample(sampleSize, workIndices, targets.Length); - + trees[iteration] = m_learner.Learn(observations, targets, residuals, predictions, orderedElements, currentInSample); @@ -163,7 +163,7 @@ public RegressionGradientBoostModel Learn(F64Matrix observations, double[] targe } } - return new RegressionGradientBoostModel(trees, m_learningRate, initialLoss, + return new RegressionGradientBoostModel(trees, m_learningRate, initialLoss, observations.ColumnCount); } @@ -183,16 +183,16 @@ public RegressionGradientBoostModel Learn(F64Matrix observations, double[] targe /// This controls how often the validation error is checked to estimate the best number of iterations. /// RegressionGradientBoostModel with early stopping. The number of iterations will equal the number of trees in the model public RegressionGradientBoostModel LearnWithEarlyStopping( - F64Matrix trainingObservations, + F64Matrix trainingObservations, double[] trainingTargets, - F64Matrix validationObservations, + F64Matrix validationObservations, double[] validationTargets, - IMetric metric, + IMetric metric, int earlyStoppingRounds) { - if(earlyStoppingRounds >= m_iterations) + if (earlyStoppingRounds >= m_iterations) { - throw new ArgumentException("Number of iterations " + m_iterations + + throw new ArgumentException("Number of iterations " + m_iterations + " is smaller than earlyStoppingRounds " + earlyStoppingRounds); } @@ -248,7 +248,7 @@ public RegressionGradientBoostModel LearnWithEarlyStopping( // If the validation error has increased, stop the learning and return the model with the best number of iterations (trees). if (iteration % earlyStoppingRounds == 0) { - var model = new RegressionGradientBoostModel(trees.Take(iteration).ToArray(), + var model = new RegressionGradientBoostModel(trees.Take(iteration).ToArray(), m_learningRate, initialLoss, trainingObservations.ColumnCount); var validPredictions = model.Predict(validationObservations); diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionHuberLossGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionHuberLossGradientBoostLearner.cs index 2757eb75..154722ca 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionHuberLossGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionHuberLossGradientBoostLearner.cs @@ -29,14 +29,14 @@ public class RegressionHuberLossGradientBoostLearner : RegressionGradientBoostLe /// The quantile used for deciding when to switch between square and absolute loss /// Use multi threading to speed up execution (default is true) public RegressionHuberLossGradientBoostLearner( - int iterations = 100, - double learningRate = 0.1, + int iterations = 100, + double learningRate = 0.1, int maximumTreeDepth = 3, - int minimumSplitSize = 1, - double minimumInformationGain = 0.000001, - double subSampleRatio = 1.0, - int featuresPrSplit = 0, - double huberQuantile = 0.9, + int minimumSplitSize = 1, + double minimumInformationGain = 0.000001, + double subSampleRatio = 1.0, + int featuresPrSplit = 0, + double huberQuantile = 0.9, bool runParallel = true) : base(iterations, learningRate, maximumTreeDepth, minimumSplitSize, minimumInformationGain, subSampleRatio, featuresPrSplit, new GradientBoostHuberLoss(huberQuantile), runParallel) diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionQuantileLossGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionQuantileLossGradientBoostLearner.cs index 9541ab9c..9681c1b1 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionQuantileLossGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionQuantileLossGradientBoostLearner.cs @@ -30,14 +30,14 @@ public class RegressionQuantileLossGradientBoostLearner : RegressionGradientBoos /// 0.5 is the median and corresponds to absolute loss or LAD regression /// Use multi threading to speed up execution (default is true) public RegressionQuantileLossGradientBoostLearner( - int iterations = 100, - double learningRate = 0.1, + int iterations = 100, + double learningRate = 0.1, int maximumTreeDepth = 3, - int minimumSplitSize = 1, - double minimumInformationGain = 0.000001, - double subSampleRatio = 1.0, - int featuresPrSplit = 0, - double quantile = 0.9, + int minimumSplitSize = 1, + double minimumInformationGain = 0.000001, + double subSampleRatio = 1.0, + int featuresPrSplit = 0, + double quantile = 0.9, bool runParallel = true) : base(iterations, learningRate, maximumTreeDepth, minimumSplitSize, minimumInformationGain, subSampleRatio, featuresPrSplit, new GradientBoostQuantileLoss(quantile), runParallel) diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionSquareLossGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionSquareLossGradientBoostLearner.cs index 6f7be033..3254b4d6 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionSquareLossGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionSquareLossGradientBoostLearner.cs @@ -28,15 +28,15 @@ public class RegressionSquareLossGradientBoostLearner : RegressionGradientBoostL /// Number of features used at each split in the tree. 0 means all will be used /// Use multi threading to speed up execution (default is true) public RegressionSquareLossGradientBoostLearner( - int iterations = 100, - double learningRate = 0.1, + int iterations = 100, + double learningRate = 0.1, int maximumTreeDepth = 3, - int minimumSplitSize = 1, - double minimumInformationGain = 0.000001, - double subSampleRatio = 1.0, - int featuresPrSplit = 0, + int minimumSplitSize = 1, + double minimumInformationGain = 0.000001, + double subSampleRatio = 1.0, + int featuresPrSplit = 0, bool runParallel = true) - : base(iterations, learningRate, maximumTreeDepth, minimumSplitSize, minimumInformationGain, + : base(iterations, learningRate, maximumTreeDepth, minimumSplitSize, minimumInformationGain, subSampleRatio, featuresPrSplit, new GradientBoostSquaredLoss(), runParallel) { } diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostAbsoluteLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostAbsoluteLoss.cs index a04c7e9b..6a19cf48 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostAbsoluteLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostAbsoluteLoss.cs @@ -95,12 +95,12 @@ public double NegativeGradient(double target, double prediction) /// /// /// - public void UpdateResiduals(double[] targets, double[] predictions, + public void UpdateResiduals(double[] targets, double[] predictions, double[] residuals, bool[] inSample) { for (int i = 0; i < residuals.Length; i++) { - if(inSample[i]) + if (inSample[i]) { residuals[i] = NegativeGradient(targets[i], predictions[i]); } @@ -114,7 +114,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, /// /// /// - public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, + public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, double target, double residual) { var residual2 = residual * residual; @@ -138,7 +138,7 @@ public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, /// /// /// - public double UpdatedLeafValue(double currentLeafValue, double[] targets, + public double UpdatedLeafValue(double currentLeafValue, double[] targets, double[] predictions, bool[] inSample) { var values = new List(); diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs index e643d56a..b5ab0a4f 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs @@ -93,7 +93,7 @@ public double NegativeGradient(double target, double prediction) /// /// /// - public void UpdateResiduals(double[] targets, double[] predictions, + public void UpdateResiduals(double[] targets, double[] predictions, double[] residuals, bool[] inSample) { for (int i = 0; i < residuals.Length; i++) @@ -112,7 +112,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, /// /// /// - public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, + public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, double target, double residual) { var residual2 = residual * residual; @@ -153,7 +153,7 @@ double BinomialBestConstant(double sum, double binomialSum) /// /// /// - public double UpdatedLeafValue(double currentLeafValue, double[] targets, + public double UpdatedLeafValue(double currentLeafValue, double[] targets, double[] predictions, bool[] inSample) { // no update needed for binomial loss diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostHuberLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostHuberLoss.cs index 13685957..453c2783 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostHuberLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostHuberLoss.cs @@ -96,7 +96,7 @@ public double NegativeGradient(double target, double prediction) /// /// /// - public void UpdateResiduals(double[] targets, double[] predictions, + public void UpdateResiduals(double[] targets, double[] predictions, double[] residuals, bool[] inSample) { var absDiff = new double[inSample.Length]; @@ -104,7 +104,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, for (int i = 0; i < inSample.Length; i++) { - if(inSample[i]) + if (inSample[i]) { var value = targets[i] - predictions[i]; difference[i] = value; @@ -116,7 +116,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, for (int i = 0; i < inSample.Length; i++) { - if(inSample[i]) + if (inSample[i]) { var diff = absDiff[i]; @@ -141,7 +141,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, /// /// /// - public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, + public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, double target, double residual) { var residual2 = residual * residual; @@ -174,13 +174,13 @@ public bool UpdateLeafValues() /// /// /// - public double UpdatedLeafValue(double currentLeafValue, double[] targets, + public double UpdatedLeafValue(double currentLeafValue, double[] targets, double[] predictions, bool[] inSample) { var diff = new List(); for (int j = 0; j < inSample.Length; j++) { - if(inSample[j]) + if (inSample[j]) { diff.Add(targets[j] - predictions[j]); } @@ -194,7 +194,7 @@ public double UpdatedLeafValue(double currentLeafValue, double[] targets, var medianDiff = diff[j] - median; var sign = Math.Sign(medianDiff); - values[j] = sign * Math.Min(Math.Abs(medianDiff), m_gamma); + values[j] = sign * Math.Min(Math.Abs(medianDiff), m_gamma); } var newValue = median + values.Sum() / values.Length; diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostQuantileLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostQuantileLoss.cs index 333187cc..fa666003 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostQuantileLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostQuantileLoss.cs @@ -101,7 +101,7 @@ public double NegativeGradient(double target, double prediction) /// /// /// - public void UpdateResiduals(double[] targets, double[] predictions, + public void UpdateResiduals(double[] targets, double[] predictions, double[] residuals, bool[] inSample) { for (int i = 0; i < residuals.Length; i++) @@ -120,7 +120,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, /// /// /// - public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, + public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, double target, double residual) { var residual2 = residual * residual; @@ -153,7 +153,7 @@ public bool UpdateLeafValues() /// /// /// - public double UpdatedLeafValue(double currentLeafValue, double[] targets, + public double UpdatedLeafValue(double currentLeafValue, double[] targets, double[] predictions, bool[] inSample) { var values = new List(); diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostSquaredLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostSquaredLoss.cs index b68f2453..79e9f742 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostSquaredLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostSquaredLoss.cs @@ -89,7 +89,7 @@ public double NegativeGradient(double target, double prediction) /// /// /// - public void UpdateResiduals(double[] targets, double[] predictions, + public void UpdateResiduals(double[] targets, double[] predictions, double[] residuals, bool[] inSample) { for (int i = 0; i < residuals.Length; i++) @@ -108,7 +108,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, /// /// /// - public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, + public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, double target, double residual) { var residual2 = residual * residual; @@ -118,7 +118,7 @@ public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, left.SumOfSquares += residual2; left.Cost = left.SumOfSquares - (left.Sum * left.Sum / left.Samples); left.BestConstant = left.Sum / left.Samples; - + // Alternative update but gives slightly different results //var leftSamplesInv = 1.0 / left.Samples; //var leftAverage = left.Sum * leftSamplesInv; @@ -126,7 +126,7 @@ public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, //left.Cost = left.SumOfSquares - (left.Sum * leftAverage); //left.Cost = left.SumOfSquares - (left.Sum * left.Sum * leftSamplesInv); //left.BestConstant = left.Sum * leftSamplesInv; - + right.Samples--; right.Sum -= residual; @@ -151,7 +151,7 @@ public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, /// /// /// - public double UpdatedLeafValue(double currentLeafValue, double[] targets, + public double UpdatedLeafValue(double currentLeafValue, double[] targets, double[] predictions, bool[] inSample) { // no updates needed for square loss diff --git a/src/SharpLearning.GradientBoost/Loss/IGradientBoostLoss.cs b/src/SharpLearning.GradientBoost/Loss/IGradientBoostLoss.cs index d9a4a223..546276e5 100644 --- a/src/SharpLearning.GradientBoost/Loss/IGradientBoostLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/IGradientBoostLoss.cs @@ -48,7 +48,7 @@ public interface IGradientBoostLoss /// /// /// - void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, + void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, double target, double residual); /// diff --git a/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs b/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs index 680ceac3..5b0df592 100644 --- a/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs +++ b/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs @@ -14,7 +14,7 @@ namespace SharpLearning.GradientBoost.Models /// /// [Serializable] - public sealed class ClassificationGradientBoostModel + public sealed class ClassificationGradientBoostModel : IPredictorModel , IPredictorModel { @@ -22,22 +22,22 @@ public sealed class ClassificationGradientBoostModel /// /// public readonly GBMTree[][] Trees; - + /// /// /// public readonly double LearningRate; - + /// /// /// public readonly double InitialLoss; - + /// /// /// public readonly double[] TargetNames; - + /// /// /// @@ -52,10 +52,10 @@ public sealed class ClassificationGradientBoostModel /// /// public ClassificationGradientBoostModel( - GBMTree[][] trees, - double[] targetNames, - double learningRate, - double initialLoss, + GBMTree[][] trees, + double[] targetNames, + double learningRate, + double initialLoss, int featureCount) { Trees = trees ?? throw new ArgumentNullException(nameof(trees)); @@ -73,7 +73,7 @@ public ClassificationGradientBoostModel( /// public double Predict(double[] observation) { - if(TargetNames.Length == 2) + if (TargetNames.Length == 2) { return BinaryPredict(observation); } diff --git a/src/SharpLearning.GradientBoost/Models/RegressionGradientBoostModel.cs b/src/SharpLearning.GradientBoost/Models/RegressionGradientBoostModel.cs index 12c946f4..34628429 100644 --- a/src/SharpLearning.GradientBoost/Models/RegressionGradientBoostModel.cs +++ b/src/SharpLearning.GradientBoost/Models/RegressionGradientBoostModel.cs @@ -19,17 +19,17 @@ public sealed class RegressionGradientBoostModel : IPredictorModel /// /// public readonly GBMTree[] Trees; - + /// /// /// public readonly double LearningRate; - + /// /// /// public readonly double InitialLoss; - + /// /// /// @@ -42,7 +42,7 @@ public sealed class RegressionGradientBoostModel : IPredictorModel /// /// /// - public RegressionGradientBoostModel(GBMTree[] trees, double learningRate, + public RegressionGradientBoostModel(GBMTree[] trees, double learningRate, double initialLoss, int featureCount) { Trees = trees ?? throw new ArgumentNullException(nameof(trees)); diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs index 9097fd28..af868d5f 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs @@ -112,7 +112,7 @@ public void CsvParser_NoHeader_EnumerateRows_Value_Throw() CollectionAssert.AreEqual(Expected_NoHeader(), actual.ToList()); } - + List Expected_NoHeader() { var columnNameToIndex = new Dictionary { { "0", 0 }, { "1", 1 }, { "2", 2 } }; diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs index 664f5f8a..c0793728 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs @@ -22,7 +22,7 @@ public class CsvRowExtensionsTest public void CsvRowExtensions_GetValues() { var sut = new CsvRow(m_columnNameToIndex, m_data); - var actual = sut.GetValues(new string[] {"1", "3"}); + var actual = sut.GetValues(new string[] { "1", "3" }); var expected = new string[] { "1", "3" }; CollectionAssert.AreEqual(expected, actual); } @@ -32,7 +32,7 @@ public void CsvRowExtensions_SetValue() { var sut = new CsvRow(m_columnNameToIndex, m_data.ToArray()); sut.SetValue("3", "33"); - + var actual = sut.GetValue("3"); Assert.AreEqual("33", actual); } @@ -53,7 +53,7 @@ public void CsvRowExtensions_Keep() var actual = sut.Keep("1", "2").ToList().First(); var expected = new CsvRow( - new Dictionary { { "1", 0 }, { "2", 1 } }, + new Dictionary { { "1", 0 }, { "2", 1 } }, new string[] { "1", "2" }); Assert.AreEqual(expected, actual); @@ -66,7 +66,7 @@ public void CsvRowExtensions_Remove() var actual = sut.Remove("3").ToList().First(); var expected = new CsvRow( - new Dictionary { { "1", 0 }, { "2", 1 }, { "4", 2 } }, + new Dictionary { { "1", 0 }, { "2", 1 }, { "4", 2 } }, new string[] { "1", "2", "4" }); Assert.AreEqual(expected, actual); @@ -148,7 +148,7 @@ public void CsvRowExtensions_KeyCombine_KeepRepeatedColumns() var actualColumnNameToIndex = rows.First().ColumnNameToIndex; - var expectedColumnNameToIndex = new Dictionary { {"Date", 0}, {"Open", 1}, {"High", 2}, {"Low", 3}, {"Close", 4}, {"Volume", 5}, {"Adj Close", 6}, {"Date_1", 7}, {"Open_1", 8}, {"High_1", 9}, {"Low_1", 10}, {"Close_1", 11}, {"Volume_1", 12}, {"Adj Close_1", 13}}; + var expectedColumnNameToIndex = new Dictionary { { "Date", 0 }, { "Open", 1 }, { "High", 2 }, { "Low", 3 }, { "Close", 4 }, { "Volume", 5 }, { "Adj Close", 6 }, { "Date_1", 7 }, { "Open_1", 8 }, { "High_1", 9 }, { "Low_1", 10 }, { "Close_1", 11 }, { "Volume_1", 12 }, { "Adj Close_1", 13 } }; CollectionAssert.AreEqual(expectedColumnNameToIndex, actualColumnNameToIndex); } @@ -172,7 +172,7 @@ public void CsvRowExtensions_KeyCombine() var actualColumnNameToIndex = rows.First().ColumnNameToIndex; - var expectedColumnNameToIndex = new Dictionary { {"Date", 0}, {"Open", 1}, {"High", 2}, {"Low", 3}, {"Close", 4}, {"Volume", 5}, {"Adj Close", 6}, {"OpenOther", 7}, {"CloseOther", 8} }; + var expectedColumnNameToIndex = new Dictionary { { "Date", 0 }, { "Open", 1 }, { "High", 2 }, { "Low", 3 }, { "Close", 4 }, { "Volume", 5 }, { "Adj Close", 6 }, { "OpenOther", 7 }, { "CloseOther", 8 } }; CollectionAssert.AreEqual(expectedColumnNameToIndex, actualColumnNameToIndex); } @@ -196,7 +196,7 @@ public void CsvRowExtensions_KeyCombine_KeepRepeatedColumns_Dict() var actualColumnNameToIndex = rows.First().ColumnNameToIndex; - var expectedColumnNameToIndex = new Dictionary { {"Date", 0}, {"Open", 1}, {"High", 2}, {"Low", 3}, {"Close", 4}, {"Volume", 5}, {"Adj Close", 6}, {"Date_1", 7}, {"Open_1", 8}, {"High_1", 9}, {"Low_1", 10}, {"Close_1", 11}, {"Volume_1", 12}, {"Adj Close_1", 13}}; + var expectedColumnNameToIndex = new Dictionary { { "Date", 0 }, { "Open", 1 }, { "High", 2 }, { "Low", 3 }, { "Close", 4 }, { "Volume", 5 }, { "Adj Close", 6 }, { "Date_1", 7 }, { "Open_1", 8 }, { "High_1", 9 }, { "Low_1", 10 }, { "Close_1", 11 }, { "Volume_1", 12 }, { "Adj Close_1", 13 } }; CollectionAssert.AreEqual(expectedColumnNameToIndex, actualColumnNameToIndex); } @@ -220,7 +220,7 @@ public void CsvRowExtensions_KeyCombine_Dict() var actualColumnNameToIndex = rows.First().ColumnNameToIndex; - var expectedColumnNameToIndex = new Dictionary { {"Date", 0}, {"Open", 1}, {"High", 2}, {"Low", 3}, {"Close", 4}, {"Volume", 5}, {"Adj Close", 6}, {"OpenOther", 7}, {"CloseOther", 8} }; + var expectedColumnNameToIndex = new Dictionary { { "Date", 0 }, { "Open", 1 }, { "High", 2 }, { "Low", 3 }, { "Close", 4 }, { "Volume", 5 }, { "Adj Close", 6 }, { "OpenOther", 7 }, { "CloseOther", 8 } }; CollectionAssert.AreEqual(expectedColumnNameToIndex, actualColumnNameToIndex); } diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvRowTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvRowTest.cs index 76ca8654..708e2244 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvRowTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvRowTest.cs @@ -16,23 +16,23 @@ public class CsvRowTest public void CsvRow_Constructor_data_columnNames_does_not_match() { var row = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 } }, - new string[] { "a", "b", "c" }); + new Dictionary { { "F1", 0 }, { "F2", 0 } }, + new string[] { "a", "b", "c" }); } [TestMethod] public void CsvRow_Equal() { var row = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, new string[] { "a", "b", "c" }); var equal = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, new string[] { "a", "b", "c" }); var notEqual = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, new string[] { "123", "b", "c" }); Assert.AreEqual(equal, row); @@ -43,15 +43,15 @@ public void CsvRow_Equal() public void CsvRow_Equal_Params() { var row = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, "a", "b", "c"); var equal = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, "a", "b", "c"); var notEqual = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, "123", "b", "c"); Assert.AreEqual(equal, row); diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvWriterTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvWriterTest.cs index 3fa84b27..ffef001f 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvWriterTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvWriterTest.cs @@ -33,7 +33,7 @@ public void CsvWriter_Write_Append() var writer = new StringWriter(); var sut = new CsvWriter(() => writer); - + sut.Write(data, false); var actual = writer.ToString(); diff --git a/src/SharpLearning.InputOutput.Test/Csv/DictionaryExtensionsTest.cs b/src/SharpLearning.InputOutput.Test/Csv/DictionaryExtensionsTest.cs index dc8e357d..d7bedf81 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/DictionaryExtensionsTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/DictionaryExtensionsTest.cs @@ -11,8 +11,8 @@ public class DictionaryExtensionsTest public void DictionaryExtensions_GetValues() { var sut = new Dictionary { { "F1", 0 }, { "F2", 1 } }; - var expected = new int[] {0, 1}; - var actual = sut.GetValues(new string[] {"F1", "F2" }); + var expected = new int[] { 0, 1 }; + var actual = sut.GetValues(new string[] { "F1", "F2" }); Assert.AreNotEqual(expected, actual); } } diff --git a/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs b/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs index 3b9f92c7..26810c2e 100644 --- a/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs +++ b/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs @@ -22,7 +22,7 @@ public void GenericXmlDataContractSerializer_Serialize() var writer = new StringWriter(); var sut = new GenericXmlDataContractSerializer(); - sut.Serialize(m_serializationData,() => writer); + sut.Serialize(m_serializationData, () => writer); Assert.AreEqual(m_serializationString, writer.ToString()); } diff --git a/src/SharpLearning.InputOutput/Csv/CsvParser.cs b/src/SharpLearning.InputOutput/Csv/CsvParser.cs index bca4834e..22320f58 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvParser.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvParser.cs @@ -27,9 +27,9 @@ public sealed class CsvParser /// /// /// - public CsvParser(Func reader, - char separator = DefaultDelimiter, - bool quoteInclosedColumns = false, + public CsvParser(Func reader, + char separator = DefaultDelimiter, + bool quoteInclosedColumns = false, bool hasHeader = true) { m_getReader = reader ?? throw new ArgumentException("reader"); @@ -45,9 +45,9 @@ public CsvParser(Func reader, /// public IEnumerable EnumerateRows(Func selectColumnNames) { - if(!m_hasHeader) + if (!m_hasHeader) { - throw new ArgumentException("CsvParser configured to use no header." + + throw new ArgumentException("CsvParser configured to use no header." + " Column names cannot be selected in this made"); } @@ -80,7 +80,7 @@ public IEnumerable EnumerateRows(params string[] columnNames) { if (!m_hasHeader) { - throw new ArgumentException("CsvParser configured to use no header." + + throw new ArgumentException("CsvParser configured to use no header." + "Column names cannot be selected in this made"); } @@ -107,7 +107,7 @@ public IEnumerable EnumerateRows(params string[] columnNames) /// public IEnumerable EnumerateRows() { - if(m_hasHeader) + if (m_hasHeader) { return EnumerateRowsHeader(); } @@ -132,7 +132,7 @@ IEnumerable EnumerateRowsHeader() } } } - + IEnumerable EnumerateRowsNoHeader() { var columnNameToIndex = CreateHeaderForCsvFileWithout(); @@ -205,7 +205,7 @@ string[] Split(string line, int[] indices) { string[] splitAll = null; - if(m_quoteInclosedColumns) + if (m_quoteInclosedColumns) { splitAll = SplitText(line, m_separator); } @@ -213,7 +213,7 @@ string[] Split(string line, int[] indices) { splitAll = line.Split(m_separator); } - + var split = new string[indices.Length]; for (int i = 0; i < indices.Length; i++) @@ -235,13 +235,13 @@ string[] SplitText(string csvText, char separator) while (current < csvText.Length) { - var token = csvText[current]; - - if(token == '"') + var token = csvText[current]; + + if (token == '"') { inText = !inText; } - else if(token == separator) + else if (token == separator) { if (!inText) { diff --git a/src/SharpLearning.InputOutput/Csv/CsvRow.cs b/src/SharpLearning.InputOutput/Csv/CsvRow.cs index 29510097..32d6aa01 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvRow.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvRow.cs @@ -13,7 +13,7 @@ public class CsvRow /// Values /// public readonly string[] Values; - + /// /// Column name to index /// diff --git a/src/SharpLearning.InputOutput/Csv/CsvRowExtensions.cs b/src/SharpLearning.InputOutput/Csv/CsvRowExtensions.cs index 4c348c0f..25fee9f7 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvRowExtensions.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvRowExtensions.cs @@ -15,7 +15,7 @@ public static class CsvRowExtensions /// /// /// - public static readonly Converter DefaultF64Converter = ArrayExtensions.DefaultF64Converter; + public static readonly Converter DefaultF64Converter = ArrayExtensions.DefaultF64Converter; /// /// Gets the CsvRow value based on the supplied column name @@ -194,7 +194,7 @@ public static StringMatrix ToStringMatrix(this IEnumerable dataRows) /// /// /// - public static IEnumerable EnumerateCsvRows(this IMatrix matrix, + public static IEnumerable EnumerateCsvRows(this IMatrix matrix, Dictionary columnNameToIndex) { var rows = matrix.RowCount; @@ -222,9 +222,9 @@ public static IEnumerable EnumerateCsvRows(this IMatrix matrix, /// /// /// True and a header is added to the stream, false and the header is omitted - public static void Write(this IEnumerable dataRows, - Func writer, - char separator = CsvParser.DefaultDelimiter, + public static void Write(this IEnumerable dataRows, + Func writer, + char separator = CsvParser.DefaultDelimiter, bool writeHeader = true) { new CsvWriter(writer, separator).Write(dataRows, writeHeader); @@ -237,9 +237,9 @@ public static void Write(this IEnumerable dataRows, /// /// /// True and a header is added to the stream, false and the header is omitted - public static void WriteFile(this IEnumerable dataRows, - string filePath, - char separator = CsvParser.DefaultDelimiter, + public static void WriteFile(this IEnumerable dataRows, + string filePath, + char separator = CsvParser.DefaultDelimiter, bool writeHeader = true) { Write(dataRows, () => new StreamWriter(filePath), separator, writeHeader); diff --git a/src/SharpLearning.InputOutput/Csv/CsvWriter.cs b/src/SharpLearning.InputOutput/Csv/CsvWriter.cs index d4acd64c..f73aca1f 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvWriter.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvWriter.cs @@ -29,9 +29,9 @@ public CsvWriter(Func writer, char separator = CsvParser.DefaultDeli /// /// the rows to write /// True and a header is added to the stream, false and the header is omittet - public void Write(IEnumerable rows, bool writeHeader=true) + public void Write(IEnumerable rows, bool writeHeader = true) { - using(var writer = m_writer()) + using (var writer = m_writer()) { if (writeHeader) { diff --git a/src/SharpLearning.InputOutput/Serialization/GenericBinarySerializer.cs b/src/SharpLearning.InputOutput/Serialization/GenericBinarySerializer.cs index 410a3d0d..c51710c7 100644 --- a/src/SharpLearning.InputOutput/Serialization/GenericBinarySerializer.cs +++ b/src/SharpLearning.InputOutput/Serialization/GenericBinarySerializer.cs @@ -54,12 +54,12 @@ public void Serialize(T data, Func writer) using (var baseWriter = writer()) { - if(baseWriter is StreamWriter) + if (baseWriter is StreamWriter) { var baseStream = ((StreamWriter)baseWriter).BaseStream; serializer.Serialize(baseStream, data); } - else if(baseWriter is StringWriter) + else if (baseWriter is StringWriter) { using (var memoryStream = new MemoryStream()) { diff --git a/src/SharpLearning.InputOutput/Serialization/GenericXmlDataContractSerializer.cs b/src/SharpLearning.InputOutput/Serialization/GenericXmlDataContractSerializer.cs index a9a7b687..d442b102 100644 --- a/src/SharpLearning.InputOutput/Serialization/GenericXmlDataContractSerializer.cs +++ b/src/SharpLearning.InputOutput/Serialization/GenericXmlDataContractSerializer.cs @@ -65,13 +65,13 @@ public void Serialize(T data, Func writer) using (var xmlWriter = XmlWriter.Create(texWriter, settings)) { var serializer = new DataContractSerializer(typeof(T), new DataContractSerializerSettings() - { - KnownTypes = m_knownTypes, - MaxItemsInObjectGraph = int.MaxValue, - IgnoreExtensionDataObject = false, - PreserveObjectReferences = m_preserveObjectReferences, - DataContractResolver = new GenericResolver() - }); + { + KnownTypes = m_knownTypes, + MaxItemsInObjectGraph = int.MaxValue, + IgnoreExtensionDataObject = false, + PreserveObjectReferences = m_preserveObjectReferences, + DataContractResolver = new GenericResolver() + }); serializer.WriteObject(xmlWriter, data); } @@ -86,20 +86,20 @@ public void Serialize(T data, Func writer) /// public T Deserialize(Func reader) { - using(var textReader = reader()) + using (var textReader = reader()) { using (var xmlReader = XmlReader.Create(textReader)) { var serializer = new DataContractSerializer(typeof(T), new DataContractSerializerSettings() - { - KnownTypes = m_knownTypes, - MaxItemsInObjectGraph = int.MaxValue, - IgnoreExtensionDataObject = false, - PreserveObjectReferences = m_preserveObjectReferences, - DataContractResolver = new GenericResolver() - }); - - return (T)serializer.ReadObject(xmlReader); + { + KnownTypes = m_knownTypes, + MaxItemsInObjectGraph = int.MaxValue, + IgnoreExtensionDataObject = false, + PreserveObjectReferences = m_preserveObjectReferences, + DataContractResolver = new GenericResolver() + }); + + return (T)serializer.ReadObject(xmlReader); } } } @@ -165,9 +165,9 @@ public static GenericResolver Merge(GenericResolver resolver1, GenericResolver r return new GenericResolver(types.ToArray()); } - public override Type ResolveName(string typeName, - string typeNamespace, - Type declaredType, + public override Type ResolveName(string typeName, + string typeNamespace, + Type declaredType, DataContractResolver knownTypeResolver) { if (m_namesToType.ContainsKey(typeNamespace)) @@ -180,10 +180,10 @@ public override Type ResolveName(string typeName, return knownTypeResolver.ResolveName(typeName, typeNamespace, declaredType, null); } - public override bool TryResolveType(Type type, - Type declaredType, - DataContractResolver knownTypeResolver, - out XmlDictionaryString typeName, + public override bool TryResolveType(Type type, + Type declaredType, + DataContractResolver knownTypeResolver, + out XmlDictionaryString typeName, out XmlDictionaryString typeNamespace) { if (m_typeToNames.ContainsKey(type)) @@ -196,7 +196,7 @@ public override bool TryResolveType(Type type, } else { - return knownTypeResolver.TryResolveType(type, declaredType, null, + return knownTypeResolver.TryResolveType(type, declaredType, null, out typeName, out typeNamespace); } } diff --git a/src/SharpLearning.Metrics.Test/Classification/ClassificationMatrixStringConverterTest.cs b/src/SharpLearning.Metrics.Test/Classification/ClassificationMatrixStringConverterTest.cs index 32514534..2a9215ca 100644 --- a/src/SharpLearning.Metrics.Test/Classification/ClassificationMatrixStringConverterTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/ClassificationMatrixStringConverterTest.cs @@ -14,7 +14,7 @@ public void ClassificationMatrixStringConverter_Convert() var errorMatrix = new double[,] { { 1.0, 0.0 }, { 1.0, 0.0 } }; var uniqueTargets = new List { 1.0, 2.0 }; - var actual = ClassificationMatrixStringConverter.Convert(uniqueTargets, + var actual = ClassificationMatrixStringConverter.Convert(uniqueTargets, confusionMatrix, errorMatrix, 0.0); var expected = ";1;2;1;2\r\n1;10.000;0.000;100.000;0.000\r\n2;0.000;10.000;100.000;0.000\r\nError: 0.000\r\n"; @@ -29,7 +29,7 @@ public void ClassificationMatrixStringConverter_Convert_TargetStringMapping() var uniqueTargets = new List { 1.0, 2.0 }; var uniqueStringTargets = new List { "Positive", "Negative" }; - var actual = ClassificationMatrixStringConverter.Convert(uniqueStringTargets, + var actual = ClassificationMatrixStringConverter.Convert(uniqueStringTargets, confusionMatrix, errorMatrix, 0.0); var expected = ";Positive;Negative;Positive;Negative\r\nPositive;10.000;0.000;100.000;0.000\r\nNegative;0.000;10.000;100.000;0.000\r\nError: 0.000\r\n"; diff --git a/src/SharpLearning.Metrics.Test/Classification/ClassificationMatrixTest.cs b/src/SharpLearning.Metrics.Test/Classification/ClassificationMatrixTest.cs index 11669757..ecb00607 100644 --- a/src/SharpLearning.Metrics.Test/Classification/ClassificationMatrixTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/ClassificationMatrixTest.cs @@ -24,7 +24,7 @@ public void ClassificationMatrix_ConfusionMatrix() [TestMethod] public void ClassificationMatrix_ErrorMatrix() { - var uniqueTargets = new List {0, 1, 2}; + var uniqueTargets = new List { 0, 1, 2 }; var confusionmatrix = new int[,] { { 1, 0, 0 }, { 0, 1, 1 }, { 0, 0, 0 } }; var actual = ClassificationMatrix.ErrorMatrix(uniqueTargets, confusionmatrix); diff --git a/src/SharpLearning.Metrics.Test/Classification/F1ScoreMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/F1ScoreMetricTest.cs index cc77d00a..a58fe424 100644 --- a/src/SharpLearning.Metrics.Test/Classification/F1ScoreMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/F1ScoreMetricTest.cs @@ -57,7 +57,7 @@ public void F1ScoreMetric_All_Error() [TestMethod] public void F1ScoreMetric_Error() { - var targets = new double[] { 0, 1, 1, 1, 1, 0, 0, 1}; + var targets = new double[] { 0, 1, 1, 1, 1, 0, 0, 1 }; var predictions = new double[] { 1, 1, 1, 0, 0, 0, 1, 1 }; var sut = new F1ScoreMetric(1); diff --git a/src/SharpLearning.Metrics.Test/Classification/LogLossClassificationProbabilityMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/LogLossClassificationProbabilityMetricTest.cs index 803a12d8..c98697c4 100644 --- a/src/SharpLearning.Metrics.Test/Classification/LogLossClassificationProbabilityMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/LogLossClassificationProbabilityMetricTest.cs @@ -12,7 +12,7 @@ public class LogLossClassificationProbabilityMetricTest public void LogLossClassificationMetric_Error_1() { var sut = new LogLossClassificationProbabilityMetric(1e-15); - var predictions = new ProbabilityPrediction[] { + var predictions = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 1.0 }, { 1, 0.0 }, { 2, 0.0 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), @@ -28,7 +28,7 @@ public void LogLossClassificationMetric_Error_1() public void LogLossClassificationMetric_Error_2() { var sut = new LogLossClassificationProbabilityMetric(1e-15); - var predictions = new ProbabilityPrediction[] { + var predictions = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 1.0 }, { 1, 1.0 }, { 2, 1.0 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), @@ -44,7 +44,7 @@ public void LogLossClassificationMetric_Error_2() public void LogLossClassificationMetric_ErrorString() { var sut = new LogLossClassificationProbabilityMetric(1e-15); - var predictions = new ProbabilityPrediction[] { + var predictions = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 1.0 }, { 1, 1.0 }, { 2, 1.0 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), @@ -54,7 +54,7 @@ public void LogLossClassificationMetric_ErrorString() var actual = sut.ErrorString(targets, predictions); var expected = ";0;1;2;0;1;2\r\n0;1.000;0.000;0.000;100.000;0.000;0.000\r\n1;0.000;1.000;0.000;0.000;100.000;0.000\r\n2;0.000;0.000;1.000;0.000;0.000;100.000\r\nError: 36.620\r\n"; - + Assert.AreEqual(expected, actual); } @@ -62,7 +62,7 @@ public void LogLossClassificationMetric_ErrorString() public void LogLossClassificationMetric_ErrorString_TargetStringMapping() { var sut = new LogLossClassificationProbabilityMetric(1e-15); - var predictions = new ProbabilityPrediction[] { + var predictions = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 1.0 }, { 1, 1.0 }, { 2, 1.0 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), diff --git a/src/SharpLearning.Metrics.Test/Classification/PrecisionMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/PrecisionMetricTest.cs index 765a187c..edc8d38c 100644 --- a/src/SharpLearning.Metrics.Test/Classification/PrecisionMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/PrecisionMetricTest.cs @@ -57,7 +57,7 @@ public void PrecisionMetric_All_Error() [TestMethod] public void PrecisionMetric_Error() { - var targets = new double[] { 0, 1, 1, 1, 1, 0, 0, 1}; + var targets = new double[] { 0, 1, 1, 1, 1, 0, 0, 1 }; var predictions = new double[] { 1, 1, 1, 0, 0, 0, 1, 1 }; var sut = new PrecisionMetric(1); diff --git a/src/SharpLearning.Metrics.Test/Classification/RecallMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/RecallMetricTest.cs index e9ea2712..950fb82d 100644 --- a/src/SharpLearning.Metrics.Test/Classification/RecallMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/RecallMetricTest.cs @@ -57,7 +57,7 @@ public void RecallMetric_All_Error() [TestMethod] public void RecallMetric_Error() { - var targets = new double[] { 0, 1, 1, 1, 1, 0, 0, 1}; + var targets = new double[] { 0, 1, 1, 1, 1, 0, 0, 1 }; var predictions = new double[] { 1, 1, 1, 0, 1, 0, 1, 1 }; var sut = new RecallMetric(1); diff --git a/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs index 6d9332c5..fb51b376 100644 --- a/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs @@ -73,7 +73,7 @@ public void RocAucClassificationMetric_Error_Always_Negative() var targets = positives.ToArray(); var probabilities = targets - .Select(s => s == 0 ? new ProbabilityPrediction(0.0, new Dictionary { { 0.0, 0.0 }, { 1.0, 0.0 } } ) : + .Select(s => s == 0 ? new ProbabilityPrediction(0.0, new Dictionary { { 0.0, 0.0 }, { 1.0, 0.0 } }) : new ProbabilityPrediction(1.0, new Dictionary { { 0.0, 0.0 }, { 1.0, 0.0 } })) .ToArray(); @@ -92,7 +92,7 @@ public void RocAucClassificationMetric_Error_Always_Positve() var targets = positives.ToArray(); var probabilities = targets - .Select(s => s == 0 ? new ProbabilityPrediction(0.0, new Dictionary { { 0.0, 1.0 }, { 1.0, 1.0 } } ) : + .Select(s => s == 0 ? new ProbabilityPrediction(0.0, new Dictionary { { 0.0, 1.0 }, { 1.0, 1.0 } }) : new ProbabilityPrediction(1.0, new Dictionary { { 0.0, 1.0 }, { 1.0, 1.0 } })) .ToArray(); @@ -156,7 +156,7 @@ public void RocAucClassificationMetric_ErrorString_TargetStringMapping() { 0, "Negative" }, { 1, "Positive" } }; - + var actual = sut.ErrorString(targets, probabilities, targetStringMapping); var expected = ";Negative;Positive;Negative;Positive\r\nNegative;1.000;0.000;100.000;0.000\r\nPositive;0.000;1.000;0.000;100.000\r\nError: 0.000\r\n"; diff --git a/src/SharpLearning.Metrics.Test/Classification/TotalErrorClassificationMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/TotalErrorClassificationMetricTest.cs index 9e9a1b18..4d920945 100644 --- a/src/SharpLearning.Metrics.Test/Classification/TotalErrorClassificationMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/TotalErrorClassificationMetricTest.cs @@ -52,7 +52,7 @@ public void TotalErrorClassificationMetric_ErrorString() var sut = new TotalErrorClassificationMetric(); var actual = sut.ErrorString(targets, predictions); - var expected = ";0;1;2;3;4;0;1;2;3;4\r\n0;1.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000;0.000\r\n1;0.000;2.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000\r\n2;0.000;0.000;1.000;1.000;0.000;0.000;0.000;50.000;50.000;0.000\r\n3;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\n4;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nError: 28.571\r\n";; + var expected = ";0;1;2;3;4;0;1;2;3;4\r\n0;1.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000;0.000\r\n1;0.000;2.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000\r\n2;0.000;0.000;1.000;1.000;0.000;0.000;0.000;50.000;50.000;0.000\r\n3;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\n4;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nError: 28.571\r\n"; ; Assert.AreEqual(expected, actual); } @@ -67,7 +67,7 @@ public void TotalErrorClassificationMetric_ErrorString_TargetStringMapping() var actual = sut.ErrorString(targets, predictions, targetStringMapping); var expected = ";One;Two;Three;Four;Five;One;Two;Three;Four;Five\r\nOne;1.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000;0.000\r\nTwo;0.000;2.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000\r\nThree;0.000;0.000;1.000;1.000;0.000;0.000;0.000;50.000;50.000;0.000\r\nFour;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nFive;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nError: 28.571\r\n"; ; - + Assert.AreEqual(expected, actual); } } diff --git a/src/SharpLearning.Metrics.Test/Regression/MeanSquaredErrorRegressionMetricTest.cs b/src/SharpLearning.Metrics.Test/Regression/MeanSquaredErrorRegressionMetricTest.cs index 489c57a0..9578c451 100644 --- a/src/SharpLearning.Metrics.Test/Regression/MeanSquaredErrorRegressionMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Regression/MeanSquaredErrorRegressionMetricTest.cs @@ -9,7 +9,7 @@ public class MeanSquaredErrorRegressionMetricTest [TestMethod] public void MeanSquaredErrorRegressionMetric_Error_Zero_Error() { - var targets = new double[] { 0, 0, 0 ,0, 0, 0 }; + var targets = new double[] { 0, 0, 0, 0, 0, 0 }; var predictions = new double[] { 0, 0, 0, 0, 0, 0 }; var sut = new MeanSquaredErrorRegressionMetric(); diff --git a/src/SharpLearning.Metrics.Test/Regression/RootMeanSquarePercentageRegressionMetricTest.cs b/src/SharpLearning.Metrics.Test/Regression/RootMeanSquarePercentageRegressionMetricTest.cs index d56a7163..21164a30 100644 --- a/src/SharpLearning.Metrics.Test/Regression/RootMeanSquarePercentageRegressionMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Regression/RootMeanSquarePercentageRegressionMetricTest.cs @@ -12,7 +12,7 @@ public void RootMeanSquarePercentageRegressionMetricTest_Error() var targets = new double[] { 1.0, 2.3, 3.1, 4.4, 5.8 }; var predictions = new double[] { 1.0, 2.0, 3.0, 4.0, 5.0 }; var sut = new RootMeanSquarePercentageRegressionMetric(); - + var result = sut.Error(targets, predictions); Assert.AreEqual(0.0952294579674858, result, 0.00001); } diff --git a/src/SharpLearning.Metrics.Test/Regression/RootMeanSquareRegressionMetricTest.cs b/src/SharpLearning.Metrics.Test/Regression/RootMeanSquareRegressionMetricTest.cs index d092121b..ab60de3b 100644 --- a/src/SharpLearning.Metrics.Test/Regression/RootMeanSquareRegressionMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Regression/RootMeanSquareRegressionMetricTest.cs @@ -12,7 +12,7 @@ public void RootMeanSquareRegressionMetric_Error() var targets = new double[] { 1.0, 2.3, 3.1, 4.4, 5.8 }; var predictions = new double[] { 1.0, 2.0, 3.0, 4.0, 5.0 }; var sut = new RootMeanSquareRegressionMetric(); - + var result = sut.Error(targets, predictions); Assert.AreEqual(0.42426406871192851, result, 0.00001); } diff --git a/src/SharpLearning.Metrics/Classification/ClassificationMatrix.cs b/src/SharpLearning.Metrics/Classification/ClassificationMatrix.cs index 5839bf42..6995f338 100644 --- a/src/SharpLearning.Metrics/Classification/ClassificationMatrix.cs +++ b/src/SharpLearning.Metrics/Classification/ClassificationMatrix.cs @@ -32,7 +32,7 @@ public static class ClassificationMatrix return confusionMatrix; } - + /// /// Creates an error matrix based on the provided confusion matrix /// diff --git a/src/SharpLearning.Metrics/Classification/ClassificationMatrixStringConverter.cs b/src/SharpLearning.Metrics/Classification/ClassificationMatrixStringConverter.cs index 4133b125..eb376437 100644 --- a/src/SharpLearning.Metrics/Classification/ClassificationMatrixStringConverter.cs +++ b/src/SharpLearning.Metrics/Classification/ClassificationMatrixStringConverter.cs @@ -21,16 +21,16 @@ public static class ClassificationMatrixStringConverter /// /// public static string Convert( - List uniqueTargets, - Dictionary targetStringMapping, - int[,] confusionMatrix, - double[,] errorMatrix, + List uniqueTargets, + Dictionary targetStringMapping, + int[,] confusionMatrix, + double[,] errorMatrix, double error) { var uniqueStringTargets = uniqueTargets.Select(t => targetStringMapping[t]).ToList(); return Convert(uniqueStringTargets, confusionMatrix, errorMatrix, error); } - + /// /// Creates a string representation of the classification matrix consisting of the provided confusion matrix and error matrix /// @@ -40,9 +40,9 @@ public static string Convert( /// /// public static string Convert( - List uniqueTargets, - int[,] confusionMatrix, - double[,] errorMatrix, + List uniqueTargets, + int[,] confusionMatrix, + double[,] errorMatrix, double error) { var combinedMatrix = CombineMatrices(confusionMatrix, errorMatrix); diff --git a/src/SharpLearning.Metrics/Classification/IClassificationMetric.cs b/src/SharpLearning.Metrics/Classification/IClassificationMetric.cs index aa37e47c..956c2579 100644 --- a/src/SharpLearning.Metrics/Classification/IClassificationMetric.cs +++ b/src/SharpLearning.Metrics/Classification/IClassificationMetric.cs @@ -16,7 +16,7 @@ public interface IClassificationMetric : IMetric /// /// new double Error(T[] targets, T[] predictions); - + /// /// Gives a string representation of the classification matrix /// diff --git a/src/SharpLearning.Metrics/Classification/IClassificationProbabilityMetric.cs b/src/SharpLearning.Metrics/Classification/IClassificationProbabilityMetric.cs index e1f1a396..f688e7ee 100644 --- a/src/SharpLearning.Metrics/Classification/IClassificationProbabilityMetric.cs +++ b/src/SharpLearning.Metrics/Classification/IClassificationProbabilityMetric.cs @@ -33,7 +33,7 @@ public interface IClassificationProbabilityMetric : IMetric /// /// - string ErrorString(double[] targets, ProbabilityPrediction[] predictions, + string ErrorString(double[] targets, ProbabilityPrediction[] predictions, Dictionary targetStringMapping); - } + } } diff --git a/src/SharpLearning.Metrics/Classification/LogLossClassificationProbabilityMetric.cs b/src/SharpLearning.Metrics/Classification/LogLossClassificationProbabilityMetric.cs index 91f54a4f..5c09c8ba 100644 --- a/src/SharpLearning.Metrics/Classification/LogLossClassificationProbabilityMetric.cs +++ b/src/SharpLearning.Metrics/Classification/LogLossClassificationProbabilityMetric.cs @@ -43,10 +43,10 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) var target = targets[i]; var probabilitySum = probabilities.Select(p => p.Value) .Sum(); - + foreach (var probability in probabilities) { - if(probability.Key == target) + if (probability.Key == target) { var prop = Math.Max(m_epsilon, probability.Value); prop = Math.Min(1.0 - m_epsilon, prop); @@ -81,8 +81,8 @@ public string ErrorString(double[] targets, ProbabilityPrediction[] probabilityP /// /// public string ErrorString( - double[] targets, - ProbabilityPrediction[] probabilityPredictions, + double[] targets, + ProbabilityPrediction[] probabilityPredictions, Dictionary targetStringMapping) { var error = Error(targets, probabilityPredictions); diff --git a/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs b/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs index aa993d9c..ba35c02f 100644 --- a/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs +++ b/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs @@ -49,19 +49,19 @@ double Precision(T[] targets, T[] predictions) for (int i = 0; i < targets.Length; i++) { - if (targets[i].Equals(m_positiveTarget) && + if (targets[i].Equals(m_positiveTarget) && predictions[i].Equals(m_positiveTarget)) { truePositives++; } - else if (!targets[i].Equals(m_positiveTarget) && + else if (!targets[i].Equals(m_positiveTarget) && predictions[i].Equals(m_positiveTarget)) { falsePositves++; } } - if(truePositives + falsePositves == 0) + if (truePositives + falsePositves == 0) { return 0.0; } diff --git a/src/SharpLearning.Metrics/Classification/RecallMetric.cs b/src/SharpLearning.Metrics/Classification/RecallMetric.cs index b02dd77a..ebe98f90 100644 --- a/src/SharpLearning.Metrics/Classification/RecallMetric.cs +++ b/src/SharpLearning.Metrics/Classification/RecallMetric.cs @@ -51,7 +51,7 @@ double Recall(T[] targets, T[] predictions) for (int i = 0; i < targets.Length; i++) { - if(targets[i].Equals(m_positiveTarget) && predictions[i].Equals(m_positiveTarget)) + if (targets[i].Equals(m_positiveTarget) && predictions[i].Equals(m_positiveTarget)) { truePositives++; } @@ -65,7 +65,7 @@ double Recall(T[] targets, T[] predictions) { return 0.0; } - + return (double)truePositives / ((double)truePositives + (double)falseNegatives); } diff --git a/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs b/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs index 3e6e2882..88081f44 100644 --- a/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs +++ b/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs @@ -37,7 +37,7 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) if (targets.Distinct().Count() == 1) { - throw new ArgumentException("Only one class present, " + + throw new ArgumentException("Only one class present, " + "Only binary classification problems supported."); } @@ -45,15 +45,15 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) .Select(p => p.Probabilities[m_positiveTarget]) .ToArray(); - var targetProbabilities = targets.Zip(positiveTargetProbabilities, + var targetProbabilities = targets.Zip(positiveTargetProbabilities, (l, s) => new { target = l, Probability = s }); targetProbabilities = targetProbabilities.OrderByDescending(l => l.Probability); var counts = targetProbabilities.GroupBy(l => l.target) .Select(l => new { Label = l.Key, Count = l.Count() }); - + int negativeCount = counts.Where(s => !s.Label.Equals(m_positiveTarget)) - .Select(s => s.Count).Sum();; + .Select(s => s.Count).Sum(); ; int positivesCount = counts.Where(s => s.Label.Equals(m_positiveTarget)) .Select(s => s.Count).Sum(); @@ -68,10 +68,10 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) if (probability != previousProbability) { - auc = auc + trapezoidArea( - fpCount * 1.0 / negativeCount, - previousFpCount * 1.0 / negativeCount, - tpCount * 1.0 / positivesCount, + auc = auc + trapezoidArea( + fpCount * 1.0 / negativeCount, + previousFpCount * 1.0 / negativeCount, + tpCount * 1.0 / positivesCount, previousTpCount * 1.0 / positivesCount); previousProbability = probability; @@ -85,13 +85,13 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) } auc = auc + trapezoidArea( - 1.0, previousFpCount * 1.0 / negativeCount, + 1.0, previousFpCount * 1.0 / negativeCount, 1.0, previousTpCount * 1.0 / positivesCount); return 1.0 - auc; } - + /// /// Calculate the trapezoidal area bound by the quad (X1,X2,Y1,Y2) /// diff --git a/src/SharpLearning.Metrics/Classification/TotalErrorClassificationMetric.cs b/src/SharpLearning.Metrics/Classification/TotalErrorClassificationMetric.cs index 014acf56..798ca049 100644 --- a/src/SharpLearning.Metrics/Classification/TotalErrorClassificationMetric.cs +++ b/src/SharpLearning.Metrics/Classification/TotalErrorClassificationMetric.cs @@ -32,7 +32,7 @@ double TotalError(List uniques, int[,] confusionMatrix) for (int row = 0; row < uniques.Count; ++row) { - errorSum -= confusionMatrix[row,row]; + errorSum -= confusionMatrix[row, row]; } return (double)errorSum / totalSum; diff --git a/src/SharpLearning.Metrics/Classification/Utilities.cs b/src/SharpLearning.Metrics/Classification/Utilities.cs index 6372a680..e298a8f3 100644 --- a/src/SharpLearning.Metrics/Classification/Utilities.cs +++ b/src/SharpLearning.Metrics/Classification/Utilities.cs @@ -22,7 +22,7 @@ internal static string ClassificationMatrixString(T[] targets, T[] prediction var confusionMatrix = ClassificationMatrix.ConfusionMatrix(uniques, targets, predictions); var errorMatrix = ClassificationMatrix.ErrorMatrix(uniques, confusionMatrix); - return ClassificationMatrixStringConverter.Convert(uniques, confusionMatrix, + return ClassificationMatrixStringConverter.Convert(uniques, confusionMatrix, errorMatrix, error); } diff --git a/src/SharpLearning.Metrics/Impurity/GiniImpurityMetric.cs b/src/SharpLearning.Metrics/Impurity/GiniImpurityMetric.cs index a31f7f04..ba959d3a 100644 --- a/src/SharpLearning.Metrics/Impurity/GiniImpurityMetric.cs +++ b/src/SharpLearning.Metrics/Impurity/GiniImpurityMetric.cs @@ -24,7 +24,7 @@ public double Impurity(double[] values) { var targetKey = (int)values[i]; - if(!m_dictionary.ContainsKey(targetKey)) + if (!m_dictionary.ContainsKey(targetKey)) { m_dictionary.Add(targetKey, 1); } diff --git a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs index 61778f08..171ef398 100644 --- a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs +++ b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs @@ -27,12 +27,12 @@ public sealed class McNemarModelComparison /// public int[][] Compare(double[] model1Predictions, double[] model2Predictions, double[] targets) { - if (model1Predictions.Length != model2Predictions.Length || + if (model1Predictions.Length != model2Predictions.Length || model1Predictions.Length != targets.Length) { throw new ArgumentException("Model prediction lengths differ from target length. " + - $"Model1: {model1Predictions.Length}, " + - $"Model2: {model2Predictions.Length}, " + + $"Model1: {model1Predictions.Length}, " + + $"Model2: {model2Predictions.Length}, " + $"Targets: {targets.Length}"); } @@ -81,12 +81,12 @@ public int[][] Compare(double[] model1Predictions, double[] model2Predictions, d public string CompareString(double[] model1Predictions, double[] model2Predictions, double[] targets) { var mcNemarMatrix = Compare(model1Predictions, model2Predictions, targets); - + var builder = new StringBuilder(); builder.AppendLine(";Model1Wrong;Model1Right"); builder.AppendLine($"Model2Wrong;{mcNemarMatrix[0][0]};{mcNemarMatrix[0][1]}"); builder.Append($"Model2Right;{mcNemarMatrix[1][0]};{mcNemarMatrix[1][1]}"); - + return builder.ToString(); } } diff --git a/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs b/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs index aba767c0..b61ca499 100644 --- a/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs +++ b/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs @@ -46,7 +46,7 @@ public double Error(T[] targets, T[] predictions) for (int i = 0; i < length; i++) { var prediction = predictions[i]; - if(m_workTargets.Contains(prediction) && + if (m_workTargets.Contains(prediction) && !Contains(predictions, i, prediction)) { hits += 1.0; diff --git a/src/SharpLearning.Metrics/Regression/CoefficientOfDeterminationMetric.cs b/src/SharpLearning.Metrics/Regression/CoefficientOfDeterminationMetric.cs index 4cd667d8..b6af904e 100644 --- a/src/SharpLearning.Metrics/Regression/CoefficientOfDeterminationMetric.cs +++ b/src/SharpLearning.Metrics/Regression/CoefficientOfDeterminationMetric.cs @@ -29,8 +29,8 @@ public double Error(double[] targets, double[] predictions) { SSres += Math.Pow(targets[i] - predictions[i], 2); } - - return SStot != 0.0?1 - SSres / SStot:0; + + return SStot != 0.0 ? 1 - SSres / SStot : 0; } } } diff --git a/src/SharpLearning.Metrics/Regression/MeanSquaredErrorRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/MeanSquaredErrorRegressionMetric.cs index 9c885c83..90aee6e5 100644 --- a/src/SharpLearning.Metrics/Regression/MeanSquaredErrorRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/MeanSquaredErrorRegressionMetric.cs @@ -19,7 +19,7 @@ public double Error(double[] targets, double[] predictions) { throw new ArgumentException("targets and predictions length do not match"); } - + var meanSquareError = 0.0; for (int i = 0; i < targets.Length; ++i) { diff --git a/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs index 29c6cea1..69df068b 100644 --- a/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs @@ -29,40 +29,40 @@ public double Error(double[] target, double[] predicted) /// double GiniCoefficient(double[] target, double[] predicted) { - if (target.Length != predicted.Length) - { throw new ArgumentException(); } - - var all = predicted.Zip(target, (prediction, actual) => new - { - actualValue = actual, - predictedValue = prediction - }) - .Zip(Enumerable.Range(1, target.Length), (ap, i) => new - { - ap.actualValue, - ap.predictedValue, - originalIndex = i - }) - .OrderByDescending(ap => ap.predictedValue) // important to sort descending by prediction - .ThenBy(ap => ap.originalIndex); // secondary sorts to ensure unambiguous orders - - var totalActualLosses = target.Sum(); + if (target.Length != predicted.Length) + { throw new ArgumentException(); } - double populationDelta = 1.0 / (double)target.Length; - double accumulatedPopulationPercentageSum = 0; - double accumulatedLossPercentageSum = 0; - - double giniSum = 0.0; - - foreach (var currentPair in all) - { - accumulatedLossPercentageSum += (currentPair.actualValue / totalActualLosses); - accumulatedPopulationPercentageSum += populationDelta; - giniSum += accumulatedLossPercentageSum - accumulatedPopulationPercentageSum; - } - - var gini = giniSum / (double)target.Length; - return gini; + var all = predicted.Zip(target, (prediction, actual) => new + { + actualValue = actual, + predictedValue = prediction + }) + .Zip(Enumerable.Range(1, target.Length), (ap, i) => new + { + ap.actualValue, + ap.predictedValue, + originalIndex = i + }) + .OrderByDescending(ap => ap.predictedValue) // important to sort descending by prediction + .ThenBy(ap => ap.originalIndex); // secondary sorts to ensure unambiguous orders + + var totalActualLosses = target.Sum(); + + double populationDelta = 1.0 / (double)target.Length; + double accumulatedPopulationPercentageSum = 0; + double accumulatedLossPercentageSum = 0; + + double giniSum = 0.0; + + foreach (var currentPair in all) + { + accumulatedLossPercentageSum += (currentPair.actualValue / totalActualLosses); + accumulatedPopulationPercentageSum += populationDelta; + giniSum += accumulatedLossPercentageSum - accumulatedPopulationPercentageSum; + } + + var gini = giniSum / (double)target.Length; + return gini; } } } diff --git a/src/SharpLearning.Neural.Test/ConvUtilsTest.cs b/src/SharpLearning.Neural.Test/ConvUtilsTest.cs index c3b551d9..fbb4e889 100644 --- a/src/SharpLearning.Neural.Test/ConvUtilsTest.cs +++ b/src/SharpLearning.Neural.Test/ConvUtilsTest.cs @@ -26,10 +26,10 @@ public void ConvUtils_Batch_Im2Cols() var random = new Random(42); var input = Matrix.Build.Random(batchSize, inputWidth * inputHeight * inputDepth, 42); - var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, + var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, padding, BorderMode.Valid); - var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, + var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, padding, BorderMode.Valid); var k = filterWidth * filterHeight * inputDepth; @@ -38,12 +38,12 @@ public void ConvUtils_Batch_Im2Cols() var actual = Matrix.Build.Dense(k, n); ConvUtils.Batch_Im2Col(input, inputDepth, inputHeight, inputWidth, filterHeight, filterWidth, - padding, padding, stride, stride, BorderMode.Valid, actual); + padding, padding, stride, stride, BorderMode.Valid, actual); Trace.WriteLine(actual.ToString()); Trace.WriteLine(string.Join(",", actual.ToColumnMajorArray())); - var expected = Matrix.Build.Dense(k, n, new float[] { 0.408388f, -0.5256838f, -1.416015f, -0.3205518f, 0.8964508f, -0.7706847f, 0.1228476f, 1.401819f, 0.02538049f, 0.4443011f, 0.3597376f, -0.8992839f, -0.5256838f, -0.8472909f, -0.3205518f, 0.168334f, -0.7706847f, -0.2688324f, 1.401819f, 0.5753565f, 0.4443011f, -0.8027026f, -0.8992839f, -0.6576554f, -1.416015f, -0.3205518f, 0.1622419f, -0.8718526f, 0.1228476f, 1.401819f, -0.8105127f, -1.366049f, 0.3597376f, -0.8992839f, -0.09693441f, 0.1117831f, -0.3205518f, 0.168334f, -0.8718526f, 2.464335f, 1.401819f, 0.5753565f, -1.366049f, 0.7328596f, -0.8992839f, -0.6576554f, 0.1117831f, -2.00572f, -0.8723587f, 1.785321f, 0.02021696f, -1.087396f, -0.7902505f, -0.06449615f, -0.4799407f, 0.7755837f, -0.08005979f, -0.163763f, 1.463557f, -0.5891034f, 1.785321f, -0.7747191f, -1.087396f, 1.942754f, -0.06449615f, 0.08791012f, 0.7755837f, 1.559499f, -0.163763f, 1.144407f, -0.5891034f, 1.486937f, 0.02021696f, -1.087396f, 1.386084f, -0.742821f, -0.4799407f, 0.7755837f, -0.93938f, 0.4403726f, 1.463557f, -0.5891034f, 0.2961742f, -1.676224f, -1.087396f, 1.942754f, -0.742821f, 0.3750592f, 0.7755837f, 1.559499f, 0.4403726f, 1.018316f, -0.5891034f, 1.486937f, -1.676224f, 0.5095494f, -1.069885f, 0.1028096f, -0.5383296f, -0.5273784f, -1.362978f, -2.817736f, -0.3506753f, -2.379571f, -0.205604f, -0.8553149f, 1.364009f, 1.960906f, 0.1028096f, 0.06300805f, -0.5273784f, 0.1655738f, -2.817736f, -0.2654593f, -2.379571f, 0.3019102f, -0.8553149f, 0.380102f, 1.960906f, -1.644088f, -0.5383296f, -0.5273784f, 1.407161f, 0.8093351f, -0.3506753f, -2.379571f, -0.1132597f, 0.00849107f, 1.364009f, 1.960906f, -1.907569f, 1.585406f, -0.5273784f, 0.1655738f, 0.8093351f, -0.5961999f, -2.379571f, 0.3019102f, 0.00849107f, -0.9973568f, 1.960906f, -1.644088f, 1.585406f, 0.1513373f, 0.06503697f, -0.6606446f, 1.281655f, 0.2639574f, -0.3281617f, 0.6252633f, -0.9870397f, -0.2739736f, 0.5706424f, -0.6933832f, -0.9226705f, 1.837471f, -0.6606446f, -2.021355f, 0.2639574f, -1.713513f, 0.6252633f, -0.6887951f, -0.2739736f, -0.1102718f, -0.6933832f, -0.2514778f, 1.837471f, 1.012506f, 1.281655f, 0.2639574f, -0.6539868f, -1.332823f, -0.9870397f, -0.2739736f, -0.6845301f, 0.3220822f, -0.9226705f, 1.837471f, 2.257283f, -0.2592173f, 0.2639574f, -1.713513f, -1.332823f, -0.1056926f, -0.2739736f, -0.1102718f, 0.3220822f, 0.02583288f, 1.837471f, 1.012506f, -0.2592173f, 0.5775524f, -0.734176f, 0.5288628f, 0.314957f, 1.331584f, 0.1659867f, -0.0002207408f, -0.3023876f, 0.5506561f, -1.365916f, -0.314546f, -0.6079422f, 0.3696074f, 0.5288628f, -0.7030032f, 1.331584f, 0.7429405f, -0.0002207408f, -2.21279f, 0.5506561f, 0.5057944f, -0.314546f, -1.749763f, 0.3696074f, -0.1464183f, 0.314957f, 1.331584f, 0.2864983f, 0.9384909f, -0.3023876f, 0.5506561f, 1.133461f, 1.134041f, -0.6079422f, 0.3696074f, 0.2236174f, -0.9724815f, 1.331584f, 0.7429405f, 0.9384909f, 1.441582f, 0.5506561f, 0.5057944f, 1.134041f, 0.2430595f, 0.3696074f, -0.1464183f, -0.9724815f, 0.7229092f }); + var expected = Matrix.Build.Dense(k, n, new float[] { 0.408388f, -0.5256838f, -1.416015f, -0.3205518f, 0.8964508f, -0.7706847f, 0.1228476f, 1.401819f, 0.02538049f, 0.4443011f, 0.3597376f, -0.8992839f, -0.5256838f, -0.8472909f, -0.3205518f, 0.168334f, -0.7706847f, -0.2688324f, 1.401819f, 0.5753565f, 0.4443011f, -0.8027026f, -0.8992839f, -0.6576554f, -1.416015f, -0.3205518f, 0.1622419f, -0.8718526f, 0.1228476f, 1.401819f, -0.8105127f, -1.366049f, 0.3597376f, -0.8992839f, -0.09693441f, 0.1117831f, -0.3205518f, 0.168334f, -0.8718526f, 2.464335f, 1.401819f, 0.5753565f, -1.366049f, 0.7328596f, -0.8992839f, -0.6576554f, 0.1117831f, -2.00572f, -0.8723587f, 1.785321f, 0.02021696f, -1.087396f, -0.7902505f, -0.06449615f, -0.4799407f, 0.7755837f, -0.08005979f, -0.163763f, 1.463557f, -0.5891034f, 1.785321f, -0.7747191f, -1.087396f, 1.942754f, -0.06449615f, 0.08791012f, 0.7755837f, 1.559499f, -0.163763f, 1.144407f, -0.5891034f, 1.486937f, 0.02021696f, -1.087396f, 1.386084f, -0.742821f, -0.4799407f, 0.7755837f, -0.93938f, 0.4403726f, 1.463557f, -0.5891034f, 0.2961742f, -1.676224f, -1.087396f, 1.942754f, -0.742821f, 0.3750592f, 0.7755837f, 1.559499f, 0.4403726f, 1.018316f, -0.5891034f, 1.486937f, -1.676224f, 0.5095494f, -1.069885f, 0.1028096f, -0.5383296f, -0.5273784f, -1.362978f, -2.817736f, -0.3506753f, -2.379571f, -0.205604f, -0.8553149f, 1.364009f, 1.960906f, 0.1028096f, 0.06300805f, -0.5273784f, 0.1655738f, -2.817736f, -0.2654593f, -2.379571f, 0.3019102f, -0.8553149f, 0.380102f, 1.960906f, -1.644088f, -0.5383296f, -0.5273784f, 1.407161f, 0.8093351f, -0.3506753f, -2.379571f, -0.1132597f, 0.00849107f, 1.364009f, 1.960906f, -1.907569f, 1.585406f, -0.5273784f, 0.1655738f, 0.8093351f, -0.5961999f, -2.379571f, 0.3019102f, 0.00849107f, -0.9973568f, 1.960906f, -1.644088f, 1.585406f, 0.1513373f, 0.06503697f, -0.6606446f, 1.281655f, 0.2639574f, -0.3281617f, 0.6252633f, -0.9870397f, -0.2739736f, 0.5706424f, -0.6933832f, -0.9226705f, 1.837471f, -0.6606446f, -2.021355f, 0.2639574f, -1.713513f, 0.6252633f, -0.6887951f, -0.2739736f, -0.1102718f, -0.6933832f, -0.2514778f, 1.837471f, 1.012506f, 1.281655f, 0.2639574f, -0.6539868f, -1.332823f, -0.9870397f, -0.2739736f, -0.6845301f, 0.3220822f, -0.9226705f, 1.837471f, 2.257283f, -0.2592173f, 0.2639574f, -1.713513f, -1.332823f, -0.1056926f, -0.2739736f, -0.1102718f, 0.3220822f, 0.02583288f, 1.837471f, 1.012506f, -0.2592173f, 0.5775524f, -0.734176f, 0.5288628f, 0.314957f, 1.331584f, 0.1659867f, -0.0002207408f, -0.3023876f, 0.5506561f, -1.365916f, -0.314546f, -0.6079422f, 0.3696074f, 0.5288628f, -0.7030032f, 1.331584f, 0.7429405f, -0.0002207408f, -2.21279f, 0.5506561f, 0.5057944f, -0.314546f, -1.749763f, 0.3696074f, -0.1464183f, 0.314957f, 1.331584f, 0.2864983f, 0.9384909f, -0.3023876f, 0.5506561f, 1.133461f, 1.134041f, -0.6079422f, 0.3696074f, 0.2236174f, -0.9724815f, 1.331584f, 0.7429405f, 0.9384909f, 1.441582f, 0.5506561f, 0.5057944f, 1.134041f, 0.2430595f, 0.3696074f, -0.1464183f, -0.9724815f, 0.7229092f }); MatrixAsserts.AreEqual(expected, actual); } @@ -52,7 +52,7 @@ public void ConvUtils_Batch_Im2Cols() public void ConvUtils_Batch_Col2Im() { var batchSize = 5; - + var filterHeight = 2; var filterWidth = 2; @@ -63,10 +63,10 @@ public void ConvUtils_Batch_Col2Im() var inputHeight = 3; var inputDepth = 3; - var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, + var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, padding, BorderMode.Valid); - var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, + var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, padding, BorderMode.Valid); var k = filterWidth * filterHeight * inputDepth; @@ -82,7 +82,7 @@ public void ConvUtils_Batch_Col2Im() Trace.WriteLine(actual.ToString()); Trace.WriteLine(string.Join(",", actual.ToColumnMajorArray())); - var expected = Matrix.Build.Dense(batchSize, fanIn, new float[] { 0.408388f, -0.3281617f, -0.163763f, -0.7540793f, -0.8690567f, -0.8093507f, 0.2888344f, -1.777985f, -2.136633f, 2.92046f, -2.021355f, -0.4799407f, -0.6079422f, 0.5664175f, 1.640147f, 0.2616988f, -0.4687745f, -0.7903177f, 1.407904f, 0.1495381f, -1.212453f, 0.6085976f, -0.7663184f, -0.05670342f, 1.895431f, -0.6066797f, -0.2541801f, -0.01155096f, 1.438064f, -1.349128f, 1.942754f, 0.5057944f, -1.907569f, -0.5227588f, 0.5727027f, -1.167249f, 0.2078037f, 2.980192f, 0.4892522f, -0.6720377f, 0.9384909f, -0.9973568f, 0.5546624f, 1.710745f, 1.995577f, -0.734176f, -2.817736f, -0.8027026f, -0.7883626f, -1.275902f, -0.5054669f, 0.3228757f, 3.105314f, -0.3089013f, 1.549119f, -0.5383296f, 1.401819f, 1.837471f, 0.1251182f, -0.7002729f, 0.07180786f, -0.9396007f, 0.6037194f, -0.7305622f, 1.063156f, 4.591741f, 0.4193244f, -1.031005f, -3.045349f, 0.4254266f, 0.6900162f, -2.136511f, -1.578628f, 0.7839373f, 1.781849f, 0.1622419f, -0.6845301f, -1.676224f, 1.028266f, 0.9345228f, 0.789884f, 1.158841f, 1.703116f, -0.8997472f, -1.423375f, -0.1056926f, -0.08005979f, 1.399474f, -0.05612089f, -0.722365f, -0.6606446f, 0.08791012f, -1.749763f, 0.685056f, 0.3641174f, 0.2083111f, -0.5394329f, 1.846675f, 0.5931945f, -1.26804f, -1.087396f, 0.5506561f, -1.644088f, -0.8753259f, -1.839462f, 0.5598704f, -2.054844f, 1.20434f, -3.263947f, 1.221963f, -0.5145022f, -1.402665f, 1.101824f, 0.4248552f, -2.63849f, 1.160408f, 2.130142f, 0.3172536f, 1.109406f, 0.9979748f, 0.2864983f, 0.00849107f, -2.00572f, 1.178588f, -0.3127078f, -1.662103f, -1.043834f, 1.065703f, -0.9702578f, -0.1781971f, -1.362978f, 0.4443011f, -1.050083f, 0.6755545f, -1.088875f }); + var expected = Matrix.Build.Dense(batchSize, fanIn, new float[] { 0.408388f, -0.3281617f, -0.163763f, -0.7540793f, -0.8690567f, -0.8093507f, 0.2888344f, -1.777985f, -2.136633f, 2.92046f, -2.021355f, -0.4799407f, -0.6079422f, 0.5664175f, 1.640147f, 0.2616988f, -0.4687745f, -0.7903177f, 1.407904f, 0.1495381f, -1.212453f, 0.6085976f, -0.7663184f, -0.05670342f, 1.895431f, -0.6066797f, -0.2541801f, -0.01155096f, 1.438064f, -1.349128f, 1.942754f, 0.5057944f, -1.907569f, -0.5227588f, 0.5727027f, -1.167249f, 0.2078037f, 2.980192f, 0.4892522f, -0.6720377f, 0.9384909f, -0.9973568f, 0.5546624f, 1.710745f, 1.995577f, -0.734176f, -2.817736f, -0.8027026f, -0.7883626f, -1.275902f, -0.5054669f, 0.3228757f, 3.105314f, -0.3089013f, 1.549119f, -0.5383296f, 1.401819f, 1.837471f, 0.1251182f, -0.7002729f, 0.07180786f, -0.9396007f, 0.6037194f, -0.7305622f, 1.063156f, 4.591741f, 0.4193244f, -1.031005f, -3.045349f, 0.4254266f, 0.6900162f, -2.136511f, -1.578628f, 0.7839373f, 1.781849f, 0.1622419f, -0.6845301f, -1.676224f, 1.028266f, 0.9345228f, 0.789884f, 1.158841f, 1.703116f, -0.8997472f, -1.423375f, -0.1056926f, -0.08005979f, 1.399474f, -0.05612089f, -0.722365f, -0.6606446f, 0.08791012f, -1.749763f, 0.685056f, 0.3641174f, 0.2083111f, -0.5394329f, 1.846675f, 0.5931945f, -1.26804f, -1.087396f, 0.5506561f, -1.644088f, -0.8753259f, -1.839462f, 0.5598704f, -2.054844f, 1.20434f, -3.263947f, 1.221963f, -0.5145022f, -1.402665f, 1.101824f, 0.4248552f, -2.63849f, 1.160408f, 2.130142f, 0.3172536f, 1.109406f, 0.9979748f, 0.2864983f, 0.00849107f, -2.00572f, 1.178588f, -0.3127078f, -1.662103f, -1.043834f, 1.065703f, -0.9702578f, -0.1781971f, -1.362978f, 0.4443011f, -1.050083f, 0.6755545f, -1.088875f }); MatrixAsserts.AreEqual(expected, actual); } @@ -102,10 +102,10 @@ public void ConvUtils_ReshapeConvolutionsToRowMajor() var inputHeight = 3; var inputDepth = 3; - var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, + var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, padding, BorderMode.Valid); - var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, + var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, padding, BorderMode.Valid); var k = filterDepth; @@ -115,10 +115,10 @@ public void ConvUtils_ReshapeConvolutionsToRowMajor() var convolutedInput = Matrix.Build.Dense(k, npq, new float[] { -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f, -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f, -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f, -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f, -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f }); var actual = Matrix.Build.Dense(batchSize, k * filterGridWidth * filterGridHeight); - ConvUtils.ReshapeConvolutionsToRowMajor(convolutedInput, inputDepth, inputHeight, inputWidth, + ConvUtils.ReshapeConvolutionsToRowMajor(convolutedInput, inputDepth, inputHeight, inputWidth, filterHeight, filterWidth, padding, padding, stride, stride, BorderMode.Valid, actual); - var expected = Matrix.Build.Dense(batchSize, k * filterGridWidth * filterGridHeight, new float[] { -6.260461f, -6.260461f, -6.260461f, -6.260461f, -6.260461f, -7.173417f, -7.173417f, -7.173417f, -7.173417f, -7.173417f, -8.999331f, -8.999331f, -8.999331f, -8.999331f, -8.999331f, -9.912288f, -9.912288f, -9.912288f, -9.912288f, -9.912288f, 87.38299f, 87.38299f, 87.38299f, 87.38299f, 87.38299f, 94.47046f, 94.47046f, 94.47046f, 94.47046f, 94.47046f, 108.6454f, 108.6454f, 108.6454f, 108.6454f, 108.6454f, 115.7329f, 115.7329f, 115.7329f, 115.7329f, 115.7329f }); + var expected = Matrix.Build.Dense(batchSize, k * filterGridWidth * filterGridHeight, new float[] { -6.260461f, -6.260461f, -6.260461f, -6.260461f, -6.260461f, -7.173417f, -7.173417f, -7.173417f, -7.173417f, -7.173417f, -8.999331f, -8.999331f, -8.999331f, -8.999331f, -8.999331f, -9.912288f, -9.912288f, -9.912288f, -9.912288f, -9.912288f, 87.38299f, 87.38299f, 87.38299f, 87.38299f, 87.38299f, 94.47046f, 94.47046f, 94.47046f, 94.47046f, 94.47046f, 108.6454f, 108.6454f, 108.6454f, 108.6454f, 108.6454f, 115.7329f, 115.7329f, 115.7329f, 115.7329f, 115.7329f }); MatrixAsserts.AreEqual(expected, actual); } @@ -138,10 +138,10 @@ public void ConvUtils_ReshapeRowMajorToConvolutionLayout() var inputHeight = 3; var inputDepth = 3; - var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, + var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, padding, BorderMode.Valid); - var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, + var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, padding, BorderMode.Valid); var k = filterDepth; @@ -151,7 +151,7 @@ public void ConvUtils_ReshapeRowMajorToConvolutionLayout() var rowMajor = Matrix.Build.Dense(batchSize, k * filterGridWidth * filterGridHeight, new float[] { -6.260461f, -6.260461f, -6.260461f, -6.260461f, -6.260461f, -7.173417f, -7.173417f, -7.173417f, -7.173417f, -7.173417f, -8.999331f, -8.999331f, -8.999331f, -8.999331f, -8.999331f, -9.912288f, -9.912288f, -9.912288f, -9.912288f, -9.912288f, 87.38299f, 87.38299f, 87.38299f, 87.38299f, 87.38299f, 94.47046f, 94.47046f, 94.47046f, 94.47046f, 94.47046f, 108.6454f, 108.6454f, 108.6454f, 108.6454f, 108.6454f, 115.7329f, 115.7329f, 115.7329f, 115.7329f, 115.7329f }); var actual = Matrix.Build.Dense(k, npq); - ConvUtils.ReshapeRowMajorToConvolutionLayout(rowMajor, inputDepth, inputHeight, inputWidth, + ConvUtils.ReshapeRowMajorToConvolutionLayout(rowMajor, inputDepth, inputHeight, inputWidth, filterHeight, filterWidth, padding, padding, stride, stride, BorderMode.Valid, actual); var expected = Matrix.Build.Dense(k, npq, new float[] { -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f, -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f, -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f, -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f, -6.260461f, 87.38299f, -7.173417f, 94.47046f, -8.999331f, 108.6454f, -9.912288f, 115.7329f }); diff --git a/src/SharpLearning.Neural.Test/GradientCheckerTool.cs b/src/SharpLearning.Neural.Test/GradientCheckerTool.cs index d4bc53c3..c92a941d 100644 --- a/src/SharpLearning.Neural.Test/GradientCheckerTool.cs +++ b/src/SharpLearning.Neural.Test/GradientCheckerTool.cs @@ -10,11 +10,11 @@ namespace SharpLearning.Neural.Test { public static class GradientCheckTools { - public static void CheckLayer(ILayer layer, int fanInWidth, int fanInHeight, int fanInDepth, + public static void CheckLayer(ILayer layer, int fanInWidth, int fanInHeight, int fanInDepth, int batchSize, float epsilon, Random random) { var accuracyCondition = 1e-2; - layer.Initialize(fanInWidth, fanInHeight, fanInDepth, batchSize, + layer.Initialize(fanInWidth, fanInHeight, fanInDepth, batchSize, Initialization.GlorotUniform, random); var fanIn = fanInWidth * fanInHeight * fanInDepth; diff --git a/src/SharpLearning.Neural.Test/Layers/BatchNormalizationLayerTest.cs b/src/SharpLearning.Neural.Test/Layers/BatchNormalizationLayerTest.cs index b629b61f..a9277945 100644 --- a/src/SharpLearning.Neural.Test/Layers/BatchNormalizationLayerTest.cs +++ b/src/SharpLearning.Neural.Test/Layers/BatchNormalizationLayerTest.cs @@ -81,10 +81,10 @@ public void BatchNormalizationLayer_Forward_SpatialInput() var inputHeight = 3; var inputDepth = 3; - var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, + var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, padding, BorderMode.Valid); - var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, + var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, padding, BorderMode.Valid); var k = filterDepth; @@ -95,14 +95,14 @@ public void BatchNormalizationLayer_Forward_SpatialInput() Trace.WriteLine(convInput); - ConvUtils.ReshapeConvolutionsToRowMajor(convInput, inputDepth, inputHeight, inputWidth, - filterHeight, filterWidth, padding, padding, stride, stride, + ConvUtils.ReshapeConvolutionsToRowMajor(convInput, inputDepth, inputHeight, inputWidth, + filterHeight, filterWidth, padding, padding, stride, stride, BorderMode.Valid, rowWiseInput); Trace.WriteLine(rowWiseInput); var sut = new BatchNormalizationLayer(); - sut.Initialize(filterGridWidth, filterGridHeight, filterDepth, batchSize, + sut.Initialize(filterGridWidth, filterGridHeight, filterDepth, batchSize, Initialization.GlorotUniform, new Random(232)); var actual = sut.Forward(rowWiseInput); diff --git a/src/SharpLearning.Neural.Test/Layers/Conv2DLayerTest.cs b/src/SharpLearning.Neural.Test/Layers/Conv2DLayerTest.cs index 3f045052..a793ce19 100644 --- a/src/SharpLearning.Neural.Test/Layers/Conv2DLayerTest.cs +++ b/src/SharpLearning.Neural.Test/Layers/Conv2DLayerTest.cs @@ -54,7 +54,7 @@ public void Conv2DLayer_CopyLayerForPredictionModel() public void Conv2DLayer_Initialize() { var batchSize = 1; - + var sut = new Conv2DLayer(2, 2, 2); sut.Initialize(3, 3, 1, batchSize, Initialization.GlorotUniform, new Random(232)); } @@ -159,7 +159,7 @@ public void Conv2DLayer_GradientCheck_BatchSize_1() var batchSize = 1; var sut = new Conv2DLayer(2, 2, 2, 1, 0, 0, Activation.Undefined); - GradientCheckTools.CheckLayer(sut, inputWidth, inputHeight, inputDepth, batchSize, + GradientCheckTools.CheckLayer(sut, inputWidth, inputHeight, inputDepth, batchSize, 1e-4f, new Random(21)); } @@ -172,7 +172,7 @@ public void Conv2DLayer_GradientCheck_BatchSize_11() var batchSize = 11; var sut = new Conv2DLayer(2, 2, 2, 1, 0, 0, Activation.Undefined); - GradientCheckTools.CheckLayer(sut, inputWidth, inputHeight, inputDepth, batchSize, + GradientCheckTools.CheckLayer(sut, inputWidth, inputHeight, inputDepth, batchSize, 1e-4f, new Random(21)); } } diff --git a/src/SharpLearning.Neural.Test/Layers/DenseLayerTest.cs b/src/SharpLearning.Neural.Test/Layers/DenseLayerTest.cs index 77ef614a..58147124 100644 --- a/src/SharpLearning.Neural.Test/Layers/DenseLayerTest.cs +++ b/src/SharpLearning.Neural.Test/Layers/DenseLayerTest.cs @@ -123,7 +123,7 @@ public void DenseLayer_MultipleBackwardsPasses() var delta = Matrix.Build.Dense(batchSize, neuronCount, 1.0f); var expected = Matrix.Build.Dense(batchSize, fanIn); sut.Backward(delta).CopyTo(expected); - + for (int i = 0; i < 20; i++) { var actual = sut.Backward(delta); diff --git a/src/SharpLearning.Neural.Test/Layers/MaxPool2DLayerTest.cs b/src/SharpLearning.Neural.Test/Layers/MaxPool2DLayerTest.cs index c863b0d6..c5f136aa 100644 --- a/src/SharpLearning.Neural.Test/Layers/MaxPool2DLayerTest.cs +++ b/src/SharpLearning.Neural.Test/Layers/MaxPool2DLayerTest.cs @@ -62,7 +62,7 @@ public void MaxPool2DLayer_Forward() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, + sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, random); var input = Matrix.Build.Random(batchSize, fanIn, random.Next()); @@ -90,7 +90,7 @@ public void MaxPool2DLayer_Forward_2() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, + sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, new Random(232)); var inputData = new float[] { 3, 0, 0, 6, 0, 2, 3, 0, 0, 8, 10, 0, 4, 6, 0, 7 }; @@ -117,7 +117,7 @@ public void MaxPool2DLayer_Forward_3() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, + sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, new Random(232)); var inputData = new float[] { 3, 0, 0, 6, 0, 2, 3, 0, 0, 8, 10, 0, 4, 6, 0, 7, 4, 0, 2, 0, 0, 8, 3, 5, 10, 0, 12, 0, 6, 5, 3, 2 }; @@ -145,7 +145,7 @@ public void MaxPool2DLayer_Backward() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, + sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, random); var input = Matrix.Build.Random(batchSize, fanIn, random.Next()); @@ -177,7 +177,7 @@ public void MaxPool2DLayer_Backward_2() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, + sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, random); var inputData = new float[] { 3, 0, 0, 6, 0, 2, 3, 0, 0, 8, 10, 0, 4, 6, 0, 7 }; @@ -187,7 +187,7 @@ public void MaxPool2DLayer_Backward_2() var delta = Matrix.Build.Dense(batchSize, fanOut, 1); var actual = sut.Backward(delta); - + var expected = Matrix.Build.Dense(batchSize, fanIn, new float[] { 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0 }); MatrixAsserts.AreEqual(expected, actual); } @@ -208,7 +208,7 @@ public void MaxPool2DLayer_Backward_3() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, + sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, random); var inputData = new float[] { 3, 0, 0, 6, 0, 2, 3, 0, 0, 8, 10, 0, 4, 6, 0, 7, 4, 0, 2, 0, 0, 8, 3, 5, 10, 0, 12, 0, 6, 5, 3, 2 }; @@ -239,7 +239,7 @@ public void MaxPool2DLayer_MultipleForwardsPasses() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, + sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, random); var input = Matrix.Build.Random(batchSize, fanIn, random.Next()); @@ -270,7 +270,7 @@ public void MaxPool2DLayer_MultipleBackwardsPasses() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, + sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, random); var input = Matrix.Build.Random(batchSize, fanIn, random.Next()); @@ -302,7 +302,7 @@ public void MaxPool2DLayer_GradientCheck_BatchSize_1() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - GradientCheckTools.CheckLayer(sut, inputWidth, inputHeight, inputDepth, batchSize, + GradientCheckTools.CheckLayer(sut, inputWidth, inputHeight, inputDepth, batchSize, 1e-4f, new Random(21)); } @@ -321,7 +321,7 @@ public void MaxPool2DLayer_GradientCheck_BatchSize_10() const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); - GradientCheckTools.CheckLayer(sut, inputWidth, inputHeight, inputDepth, batchSize, + GradientCheckTools.CheckLayer(sut, inputWidth, inputHeight, inputDepth, batchSize, 1e-4f, new Random(21)); } } diff --git a/src/SharpLearning.Neural.Test/Learners/RegressionNeuralNetLearnerTest.cs b/src/SharpLearning.Neural.Test/Learners/RegressionNeuralNetLearnerTest.cs index f25fb8b7..a744b2c9 100644 --- a/src/SharpLearning.Neural.Test/Learners/RegressionNeuralNetLearnerTest.cs +++ b/src/SharpLearning.Neural.Test/Learners/RegressionNeuralNetLearnerTest.cs @@ -20,7 +20,7 @@ public void RegressionNeuralNetLearner_Learn() var random = new Random(32); - var (observations, targets) = CreateData(numberOfObservations, + var (observations, targets) = CreateData(numberOfObservations, numberOfFeatures, random); var net = new NeuralNet(); @@ -47,10 +47,10 @@ public void RegressionNeuralNetLearner_Learn_Early_Stopping() var random = new Random(32); - var (observations, targets) = CreateData(numberOfObservations, + var (observations, targets) = CreateData(numberOfObservations, numberOfFeatures, random); - var (validationObservations, validationTargets) = CreateData(numberOfObservations, + var (validationObservations, validationTargets) = CreateData(numberOfObservations, numberOfFeatures, random); var net = new NeuralNet(); diff --git a/src/SharpLearning.Neural.Test/Loss/SquareLossTest.cs b/src/SharpLearning.Neural.Test/Loss/SquareLossTest.cs index b3798bb6..7fabafce 100644 --- a/src/SharpLearning.Neural.Test/Loss/SquareLossTest.cs +++ b/src/SharpLearning.Neural.Test/Loss/SquareLossTest.cs @@ -34,10 +34,10 @@ public void SquareLoss_Loss_1() [TestMethod] public void SquareLoss_Loss_Multi_Dimensional() { - var targets = Matrix.Build.Dense(3, 3, + var targets = Matrix.Build.Dense(3, 3, new float[] { 1.0f, 2.3f, 3.1f, 4.4f, 5.8f, 1.0f, 3.5f, 2f, 5f }); - var predictions = Matrix.Build.Dense(3, 3, + var predictions = Matrix.Build.Dense(3, 3, new float[] { 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 1.0f, 3.7f, 1.6f, 5.4f }); var sut = new SquareLoss(); diff --git a/src/SharpLearning.Neural.Test/MathNetExtensionsTest.cs b/src/SharpLearning.Neural.Test/MathNetExtensionsTest.cs index 1aafc144..58834821 100644 --- a/src/SharpLearning.Neural.Test/MathNetExtensionsTest.cs +++ b/src/SharpLearning.Neural.Test/MathNetExtensionsTest.cs @@ -76,7 +76,7 @@ public void MathNetExtensions_ColumnWiseMean() { var matrix = Matrix.Build.Dense(3, 3, new float[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }); var actual = Vector.Build.Dense(3); - + matrix.ColumnWiseMean(actual); Trace.WriteLine(string.Join(", ", actual)); @@ -146,7 +146,7 @@ public void MathNetExtensions_Matrix_Data_Modify() var data = matrix.Data(); data[changeIndex] = value; - + var expected = new float[] { 1, 2, value, 4, 5, 6, 7, 8 }; var actual = matrix.Data(); diff --git a/src/SharpLearning.Neural.Test/Optimizers/NeuralNetOptimizerTest.cs b/src/SharpLearning.Neural.Test/Optimizers/NeuralNetOptimizerTest.cs index fa921f8a..e95af5fc 100644 --- a/src/SharpLearning.Neural.Test/Optimizers/NeuralNetOptimizerTest.cs +++ b/src/SharpLearning.Neural.Test/Optimizers/NeuralNetOptimizerTest.cs @@ -22,7 +22,7 @@ public void NeuralNetOptimizer_Reset_Does_Not_Throw() var sut = new NeuralNetOptimizer(0.001, 10, optimizerMethod: optimizer); sut.UpdateParameters(parametersAndGradients); sut.Reset(); - } + } } } } diff --git a/src/SharpLearning.Neural.Test/TargetEncoders/OneOfNTargetEncoderTest.cs b/src/SharpLearning.Neural.Test/TargetEncoders/OneOfNTargetEncoderTest.cs index 827fd681..9485a3a9 100644 --- a/src/SharpLearning.Neural.Test/TargetEncoders/OneOfNTargetEncoderTest.cs +++ b/src/SharpLearning.Neural.Test/TargetEncoders/OneOfNTargetEncoderTest.cs @@ -16,7 +16,7 @@ public void OneOfNTargetEncoder_Encode() var actual = sut.Encode(targets); var expected = Matrix.Build.Dense(7, 3, new float[] { 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1 }); - + Trace.WriteLine(expected.ToString()); Assert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.Neural/Activations/SigmoidActivation.cs b/src/SharpLearning.Neural/Activations/SigmoidActivation.cs index 346b0d32..e9e0f6c4 100644 --- a/src/SharpLearning.Neural/Activations/SigmoidActivation.cs +++ b/src/SharpLearning.Neural/Activations/SigmoidActivation.cs @@ -40,8 +40,8 @@ float Sigmoid(float input) //this input should be already activated input = sigmmoid(x) float Derivative(float input) { - var de = input * (1- input); - + var de = input * (1 - input); + return de == 0 ? 1 : de; //this avoid the 0 multiplication when dx is 0. } } diff --git a/src/SharpLearning.Neural/ConvUtils.cs b/src/SharpLearning.Neural/ConvUtils.cs index 8addeab6..0d34494e 100644 --- a/src/SharpLearning.Neural/ConvUtils.cs +++ b/src/SharpLearning.Neural/ConvUtils.cs @@ -41,12 +41,12 @@ public static int PaddingFromBorderMode(int filterSize, BorderMode borderMode) /// /// /// - public static int GetFilterGridLength(int inputLength, int filterSize, + public static int GetFilterGridLength(int inputLength, int filterSize, int stride, int padding, BorderMode borderMode) { // BorderMode.Same pads with half the filter size on both sides (one less on // the second side for an even filter size) - if (borderMode == BorderMode.Same && filterSize % 2 == 0) + if (borderMode == BorderMode.Same && filterSize % 2 == 0) { return (int)Math.Floor((inputLength + (padding + padding - 1) - filterSize) / (double)stride + 1); } @@ -66,7 +66,7 @@ public static int GetFilterGridLength(int inputLength, int filterSize, /// /// /// - public static float GetValueFromIndex(this Matrix m, int n, int c, int h, int w, + public static float GetValueFromIndex(this Matrix m, int n, int c, int h, int w, int depth, int width, int height) { var indexInBatchItem = c * width * height + h * width + w; @@ -86,7 +86,7 @@ public static float GetValueFromIndex(this Matrix m, int n, int c, int h, /// /// /// - public static int GetDataIndex(this Matrix m, int n, int c, int h, int w, + public static int GetDataIndex(this Matrix m, int n, int c, int h, int w, int depth, int width, int height) { var indexInBatchItem = c * width * height + h * width + w; @@ -110,7 +110,7 @@ public static int GetDataIndex(this Matrix m, int n, int c, int h, int w, /// /// public static void Batch_Im2Col(Matrix data_im, int channels, int height, int width, - int kernel_h, int kernel_w, int pad_h, int pad_w, int stride_h, int stride_w, + int kernel_h, int kernel_w, int pad_h, int pad_w, int stride_h, int stride_w, BorderMode borderMode, Matrix data_col) { int height_col = GetFilterGridLength(height, kernel_h, stride_h, pad_h, borderMode); @@ -142,7 +142,7 @@ public static void Batch_Im2Col(Matrix data_im, int channels, int height, int w_pad = w * stride_w - pad_w + w_offset; var outColIndex = batchRowOffSet + rowOffSet + w; - var outputIndex = outColIndex * data_col.RowCount + c; + var outputIndex = outColIndex * data_col.RowCount + c; var inputColIndex = (cImRowOffSet + h_pad) * width + w_pad; var inputIndex = inputColIndex * batchSize + batchItem; @@ -176,9 +176,9 @@ public static void Batch_Im2Col(Matrix data_im, int channels, int height, /// /// /// /// - public static void ReshapeConvolutionsToRowMajor(Matrix convoluted, + public static void ReshapeConvolutionsToRowMajor(Matrix convoluted, int channels, int height, int width, - int kernel_h, int kernel_w, int pad_h, int pad_w, int stride_h, int stride_w, + int kernel_h, int kernel_w, int pad_h, int pad_w, int stride_h, int stride_w, BorderMode borderMode, Matrix data_convolutedRowMajor) { int height_col = GetFilterGridLength(height, kernel_h, stride_h, pad_h, borderMode); @@ -228,9 +228,9 @@ public static void ReshapeConvolutionsToRowMajor(Matrix convoluted, /// /// /// - public static void ReshapeRowMajorToConvolutionLayout(Matrix data_convolutedRowMajor, + public static void ReshapeRowMajorToConvolutionLayout(Matrix data_convolutedRowMajor, int channels, int height, int width, - int kernel_h, int kernel_w, int pad_h, int pad_w, int stride_h, int stride_w, + int kernel_h, int kernel_w, int pad_h, int pad_w, int stride_h, int stride_w, BorderMode borderMode, Matrix convoluted) { int height_col = GetFilterGridLength(height, kernel_h, stride_h, pad_h, borderMode); @@ -281,7 +281,7 @@ public static void ReshapeRowMajorToConvolutionLayout(Matrix data_convolu /// /// public static void Batch_Col2Im(Matrix data_col, int channels, int height, int width, - int patch_h, int patch_w, int pad_h, int pad_w, int stride_h, int stride_w, + int patch_h, int patch_w, int pad_h, int pad_w, int stride_h, int stride_w, BorderMode borderMode, Matrix data_im) { int height_col = GetFilterGridLength(height, patch_h, stride_h, pad_h, borderMode); @@ -310,7 +310,7 @@ public static void Batch_Col2Im(Matrix data_col, int channels, int height for (int w = 0; w < width_col; ++w) { - + int w_pad = w * stride_w - pad_w + w_offset; if (h_pad >= 0 && h_pad < height && w_pad >= 0 && w_pad < width) { diff --git a/src/SharpLearning.Neural/Initializations/WeightInitialization.cs b/src/SharpLearning.Neural/Initializations/WeightInitialization.cs index b8576788..c7744d4e 100644 --- a/src/SharpLearning.Neural/Initializations/WeightInitialization.cs +++ b/src/SharpLearning.Neural/Initializations/WeightInitialization.cs @@ -52,7 +52,7 @@ public static FanInFanOut GetFans(ILayer layer, int inputWidth, int inputHeight, /// /// /// - public static IContinuousDistribution GetWeightDistribution(Initialization initialization, + public static IContinuousDistribution GetWeightDistribution(Initialization initialization, FanInFanOut fans, Random random) { var bound = InitializationBound(initialization, fans); diff --git a/src/SharpLearning.Neural/Layers/ActivationLayer.cs b/src/SharpLearning.Neural/Layers/ActivationLayer.cs index e71700b3..cedf248c 100644 --- a/src/SharpLearning.Neural/Layers/ActivationLayer.cs +++ b/src/SharpLearning.Neural/Layers/ActivationLayer.cs @@ -110,7 +110,7 @@ public Matrix Forward(Matrix input) /// /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { Width = inputWidth; @@ -147,7 +147,7 @@ public void CopyLayerForPredictionModel(List layers) copy.Depth = this.Depth; var fanOut = Width * Height * Depth; - + copy.OutputActivations = Matrix.Build.Dense(batchSize, fanOut); copy.ActivationDerivative = Matrix.Build.Dense(batchSize, fanOut); diff --git a/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs b/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs index 640bc608..e4454ce4 100644 --- a/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs +++ b/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs @@ -239,7 +239,7 @@ public void AddParameresAndGradients(List parametersAndG /// /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { Width = inputWidth; diff --git a/src/SharpLearning.Neural/Layers/Conv2DLayer.cs b/src/SharpLearning.Neural/Layers/Conv2DLayer.cs index e363ea14..688e65e9 100644 --- a/src/SharpLearning.Neural/Layers/Conv2DLayer.cs +++ b/src/SharpLearning.Neural/Layers/Conv2DLayer.cs @@ -129,7 +129,7 @@ public sealed class Conv2DLayer : ILayer, IBatchNormalizable /// Zero padding for the width dimension (default is 0) /// Zero padding for the height dimension (default is 0) /// Type of activation function used (default is Relu) - public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int stride, + public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int stride, int padWidth, int padHeight, Activation activation = Activation.Relu) { if (filterWidth < 1) { throw new ArgumentException("filterWidth is less than 1: " + filterWidth); } @@ -163,7 +163,7 @@ public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int strid /// Border mode of the convolutional operation. /// This will set the width and height padding automatically based on the selected border mode: Valid, Same or Full (default is Valid) /// Type of activation function used (default is Relu) - public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int stride = 1, + public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int stride = 1, BorderMode borderMode = BorderMode.Valid, Activation activation = Activation.Relu) : this(filterWidth, filterHeight, filterCount, stride, ConvUtils.PaddingFromBorderMode(filterWidth, borderMode), @@ -195,7 +195,7 @@ public Matrix Backward(Matrix delta) // convert back to original layout m_delta.Clear(); ConvUtils.Batch_Col2Im(Im2Cols, InputDepth, InputHeight, InputWidth, - FilterHeight, FilterWidth, m_padHeight, m_padWidth, + FilterHeight, FilterWidth, m_padHeight, m_padWidth, m_stride, m_stride, BorderMode, m_delta); return m_delta; @@ -212,7 +212,7 @@ public Matrix Forward(Matrix input) // Arrange input item for GEMM version of convolution. ConvUtils.Batch_Im2Col(m_inputActivations, InputDepth, InputHeight, InputWidth, - FilterWidth, FilterHeight, m_padHeight, m_padWidth, + FilterWidth, FilterHeight, m_padHeight, m_padWidth, m_stride, m_stride, BorderMode, Im2Cols); // matrix multiplication for convolution @@ -221,7 +221,7 @@ public Matrix Forward(Matrix input) // Return the convolved data to row major and copy data to output ConvUtils.ReshapeConvolutionsToRowMajor(Conv, InputDepth, InputHeight, InputWidth, - FilterWidth, FilterHeight, m_padHeight, m_padWidth, + FilterWidth, FilterHeight, m_padHeight, m_padWidth, m_stride, m_stride, BorderMode, OutputActivations); return OutputActivations; @@ -237,16 +237,16 @@ public Matrix Forward(Matrix input) /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { InputHeight = inputHeight; InputWidth = inputWidth; - InputDepth = inputDepth; + InputDepth = inputDepth; - var filterGridWidth = ConvUtils.GetFilterGridLength(InputWidth, FilterWidth, + var filterGridWidth = ConvUtils.GetFilterGridLength(InputWidth, FilterWidth, m_stride, m_padWidth, BorderMode); - var filterGridHeight = ConvUtils.GetFilterGridLength(InputHeight, FilterHeight, + var filterGridHeight = ConvUtils.GetFilterGridLength(InputHeight, FilterHeight, m_stride, m_padHeight, BorderMode); // Calculations of dimensions based on: @@ -269,7 +269,7 @@ public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batc Im2Cols = Matrix.Build.Dense(filterCubeSize, filterGridSize * batchSize); Conv = Matrix.Build.Dense(FilterCount, filterGridSize * batchSize); - + OutputActivations = Matrix.Build.Dense(batchSize, FilterCount * filterGridSize); m_deltaInReshape = Matrix.Build.Dense(FilterCount, filterGridSize * batchSize); @@ -296,16 +296,16 @@ public void AddParameresAndGradients(List parametersAndG public void CopyLayerForPredictionModel(List layers) { var batchSize = 1; // prediction time only uses 1 item at a time. - var copy = new Conv2DLayer(FilterWidth, FilterHeight, FilterCount, + var copy = new Conv2DLayer(FilterWidth, FilterHeight, FilterCount, m_stride, m_padWidth, m_padHeight, ActivationFunc); copy.InputDepth = InputDepth; copy.InputWidth = InputWidth; copy.InputHeight = InputHeight; - var filterGridWidth = ConvUtils.GetFilterGridLength(InputWidth, FilterWidth, + var filterGridWidth = ConvUtils.GetFilterGridLength(InputWidth, FilterWidth, m_stride, m_padWidth, BorderMode); - var filterGridHeight = ConvUtils.GetFilterGridLength(InputHeight, FilterHeight, + var filterGridHeight = ConvUtils.GetFilterGridLength(InputHeight, FilterHeight, m_stride, m_padHeight, BorderMode); copy.BorderMode = BorderMode; @@ -319,7 +319,7 @@ public void CopyLayerForPredictionModel(List layers) copy.Depth = this.Depth; var fanOut = Width * Height * Depth; - + copy.Weights = Matrix.Build.Dense(Weights.RowCount, Weights.ColumnCount); copy.Bias = Vector.Build.Dense(Bias.Count); Array.Copy(Weights.Data(), copy.Weights.Data(), Weights.Data().Length); diff --git a/src/SharpLearning.Neural/Layers/DenseLayer.cs b/src/SharpLearning.Neural/Layers/DenseLayer.cs index bb4111d0..6ffdd988 100644 --- a/src/SharpLearning.Neural/Layers/DenseLayer.cs +++ b/src/SharpLearning.Neural/Layers/DenseLayer.cs @@ -56,7 +56,7 @@ public sealed class DenseLayer : ILayer, IBatchNormalizable /// Bias gradients. /// public Vector BiasGradients; - + /// /// Output activation /// @@ -88,10 +88,10 @@ public Matrix Backward(Matrix delta) // calculate gradients m_inputActivations.TransposeThisAndMultiply(delta, WeightsGradients); delta.SumColumns(BiasGradients); - + // calculate delta for next layer delta.TransposeAndMultiply(Weights, m_delta); - + return m_delta; } @@ -119,12 +119,12 @@ public Matrix Forward(Matrix input) /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { var fans = WeightInitialization.GetFans(this, inputWidth, inputHeight, inputDepth); var distribution = WeightInitialization.GetWeightDistribution(initializtion, fans, random); - + Weights = Matrix.Build.Random(fans.FanIn, fans.FanOut, distribution); Bias = Vector.Build.Dense(fans.FanOut, 0.0f); diff --git a/src/SharpLearning.Neural/Layers/DropoutLayer.cs b/src/SharpLearning.Neural/Layers/DropoutLayer.cs index bc0e1cd6..c3b0f05d 100644 --- a/src/SharpLearning.Neural/Layers/DropoutLayer.cs +++ b/src/SharpLearning.Neural/Layers/DropoutLayer.cs @@ -91,7 +91,7 @@ public Matrix Forward(Matrix input) /// /// Initialization type for layers with weights /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { m_random = new Random(random.Next()); diff --git a/src/SharpLearning.Neural/Layers/ILayer.cs b/src/SharpLearning.Neural/Layers/ILayer.cs index 681aa4f2..7a79a598 100644 --- a/src/SharpLearning.Neural/Layers/ILayer.cs +++ b/src/SharpLearning.Neural/Layers/ILayer.cs @@ -14,7 +14,7 @@ public interface ILayer /// /// Width of this layer /// - int Width { get; } + int Width { get; } /// /// Height of this layer @@ -53,7 +53,7 @@ public interface ILayer /// batch size /// Initialization type for layers with weights /// - void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random); /// diff --git a/src/SharpLearning.Neural/Layers/InputLayer.cs b/src/SharpLearning.Neural/Layers/InputLayer.cs index c82f9f0b..5bee66f9 100644 --- a/src/SharpLearning.Neural/Layers/InputLayer.cs +++ b/src/SharpLearning.Neural/Layers/InputLayer.cs @@ -37,7 +37,7 @@ public sealed class InputLayer : ILayer /// /// public InputLayer(int inputUnits) - :this(1, 1, inputUnits) + : this(1, 1, inputUnits) { } @@ -87,7 +87,7 @@ public Matrix Forward(Matrix input) /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { // input layer does not have anything to initialize. diff --git a/src/SharpLearning.Neural/Layers/MaxPool2DLayer.cs b/src/SharpLearning.Neural/Layers/MaxPool2DLayer.cs index e1eff310..15dd0ea5 100644 --- a/src/SharpLearning.Neural/Layers/MaxPool2DLayer.cs +++ b/src/SharpLearning.Neural/Layers/MaxPool2DLayer.cs @@ -117,7 +117,7 @@ public MaxPool2DLayer(int poolWidth, int poolHeight, int stride, int padWidth, i /// Controls the distance between each neighboring pool areas (default is 2) /// Border mode of the max pool operation. /// This will set the width and height padding automatically based on the selected border mode: Valid, Same or Full (default is Valid). - public MaxPool2DLayer(int poolWidth, int poolHeight, int stride = 2, + public MaxPool2DLayer(int poolWidth, int poolHeight, int stride = 2, BorderMode borderMode = BorderMode.Valid) : this(poolWidth, poolHeight, stride, ConvUtils.PaddingFromBorderMode(poolWidth, borderMode), @@ -215,12 +215,12 @@ void ForwardSingleItem(Matrix input, Matrix output, int batchItem) var outputColIndex = poolRowOffSet + pw + outputDeptOffSet; var outputIndex = outputColIndex * output.RowCount + batchItem; - outputData[outputIndex] = currentMax; + outputData[outputIndex] = currentMax; } } } } - + void BackwardSingleItem(Matrix inputGradient, Matrix outputGradient, int batchItem) { var batchSize = inputGradient.RowCount; @@ -237,7 +237,7 @@ void BackwardSingleItem(Matrix inputGradient, Matrix outputGradien var outputDeptOffSet = depth * Height * Width; var x = -this.m_padWidth; - // var y = -this.m_padHeight; + // var y = -this.m_padHeight; for (var ax = 0; ax < this.Width; x += this.m_stride, ax++) { var y = -this.m_padHeight; @@ -276,7 +276,7 @@ public void AddParameresAndGradients(List parametersAndG /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { InputWidth = inputWidth; @@ -286,10 +286,10 @@ public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batc // computed this.Depth = this.InputDepth; - this.Width = ConvUtils.GetFilterGridLength(InputWidth, m_poolWidth, + this.Width = ConvUtils.GetFilterGridLength(InputWidth, m_poolWidth, m_stride, m_padWidth, BorderMode); - this.Height = ConvUtils.GetFilterGridLength(InputHeight, m_poolHeight, + this.Height = ConvUtils.GetFilterGridLength(InputHeight, m_poolHeight, m_stride, m_padHeight, BorderMode); // store switches for x,y coordinates for where the max comes from, for each output neuron @@ -313,7 +313,7 @@ public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batc public void CopyLayerForPredictionModel(List layers) { var batchSize = 1; - var copy = new MaxPool2DLayer(m_poolWidth, m_poolHeight, + var copy = new MaxPool2DLayer(m_poolWidth, m_poolHeight, m_stride, m_padWidth, m_padHeight); copy.BorderMode = BorderMode; diff --git a/src/SharpLearning.Neural/Layers/SquaredErrorRegressionLayer.cs b/src/SharpLearning.Neural/Layers/SquaredErrorRegressionLayer.cs index 2ec7ca4c..8632dce3 100644 --- a/src/SharpLearning.Neural/Layers/SquaredErrorRegressionLayer.cs +++ b/src/SharpLearning.Neural/Layers/SquaredErrorRegressionLayer.cs @@ -95,7 +95,7 @@ public Matrix Forward(Matrix input) /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { OutputActivations = Matrix.Build.Dense(batchSize, NumberOfTargets); diff --git a/src/SharpLearning.Neural/Layers/SvmLayer.cs b/src/SharpLearning.Neural/Layers/SvmLayer.cs index a0c33928..0cd38f88 100644 --- a/src/SharpLearning.Neural/Layers/SvmLayer.cs +++ b/src/SharpLearning.Neural/Layers/SvmLayer.cs @@ -13,7 +13,7 @@ namespace SharpLearning.Neural.Layers /// This can be an advantage when the overall goal is the best possible accuracy. And probability estimates is less important. /// [Serializable] - public sealed class SvmLayer + public sealed class SvmLayer : ILayer , IOutputLayer , IClassificationLayer @@ -90,18 +90,18 @@ public Matrix Backward(Matrix delta) var maxTargetScore = OutputActivations.At(batchItem, maxTargetIndex); for (int i = 0; i < OutputActivations.ColumnCount; i++) { - if(i == maxTargetIndex) { continue; } + if (i == maxTargetIndex) { continue; } // The score of the target should be higher than he score of any other class, by a margin var diff = -maxTargetScore + OutputActivations.At(batchItem, i) + margin; - if(diff > 0) + if (diff > 0) { m_delta[batchItem, i] += 1; m_delta[batchItem, maxTargetIndex] -= 1; } } } - + return m_delta; } @@ -126,7 +126,7 @@ public Matrix Forward(Matrix input) /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, + public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { OutputActivations = Matrix.Build.Dense(batchSize, NumberOfClasses); diff --git a/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs b/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs index 3883fc1a..16fcc3a5 100644 --- a/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs +++ b/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs @@ -15,7 +15,7 @@ namespace SharpLearning.Neural.Learners /// ClassificationNeuralNet learner using mini-batch gradient descent. /// Several optimization methods is available through the constructor. /// - public sealed class ClassificationNeuralNetLearner + public sealed class ClassificationNeuralNetLearner : IIndexedLearner , IIndexedLearner , ILearner @@ -39,26 +39,26 @@ public sealed class ClassificationNeuralNetLearner /// Squared gradient moving average decay factor (Default is 0.95) /// Exponential decay rate for estimates of first moment vector, should be in range 0 to 1 (Default is 0.9) /// Exponential decay rate for estimates of second moment vector, should be in range 0 to 1 (Default is 0.999) - public ClassificationNeuralNetLearner(NeuralNet net, - ILoss loss, - double learningRate = 0.001, - int iterations = 100, - int batchSize = 128, + public ClassificationNeuralNetLearner(NeuralNet net, + ILoss loss, + double learningRate = 0.001, + int iterations = 100, + int batchSize = 128, double l1decay = 0, double l2decay = 0, - OptimizerMethod optimizerMethod = OptimizerMethod.RMSProp, - double momentum = 0.9, - double rho = 0.95, - double beta1 = 0.9, + OptimizerMethod optimizerMethod = OptimizerMethod.RMSProp, + double momentum = 0.9, + double rho = 0.95, + double beta1 = 0.9, double beta2 = 0.999) { if (!(net.Layers.Last() is IClassificationLayer)) { - throw new ArgumentException("Last layer must be a classification layer type. Was: " + throw new ArgumentException("Last layer must be a classification layer type. Was: " + net.Layers.Last().GetType().Name); } m_learner = new NeuralNetLearner(net, new OneOfNTargetEncoder(), loss, - learningRate, iterations, batchSize, l1decay, l2decay, + learningRate, iterations, batchSize, l1decay, l2decay, optimizerMethod, momentum, rho, beta1, beta2); } @@ -82,7 +82,7 @@ public ClassificationNeuralNetModel Learn(F64Matrix observations, double[] targe /// /// /// - public ClassificationNeuralNetModel Learn(F64Matrix observations, double[] targets, + public ClassificationNeuralNetModel Learn(F64Matrix observations, double[] targets, int[] indices) { var targetNames = GetOrderedTargetNames(targets); @@ -105,7 +105,7 @@ public ClassificationNeuralNetModel Learn(F64Matrix observations, double[] targe { var targetNames = GetOrderedTargetNames(targets); - var model = m_learner.Learn(observations, targets, + var model = m_learner.Learn(observations, targets, validationObservations, validationTargets); return new ClassificationNeuralNetModel(model, targetNames); diff --git a/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs b/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs index c66ff78f..164c09f7 100644 --- a/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs +++ b/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs @@ -51,17 +51,17 @@ public class NeuralNetLearner /// Exponential decay rate for estimates of first moment vector, should be in range 0 to 1 (Default is 0.9) /// Exponential decay rate for estimates of second moment vector, should be in range 0 to 1 (Default is 0.999) public NeuralNetLearner( - NeuralNet net, ITargetEncoder targetEncoder, - ILoss loss, - double learningRate = 0.001, - int iterations = 100, - int batchSize = 128, - double l1decay = 0, + NeuralNet net, ITargetEncoder targetEncoder, + ILoss loss, + double learningRate = 0.001, + int iterations = 100, + int batchSize = 128, + double l1decay = 0, double l2decay = 0, - OptimizerMethod optimizerMethod = OptimizerMethod.RMSProp, - double momentum = 0.9, - double rho = 0.95, - double beta1 = 0.9, + OptimizerMethod optimizerMethod = OptimizerMethod.RMSProp, + double momentum = 0.9, + double rho = 0.95, + double beta1 = 0.9, double beta2 = 0.999) { m_net = net ?? throw new ArgumentNullException(nameof(net)); @@ -82,8 +82,8 @@ public NeuralNetLearner( m_momentum = momentum; m_batchSize = batchSize; m_random = new Random(232); - - m_optimizer = new NeuralNetOptimizer(learningRate, batchSize, + + m_optimizer = new NeuralNetOptimizer(learningRate, batchSize, l1decay, l2decay, optimizerMethod, momentum, rho, beta1, beta2); SetupLinerAlgebraProvider(); @@ -109,7 +109,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets) /// /// /// - public NeuralNet Learn(F64Matrix observations, double[] targets, + public NeuralNet Learn(F64Matrix observations, double[] targets, int[] indices) { return Learn(observations, targets, indices, @@ -131,7 +131,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets, { var indices = Enumerable.Range(0, targets.Length).ToArray(); return Learn(observations, targets, indices, - validationObservations, validationTargets); + validationObservations, validationTargets); } /// @@ -175,7 +175,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets, int[] indices, } var currentLoss = 0.0; - + // initialize net m_net.Initialize(m_batchSize, m_random); @@ -248,7 +248,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets, int[] indices, currentLoss = accumulatedLoss / (double)indices.Length; - if(earlyStopping) + if (earlyStopping) { var candidate = m_net.CopyNetForPredictionModel(); candidate.Forward(floatValidationObservations, floatValidationPredictions); @@ -259,7 +259,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets, int[] indices, Trace.WriteLine(string.Format("Iteration: {0:000} - Loss {1:0.00000} - Validation: {2:0.00000} - Time (ms): {3}", (iteration + 1), currentLoss, validationLoss, timer.ElapsedMilliseconds)); - if(validationLoss < bestLoss) + if (validationLoss < bestLoss) { bestLoss = validationLoss; bestNeuralNet = candidate; @@ -280,7 +280,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets, int[] indices, } } - if(earlyStopping) + if (earlyStopping) { return bestNeuralNet; } diff --git a/src/SharpLearning.Neural/Learners/RegressionNeuralNetLearner.cs b/src/SharpLearning.Neural/Learners/RegressionNeuralNetLearner.cs index fe0015ef..1d32171e 100644 --- a/src/SharpLearning.Neural/Learners/RegressionNeuralNetLearner.cs +++ b/src/SharpLearning.Neural/Learners/RegressionNeuralNetLearner.cs @@ -34,17 +34,17 @@ public sealed class RegressionNeuralNetLearner : IIndexedLearner, ILearn /// Exponential decay rate for estimates of first moment vector, should be in range 0 to 1 (Default is 0.9) /// Exponential decay rate for estimates of second moment vector, should be in range 0 to 1 (Default is 0.999) public RegressionNeuralNetLearner( - NeuralNet net, - ILoss loss, - double learningRate = 0.001, - int iterations = 100, - int batchSize = 128, - double l1decay = 0, + NeuralNet net, + ILoss loss, + double learningRate = 0.001, + int iterations = 100, + int batchSize = 128, + double l1decay = 0, double l2decay = 0, - OptimizerMethod optimizerMethod = OptimizerMethod.RMSProp, - double momentum = 0.9, - double rho = 0.95, - double beta1 = 0.9, + OptimizerMethod optimizerMethod = OptimizerMethod.RMSProp, + double momentum = 0.9, + double rho = 0.95, + double beta1 = 0.9, double beta2 = 0.999) { if (!(net.Layers.Last() is IRegressionLayer)) @@ -53,7 +53,7 @@ public RegressionNeuralNetLearner( } m_learner = new NeuralNetLearner(net, new CopyTargetEncoder(), loss, - learningRate, iterations, batchSize, l1decay, l2decay, + learningRate, iterations, batchSize, l1decay, l2decay, optimizerMethod, momentum, rho, beta1, beta2); } @@ -76,7 +76,7 @@ public RegressionNeuralNetModel Learn(F64Matrix observations, double[] targets) /// /// /// - public RegressionNeuralNetModel Learn(F64Matrix observations, double[] targets, + public RegressionNeuralNetModel Learn(F64Matrix observations, double[] targets, int[] indices) { var model = m_learner.Learn(observations, targets, indices); diff --git a/src/SharpLearning.Neural/Loss/AccuracyLoss.cs b/src/SharpLearning.Neural/Loss/AccuracyLoss.cs index 79f8a347..66a64cbb 100644 --- a/src/SharpLearning.Neural/Loss/AccuracyLoss.cs +++ b/src/SharpLearning.Neural/Loss/AccuracyLoss.cs @@ -26,14 +26,14 @@ public float Loss(Matrix targets, Matrix predictions) for (int col = 0; col < targets.ColumnCount; col++) { var predictionValue = predictions.At(row, col); - if(predictionValue > max) + if (predictionValue > max) { max = predictionValue; maxIndex = col; } } - if(targets.At(row, maxIndex) == 1.0) + if (targets.At(row, maxIndex) == 1.0) { correctCount++; } diff --git a/src/SharpLearning.Neural/MathNetExtensions.cs b/src/SharpLearning.Neural/MathNetExtensions.cs index 16965098..270495fb 100644 --- a/src/SharpLearning.Neural/MathNetExtensions.cs +++ b/src/SharpLearning.Neural/MathNetExtensions.cs @@ -26,7 +26,7 @@ public static void AddRowWise(this Matrix m, Vector v, Matrix m, Vector v, Matrix if (v.Count != cols) { - throw new ArgumentException("matrix cols: " + cols + + throw new ArgumentException("matrix cols: " + cols + " differs from vector length: " + v.Count); } @@ -90,7 +90,7 @@ public static void AddColumnWise(this Matrix m, Vector v, Matrix m, Vector v, Matrix if (v.Count != cols) { - throw new ArgumentException("matrix cols: " + cols + + throw new ArgumentException("matrix cols: " + cols + " differs from vector length: " + v.Count); } @@ -149,13 +149,13 @@ public static float ElementWiseMultiplicationSum(this Matrix m1, Matrix m1, Matrix m, Vector v) { var rows = m.RowCount; - + var mData = m.Data(); var vData = v.Data(); diff --git a/src/SharpLearning.Neural/Models/ClassificationNeuralNetModel.cs b/src/SharpLearning.Neural/Models/ClassificationNeuralNetModel.cs index 7887b078..53098db6 100644 --- a/src/SharpLearning.Neural/Models/ClassificationNeuralNetModel.cs +++ b/src/SharpLearning.Neural/Models/ClassificationNeuralNetModel.cs @@ -14,7 +14,7 @@ namespace SharpLearning.Neural.Models /// Classification neural net model. /// [Serializable] - public sealed class ClassificationNeuralNetModel + public sealed class ClassificationNeuralNetModel : IPredictorModel , IPredictorModel { @@ -144,7 +144,7 @@ public ProbabilityPrediction[] PredictProbability(F64Matrix observations) /// /// /// - public Dictionary GetVariableImportance(Dictionary featureNameToIndex) + public Dictionary GetVariableImportance(Dictionary featureNameToIndex) => m_neuralNet.GetVariableImportance(featureNameToIndex); /// diff --git a/src/SharpLearning.Neural/Models/NeuralNet.cs b/src/SharpLearning.Neural/Models/NeuralNet.cs index 96a93da9..4a4fd1de 100644 --- a/src/SharpLearning.Neural/Models/NeuralNet.cs +++ b/src/SharpLearning.Neural/Models/NeuralNet.cs @@ -47,12 +47,12 @@ private NeuralNet(List layers) public void Add(ILayer layer) { var unitsOfPreviousLayer = 0; - if(Layers.Count > 0) + if (Layers.Count > 0) { unitsOfPreviousLayer = Layers[Layers.Count - 1].Width; } - if(layer is IOutputLayer) + if (layer is IOutputLayer) { var denseLayer = new DenseLayer(layer.Depth, Activation.Undefined); Layers.Add(denseLayer); @@ -60,15 +60,15 @@ public void Add(ILayer layer) Layers.Add(layer); - if(layer is IBatchNormalizable) // consider adding separate interface for batch normalization + if (layer is IBatchNormalizable) // consider adding separate interface for batch normalization { - if(((IBatchNormalizable)layer).BatchNormalization) + if (((IBatchNormalizable)layer).BatchNormalization) { Layers.Add(new BatchNormalizationLayer()); } } - if(layer.ActivationFunc != Activation.Undefined) + if (layer.ActivationFunc != Activation.Undefined) { Layers.Add(new ActivationLayer(layer.ActivationFunc)); } @@ -82,7 +82,7 @@ public void Backward(Matrix delta) { for (int i = Layers.Count; i-- > 0;) { - delta = Layers[i].Backward(delta); + delta = Layers[i].Backward(delta); } } @@ -132,20 +132,20 @@ public void Initialize(int batchSize, Random random) { if (!(Layers.First() is InputLayer)) { - throw new ArgumentException("First layer must be InputLayer. Was: " + + throw new ArgumentException("First layer must be InputLayer. Was: " + Layers.First().GetType().Name); } - if(!(Layers.Last() is IOutputLayer)) + if (!(Layers.Last() is IOutputLayer)) { - throw new ArgumentException("Last layer must be an output layer type. Was: " + + throw new ArgumentException("Last layer must be an output layer type. Was: " + Layers.Last().GetType().Name); } for (int i = 1; i < Layers.Count; i++) { var previousLayer = Layers[i - 1]; - Layers[i].Initialize(previousLayer.Width, previousLayer.Height, + Layers[i].Initialize(previousLayer.Width, previousLayer.Height, previousLayer.Depth, batchSize, m_initialization, random); } } @@ -221,7 +221,7 @@ public string GetLayerDimensions() var dimensions = string.Empty; foreach (var layer in Layers) { - dimensions += $"{layer.GetType().Name}: {layer.Width}x{layer.Height}x{layer.Depth}" + dimensions += $"{layer.GetType().Name}: {layer.Width}x{layer.Height}x{layer.Depth}" + Environment.NewLine; } diff --git a/src/SharpLearning.Neural/Models/RegressionNeuralNetModel.cs b/src/SharpLearning.Neural/Models/RegressionNeuralNetModel.cs index 9519dd7a..94477973 100644 --- a/src/SharpLearning.Neural/Models/RegressionNeuralNetModel.cs +++ b/src/SharpLearning.Neural/Models/RegressionNeuralNetModel.cs @@ -76,7 +76,7 @@ public double[] Predict(F64Matrix observations) /// /// /// - public Dictionary GetVariableImportance(Dictionary featureNameToIndex) + public Dictionary GetVariableImportance(Dictionary featureNameToIndex) => m_neuralNet.GetVariableImportance(featureNameToIndex); /// diff --git a/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs b/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs index c9b59573..469cb598 100644 --- a/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs +++ b/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs @@ -54,14 +54,14 @@ public sealed class NeuralNetOptimizer /// Exponential decay rate for estimates of first moment vector, should be in range 0 to 1 (Default is 0.9) /// Exponential decay rate for estimates of second moment vector, should be in range 0 to 1 (Default is 0.999) public NeuralNetOptimizer( - double learningRate, - int batchSize, - double l1decay = 0, + double learningRate, + int batchSize, + double l1decay = 0, double l2decay = 0, - OptimizerMethod optimizerMethod = OptimizerMethod.RMSProp, - double momentum = 0.9, - double rho = 0.95, - double beta1 = 0.9, + OptimizerMethod optimizerMethod = OptimizerMethod.RMSProp, + double momentum = 0.9, + double rho = 0.95, + double beta1 = 0.9, double beta2 = 0.999) { if (learningRate <= 0) { throw new ArgumentNullException("learning rate must be larger than 0. Was: " + learningRate); } @@ -95,7 +95,7 @@ public void UpdateParameters(List parametersAndGradients m_iterationCounter++; // initialize accumulators. Will only be done once on first iteration and if optimizer methods is not sgd - var useAccumulators = m_gsumWeights.Count == 0 && + var useAccumulators = m_gsumWeights.Count == 0 && (m_optimizerMethod != OptimizerMethod.Sgd || m_momentum > 0.0); if (useAccumulators) @@ -124,8 +124,8 @@ void InitializeAccumulators(List parametersAndGradients) for (var i = 0; i < parametersAndGradients.Count; i++) { m_gsumWeights.Add(new double[parametersAndGradients[i].Parameters.Length]); - if (m_optimizerMethod == OptimizerMethod.Adam || - m_optimizerMethod == OptimizerMethod.Adadelta || + if (m_optimizerMethod == OptimizerMethod.Adam || + m_optimizerMethod == OptimizerMethod.Adadelta || m_optimizerMethod == OptimizerMethod.AdaMax || m_optimizerMethod == OptimizerMethod.Nadam) { @@ -192,8 +192,8 @@ void UpdateParam(int i, float[] parameters, float[] gradients, double l2Decay, d if (m_momentum > 0.0) // sgd + momentum { var dx = m_momentum * gsumi[j] - m_learningRate * gij; - gsumi[j] = dx; - parameters[j] += (float)dx; + gsumi[j] = dx; + parameters[j] += (float)dx; } else // standard sgd { @@ -203,7 +203,7 @@ void UpdateParam(int i, float[] parameters, float[] gradients, double l2Decay, d break; case OptimizerMethod.Adam: { - gsumi[j] = m_beta1 * gsumi[j] + (1.0 - m_beta1) * gij; + gsumi[j] = m_beta1 * gsumi[j] + (1.0 - m_beta1) * gij; xsumi[j] = m_beta2 * xsumi[j] + (1.0 - m_beta2) * gij * gij; var dx = -m_learningRate * gsumi[j] / (Math.Sqrt(xsumi[j]) + m_eps); @@ -212,8 +212,8 @@ void UpdateParam(int i, float[] parameters, float[] gradients, double l2Decay, d break; case OptimizerMethod.AdaMax: { - gsumi[j] = m_beta1 * gsumi[j] + (1.0 - m_beta1) * gij; - xsumi[j] = Math.Max(m_beta2 * xsumi[j], Math.Abs(gij)); + gsumi[j] = m_beta1 * gsumi[j] + (1.0 - m_beta1) * gij; + xsumi[j] = Math.Max(m_beta2 * xsumi[j], Math.Abs(gij)); var dx = -m_learningRate * gsumi[j] / (xsumi[j] + m_eps); parameters[j] += (float)dx; @@ -250,9 +250,9 @@ void UpdateParam(int i, float[] parameters, float[] gradients, double l2Decay, d case OptimizerMethod.Adadelta: { gsumi[j] = m_rho * gsumi[j] + (1 - m_rho) * gij * gij; - + // learning rate multiplication left out since recommended default is 1.0. - var dx = - gij * Math.Sqrt(xsumi[j] + m_eps) / Math.Sqrt(gsumi[j] + m_eps); + var dx = -gij * Math.Sqrt(xsumi[j] + m_eps) / Math.Sqrt(gsumi[j] + m_eps); xsumi[j] = m_rho * xsumi[j] + (1 - m_rho) * dx * dx; parameters[j] += (float)dx; diff --git a/src/SharpLearning.Optimization.Test/ArrayAssert.cs b/src/SharpLearning.Optimization.Test/ArrayAssert.cs index 9e0bfe9e..8bd9822b 100644 --- a/src/SharpLearning.Optimization.Test/ArrayAssert.cs +++ b/src/SharpLearning.Optimization.Test/ArrayAssert.cs @@ -6,7 +6,7 @@ public static class ArrayAssert { const double m_defaultDelta = 0.000001; - public static void AssertAreEqual(double[] expected, double[] actual, + public static void AssertAreEqual(double[] expected, double[] actual, double delta = m_defaultDelta) { Assert.AreEqual(expected.Length, actual.Length); diff --git a/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs b/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs index 76bfdbda..dc291def 100644 --- a/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs @@ -57,16 +57,16 @@ public void GlobalizedBoundedNelderMeadOptimizer_Optimize(int? maxDegreeOfParall }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), + Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), Delta); Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); - Assert.AreEqual(expected.Last().ParameterSet.First(), + Assert.AreEqual(expected.Last().ParameterSet.First(), actual.Last().ParameterSet.First(), Delta); } static GlobalizedBoundedNelderMeadOptimizer CreateSut( - int? maybeMaxDegreeOfParallelism, + int? maybeMaxDegreeOfParallelism, MinMaxParameterSpec[] parameters) { const int DefaultMaxDegreeOfParallelism = -1; diff --git a/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs b/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs index 83fa660f..14beff39 100644 --- a/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs +++ b/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs @@ -15,13 +15,13 @@ public class GridSearchOptimizerTest [DataRow(null)] public void GridSearchOptimizer_OptimizeBest(int? maxDegreeOfParallelism) { - var parameters = new GridParameterSpec[] + var parameters = new GridParameterSpec[] { new GridParameterSpec(10.0, 20.0, 30.0, 35.0, 37.5, 40.0, 50.0, 60.0) }; - var sut = maxDegreeOfParallelism.HasValue ? - new GridSearchOptimizer(parameters, true, maxDegreeOfParallelism.Value) : + var sut = maxDegreeOfParallelism.HasValue ? + new GridSearchOptimizer(parameters, true, maxDegreeOfParallelism.Value) : new GridSearchOptimizer(parameters); var actual = sut.OptimizeBest(MinimizeWeightFromHeight); @@ -37,25 +37,25 @@ public void GridSearchOptimizer_OptimizeBest(int? maxDegreeOfParallelism) [DataRow(null)] public void GridSearchOptimizer_Optimize(int? maxDegreeOfParallelism) { - var parameters = new GridParameterSpec[] + var parameters = new GridParameterSpec[] { new GridParameterSpec(10.0, 20.0, 30.0, 35.0, 37.5, 40.0, 50.0, 60.0) }; - var sut = maxDegreeOfParallelism.HasValue ? - new GridSearchOptimizer(parameters, true, maxDegreeOfParallelism.Value) : + var sut = maxDegreeOfParallelism.HasValue ? + new GridSearchOptimizer(parameters, true, maxDegreeOfParallelism.Value) : new GridSearchOptimizer(parameters); var actual = sut.Optimize(MinimizeWeightFromHeight); - var expected = new OptimizerResult[] - { + var expected = new OptimizerResult[] + { new OptimizerResult(new double[] { 10 }, 31638.9579), - new OptimizerResult(new double[] { 60 }, 20500.6279) + new OptimizerResult(new double[] { 60 }, 20500.6279) }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), + Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), Delta); Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); diff --git a/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs b/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs index e06dd7bd..ed19197a 100644 --- a/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs @@ -55,9 +55,9 @@ OptimizerResult minimize(double[] p, double r) } var sut = new HyperbandOptimizer( - parameters, - maximumBudget: 81, - eta: 5, + parameters, + maximumBudget: 81, + eta: 5, skipLastIterationOfEachRound: false, seed: 34); diff --git a/src/SharpLearning.Optimization.Test/ParameterBoundsTest.cs b/src/SharpLearning.Optimization.Test/ParameterBoundsTest.cs index 01f554f1..2188b82d 100644 --- a/src/SharpLearning.Optimization.Test/ParameterBoundsTest.cs +++ b/src/SharpLearning.Optimization.Test/ParameterBoundsTest.cs @@ -10,7 +10,7 @@ public class ParameterBoundsTest [TestMethod] public void ParameterBounds_NextValue() { - var sut = new MinMaxParameterSpec(min: 20, max: 200, + var sut = new MinMaxParameterSpec(min: 20, max: 200, transform: Transform.Linear); var sampler = new RandomUniform(seed: 32); @@ -21,7 +21,7 @@ public void ParameterBounds_NextValue() actual[i] = sut.SampleValue(sampler: sampler); } - var expected = new double[] + var expected = new double[] { 99.8935983236384, 57.2098020451189, @@ -46,7 +46,7 @@ public void ParameterBounds_NextValue() [ExpectedException(typeof(ArgumentException))] public void ParameterBounds_Throw_On_Min_Larger_Than_Max() { - new MinMaxParameterSpec(min:30, max: 10); + new MinMaxParameterSpec(min: 30, max: 10); } [TestMethod] @@ -60,7 +60,7 @@ public void ParameterBounds_Throw_On_Min_Equals_Than_Max() [ExpectedException(typeof(ArgumentNullException))] public void ParameterBounds_Throw_On_Transform_Is_Null() { - new MinMaxParameterSpec(min: 10, max: 30, transform: null, + new MinMaxParameterSpec(min: 10, max: 30, transform: null, parameterType: ParameterType.Continuous); } } diff --git a/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs b/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs index 7cbac732..af5c1a4f 100644 --- a/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs @@ -58,11 +58,11 @@ public void ParticleSwarmOptimizer_Optimize(int? maxDegreeOfParallelism) }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), + Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), Delta); Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); - Assert.AreEqual(expected.Last().ParameterSet.First(), + Assert.AreEqual(expected.Last().ParameterSet.First(), actual.Last().ParameterSet.First(), Delta); } @@ -77,7 +77,7 @@ static ParticleSwarmOptimizer CreateSut( var sut = new ParticleSwarmOptimizer(parameters, maxIterations: 100, - numberOfParticles:10, + numberOfParticles: 10, c1: 2, c2: 2, seed: 42, diff --git a/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs b/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs index 708e0092..dbb24b41 100644 --- a/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs +++ b/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs @@ -20,8 +20,8 @@ public void RandomSearchOptimizer_OptimizeBest(int? maxDegreeOfParallelism) new MinMaxParameterSpec(0.0, 100.0, Transform.Linear) }; - var sut = maxDegreeOfParallelism.HasValue ? - new RandomSearchOptimizer(parameters, 100, 42, true, maxDegreeOfParallelism.Value) : + var sut = maxDegreeOfParallelism.HasValue ? + new RandomSearchOptimizer(parameters, 100, 42, true, maxDegreeOfParallelism.Value) : new RandomSearchOptimizer(parameters, 100); var actual = sut.OptimizeBest(MinimizeWeightFromHeight); @@ -42,8 +42,8 @@ public void RandomSearchOptimizer_Optimize(int? maxDegreeOfParallelism) new MinMaxParameterSpec(10.0, 37.5, Transform.Linear) }; - var sut = maxDegreeOfParallelism.HasValue ? - new RandomSearchOptimizer(parameters, 100, 42, true, maxDegreeOfParallelism.Value) : + var sut = maxDegreeOfParallelism.HasValue ? + new RandomSearchOptimizer(parameters, 100, 42, true, maxDegreeOfParallelism.Value) : new RandomSearchOptimizer(parameters, 100); var actual = sut.Optimize(MinimizeWeightFromHeight); @@ -55,11 +55,11 @@ public void RandomSearchOptimizer_Optimize(int? maxDegreeOfParallelism) }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), + Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), Delta); Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); - Assert.AreEqual(expected.Last().ParameterSet.First(), + Assert.AreEqual(expected.Last().ParameterSet.First(), actual.Last().ParameterSet.First(), Delta); } diff --git a/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs b/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs index b8c97aab..7ed052d7 100644 --- a/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs @@ -146,7 +146,7 @@ public void SmacOptimizer_ArgumentCheck_ParameterRanges() [ExpectedException(typeof(ArgumentException))] public void SmacOptimizer_ArgumentCheck_Iterations() { - var sut = new SmacOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, + var sut = new SmacOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, 0); } diff --git a/src/SharpLearning.Optimization.Test/Transforms/ExponentialAverageTransformTest.cs b/src/SharpLearning.Optimization.Test/Transforms/ExponentialAverageTransformTest.cs index 7909e667..38529f5f 100644 --- a/src/SharpLearning.Optimization.Test/Transforms/ExponentialAverageTransformTest.cs +++ b/src/SharpLearning.Optimization.Test/Transforms/ExponentialAverageTransformTest.cs @@ -16,8 +16,8 @@ public void ExponentialAverageTransform_Transform() var actual = new double[10]; for (int i = 0; i < actual.Length; i++) - { - actual[i] = sut.Transform(min: 0.9, max: 0.999, + { + actual[i] = sut.Transform(min: 0.9, max: 0.999, parameterType: ParameterType.Continuous, sampler: sampler); } diff --git a/src/SharpLearning.Optimization.Test/Transforms/LinearTransformTest.cs b/src/SharpLearning.Optimization.Test/Transforms/LinearTransformTest.cs index 1f015274..97506538 100644 --- a/src/SharpLearning.Optimization.Test/Transforms/LinearTransformTest.cs +++ b/src/SharpLearning.Optimization.Test/Transforms/LinearTransformTest.cs @@ -16,7 +16,7 @@ public void LinearTransform_Transform() var actual = new double[10]; for (int i = 0; i < actual.Length; i++) { - actual[i] = sut.Transform(min: 20, max: 200, + actual[i] = sut.Transform(min: 20, max: 200, parameterType: ParameterType.Continuous, sampler: sampler); } diff --git a/src/SharpLearning.Optimization.Test/Transforms/Log10TransformTest.cs b/src/SharpLearning.Optimization.Test/Transforms/Log10TransformTest.cs index 59aa725c..bdd06341 100644 --- a/src/SharpLearning.Optimization.Test/Transforms/Log10TransformTest.cs +++ b/src/SharpLearning.Optimization.Test/Transforms/Log10TransformTest.cs @@ -17,7 +17,7 @@ public void Log10Transform_Transform() var actual = new double[10]; for (int i = 0; i < actual.Length; i++) { - actual[i] = sut.Transform(min: 0.0001, max: 1, + actual[i] = sut.Transform(min: 0.0001, max: 1, parameterType: ParameterType.Continuous, sampler: sampler); } diff --git a/src/SharpLearning.Optimization/BayesianOptimizer.cs b/src/SharpLearning.Optimization/BayesianOptimizer.cs index f90f5c03..9f7b642a 100644 --- a/src/SharpLearning.Optimization/BayesianOptimizer.cs +++ b/src/SharpLearning.Optimization/BayesianOptimizer.cs @@ -204,7 +204,7 @@ public double[][] ProposeParameterSets(int parameterSetCount, var validParameterSets = previousResults .Where(v => !double.IsNaN(v.Error)) .OrderBy(v => v.Error); // TODO: This might still fail to provide same order if two different parameter sets yield the same error. - + var model = FitModel(validParameterSets); return GenerateCandidateParameterSets(parameterSetCount, validParameterSets.ToList(), model); @@ -236,7 +236,7 @@ double[][] GenerateCandidateParameterSets(int parameterSetCount, // not the minimum. var candidates = results .Where(v => !double.IsNaN(v.Error)) - .OrderByDescending(r => r.Error) + .OrderByDescending(r => r.Error) .Take(parameterSetCount) .Select(p => p.ParameterSet).ToArray(); diff --git a/src/SharpLearning.Optimization/GlobalizedBoundedNelderMeadOptimizer.cs b/src/SharpLearning.Optimization/GlobalizedBoundedNelderMeadOptimizer.cs index 365f4b14..31114825 100644 --- a/src/SharpLearning.Optimization/GlobalizedBoundedNelderMeadOptimizer.cs +++ b/src/SharpLearning.Optimization/GlobalizedBoundedNelderMeadOptimizer.cs @@ -57,17 +57,17 @@ public sealed class GlobalizedBoundedNelderMeadOptimizer : IOptimizer /// Coefficient for shrink part of the algorithm (default is 0.5) /// Seed for random restarts /// Maximum number of concurrent operations (default is -1 (unlimited)) - public GlobalizedBoundedNelderMeadOptimizer(IParameterSpec[] parameters, - int maxRestarts=8, - double noImprovementThreshold = 0.001, - int maxIterationsWithoutImprovement = 5, - int maxIterationsPrRestart = 0, + public GlobalizedBoundedNelderMeadOptimizer(IParameterSpec[] parameters, + int maxRestarts = 8, + double noImprovementThreshold = 0.001, + int maxIterationsWithoutImprovement = 5, + int maxIterationsPrRestart = 0, int maxFunctionEvaluations = 0, - double alpha = 1, - double gamma = 2, - double rho = -0.5, - double sigma = 0.5, - int seed = 324, + double alpha = 1, + double gamma = 2, + double rho = -0.5, + double sigma = 0.5, + int seed = 324, int maxDegreeOfParallelism = -1) { if (maxIterationsWithoutImprovement <= 0) { throw new ArgumentException("maxIterationsWithoutImprovement must be at least 1"); } @@ -277,7 +277,7 @@ public OptimizerResult[] Optimize(Func functionToMini return allResults.ToArray(); } - OptimizerResult EvaluateFunction(Func functionToMinimize, + OptimizerResult EvaluateFunction(Func functionToMinimize, double[] parameters) { m_totalFunctionEvaluations++; diff --git a/src/SharpLearning.Optimization/GridSearchOptimizer.cs b/src/SharpLearning.Optimization/GridSearchOptimizer.cs index 4d81bd81..3f0ba6df 100644 --- a/src/SharpLearning.Optimization/GridSearchOptimizer.cs +++ b/src/SharpLearning.Optimization/GridSearchOptimizer.cs @@ -21,8 +21,8 @@ public sealed class GridSearchOptimizer : IOptimizer /// A list of parameter specs, one for each optimization parameter /// Use multi threading to speed up execution (default is true) /// Maximum number of concurrent operations (default is -1 (unlimited)) - public GridSearchOptimizer(IParameterSpec[] parameters, - bool runParallel = true, + public GridSearchOptimizer(IParameterSpec[] parameters, + bool runParallel = true, int maxDegreeOfParallelism = -1) { m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters)); diff --git a/src/SharpLearning.Optimization/HyperbandOptimizer.cs b/src/SharpLearning.Optimization/HyperbandOptimizer.cs index 4dca1031..72206e1d 100644 --- a/src/SharpLearning.Optimization/HyperbandOptimizer.cs +++ b/src/SharpLearning.Optimization/HyperbandOptimizer.cs @@ -54,13 +54,13 @@ public sealed class HyperbandOptimizer /// True to skip the last, /// most computationally expensive, iteration of each round. Default is false. /// - public HyperbandOptimizer(IParameterSpec[] parameters, + public HyperbandOptimizer(IParameterSpec[] parameters, int maximumBudget = 81, int eta = 3, bool skipLastIterationOfEachRound = false, int seed = 34) { m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters)); - if(maximumBudget < 1) throw new ArgumentException(nameof(maximumBudget) + " must be at larger than 0"); + if (maximumBudget < 1) throw new ArgumentException(nameof(maximumBudget) + " must be at larger than 0"); if (eta < 1) throw new ArgumentException(nameof(eta) + " must be at larger than 0"); m_sampler = new RandomUniform(seed); @@ -69,7 +69,7 @@ public HyperbandOptimizer(IParameterSpec[] parameters, m_eta = eta; // This is called `s max` in the paper. - m_numberOfRounds = (int)(Math.Log(m_maximumBudget) / Math.Log(m_eta)); + m_numberOfRounds = (int)(Math.Log(m_maximumBudget) / Math.Log(m_eta)); // This is called `B` in the paper. m_totalBudgetPerRound = (m_numberOfRounds + 1) * m_maximumBudget; @@ -102,7 +102,7 @@ public OptimizerResult[] Optimize(HyperbandObjectiveFunction functionToMinimize) for (int rounds = m_numberOfRounds; rounds >= 0; rounds--) { // Initial configurations count. - var initialConfigurationCount = (int)Math.Ceiling((m_totalBudgetPerRound / m_maximumBudget) + var initialConfigurationCount = (int)Math.Ceiling((m_totalBudgetPerRound / m_maximumBudget) * (Math.Pow(m_eta, rounds) / (rounds + 1))); // Initial budget per parameter set. diff --git a/src/SharpLearning.Optimization/IOptimizer.cs b/src/SharpLearning.Optimization/IOptimizer.cs index 400409f4..4b2013bf 100644 --- a/src/SharpLearning.Optimization/IOptimizer.cs +++ b/src/SharpLearning.Optimization/IOptimizer.cs @@ -13,7 +13,7 @@ public interface IOptimizer /// /// OptimizerResult OptimizeBest(Func functionToMinimize); - + /// /// Returns all results ordered from best to worst (minimized). /// diff --git a/src/SharpLearning.Optimization/ParameterSpecs/MinMaxParameterSpec.cs b/src/SharpLearning.Optimization/ParameterSpecs/MinMaxParameterSpec.cs index d7d39d31..04e3c4c5 100644 --- a/src/SharpLearning.Optimization/ParameterSpecs/MinMaxParameterSpec.cs +++ b/src/SharpLearning.Optimization/ParameterSpecs/MinMaxParameterSpec.cs @@ -22,8 +22,8 @@ public class MinMaxParameterSpec : IParameterSpec /// Default is Linear. /// Selects the type of parameter. Should the parameter be sampled as discrete values, or as continuous values. /// Default is Continuous. - public MinMaxParameterSpec(double min, double max, - Transform transform = Transform.Linear, ParameterType parameterType = ParameterType.Continuous) + public MinMaxParameterSpec(double min, double max, + Transform transform = Transform.Linear, ParameterType parameterType = ParameterType.Continuous) { if (min >= max) { throw new ArgumentException($"min: {min} is larger than or equal to max: {max}"); } @@ -40,7 +40,7 @@ public MinMaxParameterSpec(double min, double max, /// maximum bound. /// Transform for controlling the scale of the parameter sampled between min and max bounds. /// Selects the type of parameter. Should the parameter be sampled as discrete values, or as continous values. - public MinMaxParameterSpec(double min, double max, + public MinMaxParameterSpec(double min, double max, ITransform transform, ParameterType parameterType) { if (min >= max) diff --git a/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs b/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs index a3940d74..1c8d0265 100644 --- a/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs +++ b/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs @@ -40,12 +40,12 @@ public sealed class ParticleSwarmOptimizer : IOptimizer /// Learning factor weighting global best solution. (default is 2) /// Seed for the random initialization and velocity corrections /// Maximum number of concurrent operations (default is -1 (unlimited)) - public ParticleSwarmOptimizer(IParameterSpec[] parameters, - int maxIterations, - int numberOfParticles = 10, - double c1 = 2, - double c2 = 2, - int seed = 42, + public ParticleSwarmOptimizer(IParameterSpec[] parameters, + int maxIterations, + int numberOfParticles = 10, + double c1 = 2, + double c2 = 2, + int seed = 42, int maxDegreeOfParallelism = -1) { if (maxIterations <= 0) { throw new ArgumentException("maxIterations must be at least 1"); } diff --git a/src/SharpLearning.Optimization/RandomSearchOptimizer.cs b/src/SharpLearning.Optimization/RandomSearchOptimizer.cs index 175ee5bd..66c5f939 100644 --- a/src/SharpLearning.Optimization/RandomSearchOptimizer.cs +++ b/src/SharpLearning.Optimization/RandomSearchOptimizer.cs @@ -63,16 +63,16 @@ public OptimizerResult OptimizeBest(Func functionToMi public OptimizerResult[] Optimize(Func functionToMinimize) { // Generate parameter sets. - var parameterSets = SampleRandomParameterSets(m_iterations, + var parameterSets = SampleRandomParameterSets(m_iterations, m_parameters, m_sampler); // Run parameter sets. var parameterIndexToResult = new ConcurrentDictionary(); - if(!m_runParallel) + if (!m_runParallel) { for (int index = 0; index < parameterSets.Length; index++) { - RunParameterSet(index, parameterSets, + RunParameterSet(index, parameterSets, functionToMinimize, parameterIndexToResult); } } @@ -80,7 +80,7 @@ public OptimizerResult[] Optimize(Func functionToMini { Parallel.For(0, parameterSets.Length, m_parallelOptions, (index, loopState) => { - RunParameterSet(index, parameterSets, + RunParameterSet(index, parameterSets, functionToMinimize, parameterIndexToResult); }); } @@ -99,7 +99,7 @@ public OptimizerResult[] Optimize(Func functionToMini /// /// /// - public static double[][] SampleRandomParameterSets(int parameterSetCount, + public static double[][] SampleRandomParameterSets(int parameterSetCount, IParameterSpec[] parameters, IParameterSampler sampler) { var parameterSets = new double[parameterSetCount][]; @@ -117,7 +117,7 @@ public static double[][] SampleRandomParameterSets(int parameterSetCount, /// /// /// - public static double[] SampleParameterSet(IParameterSpec[] parameters, + public static double[] SampleParameterSet(IParameterSpec[] parameters, IParameterSampler sampler) { var parameterSet = new double[parameters.Length]; diff --git a/src/SharpLearning.Optimization/SmacOptimizer.cs b/src/SharpLearning.Optimization/SmacOptimizer.cs index 133b9f60..0af8c11e 100644 --- a/src/SharpLearning.Optimization/SmacOptimizer.cs +++ b/src/SharpLearning.Optimization/SmacOptimizer.cs @@ -65,7 +65,7 @@ public SmacOptimizer(IParameterSpec[] parameters, { m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters)); - if(iterations < 1) throw new ArgumentException(nameof(iterations) + + if (iterations < 1) throw new ArgumentException(nameof(iterations) + "must be at least 1. Was: " + iterations); if (randomStartingPointCount < 1) throw new ArgumentException(nameof(randomStartingPointCount) + "must be at least 1. Was: " + randomStartingPointCount); @@ -139,7 +139,7 @@ public OptimizerResult[] Optimize(Func functionToMini /// /// /// - public List RunParameterSets(Func functionToMinimize, + public List RunParameterSets(Func functionToMinimize, double[][] parameterSets) { var results = new List(); @@ -161,7 +161,7 @@ public List RunParameterSets(Func fu /// These are used in the model for proposing new parameter sets. /// If no results are provided, random parameter sets will be returned. /// - public double[][] ProposeParameterSets(int parameterSetCount, + public double[][] ProposeParameterSets(int parameterSetCount, IReadOnlyList previousResults = null) { var previousParameterSetCount = previousResults == null ? 0 : previousResults.Count; @@ -176,7 +176,7 @@ public double[][] ProposeParameterSets(int parameterSetCount, return randomParameterSets; } - var validParameterSets = previousResults.Where(v => !double.IsNaN(v.Error)); + var validParameterSets = previousResults.Where(v => !double.IsNaN(v.Error)); var model = FitModel(validParameterSets); return GenerateCandidateParameterSets(parameterSetCount, validParameterSets.ToList(), model); @@ -194,7 +194,7 @@ RegressionForestModel FitModel(IEnumerable validParameterSets) return m_learner.Learn(observations, targets); } - double[][] GenerateCandidateParameterSets(int parameterSetCount, + double[][] GenerateCandidateParameterSets(int parameterSetCount, IReadOnlyList previousResults, RegressionForestModel model) { // Get top parameter sets from previous runs. @@ -215,7 +215,7 @@ double[][] GenerateCandidateParameterSets(int parameterSetCount, return InterLeaveModelBasedAndRandomParameterSets(challengers, randomChallengers); } - double[][] InterLeaveModelBasedAndRandomParameterSets(double[][] challengers, + double[][] InterLeaveModelBasedAndRandomParameterSets(double[][] challengers, double[][] randomChallengers) { var finalParameterSets = new double[challengers.Length + randomChallengers.Length][]; @@ -224,14 +224,14 @@ double[][] InterLeaveModelBasedAndRandomParameterSets(double[][] challengers, return finalParameterSets; } - double[][] GreedyPlusRandomSearch(double[][] parentParameterSets, RegressionForestModel model, + double[][] GreedyPlusRandomSearch(double[][] parentParameterSets, RegressionForestModel model, int parameterSetCount, IReadOnlyList previousResults) { // TODO: Handle maximization and minimization. Currently minimizes. var best = previousResults.Min(v => v.Error); var parameterSets = new List<(double[] parameterSet, double EI)>(); - + // Perform local search. foreach (var parameterSet in parentParameterSets) { @@ -259,7 +259,7 @@ double[][] GreedyPlusRandomSearch(double[][] parentParameterSets, RegressionFore /// Performs a local one-mutation neighborhood greedy search. /// Stop search when no neighbors increase expected improvement. /// - (double[] parameterSet, double expectedImprovement) LocalSearch(double[][] parentParameterSets, + (double[] parameterSet, double expectedImprovement) LocalSearch(double[][] parentParameterSets, RegressionForestModel model, double bestScore, double epsilon) { var bestParameterSet = parentParameterSets.First(); diff --git a/src/SharpLearning.Optimization/Transforms/ExponentialAverageTransform.cs b/src/SharpLearning.Optimization/Transforms/ExponentialAverageTransform.cs index aba56247..33832fa3 100644 --- a/src/SharpLearning.Optimization/Transforms/ExponentialAverageTransform.cs +++ b/src/SharpLearning.Optimization/Transforms/ExponentialAverageTransform.cs @@ -22,7 +22,7 @@ public double Transform(double min, double max, ParameterType parameterType, IPa { if (min >= 1 || max >= 1) { - throw new ArgumentException("ExponentialAverage scale requires min: " + + throw new ArgumentException("ExponentialAverage scale requires min: " + $" {min} and max: {max} to be smaller than one"); } diff --git a/src/SharpLearning.Optimization/Transforms/Log10Transform.cs b/src/SharpLearning.Optimization/Transforms/Log10Transform.cs index e6b666fa..ab2472b6 100644 --- a/src/SharpLearning.Optimization/Transforms/Log10Transform.cs +++ b/src/SharpLearning.Optimization/Transforms/Log10Transform.cs @@ -20,7 +20,7 @@ public double Transform(double min, double max, ParameterType parameterType, IPa { if (min <= 0 || max <= 0) { - throw new ArgumentException("logarithmic scale requires min: " + + throw new ArgumentException("logarithmic scale requires min: " + $"{min} and max: {max} to be larger than zero"); } var a = Math.Log10(min); diff --git a/src/SharpLearning.RandomForest.Test/Learners/ClassificationExtremelyRandomizedTreesLearnerTest.cs b/src/SharpLearning.RandomForest.Test/Learners/ClassificationExtremelyRandomizedTreesLearnerTest.cs index af2cd99c..d650e226 100644 --- a/src/SharpLearning.RandomForest.Test/Learners/ClassificationExtremelyRandomizedTreesLearnerTest.cs +++ b/src/SharpLearning.RandomForest.Test/Learners/ClassificationExtremelyRandomizedTreesLearnerTest.cs @@ -73,8 +73,8 @@ public void ClassificationExtremelyRandomizedTreesLearner_Learn_Glass_100_Indice { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); - var sut = new ClassificationExtremelyRandomizedTreesLearner(100, 1, 100, 1, 0.0001, 1.0, 42, false); - + var sut = new ClassificationExtremelyRandomizedTreesLearner(100, 1, 100, 1, 0.0001, 1.0, 42, false); + var indices = Enumerable.Range(0, targets.Length).ToArray(); indices.Shuffle(new Random(42)); indices = indices.Take((int)(targets.Length * 0.7)) @@ -107,12 +107,12 @@ public void ClassificationExtremelyRandomizedTreesLearner_Learn_Glass_100_Trees_ Assert.AreEqual(0.0560747663551402, error, m_delta); } - double ClassificationExtremelyRandomizedTreesLearner_Learn_Glass(int trees, + double ClassificationExtremelyRandomizedTreesLearner_Learn_Glass(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); - var sut = new ClassificationExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, + var sut = new ClassificationExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, subSampleRatio, 42, false); var model = sut.Learn(observations, targets); @@ -123,12 +123,12 @@ double ClassificationExtremelyRandomizedTreesLearner_Learn_Glass(int trees, return error; } - double ClassificationExtremelyRandomizedTreesLearner_Learn_Aptitude(int trees, + double ClassificationExtremelyRandomizedTreesLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var sut = new ClassificationExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, + var sut = new ClassificationExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, subSampleRatio, 42, false); var model = sut.Learn(observations, targets); diff --git a/src/SharpLearning.RandomForest.Test/Learners/ClassificationRandomForestLearnerTest.cs b/src/SharpLearning.RandomForest.Test/Learners/ClassificationRandomForestLearnerTest.cs index 540e4906..0fae8e6e 100644 --- a/src/SharpLearning.RandomForest.Test/Learners/ClassificationRandomForestLearnerTest.cs +++ b/src/SharpLearning.RandomForest.Test/Learners/ClassificationRandomForestLearnerTest.cs @@ -74,7 +74,7 @@ public void ClassificationRandomForestLearner_Learn_Glass_100_Indices() var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); var sut = new ClassificationRandomForestLearner(100, 1, 100, 1, 0.0001, 1.0, 42, false); - + var indices = Enumerable.Range(0, targets.Length).ToArray(); indices.Shuffle(new Random(42)); indices = indices.Take((int)(targets.Length * 0.7)) @@ -111,7 +111,7 @@ double ClassificationRandomForestLearner_Learn_Glass(int trees, double subSample { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); - var sut = new ClassificationRandomForestLearner(trees, 1, 100, 1, 0.0001, + var sut = new ClassificationRandomForestLearner(trees, 1, 100, 1, 0.0001, subSampleRatio, 42, false); var model = sut.Learn(observations, targets); @@ -126,7 +126,7 @@ double ClassificationRandomLearner_Learn_Aptitude(int trees, double subSampleRat { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var sut = new ClassificationRandomForestLearner(trees, 5, 100, 1, 0.0001, + var sut = new ClassificationRandomForestLearner(trees, 5, 100, 1, 0.0001, subSampleRatio, 42, false); var model = sut.Learn(observations, targets); diff --git a/src/SharpLearning.RandomForest.Test/Learners/RegressionExtremelyRandomizedTreesLearnerTest.cs b/src/SharpLearning.RandomForest.Test/Learners/RegressionExtremelyRandomizedTreesLearnerTest.cs index 8b841023..c970a4ae 100644 --- a/src/SharpLearning.RandomForest.Test/Learners/RegressionExtremelyRandomizedTreesLearnerTest.cs +++ b/src/SharpLearning.RandomForest.Test/Learners/RegressionExtremelyRandomizedTreesLearnerTest.cs @@ -32,7 +32,7 @@ public void RegressionExtremelyRandomizedTreesLearnerTest_Learn_Aptitude_Trees_1 var error = RegressionExtremelyRandomizedTreesLearner_Learn_Aptitude(100); Assert.AreEqual(0.0879343920732049, error, m_delta); } - + [TestMethod] public void RegressionExtremelyRandomizedTreesLearnerTest_Learn_Aptitude_Trees_100_SubSample() { @@ -74,7 +74,7 @@ public void RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass_100_Indice var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); var sut = new RegressionExtremelyRandomizedTreesLearner(100, 1, 100, 1, 0.0001, 1.0, 42, false); - + var indices = Enumerable.Range(0, targets.Length).ToArray(); indices.Shuffle(new Random(42)); indices = indices.Take((int)(targets.Length * 0.7)) @@ -111,7 +111,7 @@ double RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass(int trees, doub { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); - var sut = new RegressionExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, + var sut = new RegressionExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, subSampleRatio, 42, false); var model = sut.Learn(observations, targets); @@ -126,7 +126,7 @@ double RegressionExtremelyRandomizedTreesLearner_Learn_Aptitude(int trees, doubl { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var sut = new RegressionExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, + var sut = new RegressionExtremelyRandomizedTreesLearner(trees, 1, 100, 1, 0.0001, subSampleRatio, 42, false); var model = sut.Learn(observations, targets); diff --git a/src/SharpLearning.RandomForest.Test/Learners/RegressionRandomForestLearnerTest.cs b/src/SharpLearning.RandomForest.Test/Learners/RegressionRandomForestLearnerTest.cs index ee48852a..ff79c25c 100644 --- a/src/SharpLearning.RandomForest.Test/Learners/RegressionRandomForestLearnerTest.cs +++ b/src/SharpLearning.RandomForest.Test/Learners/RegressionRandomForestLearnerTest.cs @@ -74,7 +74,7 @@ public void RegressionRandomForestLearnerTest_Learn_Glass_100_Indices() var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); var sut = new RegressionRandomForestLearner(100, 1, 100, 1, 0.0001, 1.0, 42, false); - + var indices = Enumerable.Range(0, targets.Length).ToArray(); indices.Shuffle(new Random(42)); indices = indices.Take((int)(targets.Length * 0.7)) @@ -110,7 +110,7 @@ double RegressionRandomForestLearnerTest_Learn_Glass(int trees, double subSample { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); - var sut = new RegressionRandomForestLearner(trees, 1, 100, 1, 0.0001, + var sut = new RegressionRandomForestLearner(trees, 1, 100, 1, 0.0001, subSampleRatio, 42, false); var model = sut.Learn(observations, targets); @@ -125,7 +125,7 @@ double RegressionRandomForestLearner_Learn_Aptitude(int trees, double subSampleR { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var sut = new RegressionRandomForestLearner(trees, 5, 100, 1, 0.0001, + var sut = new RegressionRandomForestLearner(trees, 5, 100, 1, 0.0001, subSampleRatio, 42, false); var model = sut.Learn(observations, targets); diff --git a/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs b/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs index 76764950..d99defd5 100644 --- a/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs +++ b/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs @@ -125,7 +125,7 @@ public void ClassificationForestModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; var learner = new ClassificationRandomForestLearner(100, 5, 100, 1, 0.0001, 1.0, 42, false); diff --git a/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs b/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs index 7b858a69..e1d7fa82 100644 --- a/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs +++ b/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs @@ -124,7 +124,7 @@ public void RegressionForestModel_GetVariableImportance() { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); - var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, + var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; var learner = new RegressionRandomForestLearner(100, 5, 100, 1, 0.0001, 1.0, 42, false); diff --git a/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs b/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs index 9acc9591..cc6f0f2e 100644 --- a/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs @@ -18,7 +18,7 @@ namespace SharpLearning.RandomForest.Learners /// Learns a classification version of Extremely randomized trees /// http://www.montefiore.ulg.ac.be/~ernst/uploads/news/id63/extremely-randomized-trees.pdf /// - public sealed class ClassificationExtremelyRandomizedTreesLearner + public sealed class ClassificationExtremelyRandomizedTreesLearner : IIndexedLearner , IIndexedLearner , ILearner @@ -47,13 +47,13 @@ public sealed class ClassificationExtremelyRandomizedTreesLearner /// If below 1.0 the algorithm changes to random patches /// Seed for the random number generator /// Use multi threading to speed up execution (default is true) - public ClassificationExtremelyRandomizedTreesLearner(int trees = 100, - int minimumSplitSize = 1, - int maximumTreeDepth = 2000, - int featuresPrSplit = 0, - double minimumInformationGain = .000001, - double subSampleRatio = 1.0, - int seed = 42, + public ClassificationExtremelyRandomizedTreesLearner(int trees = 100, + int minimumSplitSize = 1, + int maximumTreeDepth = 2000, + int featuresPrSplit = 0, + double minimumInformationGain = .000001, + double subSampleRatio = 1.0, + int seed = 42, bool runParallel = true) { if (trees < 1) { throw new ArgumentException("trees must be at least 1"); } @@ -93,7 +93,7 @@ public ClassificationForestModel Learn(F64Matrix observations, double[] targets) /// /// /// - public ClassificationForestModel Learn(F64Matrix observations, double[] targets, + public ClassificationForestModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -118,7 +118,7 @@ public ClassificationForestModel Learn(F64Matrix observations, double[] targets, { foreach (var indexToRandom in treeIndexToRandomGenerators) { - var tree = CreateTree(observations, targets, + var tree = CreateTree(observations, targets, indices, indexToRandom.Random); results.TryAdd(indexToRandom.Index, tree); @@ -129,7 +129,7 @@ public ClassificationForestModel Learn(F64Matrix observations, double[] targets, var rangePartitioner = Partitioner.Create(treeIndexToRandomGenerators, true); Parallel.ForEach(rangePartitioner, (indexToRandom, loopState) => { - var tree = CreateTree(observations, targets, + var tree = CreateTree(observations, targets, indices, indexToRandom.Random); results.TryAdd(indexToRandom.Index, tree); @@ -198,7 +198,7 @@ double[] VariableImportance(ClassificationDecisionTreeModel[] models, int number return rawVariableImportance; } - ClassificationDecisionTreeModel CreateTree(F64Matrix observations, double[] targets, + ClassificationDecisionTreeModel CreateTree(F64Matrix observations, double[] targets, int[] indices, Random random) { var learner = new DecisionTreeLearner( diff --git a/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs b/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs index 34649a7b..8f70088b 100644 --- a/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs @@ -16,12 +16,12 @@ namespace SharpLearning.RandomForest.Learners /// http://en.wikipedia.org/wiki/Random_forest /// http://www.stat.berkeley.edu/~breiman/RandomForests/cc_home.htm /// - public sealed class ClassificationRandomForestLearner + public sealed class ClassificationRandomForestLearner : IIndexedLearner , IIndexedLearner , ILearner , ILearner - + { readonly int m_trees; int m_featuresPrSplit; @@ -45,13 +45,13 @@ public sealed class ClassificationRandomForestLearner /// If below 1.0 the algorithm changes to random patches /// Seed for the random number generator /// Use multi threading to speed up execution (default is true) - public ClassificationRandomForestLearner(int trees = 100, - int minimumSplitSize = 1, + public ClassificationRandomForestLearner(int trees = 100, + int minimumSplitSize = 1, int maximumTreeDepth = 2000, - int featuresPrSplit = 0, - double minimumInformationGain = .000001, - double subSampleRatio = 1.0, - int seed = 42, + int featuresPrSplit = 0, + double minimumInformationGain = .000001, + double subSampleRatio = 1.0, + int seed = 42, bool runParallel = true) { if (trees < 1) { throw new ArgumentException("trees must be at least 1"); } @@ -91,7 +91,7 @@ public ClassificationForestModel Learn(F64Matrix observations, double[] targets) /// /// /// - public ClassificationForestModel Learn(F64Matrix observations, double[] targets, + public ClassificationForestModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -116,7 +116,7 @@ public ClassificationForestModel Learn(F64Matrix observations, double[] targets, { foreach (var indexToRandom in treeIndexToRandomGenerators) { - var tree = CreateTree(observations, targets, + var tree = CreateTree(observations, targets, indices, indexToRandom.Random); results.TryAdd(indexToRandom.Index, tree); @@ -124,10 +124,10 @@ public ClassificationForestModel Learn(F64Matrix observations, double[] targets, } else { - var rangePartitioner = Partitioner.Create(treeIndexToRandomGenerators, true); + var rangePartitioner = Partitioner.Create(treeIndexToRandomGenerators, true); Parallel.ForEach(rangePartitioner, (indexToRandom, loopState) => { - var tree = CreateTree(observations, targets, + var tree = CreateTree(observations, targets, indices, indexToRandom.Random); results.TryAdd(indexToRandom.Index, tree); @@ -195,7 +195,7 @@ double[] VariableImportance(ClassificationDecisionTreeModel[] models, int number return rawVariableImportance; } - ClassificationDecisionTreeModel CreateTree(F64Matrix observations, double[] targets, + ClassificationDecisionTreeModel CreateTree(F64Matrix observations, double[] targets, int[] indices, Random random) { var learner = new ClassificationDecisionTreeLearner(m_maximumTreeDepth, m_minimumSplitSize, m_featuresPrSplit, diff --git a/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs b/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs index 3c1428e1..8837a6ec 100644 --- a/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs @@ -43,13 +43,13 @@ public sealed class RegressionExtremelyRandomizedTreesLearner : IIndexedLearner< /// If below 1.0 the algorithm changes to random patches /// Seed for the random number generator /// Use multi threading to speed up execution (default is true) - public RegressionExtremelyRandomizedTreesLearner(int trees = 100, - int minimumSplitSize = 1, + public RegressionExtremelyRandomizedTreesLearner(int trees = 100, + int minimumSplitSize = 1, int maximumTreeDepth = 2000, - int featuresPrSplit = 0, - double minimumInformationGain = .000001, - double subSampleRatio = 1.0 , - int seed = 42, + int featuresPrSplit = 0, + double minimumInformationGain = .000001, + double subSampleRatio = 1.0, + int seed = 42, bool runParallel = true) { if (trees < 1) { throw new ArgumentException("trees must be at least 1"); } @@ -57,7 +57,7 @@ public RegressionExtremelyRandomizedTreesLearner(int trees = 100, if (minimumSplitSize <= 0) { throw new ArgumentException("minimum split size must be larger than 0"); } if (maximumTreeDepth <= 0) { throw new ArgumentException("maximum tree depth must be larger than 0"); } if (minimumInformationGain <= 0) { throw new ArgumentException("minimum information gain must be larger than 0"); } - + m_trees = trees; m_minimumSplitSize = minimumSplitSize; m_maximumTreeDepth = maximumTreeDepth; @@ -88,7 +88,7 @@ public RegressionForestModel Learn(F64Matrix observations, double[] targets) /// /// /// - public RegressionForestModel Learn(F64Matrix observations, double[] targets, + public RegressionForestModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -113,7 +113,7 @@ public RegressionForestModel Learn(F64Matrix observations, double[] targets, { foreach (var indexToRandom in treeIndexToRandomGenerators) { - var tree = CreateTree(observations, targets, + var tree = CreateTree(observations, targets, indices, indexToRandom.Random); results.TryAdd(indexToRandom.Index, tree); @@ -124,7 +124,7 @@ public RegressionForestModel Learn(F64Matrix observations, double[] targets, var rangePartitioner = Partitioner.Create(treeIndexToRandomGenerators, true); Parallel.ForEach(rangePartitioner, (indexToRandom, loopState) => { - var tree = CreateTree(observations, targets, + var tree = CreateTree(observations, targets, indices, indexToRandom.Random); results.TryAdd(indexToRandom.Index, tree); @@ -173,7 +173,7 @@ double[] VariableImportance(RegressionDecisionTreeModel[] models, int numberOfFe return rawVariableImportance; } - RegressionDecisionTreeModel CreateTree(F64Matrix observations, double[] targets, + RegressionDecisionTreeModel CreateTree(F64Matrix observations, double[] targets, int[] indices, Random random) { var learner = new DecisionTreeLearner( diff --git a/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs b/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs index fa62708c..aeb9cf22 100644 --- a/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs @@ -40,13 +40,13 @@ public sealed class RegressionRandomForestLearner : IIndexedLearner, ILe /// If below 1.0 the algorithm changes to random patches /// Seed for the random number generator /// Use multi threading to speed up execution (default is true) - public RegressionRandomForestLearner(int trees = 100, - int minimumSplitSize = 1, + public RegressionRandomForestLearner(int trees = 100, + int minimumSplitSize = 1, int maximumTreeDepth = 2000, - int featuresPrSplit = 0, - double minimumInformationGain = .000001, - double subSampleRatio = 1.0, - int seed = 42, + int featuresPrSplit = 0, + double minimumInformationGain = .000001, + double subSampleRatio = 1.0, + int seed = 42, bool runParallel = true) { if (trees < 1) { throw new ArgumentException("trees must be at least 1"); } @@ -85,7 +85,7 @@ public RegressionForestModel Learn(F64Matrix observations, double[] targets) /// /// /// - public RegressionForestModel Learn(F64Matrix observations, double[] targets, + public RegressionForestModel Learn(F64Matrix observations, double[] targets, int[] indices) { Checks.VerifyObservationsAndTargets(observations, targets); @@ -166,7 +166,7 @@ double[] VariableImportance(RegressionDecisionTreeModel[] models, int numberOfFe return rawVariableImportance; } - RegressionDecisionTreeModel CreateTree(F64Matrix observations, double[] targets, + RegressionDecisionTreeModel CreateTree(F64Matrix observations, double[] targets, int[] indices, Random random) { var learner = new RegressionDecisionTreeLearner(m_maximumTreeDepth, diff --git a/src/SharpLearning.RandomForest/Models/ClassificationForestModel.cs b/src/SharpLearning.RandomForest/Models/ClassificationForestModel.cs index 471315a3..4714fa8f 100644 --- a/src/SharpLearning.RandomForest/Models/ClassificationForestModel.cs +++ b/src/SharpLearning.RandomForest/Models/ClassificationForestModel.cs @@ -44,7 +44,7 @@ public double Predict(double[] observation) var prediction = Trees.Select(m => m.Predict(observation)) .GroupBy(p => p).OrderByDescending(g => g.Count()) .First().Key; - + return prediction; } @@ -82,7 +82,7 @@ public ProbabilityPrediction PredictProbability(double[] observation) { foreach (var probability in model) { - if(probabilities.ContainsKey(probability.Key)) + if (probabilities.ContainsKey(probability.Key)) { probabilities[probability.Key] += probability.Value; } @@ -174,7 +174,7 @@ public void Save(Func writer) /// /// /// - ProbabilityPrediction IPredictor.Predict(double[] observation) + ProbabilityPrediction IPredictor.Predict(double[] observation) => PredictProbability(observation); /// @@ -182,7 +182,7 @@ ProbabilityPrediction IPredictor.Predict(double[] observa /// /// /// - ProbabilityPrediction[] IPredictor.Predict(F64Matrix observations) + ProbabilityPrediction[] IPredictor.Predict(F64Matrix observations) => PredictProbability(observations); } } diff --git a/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs b/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs index 4d66953f..bfc46c12 100644 --- a/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs +++ b/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs @@ -43,7 +43,7 @@ public double Predict(double[] observation) { var prediction = Trees.Select(m => m.Predict(observation)) .Average(); - + return prediction; } diff --git a/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs b/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs index 009190db..09e3d913 100644 --- a/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs +++ b/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs @@ -32,7 +32,7 @@ public void FeatureImportanceParser_Parse() { Assert.AreEqual(expected[i], actual[i], m_delta); } - } + } readonly string[] m_textTrees = new string[] { m_tree1, m_tree2 }; diff --git a/src/SharpLearning.XGBoost.Test/Models/RegressionXGBoostModelTest.cs b/src/SharpLearning.XGBoost.Test/Models/RegressionXGBoostModelTest.cs index 01bce9d3..ee74d0a4 100644 --- a/src/SharpLearning.XGBoost.Test/Models/RegressionXGBoostModelTest.cs +++ b/src/SharpLearning.XGBoost.Test/Models/RegressionXGBoostModelTest.cs @@ -102,7 +102,7 @@ public void RegressionXGBoostModel_GetVariableImportance() { "f1", 9.00304680229703 }, { "f5", 7.10296482157573 }, { "f4", 6.43327754840246 }, - { "f8", 4.61553313147666 }, + { "f8", 4.61553313147666 }, }; Assert.AreEqual(expected.Count, actual.Count); diff --git a/src/SharpLearning.XGBoost/Conversions.cs b/src/SharpLearning.XGBoost/Conversions.cs index fb6d8af5..52315ab0 100644 --- a/src/SharpLearning.XGBoost/Conversions.cs +++ b/src/SharpLearning.XGBoost/Conversions.cs @@ -49,7 +49,7 @@ public static float[][] ToFloatJaggedArray(this F64Matrix matrix, int[] rowIndic { var inputRow = rowIndices[outputRow]; var rowArray = new float[cols]; - + for (int col = 0; col < cols; col++) { rowArray[col] = (float)matrix.At(inputRow, col); @@ -78,7 +78,7 @@ public static float[] ToFloat(this double[] array) /// public static float[] ToFloat(this double[] array, int[] indices) { - return indices.Select(i => (float)array[i]).ToArray(); + return indices.Select(i => (float)array[i]).ToArray(); } /// diff --git a/src/SharpLearning.XGBoost/Learners/ArgumentChecks.cs b/src/SharpLearning.XGBoost/Learners/ArgumentChecks.cs index 6bf0f1ba..0b91fbb6 100644 --- a/src/SharpLearning.XGBoost/Learners/ArgumentChecks.cs +++ b/src/SharpLearning.XGBoost/Learners/ArgumentChecks.cs @@ -12,7 +12,7 @@ internal static void ThrowOnArgumentLessThan(string name, double value, double m } } - internal static void ThrowOnArgumentLessThanOrHigherThan(string name, double value, + internal static void ThrowOnArgumentLessThanOrHigherThan(string name, double value, double min, double max) { if (value < min || value > max) diff --git a/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs b/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs index cb92c4f5..adf542f6 100644 --- a/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs +++ b/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs @@ -11,7 +11,7 @@ namespace SharpLearning.XGBoost.Learners /// /// Classification learner for XGBoost /// - public sealed class ClassificationXGBoostLearner + public sealed class ClassificationXGBoostLearner : ILearner , IIndexedLearner , ILearner @@ -52,8 +52,8 @@ public sealed class ClassificationXGBoostLearner /// The initial prediction score of all instances, global bias. (default is 0.5) /// Random number seed. (default is 0) /// Value in the data which needs to be present as a missing value. (default is NaN) - public ClassificationXGBoostLearner(int maximumTreeDepth = 3, - double learningRate = 0.1, + public ClassificationXGBoostLearner(int maximumTreeDepth = 3, + double learningRate = 0.1, int estimators = 100, bool silent = true, ClassificationObjective objective = ClassificationObjective.Softmax, @@ -64,16 +64,16 @@ public ClassificationXGBoostLearner(int maximumTreeDepth = 3, double dropoutRate = 0.0, bool oneDrop = false, double skipDrop = 0.0, - int numberOfThreads = -1, - double gamma = 0, + int numberOfThreads = -1, + double gamma = 0, int minChildWeight = 1, - int maxDeltaStep = 0, - double subSample = 1, + int maxDeltaStep = 0, + double subSample = 1, double colSampleByTree = 1, - double colSampleByLevel = 1, - double l1Regularization = 0, + double colSampleByLevel = 1, + double l1Regularization = 0, double l2Reguralization = 1, - double scalePosWeight = 1, + double scalePosWeight = 1, double baseScore = 0.5, int seed = 0, double missing = double.NaN) { @@ -96,7 +96,7 @@ public ClassificationXGBoostLearner(int maximumTreeDepth = 3, m_parameters[ParameterNames.Estimators] = estimators; m_parameters[ParameterNames.Silent] = silent; - if(objective == ClassificationObjective.Softmax) + if (objective == ClassificationObjective.Softmax) { // SoftMax and SoftProp are the same objective, // but softprop returns probabilities. @@ -157,13 +157,13 @@ public ClassificationXGBoostModel Learn(F64Matrix observations, double[] targets { Checks.VerifyObservationsAndTargets(observations, targets); Checks.VerifyIndices(indices, observations, targets); - + var floatObservations = observations.ToFloatJaggedArray(indices); var floatTargets = targets.ToFloat(indices); // Only specify XGBoost number of classes if the objective is multi-class. var objective = (string)m_parameters[ParameterNames.objective]; - if (objective == ClassificationObjective.Softmax.ToXGBoostString() || + if (objective == ClassificationObjective.Softmax.ToXGBoostString() || objective == ClassificationObjective.SoftProb.ToXGBoostString()) { var numberOfClasses = floatTargets.Distinct().Count(); diff --git a/src/SharpLearning.XGBoost/Learners/RegressionXGBoostLearner.cs b/src/SharpLearning.XGBoost/Learners/RegressionXGBoostLearner.cs index b1513db4..3d0745bf 100644 --- a/src/SharpLearning.XGBoost/Learners/RegressionXGBoostLearner.cs +++ b/src/SharpLearning.XGBoost/Learners/RegressionXGBoostLearner.cs @@ -48,8 +48,8 @@ public sealed class RegressionXGBoostLearner : ILearner, IIndexedLearner /// Random number seed. (default is 0) /// Value in the data which needs to be present as a missing value. (default is NaN) public RegressionXGBoostLearner( - int maximumTreeDepth = 3, - double learningRate = 0.1, + int maximumTreeDepth = 3, + double learningRate = 0.1, int estimators = 100, bool silent = true, RegressionObjective objective = RegressionObjective.LinearRegression, @@ -60,16 +60,16 @@ public RegressionXGBoostLearner( double dropoutRate = 0.0, bool oneDrop = false, double skipDrop = 0.0, - int numberOfThreads = -1, - double gamma = 0, + int numberOfThreads = -1, + double gamma = 0, int minChildWeight = 1, - int maxDeltaStep = 0, - double subSample = 1, + int maxDeltaStep = 0, + double subSample = 1, double colSampleByTree = 1, - double colSampleByLevel = 1, - double l1Regularization = 0, + double colSampleByLevel = 1, + double l1Regularization = 0, double l2Reguralization = 1, - double scalePosWeight = 1, + double scalePosWeight = 1, double baseScore = 0.5, int seed = 0, double missing = double.NaN) @@ -155,7 +155,7 @@ public RegressionXGBoostModel Learn(F64Matrix observations, double[] targets, in { booster.Update(train, iteration); } - + return new RegressionXGBoostModel(booster); } } diff --git a/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs b/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs index ad17a4f3..9e0b53a6 100644 --- a/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs +++ b/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs @@ -11,7 +11,7 @@ namespace SharpLearning.XGBoost.Models /// /// /// - public sealed class ClassificationXGBoostModel + public sealed class ClassificationXGBoostModel : IDisposable , IPredictorModel , IPredictorModel @@ -162,7 +162,7 @@ public Dictionary GetVariableImportance(Dictionary /// /// /// - public static ClassificationXGBoostModel Load(string modelFilePath) + public static ClassificationXGBoostModel Load(string modelFilePath) => new ClassificationXGBoostModel(new Booster(modelFilePath)); /// @@ -182,10 +182,10 @@ public void Dispose() } } - ProbabilityPrediction IPredictor.Predict(double[] observation) + ProbabilityPrediction IPredictor.Predict(double[] observation) => PredictProbability(observation); - ProbabilityPrediction[] IPredictor.Predict(F64Matrix observations) + ProbabilityPrediction[] IPredictor.Predict(F64Matrix observations) => PredictProbability(observations); static ProbabilityPrediction PredictMultiClassProbability(float[] prediction) diff --git a/src/SharpLearning.XGBoost/Models/FeatureImportanceParser.cs b/src/SharpLearning.XGBoost/Models/FeatureImportanceParser.cs index 061c754a..5f4d8911 100644 --- a/src/SharpLearning.XGBoost/Models/FeatureImportanceParser.cs +++ b/src/SharpLearning.XGBoost/Models/FeatureImportanceParser.cs @@ -25,7 +25,7 @@ public static double[] ParseFromTreeDump(string[] textTrees, int numberOfFeature var lines = tree.Split(newLine, StringSplitOptions.RemoveEmptyEntries); foreach (var line in lines) { - if(!line.Contains("[") || line.Contains("booster")) + if (!line.Contains("[") || line.Contains("booster")) { // Continue if line does not contain a split. continue; @@ -38,7 +38,7 @@ public static double[] ParseFromTreeDump(string[] textTrees, int numberOfFeature var featureIndex = int.Parse(name.Split('f')[1]); // extract gain or cover - var gain = FloatingPointConversion.ToF64(line.Split(importanceType, + var gain = FloatingPointConversion.ToF64(line.Split(importanceType, StringSplitOptions.RemoveEmptyEntries)[1].Split(',')[0]); // add to featureImportance @@ -47,6 +47,6 @@ public static double[] ParseFromTreeDump(string[] textTrees, int numberOfFeature } return rawFeatureImportance; - } + } } } diff --git a/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs b/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs index 1ee242fe..b74f1baf 100644 --- a/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs +++ b/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs @@ -93,7 +93,7 @@ public Dictionary GetVariableImportance(Dictionary /// /// /// - public static RegressionXGBoostModel Load(string modelFilePath) + public static RegressionXGBoostModel Load(string modelFilePath) => new RegressionXGBoostModel(new Booster(modelFilePath)); /// From d85e0d7c94a37c49290f45e3c19749f58afbdec0 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:24:38 +0100 Subject: [PATCH 03/15] Fix CA1052 --- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 4 ++-- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- .../AssemblyInitializeCultureTest.cs | 2 +- 13 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/SharpLearning.AdaBoost.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.AdaBoost.Test/AssemblyInitializeCultureTest.cs index 99a58979..9c059165 100644 --- a/src/SharpLearning.AdaBoost.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.AdaBoost.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.AdaBoost.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.Containers.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.Containers.Test/AssemblyInitializeCultureTest.cs index 3fb36c14..d1b92a01 100644 --- a/src/SharpLearning.Containers.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.Containers.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.Containers.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.CrossValidation.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.CrossValidation.Test/AssemblyInitializeCultureTest.cs index 64731120..2f8c7821 100644 --- a/src/SharpLearning.CrossValidation.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.CrossValidation.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.CrossValidation.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.DecisionTrees.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.DecisionTrees.Test/AssemblyInitializeCultureTest.cs index 4476317b..b64eda69 100644 --- a/src/SharpLearning.DecisionTrees.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.DecisionTrees.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) @@ -17,4 +17,4 @@ public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) Thread.CurrentThread.CurrentUICulture = culture; } } -} \ No newline at end of file +} diff --git a/src/SharpLearning.Ensemble.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.Ensemble.Test/AssemblyInitializeCultureTest.cs index 3c872b24..53f89420 100644 --- a/src/SharpLearning.Ensemble.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.Ensemble.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.Ensemble.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.FeatureTransformations.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.FeatureTransformations.Test/AssemblyInitializeCultureTest.cs index cecd72d3..ddc2b0e3 100644 --- a/src/SharpLearning.FeatureTransformations.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.FeatureTransformations.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.FeatureTransformations.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.GradientBoost.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.GradientBoost.Test/AssemblyInitializeCultureTest.cs index b7961876..d4b79e8e 100644 --- a/src/SharpLearning.GradientBoost.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.GradientBoost.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.GradientBoost.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.InputOutput.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.InputOutput.Test/AssemblyInitializeCultureTest.cs index ed29ea50..9f69d01e 100644 --- a/src/SharpLearning.InputOutput.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.InputOutput.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.InputOutput.Test.Csv { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.Metrics.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.Metrics.Test/AssemblyInitializeCultureTest.cs index 503e5a62..b142f1dc 100644 --- a/src/SharpLearning.Metrics.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.Metrics.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.Metrics.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.Neural.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.Neural.Test/AssemblyInitializeCultureTest.cs index 6bd59143..6b7d3d50 100644 --- a/src/SharpLearning.Neural.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.Neural.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.Neural.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.Optimization.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.Optimization.Test/AssemblyInitializeCultureTest.cs index 06807d69..5f9c6b31 100644 --- a/src/SharpLearning.Optimization.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.Optimization.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.Optimization.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.RandomForest.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.RandomForest.Test/AssemblyInitializeCultureTest.cs index 71762cfd..a9680299 100644 --- a/src/SharpLearning.RandomForest.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.RandomForest.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.RandomForest.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) diff --git a/src/SharpLearning.XGBoost.Test/AssemblyInitializeCultureTest.cs b/src/SharpLearning.XGBoost.Test/AssemblyInitializeCultureTest.cs index b5a75b9f..2d24ef3f 100644 --- a/src/SharpLearning.XGBoost.Test/AssemblyInitializeCultureTest.cs +++ b/src/SharpLearning.XGBoost.Test/AssemblyInitializeCultureTest.cs @@ -5,7 +5,7 @@ namespace SharpLearning.XGBoost.Test { [TestClass] - public class AssemblyInitializeCultureTest + public static class AssemblyInitializeCultureTest { [AssemblyInitialize] public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c) From 2661364be06498a78385ad7cb466938397672874 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:25:50 +0100 Subject: [PATCH 04/15] Remove public constructor --- .../ClassificationImpurityCalculator.cs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs index e6a0b8c0..48a6186d 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs @@ -72,13 +72,6 @@ public abstract class ClassificationImpurityCalculator /// public double WeightedRight { get { return m_weightedRight; } } - /// - /// - /// - public ClassificationImpurityCalculator() - { - } - /// /// Initialize the calculator with targets, weights and work interval /// From 6381976986fede55dae8545c8bd3fc9630432007 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:28:17 +0100 Subject: [PATCH 05/15] remove private/internal where not needed --- .../Views/F64MatrixViewTest.cs | 8 ++++---- src/SharpLearning.CrossValidation/ModelDisposer.cs | 2 +- .../Learners/RegressionDecisionTreeLearnerTest.cs | 4 ++-- .../ImpurityCalculators/TargetCounts.cs | 2 +- src/SharpLearning.Metrics/Classification/Utilities.cs | 2 +- src/SharpLearning.Neural/Models/NeuralNet.cs | 2 +- src/SharpLearning.XGBoost/Learners/ArgumentChecks.cs | 2 +- src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs | 2 +- src/SharpLearning.XGBoost/ParameterNames.cs | 2 +- 9 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs b/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs index 3d49df45..61600ff7 100644 --- a/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs +++ b/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs @@ -85,12 +85,12 @@ public void F64MatrixView_ColumnLargePointerOffset() /// A matrix which needs a byte pointer offset larger than /// int.MaxValue to access all records in the backing array - private F64Matrix LargeMatrix() + F64Matrix LargeMatrix() { return new F64Matrix(int.MaxValue / sizeof(double) + 2, 1); } - private F64Matrix Matrix() + F64Matrix Matrix() { var features = new double[9] { 1, 2, 3, 10, 20, 30, @@ -99,7 +99,7 @@ private F64Matrix Matrix() return new F64Matrix(features, 3, 3); } - private unsafe void AssertColumnView(double[] column, F64MatrixColumnView columnView) + unsafe void AssertColumnView(double[] column, F64MatrixColumnView columnView) { for (int i = 0; i < column.Length; i++) { @@ -108,7 +108,7 @@ private unsafe void AssertColumnView(double[] column, F64MatrixColumnView column } - private unsafe void AssertMatrixView(IMatrix matrix, F64MatrixView view) + unsafe void AssertMatrixView(IMatrix matrix, F64MatrixView view) { for (int i = 0; i < matrix.RowCount; i++) { diff --git a/src/SharpLearning.CrossValidation/ModelDisposer.cs b/src/SharpLearning.CrossValidation/ModelDisposer.cs index 370746ec..340f2bda 100644 --- a/src/SharpLearning.CrossValidation/ModelDisposer.cs +++ b/src/SharpLearning.CrossValidation/ModelDisposer.cs @@ -3,7 +3,7 @@ namespace SharpLearning.CrossValidation { - internal static class ModelDisposer + static class ModelDisposer { internal static void DisposeIfDisposable(IPredictorModel model) { diff --git a/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs b/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs index 3b6705f4..d84e0319 100644 --- a/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs @@ -82,7 +82,7 @@ public void RegressionDecisionTreeLearner_Learn_Depth_100_Weight_100() Assert.AreEqual(0.032256921590414704, error, 0.0000001); } - private static double RegressionDecisionTreeLearner_Learn(int treeDepth) + static double RegressionDecisionTreeLearner_Learn(int treeDepth) { var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); @@ -97,7 +97,7 @@ private static double RegressionDecisionTreeLearner_Learn(int treeDepth) return error; } - private double RegressionDecisionTreeLearner_Learn_Weighted(int treeDepth, double weight) + double RegressionDecisionTreeLearner_Learn_Weighted(int treeDepth, double weight) { var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs index 4f5b355a..5d13eef0 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs @@ -7,7 +7,7 @@ namespace SharpLearning.DecisionTrees.ImpurityCalculators /// Offset is used for cases with negative target names like -1. /// This is a lot faster than mapping using a dictionary since this solution simply indexes into an array /// - internal class TargetCounts + class TargetCounts { double[] m_counts; diff --git a/src/SharpLearning.Metrics/Classification/Utilities.cs b/src/SharpLearning.Metrics/Classification/Utilities.cs index e298a8f3..f280c39d 100644 --- a/src/SharpLearning.Metrics/Classification/Utilities.cs +++ b/src/SharpLearning.Metrics/Classification/Utilities.cs @@ -3,7 +3,7 @@ namespace SharpLearning.Metrics.Classification { - internal static class Utilities + static class Utilities { static internal List UniqueTargetValues(T[] targets, T[] predictions) { diff --git a/src/SharpLearning.Neural/Models/NeuralNet.cs b/src/SharpLearning.Neural/Models/NeuralNet.cs index 4a4fd1de..d4a10009 100644 --- a/src/SharpLearning.Neural/Models/NeuralNet.cs +++ b/src/SharpLearning.Neural/Models/NeuralNet.cs @@ -35,7 +35,7 @@ public NeuralNet(Initialization initialization = Initialization.GlorotUniform) /// /// /// - private NeuralNet(List layers) + NeuralNet(List layers) { Layers = layers ?? throw new ArgumentNullException(nameof(layers)); } diff --git a/src/SharpLearning.XGBoost/Learners/ArgumentChecks.cs b/src/SharpLearning.XGBoost/Learners/ArgumentChecks.cs index 0b91fbb6..4728046e 100644 --- a/src/SharpLearning.XGBoost/Learners/ArgumentChecks.cs +++ b/src/SharpLearning.XGBoost/Learners/ArgumentChecks.cs @@ -2,7 +2,7 @@ namespace SharpLearning.XGBoost.Learners { - internal static class ArgumentChecks + static class ArgumentChecks { internal static void ThrowOnArgumentLessThan(string name, double value, double min) { diff --git a/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs b/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs index ad9b459a..4edf8291 100644 --- a/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs +++ b/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs @@ -43,7 +43,7 @@ public static GBMTree ConvertXGBoostTextTreeToGBMTree(string textTree) return new GBMTree(nodes); } - private static List ConvertXGBoostNodesToGBMNodes(string textTree) + static List ConvertXGBoostNodesToGBMNodes(string textTree) { var newLine = new string[] { "\n" }; var lines = textTree.Split(newLine, StringSplitOptions.RemoveEmptyEntries); diff --git a/src/SharpLearning.XGBoost/ParameterNames.cs b/src/SharpLearning.XGBoost/ParameterNames.cs index b1edcae0..6210315f 100644 --- a/src/SharpLearning.XGBoost/ParameterNames.cs +++ b/src/SharpLearning.XGBoost/ParameterNames.cs @@ -1,6 +1,6 @@ namespace SharpLearning.XGBoost { - internal static class ParameterNames + static class ParameterNames { /// /// Maximum tree depth for base learners From 5eeb4e5b9738c50270913a6cd2544fa1ab71f889 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:29:03 +0100 Subject: [PATCH 06/15] Fix CA1802 --- .../Models/FeatureImportanceParserTest.cs | 4 ++-- .../Models/XGBoostTreeConverterTest.cs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs b/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs index 09e3d913..e94bbda8 100644 --- a/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs +++ b/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs @@ -36,7 +36,7 @@ public void FeatureImportanceParser_Parse() readonly string[] m_textTrees = new string[] { m_tree1, m_tree2 }; - static readonly string m_tree1 = @"booster[0] + const string m_tree1 = @"booster[0] 0:[f2<2.695] yes=1,no=2,missing=1,gain=343.922,cover=214 1:[f6<9.81] yes=3,no=4,missing=3,gain=74.1261,cover=61 3:[f8<0.13] yes=7,no=8,missing=7,gain=10.7401,cover=37 @@ -52,7 +52,7 @@ public void FeatureImportanceParser_Parse() 6:leaf=0.3,cover=2"; - static readonly string m_tree2 = @"booster[1] + const string m_tree2 = @"booster[1] 0:[f2<2.695] yes=1,no=2,missing=1,gain=280.77,cover=214 1:[f6<9.81] yes=3,no=4,missing=3,gain=60.1843,cover=61 3:[f8<0.13] yes=7,no=8,missing=7,gain=8.27457,cover=37 diff --git a/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs b/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs index 608dc37d..9ca054d2 100644 --- a/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs +++ b/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs @@ -127,7 +127,7 @@ void ArrayAssert(double[] expected, double[] actual) } } - static readonly string m_tree1 = @"booster[0] + const string m_tree1 = @"booster[0] 0:[f2<2.695] yes=1,no=2,missing=1,gain=343.922,cover=214 1:[f6<9.81] yes=3,no=4,missing=3,gain=74.1261,cover=61 3:[f8<0.13] yes=7,no=8,missing=7,gain=10.7401,cover=37 @@ -213,7 +213,7 @@ void ArrayAssert(double[] expected, double[] actual) }, }; - static readonly string m_tree2 = @"booster[10] + const string m_tree2 = @"booster[10] 0:[f2<2.545] yes=1,no=2,missing=1,gain=46.9086,cover=214 1:[f1<13.785] yes=3,no=4,missing=3,gain=12.7152,cover=60 3:[f1<13.495] yes=7,no=8,missing=7,gain=4.75871,cover=24 From 3a3f1916f71d43a95861e1a648e63fa18b850012 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:31:35 +0100 Subject: [PATCH 07/15] Fix CA1822 --- .../Models/ClassificationAdaBoostModelTest.cs | 3 +-- .../Matrices/F64MatrixTest.cs | 10 +++++----- .../Matrices/StringMatrixTest.cs | 10 +++++----- .../Views/F64MatrixViewTest.cs | 6 +++--- .../Views/F64VectorViewTest.cs | 2 +- src/SharpLearning.Containers/CertaintyPrediction.cs | 2 +- .../ProbabilityPrediction.cs | 2 +- .../CrossValidationUtilitiesTest.cs | 2 +- .../CrossValidators/CrossValidationTest.cs | 2 +- .../CrossValidators/NoShuffleCrossValidationTest.cs | 4 ++-- .../CrossValidators/RandomCrossValidationTest.cs | 4 ++-- .../CrossValidators/StratifiedCrossValidationTest.cs | 4 ++-- .../Augmentators/ContinuousMungeAugmentator.cs | 2 +- .../Augmentators/NominalMungeAugmentator.cs | 3 +-- .../GiniClassificationImpurityCalculatorTest.cs | 2 +- .../RegressionImpurityCalculatorTest.cs | 2 +- .../ClassificationDecisionTreeLearnerTest.cs | 6 +++--- .../Learners/RegressionDecisionTreeLearnerTest.cs | 2 +- .../Models/ClassificationDecisionTreeModelTest.cs | 2 +- .../SplitSearchers/LinearSplitSearcherTest.cs | 2 +- .../OnlyUniqueThresholdsSplitSearcherTest.cs | 2 +- .../ImpurityCalculators/ChildImpurities.cs | 2 +- .../SplitSearchers/SplitResult.cs | 2 +- .../ClassificationStackingEnsembleModelTest.cs | 2 +- .../Models/RegressionStackingEnsembleModelTest.cs | 4 ++-- .../Models/ClassificationStackingEnsembleModel.cs | 2 +- .../Models/RegressionStackingEnsembleModel.cs | 2 +- ...cMeanProbabilityClassificationEnsembleStrategy.cs | 2 +- .../Normalization/LinearNormalizerTest.cs | 8 ++++---- .../CsvRowTransforms/DateTimeFeatureTransformer.cs | 4 ++-- .../MatrixTransforms/MinMaxTransformer.cs | 4 ++-- .../Normalization/LinearNormalizer.cs | 2 +- .../Models/ClassificationGradientBoostModelTest.cs | 2 +- .../GBMDecisionTree/GBMDecisionTreeLearner.cs | 4 ++-- .../Learners/ClassificationGradientBoostLearner.cs | 2 +- .../Learners/RegressionGradientBoostLearner.cs | 2 +- .../Loss/GradientBoostBinomialLoss.cs | 2 +- .../Models/ClassificationGradientBoostModel.cs | 2 +- .../Csv/CsvParserTest.cs | 12 ++++++------ src/SharpLearning.InputOutput/Csv/CsvParser.cs | 2 +- .../ModelComparison/McNemarModelComparisonTest.cs | 2 +- .../RocAucClassificationProbabilityMetric.cs | 2 +- .../Classification/TotalErrorClassificationMetric.cs | 4 ++-- .../ModelComparison/McNemarModelComparison.cs | 2 +- .../Ranking/AveragePrecisionRankingMetric.cs | 2 +- .../NormalizedGiniCoefficientRegressionMetric.cs | 2 +- .../Learners/ClassificationNeuralNetLearnerTest.cs | 4 ++-- .../Learners/RegressionNeuralNetLearnerTest.cs | 2 +- .../Activations/ReluActivation.cs | 4 ++-- .../Activations/SigmoidActivation.cs | 4 ++-- .../Layers/BatchNormalizationLayer.cs | 2 +- src/SharpLearning.Neural/Layers/SoftMaxLayer.cs | 2 +- .../Learners/ClassificationNeuralNetLearner.cs | 2 +- .../Learners/NeuralNetLearner.cs | 6 +++--- .../BayesianOptimizerTest.cs | 3 +-- .../ParameterSamplers/RandomUniformTest.cs | 2 +- .../SmacOptimizerTest.cs | 6 +++--- src/SharpLearning.Optimization/BayesianOptimizer.cs | 2 +- .../ParticleSwarmOptimizer.cs | 2 +- src/SharpLearning.Optimization/SmacOptimizer.cs | 6 +++--- ...ssificationExtremelyRandomizedTreesLearnerTest.cs | 4 ++-- .../ClassificationRandomForestLearnerTest.cs | 4 ++-- .../RegressionExtremelyRandomizedTreesLearnerTest.cs | 4 ++-- .../Learners/RegressionRandomForestLearnerTest.cs | 4 ++-- .../Models/ClassificationForestModelTest.cs | 2 +- .../Models/RegressionForestModelTest.cs | 2 +- .../ClassificationExtremelyRandomizedTreesLearner.cs | 2 +- .../Learners/ClassificationRandomForestLearner.cs | 2 +- .../RegressionExtremelyRandomizedTreesLearner.cs | 2 +- .../Learners/RegressionRandomForestLearner.cs | 2 +- 70 files changed, 111 insertions(+), 114 deletions(-) diff --git a/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs b/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs index 316513c8..2b0300b5 100644 --- a/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs +++ b/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs @@ -169,8 +169,7 @@ public void ClassificationAdaBoostModel_Load() Assert.AreEqual(0.19230769230769232, error, 0.0000001); } - - void Write(ProbabilityPrediction[] predictions) + static void Write(ProbabilityPrediction[] predictions) { var value = "new ProbabilityPrediction[] {"; foreach (var item in predictions) diff --git a/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs b/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs index ccbb3479..386f4693 100644 --- a/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs +++ b/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs @@ -142,17 +142,17 @@ public void F64Matrix_Implicit_Conversion() Assert.AreEqual(3, actual.At(1, 1)); } - double[] GetExpectedColumn() + static double[] GetExpectedColumn() { return new double[3] { 2, 20, 200 }; } - double[] GetExpectedRow() + static double[] GetExpectedRow() { return new double[3] { 10, 20, 30 }; } - F64Matrix GetExpectedColSubMatrix() + static F64Matrix GetExpectedColSubMatrix() { var features = new double[6] { 1, 3, 10, 30, @@ -161,7 +161,7 @@ F64Matrix GetExpectedColSubMatrix() return new F64Matrix(features, 3, 2); } - F64Matrix GetExpectedRowSubMatrix() + static F64Matrix GetExpectedRowSubMatrix() { var features = new double[6] { 1, 2, 3, 100, 200, 300}; @@ -169,7 +169,7 @@ F64Matrix GetExpectedRowSubMatrix() return new F64Matrix(features, 2, 3); } - F64Matrix CreateFeatures() + static F64Matrix CreateFeatures() { var features = new double[9] { 1, 2, 3, 10, 20, 30, diff --git a/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs b/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs index 6f2a750c..cb017b7d 100644 --- a/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs +++ b/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs @@ -127,17 +127,17 @@ public void StringMatrix_Columns_Predefined() Assert.IsTrue(expected.Equals(actual)); } - string[] GetExpectedColumn() + static string[] GetExpectedColumn() { return new string[] { "2", "20", "200" }; } - string[] GetExpectedRow() + static string[] GetExpectedRow() { return new string[] { "10", "20", "30" }; } - StringMatrix GetExpectedColSubMatrix() + static StringMatrix GetExpectedColSubMatrix() { var features = new string[] { "1", "3", "10", "30", @@ -146,7 +146,7 @@ StringMatrix GetExpectedColSubMatrix() return new StringMatrix(features, 3, 2); } - StringMatrix GetExpectedRowSubMatrix() + static StringMatrix GetExpectedRowSubMatrix() { var features = new string[] { "1", "2", "3", "100", "200", "300"}; @@ -154,7 +154,7 @@ StringMatrix GetExpectedRowSubMatrix() return new StringMatrix(features, 2, 3); } - StringMatrix CreateFeatures() + static StringMatrix CreateFeatures() { var features = new string[] { "1", "2", "3", "10", "20", "30", diff --git a/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs b/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs index 61600ff7..e333fd13 100644 --- a/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs +++ b/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs @@ -85,12 +85,12 @@ public void F64MatrixView_ColumnLargePointerOffset() /// A matrix which needs a byte pointer offset larger than /// int.MaxValue to access all records in the backing array - F64Matrix LargeMatrix() + static F64Matrix LargeMatrix() { return new F64Matrix(int.MaxValue / sizeof(double) + 2, 1); } - F64Matrix Matrix() + static F64Matrix Matrix() { var features = new double[9] { 1, 2, 3, 10, 20, 30, @@ -99,7 +99,7 @@ F64Matrix Matrix() return new F64Matrix(features, 3, 3); } - unsafe void AssertColumnView(double[] column, F64MatrixColumnView columnView) + static unsafe void AssertColumnView(double[] column, F64MatrixColumnView columnView) { for (int i = 0; i < column.Length; i++) { diff --git a/src/SharpLearning.Containers.Test/Views/F64VectorViewTest.cs b/src/SharpLearning.Containers.Test/Views/F64VectorViewTest.cs index e7ee7bc6..150a010d 100644 --- a/src/SharpLearning.Containers.Test/Views/F64VectorViewTest.cs +++ b/src/SharpLearning.Containers.Test/Views/F64VectorViewTest.cs @@ -54,7 +54,7 @@ public void F64VectorView_SubView_End() } } - void AssertVectorView(double[] vector, F64VectorView view) + static void AssertVectorView(double[] vector, F64VectorView view) { for (int i = 0; i < vector.Length; i++) { diff --git a/src/SharpLearning.Containers/CertaintyPrediction.cs b/src/SharpLearning.Containers/CertaintyPrediction.cs index 56ce9e3f..910c319b 100644 --- a/src/SharpLearning.Containers/CertaintyPrediction.cs +++ b/src/SharpLearning.Containers/CertaintyPrediction.cs @@ -87,7 +87,7 @@ public override int GetHashCode() const double m_tolerence = 0.00001; - bool Equal(double a, double b) + static bool Equal(double a, double b) { var diff = Math.Abs(a * m_tolerence); if (Math.Abs(a - b) <= diff) diff --git a/src/SharpLearning.Containers/ProbabilityPrediction.cs b/src/SharpLearning.Containers/ProbabilityPrediction.cs index d51d891c..b001fa2f 100644 --- a/src/SharpLearning.Containers/ProbabilityPrediction.cs +++ b/src/SharpLearning.Containers/ProbabilityPrediction.cs @@ -98,7 +98,7 @@ public override int GetHashCode() const double m_tolerence = 0.00001; - bool Equal(double a, double b) + static bool Equal(double a, double b) { var diff = Math.Abs(a * m_tolerence); if (Math.Abs(a - b) <= diff) diff --git a/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs b/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs index f71a14b6..07a13107 100644 --- a/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs +++ b/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs @@ -92,7 +92,7 @@ public void CrossValidationUtilities_GetKFoldCrossValidationIndexSets_Handle_Rem } } - void TraceIndexSets(IReadOnlyList<(int[] trainingIndices, int[] validationIndices)> indexSets) + static void TraceIndexSets(IReadOnlyList<(int[] trainingIndices, int[] validationIndices)> indexSets) { const string Separator = ", "; foreach (var set in indexSets) diff --git a/src/SharpLearning.CrossValidation.Test/CrossValidators/CrossValidationTest.cs b/src/SharpLearning.CrossValidation.Test/CrossValidators/CrossValidationTest.cs index e1b79cfe..bca9b401 100644 --- a/src/SharpLearning.CrossValidation.Test/CrossValidators/CrossValidationTest.cs +++ b/src/SharpLearning.CrossValidation.Test/CrossValidators/CrossValidationTest.cs @@ -31,7 +31,7 @@ public void CrossValidation_CrossValidate_Too_Many_Folds() CrossValidate(2000); } - double CrossValidate(int folds) + static double CrossValidate(int folds) { var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); diff --git a/src/SharpLearning.CrossValidation.Test/CrossValidators/NoShuffleCrossValidationTest.cs b/src/SharpLearning.CrossValidation.Test/CrossValidators/NoShuffleCrossValidationTest.cs index 38c18406..df29d79c 100644 --- a/src/SharpLearning.CrossValidation.Test/CrossValidators/NoShuffleCrossValidationTest.cs +++ b/src/SharpLearning.CrossValidation.Test/CrossValidators/NoShuffleCrossValidationTest.cs @@ -45,7 +45,7 @@ public void NoShuffleCrossValidation_CrossValidate_Too_Many_Folds() CrossValidate(2000); } - double CrossValidate(int folds) + static double CrossValidate(int folds) { var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); @@ -57,7 +57,7 @@ double CrossValidate(int folds) return metric.Error(targets, predictions); } - double CrossValidate_Provide_Indices(int folds) + static double CrossValidate_Provide_Indices(int folds) { var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); diff --git a/src/SharpLearning.CrossValidation.Test/CrossValidators/RandomCrossValidationTest.cs b/src/SharpLearning.CrossValidation.Test/CrossValidators/RandomCrossValidationTest.cs index cf2c623e..a62958d6 100644 --- a/src/SharpLearning.CrossValidation.Test/CrossValidators/RandomCrossValidationTest.cs +++ b/src/SharpLearning.CrossValidation.Test/CrossValidators/RandomCrossValidationTest.cs @@ -45,7 +45,7 @@ public void RandomCrossValidation_CrossValidate_Too_Many_Folds() CrossValidate(2000); } - double CrossValidate(int folds) + static double CrossValidate(int folds) { var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); @@ -57,7 +57,7 @@ double CrossValidate(int folds) return metric.Error(targets, predictions); } - double CrossValidate_Provide_Indices(int folds) + static double CrossValidate_Provide_Indices(int folds) { var (observations, targets) = DataSetUtilities.LoadDecisionTreeDataSet(); diff --git a/src/SharpLearning.CrossValidation.Test/CrossValidators/StratifiedCrossValidationTest.cs b/src/SharpLearning.CrossValidation.Test/CrossValidators/StratifiedCrossValidationTest.cs index 9406a9e1..acf0925f 100644 --- a/src/SharpLearning.CrossValidation.Test/CrossValidators/StratifiedCrossValidationTest.cs +++ b/src/SharpLearning.CrossValidation.Test/CrossValidators/StratifiedCrossValidationTest.cs @@ -48,7 +48,7 @@ public void StratisfiedCrossValidation_CrossValidate_Too_Many_Folds() CrossValidate(200); } - double CrossValidate(int folds) + static double CrossValidate(int folds) { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); @@ -60,7 +60,7 @@ double CrossValidate(int folds) return metric.Error(targets, predictions); } - double CrossValidate_Provide_Indices(int folds) + static double CrossValidate_Provide_Indices(int folds) { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); diff --git a/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs b/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs index 4de7b2e1..7e681ce6 100644 --- a/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs +++ b/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs @@ -120,7 +120,7 @@ double SampleRandom(double mean, double std) return randNormal; } - double GetDistance(double[] p, double[] q) + static double GetDistance(double[] p, double[] q) { double distance = 0; double diff = 0; diff --git a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs index b0d290e3..c2c8d71a 100644 --- a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs +++ b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs @@ -105,8 +105,7 @@ public F64Matrix Agument(F64Matrix dataset) return augmentation; } - - double GetHammingDistance(double[] a, double[] b) + static double GetHammingDistance(double[] a, double[] b) { if (a.Length != b.Length) throw new ArgumentOutOfRangeException("lengths are not equal"); int count = 0; diff --git a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/GiniClassificationImpurityCalculatorTest.cs b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/GiniClassificationImpurityCalculatorTest.cs index 1151f6ec..68e097ac 100644 --- a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/GiniClassificationImpurityCalculatorTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/GiniClassificationImpurityCalculatorTest.cs @@ -109,7 +109,7 @@ public void GiniClassificationImpurityCalculator_LeafValue_Weighted() Assert.AreEqual(2.0, actual, 0.000001); } - double Weight(double t) + static double Weight(double t) { if (t == 2.0) return 10.0; diff --git a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs index 6a4d1e07..b63e404c 100644 --- a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs @@ -101,7 +101,7 @@ public void RegressionImpurityCalculator_LeafValue_Weighted() Assert.AreEqual(1.75, actual, 0.000001); } - double Weight(double t) + static double Weight(double t) { if (t == 2.0) return 10.0; diff --git a/src/SharpLearning.DecisionTrees.Test/Learners/ClassificationDecisionTreeLearnerTest.cs b/src/SharpLearning.DecisionTrees.Test/Learners/ClassificationDecisionTreeLearnerTest.cs index fad6b667..38682116 100644 --- a/src/SharpLearning.DecisionTrees.Test/Learners/ClassificationDecisionTreeLearnerTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Learners/ClassificationDecisionTreeLearnerTest.cs @@ -146,7 +146,7 @@ public void ClassificationDecisionTreeLearner_Learn_Glass_Depth_5_Weight_10() Assert.AreEqual(0.14018691588785046, error, 0.0000001); } - double ClassificationDecisionTreeLearner_Learn_Glass(int treeDepth) + static double ClassificationDecisionTreeLearner_Learn_Glass(int treeDepth) { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); @@ -160,7 +160,7 @@ double ClassificationDecisionTreeLearner_Learn_Glass(int treeDepth) return error; } - double ClassificationDecisionTreeLearner_Learn_Aptitude(int treeDepth) + static double ClassificationDecisionTreeLearner_Learn_Aptitude(int treeDepth) { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); @@ -205,7 +205,7 @@ double ClassificationDecisionTreeLearner_Learn_Aptitude_Weighted(int treeDepth, return error; } - public double Weight(double v, double targetToWeigh, double weight) + public static double Weight(double v, double targetToWeigh, double weight) { if (v == targetToWeigh) return weight; diff --git a/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs b/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs index d84e0319..2986dadc 100644 --- a/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs @@ -112,7 +112,7 @@ double RegressionDecisionTreeLearner_Learn_Weighted(int treeDepth, double weight return error; } - double Weight(double v, double weight) + static double Weight(double v, double weight) { if (v < 3.0) return weight; diff --git a/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs b/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs index 361902d1..9a5b5c03 100644 --- a/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs @@ -208,7 +208,7 @@ public void ClassificationDecisionTreeModel_Load() Assert.AreEqual(0.19230769230769232, error, 0.0000001); } - void Write(ProbabilityPrediction[] predictions) + static void Write(ProbabilityPrediction[] predictions) { var value = "new ProbabilityPrediction[] {"; foreach (var item in predictions) diff --git a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs index 2d34ceb6..a0eff6b3 100644 --- a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs @@ -121,7 +121,7 @@ public void LinearSplitSearcher_FindBestSplit_Large() Assert.AreEqual(expected, actual); } - double Weight(double v, double weight) + static double Weight(double v, double weight) { if (v == 1.0) return weight; diff --git a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs index a7d51a04..393911f4 100644 --- a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs @@ -121,7 +121,7 @@ public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit_Large() Assert.AreEqual(expected, actual); } - double Weight(double v, double weight) + static double Weight(double v, double weight) { if (v == 1.0) return weight; diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ChildImpurities.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ChildImpurities.cs index d00713c6..b68b1468 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ChildImpurities.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ChildImpurities.cs @@ -86,7 +86,7 @@ public override int GetHashCode() const double m_tolerence = 0.00001; - bool Equal(double a, double b) + static bool Equal(double a, double b) { var diff = Math.Abs(a * m_tolerence); if (Math.Abs(a - b) <= diff) diff --git a/src/SharpLearning.DecisionTrees/SplitSearchers/SplitResult.cs b/src/SharpLearning.DecisionTrees/SplitSearchers/SplitResult.cs index 0b1a3aea..6f260566 100644 --- a/src/SharpLearning.DecisionTrees/SplitSearchers/SplitResult.cs +++ b/src/SharpLearning.DecisionTrees/SplitSearchers/SplitResult.cs @@ -124,7 +124,7 @@ public override int GetHashCode() const double m_tolerence = 0.00001; - bool Equal(double a, double b) + static bool Equal(double a, double b) { var diff = Math.Abs(a * m_tolerence); if (Math.Abs(a - b) <= diff) diff --git a/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs index 73167372..12d6ee76 100644 --- a/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs @@ -199,7 +199,7 @@ public void ClassificationStackingEnsembleModel_GetRawVariableImportance() } } - void WriteImportances(Dictionary featureImportance) + static void WriteImportances(Dictionary featureImportance) { var result = "new Dictionary {"; foreach (var item in featureImportance) diff --git a/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs index 2636b571..425d1e9c 100644 --- a/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs @@ -140,7 +140,7 @@ public void RegressionStackingEnsembleModel_GetRawVariableImportance() } } - void WriteRawImportances(double[] featureImportance) + static void WriteRawImportances(double[] featureImportance) { var result = "new double[] {"; foreach (var item in featureImportance) @@ -151,7 +151,7 @@ void WriteRawImportances(double[] featureImportance) Trace.WriteLine(result); } - void WriteImportances(Dictionary featureImportance) + static void WriteImportances(Dictionary featureImportance) { var result = "new Dictionary {"; foreach (var item in featureImportance) diff --git a/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs index 5bfa9eab..8858acb4 100644 --- a/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs @@ -186,7 +186,7 @@ ProbabilityPrediction IPredictor.Predict(double[] observa ProbabilityPrediction[] IPredictor.Predict(F64Matrix observations) => PredictProbability(observations); - string GetNewFeatureName(string name, Dictionary ensembleFeatureNameToIndex) + static string GetNewFeatureName(string name, Dictionary ensembleFeatureNameToIndex) { if (ensembleFeatureNameToIndex.ContainsKey(name)) { diff --git a/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs index ea28a2bc..784a0a7a 100644 --- a/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs @@ -118,7 +118,7 @@ public Dictionary GetVariableImportance(Dictionary return m_metaModel.GetVariableImportance(ensembleFeatureNameToIndex); } - string GetNewFeatureName(string name, Dictionary ensembleFeatureNameToIndex) + static string GetNewFeatureName(string name, Dictionary ensembleFeatureNameToIndex) { if (ensembleFeatureNameToIndex.ContainsKey(name)) { diff --git a/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs b/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs index 05bc49af..369698ef 100644 --- a/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs +++ b/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs @@ -49,7 +49,7 @@ public void Combine(ProbabilityPrediction[][] ensemblePredictions, ProbabilityPr } } - double GeometricMean(double[] values) + static double GeometricMean(double[] values) { var geoMean = 0.0; for (int i = 0; i < values.Length; i++) diff --git a/src/SharpLearning.FeatureTransformations.Test/Normalization/LinearNormalizerTest.cs b/src/SharpLearning.FeatureTransformations.Test/Normalization/LinearNormalizerTest.cs index ece1ef05..f98cf3cd 100644 --- a/src/SharpLearning.FeatureTransformations.Test/Normalization/LinearNormalizerTest.cs +++ b/src/SharpLearning.FeatureTransformations.Test/Normalization/LinearNormalizerTest.cs @@ -17,11 +17,11 @@ public void LinearNormalizer_Normalize() var sut = new LinearNormalizer(); - Assert.AreEqual(1.0, sut.Normalize(newMin, newMax, oldMin, oldMax, oldMax), 0.000001); - Assert.AreEqual(-1.0, sut.Normalize(newMin, newMax, oldMin, oldMax, oldMin), 0.000001); + Assert.AreEqual(1.0, LinearNormalizer.Normalize(newMin, newMax, oldMin, oldMax, oldMax), 0.000001); + Assert.AreEqual(-1.0, LinearNormalizer.Normalize(newMin, newMax, oldMin, oldMax, oldMin), 0.000001); - Assert.AreEqual(-0.833463643471462, sut.Normalize(newMin, newMax, oldMin, oldMax, 0.0), 0.000001); - Assert.AreEqual(0.730258014073495, sut.Normalize(newMin, newMax, oldMin, oldMax, 2000.0), 0.000001); + Assert.AreEqual(-0.833463643471462, LinearNormalizer.Normalize(newMin, newMax, oldMin, oldMax, 0.0), 0.000001); + Assert.AreEqual(0.730258014073495, LinearNormalizer.Normalize(newMin, newMax, oldMin, oldMax, 2000.0), 0.000001); } } } diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs index a2a503c5..b74a5139 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs @@ -49,7 +49,7 @@ public DateTimeFeatureTransformer(string dateTimeColumn, DateTime startDate) /// /// /// - public string[] FeatureNames + public static string[] FeatureNames { get => new[] { "Year", "Month", "WeekOfYear", "DayOfMonth", "DayOfWeek", "HourOfDay", "TotalDays", "TotalHours" }; } @@ -116,7 +116,7 @@ string[] CreateDateTimeFeatures(DateTime dateTime) return timeValues; } - string CreateKey(string key, Dictionary columnNameToIndex) + static string CreateKey(string key, Dictionary columnNameToIndex) { if (!columnNameToIndex.ContainsKey(key)) { diff --git a/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs b/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs index cbbeffca..fd1bd5a4 100644 --- a/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs @@ -80,7 +80,7 @@ public void Transform(F64Matrix matrix, F64Matrix output) { var value = matrix[j, i]; var minMax = m_featureMinMax[i]; - var newValue = m_normalizer.Normalize(m_min, m_max, minMax.Min, minMax.Max, value); + var newValue = LinearNormalizer.Normalize(m_min, m_max, minMax.Min, minMax.Max, value); output[j, i] = newValue; } } @@ -141,7 +141,7 @@ public void Transform(double[] vector, double[] output) { var value = vector[i]; var minMax = m_featureMinMax[i]; - var newValue = m_normalizer.Normalize(m_min, m_max, minMax.Min, minMax.Max, value); + var newValue = LinearNormalizer.Normalize(m_min, m_max, minMax.Min, minMax.Max, value); output[i] = newValue; } } diff --git a/src/SharpLearning.FeatureTransformations/Normalization/LinearNormalizer.cs b/src/SharpLearning.FeatureTransformations/Normalization/LinearNormalizer.cs index 1015b101..ee5b0d53 100644 --- a/src/SharpLearning.FeatureTransformations/Normalization/LinearNormalizer.cs +++ b/src/SharpLearning.FeatureTransformations/Normalization/LinearNormalizer.cs @@ -15,7 +15,7 @@ public sealed class LinearNormalizer /// /// /// - public double Normalize(double newMin, double newMax, double oldMin, double oldMax, double value) + public static double Normalize(double newMin, double newMax, double oldMin, double oldMax, double value) { if (value == oldMin) { diff --git a/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs b/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs index 1001c8ce..e7cebf1f 100644 --- a/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs @@ -189,7 +189,7 @@ public void ClassificationGradientBoostModel_Load() Assert.AreEqual(0.15384615384615385, error, 0.0000001); } - void Write(ProbabilityPrediction[] predictions) + static void Write(ProbabilityPrediction[] predictions) { var value = "new ProbabilityPrediction[] {"; foreach (var item in predictions) diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs index 8207b9f3..2d9a8007 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs @@ -399,7 +399,7 @@ void FindBestSplit(F64Matrix observations, } } - int NextAllowedIndex(int start, int[] orderedIndexes, bool[] inSample) + static int NextAllowedIndex(int start, int[] orderedIndexes, bool[] inSample) { for (int i = start; i < orderedIndexes.Length; i++) @@ -412,7 +412,7 @@ int NextAllowedIndex(int start, int[] orderedIndexes, bool[] inSample) return (orderedIndexes.Length + 1); } - void SetParentLeafIndex(int nodeIndex, GBMTreeCreationItem parentItem) + static void SetParentLeafIndex(int nodeIndex, GBMTreeCreationItem parentItem) { if (parentItem.Values.Position == NodePositionType.Left) { diff --git a/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs index 014c24d5..c6d70a35 100644 --- a/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs @@ -512,7 +512,7 @@ IPredictorModel ILearner.Learn( /// /// /// - int[][] CreateOrderedElements(F64Matrix observations, int rows) + static int[][] CreateOrderedElements(F64Matrix observations, int rows) { var orderedElements = new int[observations.ColumnCount][]; diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs index 7e579105..7f0c9ca3 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs @@ -293,7 +293,7 @@ IPredictorModel ILearner.Learn( /// /// /// - int[][] CreateOrderedElements(F64Matrix observations, int rows) + static int[][] CreateOrderedElements(F64Matrix observations, int rows) { var orderedElements = new int[observations.ColumnCount][]; diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs index b5ab0a4f..3831f9a7 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs @@ -133,7 +133,7 @@ public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, right.BestConstant = BinomialBestConstant(right.Sum, right.BinomialSum); } - double BinomialBestConstant(double sum, double binomialSum) + static double BinomialBestConstant(double sum, double binomialSum) { if (binomialSum != 0.0) { diff --git a/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs b/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs index 5b0df592..c5ec3647 100644 --- a/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs +++ b/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs @@ -266,7 +266,7 @@ double Probability(double[] observation, int targetIndex) return Sigmoid(prediction); } - double Sigmoid(double z) + static double Sigmoid(double z) { return 1.0 / (1.0 + Math.Exp(-z)); } diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs index af868d5f..0d16a734 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs @@ -113,7 +113,7 @@ public void CsvParser_NoHeader_EnumerateRows_Value_Throw() CollectionAssert.AreEqual(Expected_NoHeader(), actual.ToList()); } - List Expected_NoHeader() + static List Expected_NoHeader() { var columnNameToIndex = new Dictionary { { "0", 0 }, { "1", 1 }, { "2", 2 } }; @@ -127,7 +127,7 @@ List Expected_NoHeader() return expected; } - List Expected() + static List Expected() { var columnNameToIndex = new Dictionary { @@ -146,7 +146,7 @@ List Expected() return expected; } - List Expected_ColumnNames() + static List Expected_ColumnNames() { var columnNameToIndex = new Dictionary { @@ -164,7 +164,7 @@ List Expected_ColumnNames() return expected; } - List Expected_Select_ColumnNames() + static List Expected_Select_ColumnNames() { var columnNameToIndex = new Dictionary { { "Pass", 0 } }; @@ -178,7 +178,7 @@ List Expected_Select_ColumnNames() return expected; } - List Expected_Quote_Inclosed_Columns() + static List Expected_Quote_Inclosed_Columns() { var columnNameToIndex = new Dictionary { { "c1", 0 }, { "c2", 1 }, { "c3", 2 } }; @@ -191,7 +191,7 @@ List Expected_Quote_Inclosed_Columns() return expected; } - List Expected_Quote_Inclosed_Columns_Separator_In_Text() + static List Expected_Quote_Inclosed_Columns_Separator_In_Text() { var columnNameToIndex = new Dictionary { { "c1", 0 }, { "c2", 1 }, { "c3", 2 } }; diff --git a/src/SharpLearning.InputOutput/Csv/CsvParser.cs b/src/SharpLearning.InputOutput/Csv/CsvParser.cs index 22320f58..4085627d 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvParser.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvParser.cs @@ -225,7 +225,7 @@ string[] Split(string line, int[] indices) return split; } - string[] SplitText(string csvText, char separator) + static string[] SplitText(string csvText, char separator) { List tokens = new List(); diff --git a/src/SharpLearning.Metrics.Test/ModelComparison/McNemarModelComparisonTest.cs b/src/SharpLearning.Metrics.Test/ModelComparison/McNemarModelComparisonTest.cs index 60c802ec..469124ad 100644 --- a/src/SharpLearning.Metrics.Test/ModelComparison/McNemarModelComparisonTest.cs +++ b/src/SharpLearning.Metrics.Test/ModelComparison/McNemarModelComparisonTest.cs @@ -13,7 +13,7 @@ public void McNemarModelComparison_Compare() var model2 = new double[] { 1, 1, 3, 4, 3, 4, 2, 3, 1, 1 }; var sut = new McNemarModelComparison(); - var actual = sut.Compare(model1, model2, targets); + var actual = McNemarModelComparison.Compare(model1, model2, targets); CollectionAssert.AreEqual(new int[] { 1, 2 }, actual[0]); CollectionAssert.AreEqual(new int[] { 3, 4 }, actual[1]); diff --git a/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs b/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs index 88081f44..c8efa499 100644 --- a/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs +++ b/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs @@ -100,7 +100,7 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) /// /// /// - double trapezoidArea(double X1, double X2, double Y1, double Y2) + static double trapezoidArea(double X1, double X2, double Y1, double Y2) { double b = Math.Abs(X1 - X2); double height = (Y1 + Y2) / 2.0; diff --git a/src/SharpLearning.Metrics/Classification/TotalErrorClassificationMetric.cs b/src/SharpLearning.Metrics/Classification/TotalErrorClassificationMetric.cs index 798ca049..7196d9c5 100644 --- a/src/SharpLearning.Metrics/Classification/TotalErrorClassificationMetric.cs +++ b/src/SharpLearning.Metrics/Classification/TotalErrorClassificationMetric.cs @@ -38,7 +38,7 @@ double TotalError(List uniques, int[,] confusionMatrix) return (double)errorSum / totalSum; } - int Sum(int[,] confusionMatrix) + static int Sum(int[,] confusionMatrix) { var rows = confusionMatrix.GetLength(0); var cols = confusionMatrix.GetLength(1); @@ -55,7 +55,7 @@ int Sum(int[,] confusionMatrix) return sum; } - List UniqueTargets(T[] targets, T[] predictions) + static List UniqueTargets(T[] targets, T[] predictions) { var uniquePredictions = predictions.Distinct(); var uniqueTargets = targets.Distinct(); diff --git a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs index 171ef398..e0513955 100644 --- a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs +++ b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs @@ -25,7 +25,7 @@ public sealed class McNemarModelComparison /// /// /// - public int[][] Compare(double[] model1Predictions, double[] model2Predictions, double[] targets) + public static int[][] Compare(double[] model1Predictions, double[] model2Predictions, double[] targets) { if (model1Predictions.Length != model2Predictions.Length || model1Predictions.Length != targets.Length) diff --git a/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs b/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs index b61ca499..a64dc101 100644 --- a/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs +++ b/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs @@ -59,7 +59,7 @@ public double Error(T[] targets, T[] predictions) return 1.0 - score / minLength; } - bool Contains(T[] predictions, int i, T prediction) + static bool Contains(T[] predictions, int i, T prediction) { var predictionFound = false; for (int j = 0; j < i; j++) diff --git a/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs index 69df068b..a24311f2 100644 --- a/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs @@ -27,7 +27,7 @@ public double Error(double[] target, double[] predicted) /// /// /// - double GiniCoefficient(double[] target, double[] predicted) + static double GiniCoefficient(double[] target, double[] predicted) { if (target.Length != predicted.Length) { throw new ArgumentException(); } diff --git a/src/SharpLearning.Neural.Test/Learners/ClassificationNeuralNetLearnerTest.cs b/src/SharpLearning.Neural.Test/Learners/ClassificationNeuralNetLearnerTest.cs index d2a3631f..ccca6fa5 100644 --- a/src/SharpLearning.Neural.Test/Learners/ClassificationNeuralNetLearnerTest.cs +++ b/src/SharpLearning.Neural.Test/Learners/ClassificationNeuralNetLearnerTest.cs @@ -110,7 +110,7 @@ public void ClassificationNeuralNetLearner_Constructor_Throw_On_Wrong_OutputLaye var sut = new ClassificationNeuralNetLearner(net, new AccuracyLoss()); } - (F64Matrix observations, double[] targets) CreateData( + static (F64Matrix observations, double[] targets) CreateData( int numberOfObservations, int numberOfFeatures, int numberOfClasses, Random random) { var observations = new F64Matrix(numberOfObservations, numberOfFeatures); @@ -120,7 +120,7 @@ public void ClassificationNeuralNetLearner_Constructor_Throw_On_Wrong_OutputLaye return (observations, targets); } - (double[][] observations, double[] targets) CreateArrayData( + static (double[][] observations, double[] targets) CreateArrayData( int numberOfObservations, int numberOfFeatures, int numberOfClasses, Random random) { var observations = Enumerable.Range(0, numberOfObservations).Select(i => Enumerable.Range(0, numberOfFeatures) diff --git a/src/SharpLearning.Neural.Test/Learners/RegressionNeuralNetLearnerTest.cs b/src/SharpLearning.Neural.Test/Learners/RegressionNeuralNetLearnerTest.cs index a744b2c9..ad191fda 100644 --- a/src/SharpLearning.Neural.Test/Learners/RegressionNeuralNetLearnerTest.cs +++ b/src/SharpLearning.Neural.Test/Learners/RegressionNeuralNetLearnerTest.cs @@ -82,7 +82,7 @@ public void RegressionNeuralNetLearner_Constructor_Throw_On_Wrong_OutputLayerTyp var sut = new RegressionNeuralNetLearner(net, new AccuracyLoss()); } - (F64Matrix observations, double[] targets) CreateData( + static (F64Matrix observations, double[] targets) CreateData( int numberOfObservations, int numberOfFeatures, Random random) { var observations = new F64Matrix(numberOfObservations, numberOfFeatures); diff --git a/src/SharpLearning.Neural/Activations/ReluActivation.cs b/src/SharpLearning.Neural/Activations/ReluActivation.cs index 13f6d212..02cfeb25 100644 --- a/src/SharpLearning.Neural/Activations/ReluActivation.cs +++ b/src/SharpLearning.Neural/Activations/ReluActivation.cs @@ -33,12 +33,12 @@ public void Derivative(float[] x, float[] output) } } - float Relu(float input) + static float Relu(float input) { return Math.Max(0, input); } - float Derivative(float input) + static float Derivative(float input) { if (input > 0.0) return 1.0f; diff --git a/src/SharpLearning.Neural/Activations/SigmoidActivation.cs b/src/SharpLearning.Neural/Activations/SigmoidActivation.cs index e9e0f6c4..ca464c2a 100644 --- a/src/SharpLearning.Neural/Activations/SigmoidActivation.cs +++ b/src/SharpLearning.Neural/Activations/SigmoidActivation.cs @@ -32,13 +32,13 @@ public void Derivative(float[] x, float[] output) } } - float Sigmoid(float input) + static float Sigmoid(float input) { return Convert.ToSingle(1 / (1 + Math.Pow(Math.E, -input))); } //this input should be already activated input = sigmmoid(x) - float Derivative(float input) + static float Derivative(float input) { var de = input * (1 - input); diff --git a/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs b/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs index e4454ce4..c2ed7ebb 100644 --- a/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs +++ b/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs @@ -211,7 +211,7 @@ public Matrix Forward(Matrix input) return OutputActivations; } - float MovingAverage(float currentValue, float value, float momentum = 0.99f) + static float MovingAverage(float currentValue, float value, float momentum = 0.99f) { var newValue = currentValue * momentum + value * (1.0f - momentum); return newValue; diff --git a/src/SharpLearning.Neural/Layers/SoftMaxLayer.cs b/src/SharpLearning.Neural/Layers/SoftMaxLayer.cs index 0e92940a..441395f1 100644 --- a/src/SharpLearning.Neural/Layers/SoftMaxLayer.cs +++ b/src/SharpLearning.Neural/Layers/SoftMaxLayer.cs @@ -110,7 +110,7 @@ public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batc /// Softmax activation for neural net. /// /// - public void SoftMax(Matrix x) + public static void SoftMax(Matrix x) { var xData = x.Data(); var rows = x.RowCount; diff --git a/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs b/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs index 16fcc3a5..d360afaa 100644 --- a/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs +++ b/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs @@ -149,7 +149,7 @@ IPredictorModel ILearner.Learn( IPredictorModel ILearner.Learn( F64Matrix observations, double[] targets) => Learn(observations, targets); - double[] GetOrderedTargetNames(double[] targets) + static double[] GetOrderedTargetNames(double[] targets) { return targets.Distinct().OrderBy(v => v).ToArray(); } diff --git a/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs b/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs index 164c09f7..6427fc83 100644 --- a/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs +++ b/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs @@ -290,7 +290,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets, int[] indices, } } - void SetupLinerAlgebraProvider() + static void SetupLinerAlgebraProvider() { if (Control.TryUseNativeMKL()) { @@ -308,7 +308,7 @@ void SetupLinerAlgebraProvider() } } - void CopyBatchTargets(Matrix targets, Matrix batch, int[] indices) + static void CopyBatchTargets(Matrix targets, Matrix batch, int[] indices) { var cols = targets.ColumnCount; var batchRow = 0; @@ -322,7 +322,7 @@ void CopyBatchTargets(Matrix targets, Matrix batch, int[] indices) } } - void CopyBatch(F64Matrix observations, Matrix batch, int[] indices) + static void CopyBatch(F64Matrix observations, Matrix batch, int[] indices) { var cols = observations.ColumnCount; var batchRow = 0; diff --git a/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs b/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs index 7dce6b9a..348b3bc6 100644 --- a/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs @@ -210,8 +210,7 @@ static BayesianOptimizer CreateSut( return sut; } - - OptimizerResult RunOpenLoopOptimizationTest(List results) + static OptimizerResult RunOpenLoopOptimizationTest(List results) { var parameters = new MinMaxParameterSpec[] { diff --git a/src/SharpLearning.Optimization.Test/ParameterSamplers/RandomUniformTest.cs b/src/SharpLearning.Optimization.Test/ParameterSamplers/RandomUniformTest.cs index 5c81da47..79bf33f1 100644 --- a/src/SharpLearning.Optimization.Test/ParameterSamplers/RandomUniformTest.cs +++ b/src/SharpLearning.Optimization.Test/ParameterSamplers/RandomUniformTest.cs @@ -26,7 +26,7 @@ public void RandomUniform_Sample_Continous() } } - public void RandomUniformIntergers_Sample_Integer() + public static void RandomUniformIntergers_Sample_Integer() { var sut = new RandomUniform(32); diff --git a/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs b/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs index 7ed052d7..729f23f7 100644 --- a/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs @@ -194,7 +194,7 @@ static SmacOptimizer CreateSut(MinMaxParameterSpec[] parameters) seed: 42); } - OptimizerResult RunOpenLoopOptimizationTest(List results) + static OptimizerResult RunOpenLoopOptimizationTest(List results) { var parameters = new MinMaxParameterSpec[] { @@ -218,13 +218,13 @@ OptimizerResult RunOpenLoopOptimizationTest(List results) // Using SmacOptimizer in an open loop. var initialParameterSets = sut.ProposeParameterSets(randomStartingPointsCount, results); - var initializationResults = sut.RunParameterSets(Minimize, initialParameterSets); + var initializationResults = SmacOptimizer.RunParameterSets(Minimize, initialParameterSets); results.AddRange(initializationResults); for (int i = 0; i < iterations; i++) { var parameterSets = sut.ProposeParameterSets(functionEvaluationsPerIterationCount, results); - var iterationResults = sut.RunParameterSets(Minimize, parameterSets); + var iterationResults = SmacOptimizer.RunParameterSets(Minimize, parameterSets); results.AddRange(iterationResults); } diff --git a/src/SharpLearning.Optimization/BayesianOptimizer.cs b/src/SharpLearning.Optimization/BayesianOptimizer.cs index 9f7b642a..aad52c2e 100644 --- a/src/SharpLearning.Optimization/BayesianOptimizer.cs +++ b/src/SharpLearning.Optimization/BayesianOptimizer.cs @@ -259,7 +259,7 @@ OptimizerResult[] FindNextCandidates(RegressionForestModel model, double bestSco return results.ToArray(); } - double ComputeExpectedImprovement(double best, double[] parameterSet, RegressionForestModel model) + static double ComputeExpectedImprovement(double best, double[] parameterSet, RegressionForestModel model) { var prediction = model.PredictCertainty(parameterSet); var mean = prediction.Prediction; diff --git a/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs b/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs index 1c8d0265..9351d866 100644 --- a/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs +++ b/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs @@ -169,7 +169,7 @@ public OptimizerResult[] Optimize(Func functionToMini return results.ToArray(); } - void BoundCheck(double[] newValues, double[] maxValues, double[] minValues) + static void BoundCheck(double[] newValues, double[] maxValues, double[] minValues) { for (int i = 0; i < newValues.Length; i++) { diff --git a/src/SharpLearning.Optimization/SmacOptimizer.cs b/src/SharpLearning.Optimization/SmacOptimizer.cs index 0af8c11e..0299b93e 100644 --- a/src/SharpLearning.Optimization/SmacOptimizer.cs +++ b/src/SharpLearning.Optimization/SmacOptimizer.cs @@ -139,7 +139,7 @@ public OptimizerResult[] Optimize(Func functionToMini /// /// /// - public List RunParameterSets(Func functionToMinimize, + public static List RunParameterSets(Func functionToMinimize, double[][] parameterSets) { var results = new List(); @@ -215,7 +215,7 @@ double[][] GenerateCandidateParameterSets(int parameterSetCount, return InterLeaveModelBasedAndRandomParameterSets(challengers, randomChallengers); } - double[][] InterLeaveModelBasedAndRandomParameterSets(double[][] challengers, + static double[][] InterLeaveModelBasedAndRandomParameterSets(double[][] challengers, double[][] randomChallengers) { var finalParameterSets = new double[challengers.Length + randomChallengers.Length][]; @@ -313,7 +313,7 @@ List GetOneMutationNeighborhood(double[] parentParameterSet) return neighbors; } - double ComputeExpectedImprovement(double best, double[] parameterSet, RegressionForestModel model) + static double ComputeExpectedImprovement(double best, double[] parameterSet, RegressionForestModel model) { var prediction = model.PredictCertainty(parameterSet); var mean = prediction.Prediction; diff --git a/src/SharpLearning.RandomForest.Test/Learners/ClassificationExtremelyRandomizedTreesLearnerTest.cs b/src/SharpLearning.RandomForest.Test/Learners/ClassificationExtremelyRandomizedTreesLearnerTest.cs index d650e226..c16c60a2 100644 --- a/src/SharpLearning.RandomForest.Test/Learners/ClassificationExtremelyRandomizedTreesLearnerTest.cs +++ b/src/SharpLearning.RandomForest.Test/Learners/ClassificationExtremelyRandomizedTreesLearnerTest.cs @@ -107,7 +107,7 @@ public void ClassificationExtremelyRandomizedTreesLearner_Learn_Glass_100_Trees_ Assert.AreEqual(0.0560747663551402, error, m_delta); } - double ClassificationExtremelyRandomizedTreesLearner_Learn_Glass(int trees, + static double ClassificationExtremelyRandomizedTreesLearner_Learn_Glass(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); @@ -123,7 +123,7 @@ double ClassificationExtremelyRandomizedTreesLearner_Learn_Glass(int trees, return error; } - double ClassificationExtremelyRandomizedTreesLearner_Learn_Aptitude(int trees, + static double ClassificationExtremelyRandomizedTreesLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); diff --git a/src/SharpLearning.RandomForest.Test/Learners/ClassificationRandomForestLearnerTest.cs b/src/SharpLearning.RandomForest.Test/Learners/ClassificationRandomForestLearnerTest.cs index 0fae8e6e..cc693598 100644 --- a/src/SharpLearning.RandomForest.Test/Learners/ClassificationRandomForestLearnerTest.cs +++ b/src/SharpLearning.RandomForest.Test/Learners/ClassificationRandomForestLearnerTest.cs @@ -107,7 +107,7 @@ public void ClassificationRandomForestLearner_Learn_Glass_100_Trees_Parallel() Assert.AreEqual(0.018691588785046728, error, m_delta); } - double ClassificationRandomForestLearner_Learn_Glass(int trees, double subSampleRatio = 1.0) + static double ClassificationRandomForestLearner_Learn_Glass(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); @@ -122,7 +122,7 @@ double ClassificationRandomForestLearner_Learn_Glass(int trees, double subSample return error; } - double ClassificationRandomLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0) + static double ClassificationRandomLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); diff --git a/src/SharpLearning.RandomForest.Test/Learners/RegressionExtremelyRandomizedTreesLearnerTest.cs b/src/SharpLearning.RandomForest.Test/Learners/RegressionExtremelyRandomizedTreesLearnerTest.cs index c970a4ae..310ec2c6 100644 --- a/src/SharpLearning.RandomForest.Test/Learners/RegressionExtremelyRandomizedTreesLearnerTest.cs +++ b/src/SharpLearning.RandomForest.Test/Learners/RegressionExtremelyRandomizedTreesLearnerTest.cs @@ -107,7 +107,7 @@ public void RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass_100_Trees_ Assert.AreEqual(0.33450356466453129, error, m_delta); } - double RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass(int trees, double subSampleRatio = 1.0) + static double RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); @@ -122,7 +122,7 @@ double RegressionExtremelyRandomizedTreesLearnerTest_Learn_Glass(int trees, doub return error; } - double RegressionExtremelyRandomizedTreesLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0) + static double RegressionExtremelyRandomizedTreesLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); diff --git a/src/SharpLearning.RandomForest.Test/Learners/RegressionRandomForestLearnerTest.cs b/src/SharpLearning.RandomForest.Test/Learners/RegressionRandomForestLearnerTest.cs index ff79c25c..0f70f5bc 100644 --- a/src/SharpLearning.RandomForest.Test/Learners/RegressionRandomForestLearnerTest.cs +++ b/src/SharpLearning.RandomForest.Test/Learners/RegressionRandomForestLearnerTest.cs @@ -106,7 +106,7 @@ public void RegressionRandomForestLearnerTest_Learn_Glass_100_Trees_Parallel() Assert.AreEqual(0.22701441864756075, error, m_delta); } - double RegressionRandomForestLearnerTest_Learn_Glass(int trees, double subSampleRatio = 1.0) + static double RegressionRandomForestLearnerTest_Learn_Glass(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadGlassDataSet(); @@ -121,7 +121,7 @@ double RegressionRandomForestLearnerTest_Learn_Glass(int trees, double subSample return error; } - double RegressionRandomForestLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0) + static double RegressionRandomForestLearner_Learn_Aptitude(int trees, double subSampleRatio = 1.0) { var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); diff --git a/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs b/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs index d99defd5..fe0346e9 100644 --- a/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs +++ b/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs @@ -195,7 +195,7 @@ public void ClassificationForestModel_Load() Assert.AreEqual(0.42307692307692307, error, m_delta); } - void Write(ProbabilityPrediction[] predictions) + static void Write(ProbabilityPrediction[] predictions) { var value = "new ProbabilityPrediction[] {"; foreach (var item in predictions) diff --git a/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs b/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs index e1d7fa82..85ba5654 100644 --- a/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs +++ b/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs @@ -201,7 +201,7 @@ public void RegressionForestModel_Load() Assert.AreEqual(0.14547628738104926, error, m_delta); } - void Write(CertaintyPrediction[] predictions) + static void Write(CertaintyPrediction[] predictions) { var value = "new CertaintyPrediction[] {"; foreach (var item in predictions) diff --git a/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs b/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs index cc6f0f2e..2176abfd 100644 --- a/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs @@ -182,7 +182,7 @@ IPredictorModel ILearner.Learn( IPredictorModel ILearner.Learn( F64Matrix observations, double[] targets) => Learn(observations, targets); - double[] VariableImportance(ClassificationDecisionTreeModel[] models, int numberOfFeatures) + static double[] VariableImportance(ClassificationDecisionTreeModel[] models, int numberOfFeatures) { var rawVariableImportance = new double[numberOfFeatures]; diff --git a/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs b/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs index 8f70088b..0d305bfc 100644 --- a/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs @@ -179,7 +179,7 @@ IPredictorModel ILearner.Learn( IPredictorModel ILearner.Learn( F64Matrix observations, double[] targets) => Learn(observations, targets); - double[] VariableImportance(ClassificationDecisionTreeModel[] models, int numberOfFeatures) + static double[] VariableImportance(ClassificationDecisionTreeModel[] models, int numberOfFeatures) { var rawVariableImportance = new double[numberOfFeatures]; diff --git a/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs b/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs index 8837a6ec..e06f0e70 100644 --- a/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs @@ -157,7 +157,7 @@ IPredictorModel IIndexedLearner.Learn( IPredictorModel ILearner.Learn( F64Matrix observations, double[] targets) => Learn(observations, targets); - double[] VariableImportance(RegressionDecisionTreeModel[] models, int numberOfFeatures) + static double[] VariableImportance(RegressionDecisionTreeModel[] models, int numberOfFeatures) { var rawVariableImportance = new double[numberOfFeatures]; diff --git a/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs b/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs index aeb9cf22..428a6da9 100644 --- a/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs @@ -150,7 +150,7 @@ IPredictorModel IIndexedLearner.Learn( IPredictorModel ILearner.Learn( F64Matrix observations, double[] targets) => Learn(observations, targets); - double[] VariableImportance(RegressionDecisionTreeModel[] models, int numberOfFeatures) + static double[] VariableImportance(RegressionDecisionTreeModel[] models, int numberOfFeatures) { var rawVariableImportance = new double[numberOfFeatures]; From d0dba98e5dd809527f815230b3639e6f913a2321 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:32:01 +0100 Subject: [PATCH 08/15] Fix CA1822 --- .../ModelComparison/McNemarModelComparison.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs index e0513955..6f7e98e3 100644 --- a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs +++ b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs @@ -78,7 +78,7 @@ public static int[][] Compare(double[] model1Predictions, double[] model2Predict /// /// /// - public string CompareString(double[] model1Predictions, double[] model2Predictions, double[] targets) + public static string CompareString(double[] model1Predictions, double[] model2Predictions, double[] targets) { var mcNemarMatrix = Compare(model1Predictions, model2Predictions, targets); From 1feb287f0bbf45d290399748404526bd5ce2dd20 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:34:16 +0100 Subject: [PATCH 09/15] Fix IDE0044 --- .../Learners/ClassificationAdaBoostLearner.cs | 6 +++--- .../Learners/RegressionAdaBoostLearner.cs | 6 +++--- .../Models/ClassificationAdaBoostModel.cs | 2 +- src/SharpLearning.Containers/Matrices/F64Matrix.cs | 2 +- src/SharpLearning.Containers/Matrices/StringMatrix.cs | 2 +- .../Augmentators/ContinuousMungeAugmentator.cs | 2 +- .../Augmentators/NominalMungeAugmentator.cs | 2 +- .../MatrixTransforms/MinMaxTransformer.cs | 2 +- src/SharpLearning.Neural/Layers/ActivationLayer.cs | 2 +- .../Learners/ClassificationXGBoostLearner.cs | 2 +- 10 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs b/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs index e7ac6856..4596be6a 100644 --- a/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs +++ b/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs @@ -36,9 +36,9 @@ public sealed class ClassificationAdaBoostLearner readonly TotalErrorClassificationMetric m_errorMetric = new TotalErrorClassificationMetric(); - List m_modelErrors = new List(); - List m_modelWeights = new List(); - List m_models = + readonly List m_modelErrors = new List(); + readonly List m_modelWeights = new List(); + readonly List m_models = new List(); double[] m_workErrors = new double[0]; diff --git a/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs b/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs index db4188b2..aef71d9a 100644 --- a/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs +++ b/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs @@ -32,9 +32,9 @@ public sealed class RegressionAdaBoostLearner : IIndexedLearner, ILearne readonly MeanAbsolutErrorRegressionMetric m_errorMetric = new MeanAbsolutErrorRegressionMetric(); readonly AdaBoostRegressionLoss m_loss; - List m_modelErrors = new List(); - List m_modelWeights = new List(); - List m_models = new List(); + readonly List m_modelErrors = new List(); + readonly List m_modelWeights = new List(); + readonly List m_models = new List(); double[] m_workErrors = new double[0]; double[] m_sampleWeights = new double[0]; diff --git a/src/SharpLearning.AdaBoost/Models/ClassificationAdaBoostModel.cs b/src/SharpLearning.AdaBoost/Models/ClassificationAdaBoostModel.cs index da698ffb..5ffce94e 100644 --- a/src/SharpLearning.AdaBoost/Models/ClassificationAdaBoostModel.cs +++ b/src/SharpLearning.AdaBoost/Models/ClassificationAdaBoostModel.cs @@ -19,7 +19,7 @@ public sealed class ClassificationAdaBoostModel : IPredictorModel, IPred readonly double[] m_modelWeights; readonly ClassificationDecisionTreeModel[] m_models; readonly double[] m_rawVariableImportance; - Dictionary m_predictions = new Dictionary(); + readonly Dictionary m_predictions = new Dictionary(); /// /// AdaBoost classification model. Consist of a series of tree model and corresponding weights diff --git a/src/SharpLearning.Containers/Matrices/F64Matrix.cs b/src/SharpLearning.Containers/Matrices/F64Matrix.cs index cff8f816..0b311683 100644 --- a/src/SharpLearning.Containers/Matrices/F64Matrix.cs +++ b/src/SharpLearning.Containers/Matrices/F64Matrix.cs @@ -11,7 +11,7 @@ namespace SharpLearning.Containers.Matrices /// Can be implicitly converted from double[][] public sealed unsafe class F64Matrix : IMatrix, IEquatable { - double[] m_featureArray; + readonly double[] m_featureArray; /// /// Creates a zero-matrix with the specified number of rows and cols diff --git a/src/SharpLearning.Containers/Matrices/StringMatrix.cs b/src/SharpLearning.Containers/Matrices/StringMatrix.cs index 08b3395d..4036c21a 100644 --- a/src/SharpLearning.Containers/Matrices/StringMatrix.cs +++ b/src/SharpLearning.Containers/Matrices/StringMatrix.cs @@ -8,7 +8,7 @@ namespace SharpLearning.Containers.Matrices /// public sealed class StringMatrix : IMatrix, IEquatable { - string[] m_featureArray; + readonly string[] m_featureArray; /// /// Creates a empty string matrix with the specified number of rows and cols diff --git a/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs b/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs index 7e681ce6..74eb87d2 100644 --- a/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs +++ b/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs @@ -15,7 +15,7 @@ public sealed class ContinuousMungeAugmentator { readonly double m_probabilityParameter; readonly double m_localVariance; - Random m_random; + readonly Random m_random; /// /// Augmentates continuous data according to the MUNGE method: diff --git a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs index c2c8d71a..aec4562b 100644 --- a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs +++ b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs @@ -14,7 +14,7 @@ namespace SharpLearning.CrossValidation.Augmentators public sealed class NominalMungeAugmentator { readonly double m_probabilityParameter; - Random m_random; + readonly Random m_random; /// /// Augmentates nominal data according to the MUNGE method: diff --git a/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs b/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs index fd1bd5a4..ae734879 100644 --- a/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs @@ -22,7 +22,7 @@ class FeatureMinMax readonly double m_max; readonly LinearNormalizer m_normalizer = new LinearNormalizer(); - Dictionary m_featureMinMax; + readonly Dictionary m_featureMinMax; /// /// Normalizes features within the specified range (min, max) diff --git a/src/SharpLearning.Neural/Layers/ActivationLayer.cs b/src/SharpLearning.Neural/Layers/ActivationLayer.cs index cedf248c..82f63f29 100644 --- a/src/SharpLearning.Neural/Layers/ActivationLayer.cs +++ b/src/SharpLearning.Neural/Layers/ActivationLayer.cs @@ -24,7 +24,7 @@ public class ActivationLayer : ILayer public Matrix ActivationDerivative; Matrix m_delta; - IActivation m_activation; + readonly IActivation m_activation; /// /// diff --git a/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs b/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs index adf542f6..568ccc8e 100644 --- a/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs +++ b/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs @@ -17,7 +17,7 @@ public sealed class ClassificationXGBoostLearner , ILearner , IIndexedLearner { - IDictionary m_parameters = new Dictionary(); + readonly IDictionary m_parameters = new Dictionary(); /// /// Classification learner for XGBoost. For classification problems, From acc445c9ac80d7a7c4cf2adf324940450e34cd27 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:35:57 +0100 Subject: [PATCH 10/15] Make static --- .../ModelComparison/McNemarModelComparisonTest.cs | 4 +--- .../ModelComparison/McNemarModelComparison.cs | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/SharpLearning.Metrics.Test/ModelComparison/McNemarModelComparisonTest.cs b/src/SharpLearning.Metrics.Test/ModelComparison/McNemarModelComparisonTest.cs index 469124ad..6f2f7436 100644 --- a/src/SharpLearning.Metrics.Test/ModelComparison/McNemarModelComparisonTest.cs +++ b/src/SharpLearning.Metrics.Test/ModelComparison/McNemarModelComparisonTest.cs @@ -12,7 +12,6 @@ public void McNemarModelComparison_Compare() var model1 = new double[] { 2, 2, 3, 4, 3, 2, 3, 2, 1, 1 }; var model2 = new double[] { 1, 1, 3, 4, 3, 4, 2, 3, 1, 1 }; - var sut = new McNemarModelComparison(); var actual = McNemarModelComparison.Compare(model1, model2, targets); CollectionAssert.AreEqual(new int[] { 1, 2 }, actual[0]); @@ -26,8 +25,7 @@ public void McNemarModelComparison_CompareString() var model1 = new double[] { 2, 2, 3, 4, 3, 2, 3, 2, 1, 1 }; var model2 = new double[] { 1, 1, 3, 4, 3, 4, 2, 3, 1, 1 }; - var sut = new McNemarModelComparison(); - var actual = sut.CompareString(model1, model2, targets); + var actual = McNemarModelComparison.CompareString(model1, model2, targets); var expected = ";Model1Wrong;Model1Right\r\nModel2Wrong;1;2\r\nModel2Right;3;4"; Assert.AreEqual(expected, actual); diff --git a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs index 6f7e98e3..944e8773 100644 --- a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs +++ b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs @@ -10,7 +10,7 @@ namespace SharpLearning.Metrics /// A clear improvement between two models would be if this number is, say 1 to 10. /// https://en.wikipedia.org/wiki/McNemar%27s_test /// - public sealed class McNemarModelComparison + public static class McNemarModelComparison { /// /// Compares two model using the McNemar test. From 97ae27fe54381ec0140a0a54bf7838d181a8b044 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:40:46 +0100 Subject: [PATCH 11/15] Fix src links --- SharpLearning.sln | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/SharpLearning.sln b/SharpLearning.sln index 5d60d821..99af6458 100644 --- a/SharpLearning.sln +++ b/SharpLearning.sln @@ -75,12 +75,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "root", "root", "{D83436F7-2 EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{2204FA16-973A-48CF-A9FD-94FA72424BA4}" ProjectSection(SolutionItems) = preProject - Directory.Build.props = Directory.Build.props - Directory.Build.targets = Directory.Build.targets - OutputBuildProject.props = OutputBuildProject.props - OutputBuildProps.props = OutputBuildProps.props - OutputBuildTargets.props = OutputBuildTargets.props - SourceLink.GitHub.props = SourceLink.GitHub.props + src\Directory.Build.props = src\Directory.Build.props + src\Directory.Build.targets = src\Directory.Build.targets + src\OutputBuildProject.props = src\OutputBuildProject.props + src\OutputBuildProps.props = src\OutputBuildProps.props + src\OutputBuildTargets.props = src\OutputBuildTargets.props + src\SourceLink.GitHub.props = src\SourceLink.GitHub.props EndProjectSection EndProject Global From 974ba535db778928873c13a44e146f8e19b910ca Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:47:16 +0100 Subject: [PATCH 12/15] Fix IDE0090 --- .../Models/ClassificationAdaBoostModelTest.cs | 4 +- .../Learners/ClassificationAdaBoostLearner.cs | 10 +-- .../Learners/RegressionAdaBoostLearner.cs | 8 +- .../Models/ClassificationAdaBoostModel.cs | 2 +- .../CertaintyPredictionTest.cs | 8 +- .../ProbabilityPredictionTest.cs | 8 +- .../FloatingPointConversion.cs | 2 +- .../LearningCurvePointExtensionsTest.cs | 8 +- .../LearningCurvesCalculatorTest.cs | 8 +- .../NoShuffleLearningCurvesCalculatorTest.cs | 4 +- .../RandomLearningCurvesCalculatorTest.cs | 4 +- .../StratifiedLearningCurvesCalculatorTest.cs | 4 +- .../ClassificationDecisionTreeModelTest.cs | 6 +- .../Nodes/NodeExtensionsTest.cs | 2 +- .../ClassificationImpurityCalculator.cs | 6 +- ...ilityClassificationEnsembleStrategyTest.cs | 6 +- ...ilityClassificationEnsembleStrategyTest.cs | 6 +- .../MatrixTransforms/MinMaxTransformer.cs | 2 +- .../ClassificationGradientBoostModelTest.cs | 4 +- .../GBMDecisionTree/GBMDecisionTreeLearner.cs | 2 +- .../ClassificationGradientBoostLearner.cs | 2 +- .../RegressionGradientBoostLearner.cs | 2 +- .../Csv/CsvParserTest.cs | 32 +++---- .../Csv/CsvRowExtensionsTest.cs | 16 ++-- .../GenericBinarySerializerTest.cs | 2 +- .../GenericXmlDataContractSerializerTest.cs | 2 +- ...LossClassificationProbabilityMetricTest.cs | 24 ++--- ...cAucClassificationProbabilityMetricTest.cs | 8 +- .../Impurity/GiniImpurityMetric.cs | 2 +- .../Ranking/AveragePrecisionRankingMetric.cs | 2 +- .../Optimizers/NeuralNetOptimizerTest.cs | 4 +- .../Optimizers/NeuralNetOptimizer.cs | 4 +- .../BayesianOptimizerTest.cs | 82 ++++++++--------- ...lobalizedBoundedNelderMeadOptimizerTest.cs | 12 +-- .../GridSearchOptimizationTest.cs | 8 +- .../HyperbandOptimizerTest.cs | 88 +++++++++---------- .../ParticleSwarmOptimizerTest.cs | 12 +-- .../RandomSearchOptimizationTest.cs | 8 +- .../SmacOptimizerTest.cs | 82 ++++++++--------- .../Models/ClassificationForestModelTest.cs | 4 +- .../Models/RegressionForestModelTest.cs | 4 +- .../Models/ClassificationXGBoostModelTest.cs | 6 +- .../Models/XGBoostTreeConverterTest.cs | 4 +- .../Models/ClassificationXGBoostModel.cs | 2 +- .../Models/RegressionXGBoostModel.cs | 2 +- .../Models/XGBoostTreeConverter.cs | 3 +- 46 files changed, 259 insertions(+), 262 deletions(-) diff --git a/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs b/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs index 2b0300b5..06825399 100644 --- a/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs +++ b/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs @@ -72,7 +72,7 @@ public void ClassificationAdaBoostModel_PredictProbability_Single() Assert.AreEqual(0.038461538461538464, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(1, new Dictionary { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new(1, new Dictionary { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new(0, new Dictionary { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new(1, new Dictionary { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), }; CollectionAssert.AreEqual(expected, actual); } @@ -90,7 +90,7 @@ public void ClassificationAdaBoostModel_PredictProbability_Multiple() Assert.AreEqual(0.038461538461538464, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(1, new Dictionary { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new(1, new Dictionary { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new(0, new Dictionary { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new(1, new Dictionary { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), }; CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs b/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs index 4596be6a..bc1ee341 100644 --- a/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs +++ b/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs @@ -33,13 +33,11 @@ public sealed class ClassificationAdaBoostLearner int m_uniqueTargetValues; ClassificationDecisionTreeLearner m_modelLearner; - readonly TotalErrorClassificationMetric m_errorMetric = - new TotalErrorClassificationMetric(); + readonly TotalErrorClassificationMetric m_errorMetric = new(); - readonly List m_modelErrors = new List(); - readonly List m_modelWeights = new List(); - readonly List m_models = - new List(); + readonly List m_modelErrors = new(); + readonly List m_modelWeights = new(); + readonly List m_models = new(); double[] m_workErrors = new double[0]; double[] m_sampleWeights = new double[0]; diff --git a/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs b/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs index aef71d9a..466a2987 100644 --- a/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs +++ b/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs @@ -29,12 +29,12 @@ public sealed class RegressionAdaBoostLearner : IIndexedLearner, ILearne RegressionDecisionTreeLearner m_modelLearner; - readonly MeanAbsolutErrorRegressionMetric m_errorMetric = new MeanAbsolutErrorRegressionMetric(); + readonly MeanAbsolutErrorRegressionMetric m_errorMetric = new(); readonly AdaBoostRegressionLoss m_loss; - readonly List m_modelErrors = new List(); - readonly List m_modelWeights = new List(); - readonly List m_models = new List(); + readonly List m_modelErrors = new(); + readonly List m_modelWeights = new(); + readonly List m_models = new(); double[] m_workErrors = new double[0]; double[] m_sampleWeights = new double[0]; diff --git a/src/SharpLearning.AdaBoost/Models/ClassificationAdaBoostModel.cs b/src/SharpLearning.AdaBoost/Models/ClassificationAdaBoostModel.cs index 5ffce94e..c0bae425 100644 --- a/src/SharpLearning.AdaBoost/Models/ClassificationAdaBoostModel.cs +++ b/src/SharpLearning.AdaBoost/Models/ClassificationAdaBoostModel.cs @@ -19,7 +19,7 @@ public sealed class ClassificationAdaBoostModel : IPredictorModel, IPred readonly double[] m_modelWeights; readonly ClassificationDecisionTreeModel[] m_models; readonly double[] m_rawVariableImportance; - readonly Dictionary m_predictions = new Dictionary(); + readonly Dictionary m_predictions = new(); /// /// AdaBoost classification model. Consist of a series of tree model and corresponding weights diff --git a/src/SharpLearning.Containers.Test/CertaintyPredictionTest.cs b/src/SharpLearning.Containers.Test/CertaintyPredictionTest.cs index bafc7753..6ebabfde 100644 --- a/src/SharpLearning.Containers.Test/CertaintyPredictionTest.cs +++ b/src/SharpLearning.Containers.Test/CertaintyPredictionTest.cs @@ -5,10 +5,10 @@ namespace SharpLearning.Containers.Test [TestClass] public class CertaintyPredictionTest { - readonly CertaintyPrediction m_sut = new CertaintyPrediction(1.0, 0.4); - readonly CertaintyPrediction m_equal = new CertaintyPrediction(1.0, 0.4); - readonly CertaintyPrediction m_notEqual1 = new CertaintyPrediction(0.0, 0.4); - readonly CertaintyPrediction m_notEqual2 = new CertaintyPrediction(1.0, 0.65); + readonly CertaintyPrediction m_sut = new(1.0, 0.4); + readonly CertaintyPrediction m_equal = new(1.0, 0.4); + readonly CertaintyPrediction m_notEqual1 = new(0.0, 0.4); + readonly CertaintyPrediction m_notEqual2 = new(1.0, 0.65); [TestMethod] public void CertaintyPrediction_Prediction_Equals() diff --git a/src/SharpLearning.Containers.Test/ProbabilityPredictionTest.cs b/src/SharpLearning.Containers.Test/ProbabilityPredictionTest.cs index b5a2ba56..09d1bdfd 100644 --- a/src/SharpLearning.Containers.Test/ProbabilityPredictionTest.cs +++ b/src/SharpLearning.Containers.Test/ProbabilityPredictionTest.cs @@ -6,10 +6,10 @@ namespace SharpLearning.Containers.Test [TestClass] public class ProbabilityPredictionTest { - readonly ProbabilityPrediction m_sut = new ProbabilityPrediction(1.0, new Dictionary { { 1.0, .9 }, { 0.0, 0.3 } }); - readonly ProbabilityPrediction m_equal = new ProbabilityPrediction(1.0, new Dictionary { { 1.0, .9 }, { 0.0, 0.3 } }); - readonly ProbabilityPrediction m_notEqual1 = new ProbabilityPrediction(0.0, new Dictionary { { 1.0, .3 }, { 0.0, 0.8 } }); - readonly ProbabilityPrediction m_notEqual2 = new ProbabilityPrediction(1.0, new Dictionary { { 1.0, .78 }, { 0.0, 0.22 } }); + readonly ProbabilityPrediction m_sut = new(1.0, new Dictionary { { 1.0, .9 }, { 0.0, 0.3 } }); + readonly ProbabilityPrediction m_equal = new(1.0, new Dictionary { { 1.0, .9 }, { 0.0, 0.3 } }); + readonly ProbabilityPrediction m_notEqual1 = new(0.0, new Dictionary { { 1.0, .3 }, { 0.0, 0.8 } }); + readonly ProbabilityPrediction m_notEqual2 = new(1.0, new Dictionary { { 1.0, .78 }, { 0.0, 0.22 } }); [TestMethod] diff --git a/src/SharpLearning.Containers/FloatingPointConversion.cs b/src/SharpLearning.Containers/FloatingPointConversion.cs index cfe13679..9de94dd1 100644 --- a/src/SharpLearning.Containers/FloatingPointConversion.cs +++ b/src/SharpLearning.Containers/FloatingPointConversion.cs @@ -16,7 +16,7 @@ public static class FloatingPointConversion /// /// /// - public static readonly NumberFormatInfo nfi = new NumberFormatInfo(); + public static readonly NumberFormatInfo nfi = new(); /// /// Default NumberStyle is Any. diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs index 29a9ebed..0561e347 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs @@ -13,8 +13,8 @@ public class LearningCurvePointExtensionsTest [TestMethod] public void BiasVarianceLearningCurvePointExtensions_ToF64Matrix() { - var sut = new List { new LearningCurvePoint(10, 0.0, 1.0), - new LearningCurvePoint(100, 3.0, 8.0), new LearningCurvePoint(1000, 4.0, 4.0) }; + var sut = new List { new(10, 0.0, 1.0), + new(100, 3.0, 8.0), new(1000, 4.0, 4.0) }; var actual = sut.ToF64Matrix(); var expected = new F64Matrix(new double[] { 10, 0.0, 1.0, @@ -28,8 +28,8 @@ public void BiasVarianceLearningCurvePointExtensions_ToF64Matrix() [TestMethod] public void BiasVarianceLearningCurvePointExtensions_Write() { - var sut = new List { new LearningCurvePoint(10, 0.0, 1.0), - new LearningCurvePoint(100, 3.0, 8.0), new LearningCurvePoint(1000, 4.0, 4.0) }; + var sut = new List { new(10, 0.0, 1.0), + new(100, 3.0, 8.0), new(1000, 4.0, 4.0) }; var writer = new StringWriter(); sut.Write(() => writer); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs index dc24eca5..17af1071 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs @@ -29,8 +29,8 @@ public void LearningCurvesCalculator_Calculate() var expected = new List() { - new LearningCurvePoint(32, 0, 0.141565953928265), - new LearningCurvePoint(128, 0.0, 0.068970597423950036) + new(32, 0, 0.141565953928265), + new(128, 0.0, 0.068970597423950036) }; CollectionAssert.AreEqual(expected, actual); @@ -55,8 +55,8 @@ public void LearningCurvesCalculator_Calculate_Indices_Provided() var expected = new List() { - new LearningCurvePoint(32, 0, 0.141565953928265), - new LearningCurvePoint(128, 0.0, 0.068970597423950036) + new(32, 0, 0.141565953928265), + new(128, 0.0, 0.068970597423950036) }; CollectionAssert.AreEqual(expected, actual); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs index 419ca44d..fb080e6d 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs @@ -24,8 +24,8 @@ public void NoShuffleLearningCurvesCalculator_Calculate() var expected = new List() { - new LearningCurvePoint(32, 0, 0.12874833873980004), - new LearningCurvePoint(128, 0.0, 0.067720786718774989) + new(32, 0, 0.12874833873980004), + new(128, 0.0, 0.067720786718774989) }; CollectionAssert.AreEqual(expected, actual); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs index 0350564a..4d7a3fd3 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs @@ -24,8 +24,8 @@ public void RandomLearningCurvesCalculator_Calculate() var expected = new List() { - new LearningCurvePoint(32, 0, 0.141565953928265), - new LearningCurvePoint(128, 0.0, 0.068970597423950036) + new(32, 0, 0.141565953928265), + new(128, 0.0, 0.068970597423950036) }; CollectionAssert.AreEqual(expected, actual); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs index 0aecbff0..5db46d38 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs @@ -24,8 +24,8 @@ public void StratifiedLearningCurvesCalculator_Calculate() var expected = new List() { - new LearningCurvePoint(4, 0, 0.39999999999999997), - new LearningCurvePoint(16, 0.0625, 0.33333333333333331) + new(4, 0, 0.39999999999999997), + new(16, 0.0625, 0.33333333333333331) }; CollectionAssert.AreEqual(expected, actual); diff --git a/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs b/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs index 9a5b5c03..118c2758 100644 --- a/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs @@ -92,7 +92,7 @@ public void ClassificationDecisionTreeModel_PredictProbability_Single() Assert.AreEqual(0.23076923076923078, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), }; CollectionAssert.AreEqual(expected, actual); } @@ -110,7 +110,7 @@ public void ClassificationDecisionTreeModel_PredictProbability_Multiple() Assert.AreEqual(0.23076923076923078, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), }; CollectionAssert.AreEqual(expected, actual); } @@ -131,7 +131,7 @@ public void ClassificationDecisionTreeModel_PredictProbability_Multiple_Indexed( Assert.AreEqual(0.1, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), }; CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.DecisionTrees.Test/Nodes/NodeExtensionsTest.cs b/src/SharpLearning.DecisionTrees.Test/Nodes/NodeExtensionsTest.cs index f9aedc11..4c1de002 100644 --- a/src/SharpLearning.DecisionTrees.Test/Nodes/NodeExtensionsTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Nodes/NodeExtensionsTest.cs @@ -10,7 +10,7 @@ public class NodeExtensionsTest [TestMethod] public void NodeExtensions_UpdateParent() { - var nodes = new List { new Node(-1, 2.0, -1, -1, 0, -1) }; + var nodes = new List { new(-1, 2.0, -1, -1, 0, -1) }; var left = new Node(1, 5.0, -1, -1, 1, -1); var right = new Node(1, 5.0, -1, -1, 2, -1); diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs index 48a6186d..e43924bf 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs @@ -33,9 +33,9 @@ public abstract class ClassificationImpurityCalculator /// protected double m_weightedRight = 0.0; - internal TargetCounts m_weightedTargetCount = new TargetCounts(); - internal TargetCounts m_weightedTargetCountLeft = new TargetCounts(); - internal TargetCounts m_weightedTargetCountRight = new TargetCounts(); + internal TargetCounts m_weightedTargetCount = new(); + internal TargetCounts m_weightedTargetCountLeft = new(); + internal TargetCounts m_weightedTargetCountRight = new(); /// /// diff --git a/src/SharpLearning.Ensemble.Test/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategyTest.cs b/src/SharpLearning.Ensemble.Test/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategyTest.cs index f40eabe3..1c618416 100644 --- a/src/SharpLearning.Ensemble.Test/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategyTest.cs +++ b/src/SharpLearning.Ensemble.Test/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategyTest.cs @@ -13,9 +13,9 @@ public void GeometricMeanProbabilityClassificationEnsembleStrategy_Combine() { var values = new ProbabilityPrediction[] { - new ProbabilityPrediction(1.0, new Dictionary { {0.0, 0.3}, {1.0, 0.88} }), - new ProbabilityPrediction(0.0, new Dictionary { {0.0, 0.66}, {1.0, 0.33} }), - new ProbabilityPrediction(1.0, new Dictionary { {0.0, 0.01}, {1.0, 0.99} }), + new(1.0, new Dictionary { {0.0, 0.3}, {1.0, 0.88} }), + new(0.0, new Dictionary { {0.0, 0.66}, {1.0, 0.33} }), + new(1.0, new Dictionary { {0.0, 0.01}, {1.0, 0.99} }), }; var sut = new GeometricMeanProbabilityClassificationEnsembleStrategy(); diff --git a/src/SharpLearning.Ensemble.Test/Strategies/MeanProbabilityClassificationEnsembleStrategyTest.cs b/src/SharpLearning.Ensemble.Test/Strategies/MeanProbabilityClassificationEnsembleStrategyTest.cs index 97040382..9df5c470 100644 --- a/src/SharpLearning.Ensemble.Test/Strategies/MeanProbabilityClassificationEnsembleStrategyTest.cs +++ b/src/SharpLearning.Ensemble.Test/Strategies/MeanProbabilityClassificationEnsembleStrategyTest.cs @@ -13,9 +13,9 @@ public void MeanProbabilityClassificationEnsembleStrategy_Combine() { var values = new ProbabilityPrediction[] { - new ProbabilityPrediction(1.0, new Dictionary { {0.0, 0.3}, {1.0, 0.88} }), - new ProbabilityPrediction(0.0, new Dictionary { {0.0, 0.66}, {1.0, 0.33} }), - new ProbabilityPrediction(1.0, new Dictionary { {0.0, 0.01}, {1.0, 0.99} }), + new(1.0, new Dictionary { {0.0, 0.3}, {1.0, 0.88} }), + new(0.0, new Dictionary { {0.0, 0.66}, {1.0, 0.33} }), + new(1.0, new Dictionary { {0.0, 0.01}, {1.0, 0.99} }), }; var sut = new MeanProbabilityClassificationEnsembleStrategy(); diff --git a/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs b/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs index ae734879..39c29ca2 100644 --- a/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/MatrixTransforms/MinMaxTransformer.cs @@ -20,7 +20,7 @@ class FeatureMinMax readonly double m_min; readonly double m_max; - readonly LinearNormalizer m_normalizer = new LinearNormalizer(); + readonly LinearNormalizer m_normalizer = new(); readonly Dictionary m_featureMinMax; diff --git a/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs b/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs index e7cebf1f..aa739b04 100644 --- a/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs @@ -79,7 +79,7 @@ public void ClassificationGradientBoostModel_PredictProbability_Single() Assert.AreEqual(0.038461538461538464, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.987907185249206 }, { 0, 0.0120928147507945 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.982783250692275 }, { 0, 0.0172167493077254 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489364 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.995341658753364 }, { 0, 0.00465834124663571 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00048646805791186 }, { 0, 0.999513531942088 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.999891769651047 }, { 0, 0.000108230348952856 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.993419876193791 }, { 0, 0.00658012380620933 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.988568859753437 }, { 0, 0.0114311402465632 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(1, new Dictionary { { 1, 0.987907185249206 }, { 0, 0.0120928147507945 }, }), new(1, new Dictionary { { 1, 0.982783250692275 }, { 0, 0.0172167493077254 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489364 }, }), new(1, new Dictionary { { 1, 0.995341658753364 }, { 0, 0.00465834124663571 }, }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new(0, new Dictionary { { 1, 0.00048646805791186 }, { 0, 0.999513531942088 }, }), new(1, new Dictionary { { 1, 0.999891769651047 }, { 0, 0.000108230348952856 }, }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new(1, new Dictionary { { 1, 0.993419876193791 }, { 0, 0.00658012380620933 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new(1, new Dictionary { { 1, 0.988568859753437 }, { 0, 0.0114311402465632 }, }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), }; CollectionAssert.AreEqual(expected, actual); } @@ -99,7 +99,7 @@ public void ClassificationGradientBoostModel_PredictProbability_Multiple() Assert.AreEqual(0.038461538461538464, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.987907185249206 }, { 0, 0.0120928147507945 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.982783250692275 }, { 0, 0.0172167493077254 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489364 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.995341658753364 }, { 0, 0.00465834124663571 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00048646805791186 }, { 0, 0.999513531942088 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.999891769651047 }, { 0, 0.000108230348952856 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.993419876193791 }, { 0, 0.00658012380620933 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new ProbabilityPrediction(1, new Dictionary { { 1, 0.988568859753437 }, { 0, 0.0114311402465632 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new ProbabilityPrediction(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(1, new Dictionary { { 1, 0.987907185249206 }, { 0, 0.0120928147507945 }, }), new(1, new Dictionary { { 1, 0.982783250692275 }, { 0, 0.0172167493077254 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489364 }, }), new(1, new Dictionary { { 1, 0.995341658753364 }, { 0, 0.00465834124663571 }, }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new(0, new Dictionary { { 1, 0.00048646805791186 }, { 0, 0.999513531942088 }, }), new(1, new Dictionary { { 1, 0.999891769651047 }, { 0, 0.000108230348952856 }, }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new(1, new Dictionary { { 1, 0.993419876193791 }, { 0, 0.00658012380620933 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new(1, new Dictionary { { 1, 0.988568859753437 }, { 0, 0.0114311402465632 }, }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), }; CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs index 2d9a8007..aea2d482 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs @@ -22,7 +22,7 @@ public sealed class GBMDecisionTreeLearner readonly bool m_runParallel; readonly IGradientBoostLoss m_loss; int m_featuresPrSplit; - readonly Random m_random = new Random(234); + readonly Random m_random = new(234); /// /// Fits a regression decision tree using a set presorted indices for each feature. diff --git a/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs index c6d70a35..c1ec3fe9 100644 --- a/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs @@ -28,7 +28,7 @@ public class ClassificationGradientBoostLearner readonly double m_learningRate; readonly int m_iterations; readonly double m_subSampleRatio; - readonly Random m_random = new Random(42); + readonly Random m_random = new(42); readonly IGradientBoostLoss m_loss; /// diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs index 7f0c9ca3..3f847dc4 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs @@ -26,7 +26,7 @@ public class RegressionGradientBoostLearner : IIndexedLearner, ILearner< readonly double m_learningRate; readonly int m_iterations; readonly double m_subSampleRatio; - readonly Random m_random = new Random(42); + readonly Random m_random = new(42); readonly IGradientBoostLoss m_loss; /// diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs index 0d16a734..497881fd 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs @@ -119,9 +119,9 @@ static List Expected_NoHeader() var expected = new List { - new CsvRow(columnNameToIndex, new string[] { "1", "15", "0"}), - new CsvRow(columnNameToIndex, new string[] { "1", "12", "0"}), - new CsvRow(columnNameToIndex, new string[] { "4", "6", "0"}) + new(columnNameToIndex, new string[] { "1", "15", "0"}), + new(columnNameToIndex, new string[] { "1", "12", "0"}), + new(columnNameToIndex, new string[] { "4", "6", "0"}) }; return expected; @@ -138,9 +138,9 @@ static List Expected() var expected = new List { - new CsvRow(columnNameToIndex, new string[] { "5", "2", "1"}), - new CsvRow(columnNameToIndex, new string[] { "1", "12", "0"}), - new CsvRow(columnNameToIndex, new string[] { "3", "18", "0"}) + new(columnNameToIndex, new string[] { "5", "2", "1"}), + new(columnNameToIndex, new string[] { "1", "12", "0"}), + new(columnNameToIndex, new string[] { "3", "18", "0"}) }; return expected; @@ -156,9 +156,9 @@ static List Expected_ColumnNames() var expected = new List { - new CsvRow(columnNameToIndex, new string[] { "2", "1"}), - new CsvRow(columnNameToIndex, new string[] { "12", "0"}), - new CsvRow(columnNameToIndex, new string[] { "18", "0"}) + new(columnNameToIndex, new string[] { "2", "1"}), + new(columnNameToIndex, new string[] { "12", "0"}), + new(columnNameToIndex, new string[] { "18", "0"}) }; return expected; @@ -170,9 +170,9 @@ static List Expected_Select_ColumnNames() var expected = new List { - new CsvRow(columnNameToIndex, new string[] { "1" }), - new CsvRow(columnNameToIndex, new string[] { "0" }), - new CsvRow(columnNameToIndex, new string[] { "0" }) + new(columnNameToIndex, new string[] { "1" }), + new(columnNameToIndex, new string[] { "0" }), + new(columnNameToIndex, new string[] { "0" }) }; return expected; @@ -184,8 +184,8 @@ static List Expected_Quote_Inclosed_Columns() var expected = new List { - new CsvRow(columnNameToIndex, new string[] { "1", "2", "3"}), - new CsvRow(columnNameToIndex, new string[] { "10", "20", "30"}) + new(columnNameToIndex, new string[] { "1", "2", "3"}), + new(columnNameToIndex, new string[] { "10", "20", "30"}) }; return expected; @@ -197,8 +197,8 @@ static List Expected_Quote_Inclosed_Columns_Separator_In_Text() var expected = new List { - new CsvRow(columnNameToIndex, new string[] { "1", "the following dates;1. jan, 1. april", "3"}), - new CsvRow(columnNameToIndex, new string[] { "10", "20", "30"}) + new(columnNameToIndex, new string[] { "1", "the following dates;1. jan, 1. april", "3"}), + new(columnNameToIndex, new string[] { "10", "20", "30"}) }; return expected; diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs index c0793728..c6d6d484 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs @@ -12,9 +12,9 @@ namespace SharpLearning.InputOutput.Test.Csv public class CsvRowExtensionsTest { static readonly string[] m_data = new string[] { "1", "2", "3", "4" }; - static readonly Dictionary m_columnNameToIndex = new Dictionary { { "1", 0 }, { "2", 1 }, { "3", 2 }, { "4", 3 } }; - readonly F64Matrix m_expectedF64Matrix = new F64Matrix(m_data.Select(value => CsvRowExtensions.DefaultF64Converter(value)).ToArray(), 1, 4); - readonly StringMatrix m_expectedStringMatrix = new StringMatrix(m_data, 1, 4); + static readonly Dictionary m_columnNameToIndex = new() { { "1", 0 }, { "2", 1 }, { "3", 2 }, { "4", 3 } }; + readonly F64Matrix m_expectedF64Matrix = new(m_data.Select(value => CsvRowExtensions.DefaultF64Converter(value)).ToArray(), 1, 4); + readonly StringMatrix m_expectedStringMatrix = new(m_data, 1, 4); readonly string m_expectedWrite = "1;2;3;4\r\n1;2;3;4"; @@ -49,7 +49,7 @@ public void CsvRowExtensions_GetValue() [TestMethod] public void CsvRowExtensions_Keep() { - var sut = new List { new CsvRow(m_columnNameToIndex, m_data) }; + var sut = new List { new(m_columnNameToIndex, m_data) }; var actual = sut.Keep("1", "2").ToList().First(); var expected = new CsvRow( @@ -62,7 +62,7 @@ public void CsvRowExtensions_Keep() [TestMethod] public void CsvRowExtensions_Remove() { - var sut = new List { new CsvRow(m_columnNameToIndex, m_data) }; + var sut = new List { new(m_columnNameToIndex, m_data) }; var actual = sut.Remove("3").ToList().First(); var expected = new CsvRow( @@ -75,7 +75,7 @@ public void CsvRowExtensions_Remove() [TestMethod] public void CsvRowExtensions_ToF64Matrix() { - var sut = new List { new CsvRow(m_columnNameToIndex, m_data) }; + var sut = new List { new(m_columnNameToIndex, m_data) }; var actual = sut.ToF64Matrix(); Assert.AreEqual(m_expectedF64Matrix, actual); } @@ -83,7 +83,7 @@ public void CsvRowExtensions_ToF64Matrix() [TestMethod] public void CsvRowExtensions_ToStringMatrix() { - var sut = new List { new CsvRow(m_columnNameToIndex, m_data) }; + var sut = new List { new(m_columnNameToIndex, m_data) }; var actual = sut.ToStringMatrix(); Assert.AreEqual(m_expectedStringMatrix, actual); } @@ -120,7 +120,7 @@ public void CsvRowExtensions_ToStringVector() [TestMethod] public void CsvRowExtensions_Write() { - var sut = new List { new CsvRow(m_columnNameToIndex, m_data) }; + var sut = new List { new(m_columnNameToIndex, m_data) }; var writer = new StringWriter(); sut.Write(() => writer); diff --git a/src/SharpLearning.InputOutput.Test/Serialization/GenericBinarySerializerTest.cs b/src/SharpLearning.InputOutput.Test/Serialization/GenericBinarySerializerTest.cs index 1188f44a..409bb9a2 100644 --- a/src/SharpLearning.InputOutput.Test/Serialization/GenericBinarySerializerTest.cs +++ b/src/SharpLearning.InputOutput.Test/Serialization/GenericBinarySerializerTest.cs @@ -8,7 +8,7 @@ namespace SharpLearning.InputOutput.Test.Serialization [TestClass] public class GenericBinarySerializerTest { - readonly Dictionary m_serializationData = new Dictionary + readonly Dictionary m_serializationData = new() { {"Test1", 0}, {"Test2", 1}, {"Test3", 2}, {"Test4", 3}, {"Test5", 4} }; diff --git a/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs b/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs index 26810c2e..81451bd7 100644 --- a/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs +++ b/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs @@ -8,7 +8,7 @@ namespace SharpLearning.InputOutput.Test.Serialization [TestClass] public class GenericXmlDataContractSerializerTest { - readonly Dictionary m_serializationData = new Dictionary + readonly Dictionary m_serializationData = new() { {"Test1", 0}, {"Test2", 1}, {"Test3", 2}, {"Test4", 3}, {"Test5", 4} }; diff --git a/src/SharpLearning.Metrics.Test/Classification/LogLossClassificationProbabilityMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/LogLossClassificationProbabilityMetricTest.cs index c98697c4..bed002de 100644 --- a/src/SharpLearning.Metrics.Test/Classification/LogLossClassificationProbabilityMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/LogLossClassificationProbabilityMetricTest.cs @@ -13,9 +13,9 @@ public void LogLossClassificationMetric_Error_1() { var sut = new LogLossClassificationProbabilityMetric(1e-15); var predictions = new ProbabilityPrediction[] { - new ProbabilityPrediction(0, new Dictionary { { 0, 1.0 }, { 1, 0.0 }, { 2, 0.0 } }), - new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), - new ProbabilityPrediction(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), + new(0, new Dictionary { { 0, 1.0 }, { 1, 0.0 }, { 2, 0.0 } }), + new(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), + new(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), }; var targets = new double[] { 0, 1, 2 }; @@ -29,9 +29,9 @@ public void LogLossClassificationMetric_Error_2() { var sut = new LogLossClassificationProbabilityMetric(1e-15); var predictions = new ProbabilityPrediction[] { - new ProbabilityPrediction(0, new Dictionary { { 0, 1.0 }, { 1, 1.0 }, { 2, 1.0 } }), - new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), - new ProbabilityPrediction(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), + new(0, new Dictionary { { 0, 1.0 }, { 1, 1.0 }, { 2, 1.0 } }), + new(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), + new(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), }; var targets = new double[] { 0, 1, 2 }; @@ -45,9 +45,9 @@ public void LogLossClassificationMetric_ErrorString() { var sut = new LogLossClassificationProbabilityMetric(1e-15); var predictions = new ProbabilityPrediction[] { - new ProbabilityPrediction(0, new Dictionary { { 0, 1.0 }, { 1, 1.0 }, { 2, 1.0 } }), - new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), - new ProbabilityPrediction(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), + new(0, new Dictionary { { 0, 1.0 }, { 1, 1.0 }, { 2, 1.0 } }), + new(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), + new(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), }; var targets = new double[] { 0, 1, 2 }; @@ -63,9 +63,9 @@ public void LogLossClassificationMetric_ErrorString_TargetStringMapping() { var sut = new LogLossClassificationProbabilityMetric(1e-15); var predictions = new ProbabilityPrediction[] { - new ProbabilityPrediction(0, new Dictionary { { 0, 1.0 }, { 1, 1.0 }, { 2, 1.0 } }), - new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), - new ProbabilityPrediction(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), + new(0, new Dictionary { { 0, 1.0 }, { 1, 1.0 }, { 2, 1.0 } }), + new(1, new Dictionary { { 0, 0.0 }, { 1, 1.0 }, { 2, 0.0 } }), + new(2, new Dictionary { { 0, 0.0 }, { 1, 0.0 }, { 2, 1.0 } }), }; var targets = new double[] { 0, 1, 2 }; diff --git a/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs index fb51b376..8cc6227c 100644 --- a/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs @@ -25,7 +25,7 @@ public void RocAucClassificationMetric_Error_Not_Binary() public void RocAucClassificationMetric_Error_No_Error() { var targets = new double[] { 0, 1 }; - var probabilities = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0 }, { 1.0, 0.0 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 1 } }) }; + var probabilities = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0 }, { 1.0, 0.0 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 1 } }) }; var sut = new RocAucClassificationProbabilityMetric(1); var actual = sut.Error(targets, probabilities); @@ -36,7 +36,7 @@ public void RocAucClassificationMetric_Error_No_Error() public void RocAucClassificationMetric_Error() { var targets = new double[] { 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1 }; - var probabilities = new ProbabilityPrediction[] { new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.052380952 } }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.993377483 } }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.111111111 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.193377483 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.793377483 } }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.012345679 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.885860173 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.714285714 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.985860173 } }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.985860173 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.993377483 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.993377483 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.954545455 } }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.985860173 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.985860173 } }) }; + var probabilities = new ProbabilityPrediction[] { new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.052380952 } }), new(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.993377483 } }), new(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.111111111 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.193377483 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.793377483 } }), new(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.012345679 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.885860173 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.714285714 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.985860173 } }), new(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.985860173 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.993377483 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.993377483 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.954545455 } }), new(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new(0, new Dictionary { { 0, 0.0 }, { 1.0, 0.020725389 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.985860173 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 0.985860173 } }) }; var sut = new RocAucClassificationProbabilityMetric(1); var actual = sut.Error(targets, probabilities); @@ -136,7 +136,7 @@ public void RocAucClassificationMetric_Error_Only_Negative_Targets() public void RocAucClassificationMetric_ErrorString() { var targets = new double[] { 0, 1 }; - var probabilities = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0 }, { 1.0, 0.0 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 1 } }) }; + var probabilities = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0 }, { 1.0, 0.0 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 1 } }) }; var sut = new RocAucClassificationProbabilityMetric(1); var actual = sut.ErrorString(targets, probabilities); @@ -149,7 +149,7 @@ public void RocAucClassificationMetric_ErrorString() public void RocAucClassificationMetric_ErrorString_TargetStringMapping() { var targets = new double[] { 0, 1 }; - var probabilities = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0 }, { 1.0, 0.0 } }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.0 }, { 1.0, 1 } }) }; + var probabilities = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0 }, { 1.0, 0.0 } }), new(1, new Dictionary { { 0, 0.0 }, { 1.0, 1 } }) }; var sut = new RocAucClassificationProbabilityMetric(1); var targetStringMapping = new Dictionary { diff --git a/src/SharpLearning.Metrics/Impurity/GiniImpurityMetric.cs b/src/SharpLearning.Metrics/Impurity/GiniImpurityMetric.cs index ba959d3a..27d1a876 100644 --- a/src/SharpLearning.Metrics/Impurity/GiniImpurityMetric.cs +++ b/src/SharpLearning.Metrics/Impurity/GiniImpurityMetric.cs @@ -8,7 +8,7 @@ namespace SharpLearning.Metrics.Impurity /// public sealed class GiniImpurityMetric : IImpurityMetric { - readonly Dictionary m_dictionary = new Dictionary(); + readonly Dictionary m_dictionary = new(); /// /// Calculates the Gini impurity of a sample. Main use is for decision tree classification diff --git a/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs b/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs index a64dc101..6a5a7d94 100644 --- a/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs +++ b/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs @@ -10,7 +10,7 @@ namespace SharpLearning.Metrics.Ranking public sealed class AveragePrecisionRankingMetric : IRankingMetric { readonly int m_k; - readonly HashSet m_workTargets = new HashSet(); + readonly HashSet m_workTargets = new(); /// /// Takes the top k predictions to consider diff --git a/src/SharpLearning.Neural.Test/Optimizers/NeuralNetOptimizerTest.cs b/src/SharpLearning.Neural.Test/Optimizers/NeuralNetOptimizerTest.cs index e95af5fc..95180dc3 100644 --- a/src/SharpLearning.Neural.Test/Optimizers/NeuralNetOptimizerTest.cs +++ b/src/SharpLearning.Neural.Test/Optimizers/NeuralNetOptimizerTest.cs @@ -13,8 +13,8 @@ public void NeuralNetOptimizer_Reset_Does_Not_Throw() { var parametersAndGradients = new List { - new ParametersAndGradients(new float[10], new float[10]), - new ParametersAndGradients(new float[10], new float[10]), + new(new float[10], new float[10]), + new(new float[10], new float[10]), }; foreach (OptimizerMethod optimizer in Enum.GetValues(typeof(OptimizerMethod))) diff --git a/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs b/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs index 469cb598..d7766577 100644 --- a/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs +++ b/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs @@ -17,8 +17,8 @@ public sealed class NeuralNetOptimizer readonly float m_momentum; readonly int m_batchSize; - readonly List m_gsumWeights = new List(); // last iteration gradients (used for momentum calculations) - readonly List m_xsumWeights = new List(); // used in adam or adadelta + readonly List m_gsumWeights = new(); // last iteration gradients (used for momentum calculations) + readonly List m_xsumWeights = new(); // used in adam or adadelta readonly OptimizerMethod m_optimizerMethod = OptimizerMethod.Sgd; readonly float m_rho = 0.95f; diff --git a/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs b/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs index 348b3bc6..cf60204a 100644 --- a/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs @@ -18,7 +18,7 @@ public void BayesianOptimizer_OptimizeBest_SingleParameter(int? maxDegreeOfParal { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(0.0, 100.0, Transform.Linear) + new(0.0, 100.0, Transform.Linear) }; var sut = CreateSut(maxDegreeOfParallelism, parameters); @@ -38,9 +38,9 @@ public void BayesianOptimizer_OptimizeBest_MultipleParameters(int? maxDegreeOfPa { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), }; var sut = CreateSut(maxDegreeOfParallelism, parameters); @@ -64,7 +64,7 @@ public void BayesianOptimizer_Optimize(int? maxDegreeOfParallelism) { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(0.0, 100.0, Transform.Linear) + new(0.0, 100.0, Transform.Linear) }; var sut = CreateSut(maxDegreeOfParallelism, parameters); @@ -74,8 +74,8 @@ public void BayesianOptimizer_Optimize(int? maxDegreeOfParallelism) var expected = new OptimizerResult[] { - new OptimizerResult(new double[] { 90.513222660177036 }, 114559.43191955783), - new OptimizerResult(new double[] { 41.752538896050559 }, 779.196560786838) + new(new double[] { 90.513222660177036 }, 114559.43191955783), + new(new double[] { 41.752538896050559 }, 779.196560786838) }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); @@ -105,37 +105,37 @@ public void BayesianOptimizer_OptimizeBest_MultipleParameters_Open_Loop_Using_Pr { var previousResults = new List() { - new OptimizerResult(new[] {-6.83357586936726,6.0834837966056,-0.0766206300242906}, -0.476143174040315), - new OptimizerResult(new[] {-7.29391428515963,6.0834837966056,1.01057317620636}, -0.41300737879641), - new OptimizerResult(new[] {-7.29391428515963,6.0834837966056,1.01057317620636}, -0.41300737879641), - new OptimizerResult(new[] {-8.05557010604794,-5.14662256238359,0.0363854738121798}, -0.397724266113204), - new OptimizerResult(new[] {-8.06241082868651,5.88012208038947,-1.5210571566229}, -0.356975377788698), - new OptimizerResult(new[] {4.42408777513732,0.472018332440413,1.7076749781648}, -0.315360461074171), - new OptimizerResult(new[] {-8.14483470197061,7.54724840519356,0.0363854738121798}, -0.279108605472165), - new OptimizerResult(new[] {-6.64746686660101,6.7109944004151,-0.214493549528761}, -0.266917186594653), - new OptimizerResult(new[] {5.34224593795009,-6.45170816986435,-2.1147669628797}, -0.255769932489526), - new OptimizerResult(new[] {-7.84876385603508,6.28409400409278,3.1447921661403}, -0.241263236969342), - new OptimizerResult(new[] {-7.84876385603508,4.96554990995934,-0.0766206300242906}, -0.232637166385485), - new OptimizerResult(new[] {-8.14041409554911,7.16927772256047,1.75166608381628}, -0.220476103560048), - new OptimizerResult(new[] {-7.84876385603508,6.0834837966056,-3.60210045874217}, -0.212970686239402), - new OptimizerResult(new[] {-7.29391428515963,5.22505613752876,1.01057317620636}, -0.206689239504653), - new OptimizerResult(new[] {-9.20479206331297,6.0834837966056,-0.0766206300242906}, -0.198657722521128), - new OptimizerResult(new[] {-8.25145286426481,5.27274844947865,-1.82163462593296}, -0.17367847378187), - new OptimizerResult(new[] {-7.84876385603508,6.0834837966056,5.3824106023565}, -0.153564625328103), - new OptimizerResult(new[] {-1.37364300497511,-1.35665034472786,-0.585322245296707}, -0.131453543138338), - new OptimizerResult(new[] {-7.84876385603508,7.74187722138216,-0.0766206300242906}, -0.103906821017427), - new OptimizerResult(new[] {9.20868899636375,-9.38389458664874,1.51842798642741}, -0.0850657757130275), - new OptimizerResult(new[] {-7.72406242681856,5.70825177044992,9.95585092341334}, -0.0759553721161318), - new OptimizerResult(new[] {1.65093947744506,-4.37866264692445,-4.29402069854272}, -0.0616761163702651), - new OptimizerResult(new[] {-9.37414173938993,6.28409400409278,0.0363854738121798}, -0.0488375857853505), - new OptimizerResult(new[] {3.38691201684387,5.42095644186295,-5.71318443664964}, -0.0235423806080941), - new OptimizerResult(new[] {-6.48224856540665,-7.13935053774125,7.05507751417117}, -0.0160884883078408), - new OptimizerResult(new[] {-9.68539061941457,7.96346846873102,-0.990608674935348}, -0.0141441279734299), - new OptimizerResult(new[] {-9.41382774124566,5.12580713030221,0.630654976996897}, -0.00269773409680873), - new OptimizerResult(new[] {6.7694738305963,1.56629731485913,-2.12145430600338}, 0.000673595210828553), - new OptimizerResult(new[] {-0.0282478006688169,2.87566112022645,-4.84997700660023}, 0.00465834522866944), - new OptimizerResult(new[] {3.50054986472267,8.01269467827524,7.36471213277649}, 0.00663762309484885), - new OptimizerResult(new[] {3.05129390817662,-6.16640157819092,7.49125691013935}, 0.0105475373675896), + new(new[] {-6.83357586936726,6.0834837966056,-0.0766206300242906}, -0.476143174040315), + new(new[] {-7.29391428515963,6.0834837966056,1.01057317620636}, -0.41300737879641), + new(new[] {-7.29391428515963,6.0834837966056,1.01057317620636}, -0.41300737879641), + new(new[] {-8.05557010604794,-5.14662256238359,0.0363854738121798}, -0.397724266113204), + new(new[] {-8.06241082868651,5.88012208038947,-1.5210571566229}, -0.356975377788698), + new(new[] {4.42408777513732,0.472018332440413,1.7076749781648}, -0.315360461074171), + new(new[] {-8.14483470197061,7.54724840519356,0.0363854738121798}, -0.279108605472165), + new(new[] {-6.64746686660101,6.7109944004151,-0.214493549528761}, -0.266917186594653), + new(new[] {5.34224593795009,-6.45170816986435,-2.1147669628797}, -0.255769932489526), + new(new[] {-7.84876385603508,6.28409400409278,3.1447921661403}, -0.241263236969342), + new(new[] {-7.84876385603508,4.96554990995934,-0.0766206300242906}, -0.232637166385485), + new(new[] {-8.14041409554911,7.16927772256047,1.75166608381628}, -0.220476103560048), + new(new[] {-7.84876385603508,6.0834837966056,-3.60210045874217}, -0.212970686239402), + new(new[] {-7.29391428515963,5.22505613752876,1.01057317620636}, -0.206689239504653), + new(new[] {-9.20479206331297,6.0834837966056,-0.0766206300242906}, -0.198657722521128), + new(new[] {-8.25145286426481,5.27274844947865,-1.82163462593296}, -0.17367847378187), + new(new[] {-7.84876385603508,6.0834837966056,5.3824106023565}, -0.153564625328103), + new(new[] {-1.37364300497511,-1.35665034472786,-0.585322245296707}, -0.131453543138338), + new(new[] {-7.84876385603508,7.74187722138216,-0.0766206300242906}, -0.103906821017427), + new(new[] {9.20868899636375,-9.38389458664874,1.51842798642741}, -0.0850657757130275), + new(new[] {-7.72406242681856,5.70825177044992,9.95585092341334}, -0.0759553721161318), + new(new[] {1.65093947744506,-4.37866264692445,-4.29402069854272}, -0.0616761163702651), + new(new[] {-9.37414173938993,6.28409400409278,0.0363854738121798}, -0.0488375857853505), + new(new[] {3.38691201684387,5.42095644186295,-5.71318443664964}, -0.0235423806080941), + new(new[] {-6.48224856540665,-7.13935053774125,7.05507751417117}, -0.0160884883078408), + new(new[] {-9.68539061941457,7.96346846873102,-0.990608674935348}, -0.0141441279734299), + new(new[] {-9.41382774124566,5.12580713030221,0.630654976996897}, -0.00269773409680873), + new(new[] {6.7694738305963,1.56629731485913,-2.12145430600338}, 0.000673595210828553), + new(new[] {-0.0282478006688169,2.87566112022645,-4.84997700660023}, 0.00465834522866944), + new(new[] {3.50054986472267,8.01269467827524,7.36471213277649}, 0.00663762309484885), + new(new[] {3.05129390817662,-6.16640157819092,7.49125691013935}, 0.0105475373675896), }; OptimizerResult actual = RunOpenLoopOptimizationTest(previousResults); @@ -214,9 +214,9 @@ static OptimizerResult RunOpenLoopOptimizationTest(List results { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), }; var iterations = 80; diff --git a/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs b/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs index dc291def..d84dd1ca 100644 --- a/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs @@ -16,9 +16,9 @@ public void GlobalizedBoundedNelderMeadOptimizer_OptimizeBest(int? maxDegreeOfPa { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), }; var sut = CreateSut(maxDegreeOfParallelism, parameters); @@ -42,7 +42,7 @@ public void GlobalizedBoundedNelderMeadOptimizer_Optimize(int? maxDegreeOfParall { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(0.0, 100.0, Transform.Linear) + new(0.0, 100.0, Transform.Linear) }; var sut = CreateSut(maxDegreeOfParallelism, parameters); @@ -52,8 +52,8 @@ public void GlobalizedBoundedNelderMeadOptimizer_Optimize(int? maxDegreeOfParall var expected = new OptimizerResult[] { - new OptimizerResult(new double[] { 37.71314634450421 }, 109.3438139631394), - new OptimizerResult(new double[] { 37.713142445047254 }, 109.34381396345546) + new(new double[] { 37.71314634450421 }, 109.3438139631394), + new(new double[] { 37.713142445047254 }, 109.34381396345546) }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); diff --git a/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs b/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs index 14beff39..201b0e13 100644 --- a/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs +++ b/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs @@ -17,7 +17,7 @@ public void GridSearchOptimizer_OptimizeBest(int? maxDegreeOfParallelism) { var parameters = new GridParameterSpec[] { - new GridParameterSpec(10.0, 20.0, 30.0, 35.0, 37.5, 40.0, 50.0, 60.0) + new(10.0, 20.0, 30.0, 35.0, 37.5, 40.0, 50.0, 60.0) }; var sut = maxDegreeOfParallelism.HasValue ? @@ -39,7 +39,7 @@ public void GridSearchOptimizer_Optimize(int? maxDegreeOfParallelism) { var parameters = new GridParameterSpec[] { - new GridParameterSpec(10.0, 20.0, 30.0, 35.0, 37.5, 40.0, 50.0, 60.0) + new(10.0, 20.0, 30.0, 35.0, 37.5, 40.0, 50.0, 60.0) }; var sut = maxDegreeOfParallelism.HasValue ? @@ -50,8 +50,8 @@ public void GridSearchOptimizer_Optimize(int? maxDegreeOfParallelism) var expected = new OptimizerResult[] { - new OptimizerResult(new double[] { 10 }, 31638.9579), - new OptimizerResult(new double[] { 60 }, 20500.6279) + new(new double[] { 10 }, 31638.9579), + new(new double[] { 60 }, 20500.6279) }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); diff --git a/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs b/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs index ed19197a..b073e77b 100644 --- a/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs @@ -92,50 +92,50 @@ static void AssertOptimizerResult(OptimizerResult expected, OptimizerResult actu static OptimizerResult[] Expected => new OptimizerResult[] { - new OptimizerResult(new [] { 183.050454, 0.103778, 10.521202 }, 0.815090), - new OptimizerResult(new [] { 242.035154, 0.160589, 14.928944 }, 0.319450), - new OptimizerResult(new [] { 217.110439, 0.121371, 9.134293 }, 0.287873), - new OptimizerResult(new [] { 205.828006, 0.026428, 13.831848 }, 0.213150), - new OptimizerResult(new [] { 81.318916, 0.028789, 13.468363 }, 0.833401), - new OptimizerResult(new [] { 183.050454, 0.103778, 10.521202 }, 0.138057), - new OptimizerResult(new [] { 280.115839, 0.043236, 14.109365 }, 0.315902), - new OptimizerResult(new [] { 199.842478, 0.023487, 12.218300 }, 0.858262), - new OptimizerResult(new [] { 89.288205, 0.029247, 12.503943 }, 0.960621), - new OptimizerResult(new [] { 238.527937, 0.023610, 14.521096 }, 0.998539), - new OptimizerResult(new [] { 103.184215, 0.048606, 11.929732 }, 0.391503), - new OptimizerResult(new [] { 217.110439, 0.121371, 9.134293 }, 0.125866), - new OptimizerResult(new [] { 80.598836, 0.039832, 8.388401 }, 0.962324), - new OptimizerResult(new [] { 89.359300, 0.042719, 10.902781 }, 0.655116), - new OptimizerResult(new [] { 183.050454, 0.103778, 10.521202 }, 0.045531), - new OptimizerResult(new [] { 242.035154, 0.160589, 14.928944 }, 0.241034), - new OptimizerResult(new [] { 205.828006, 0.026428, 13.831848 }, 0.072501), - new OptimizerResult(new [] { 137.807164, 0.080876, 9.133881 }, 0.917069), - new OptimizerResult(new [] { 122.739555, 0.071284, 9.159947 }, 0.428372), - new OptimizerResult(new [] { 265.007895, 0.065434, 9.655193 }, 0.252369), - new OptimizerResult(new [] { 242.616914, 0.051308, 14.785707 }, 0.990477), - new OptimizerResult(new [] { 245.944001, 0.173415, 11.243352 }, 0.755331), - new OptimizerResult(new [] { 87.069973, 0.049606, 9.162192 }, 0.412378), - new OptimizerResult(new [] { 121.689890, 0.109421, 14.372696 }, 0.519928), - new OptimizerResult(new [] { 211.466343, 0.060338, 10.341543 }, 0.589474), - new OptimizerResult(new [] { 138.097042, 0.028550, 8.527269 }, 0.305832), - new OptimizerResult(new [] { 81.318916, 0.028789, 13.468363 }, 0.065642), - new OptimizerResult(new [] { 258.473191, 0.043830, 8.081241 }, 0.769086), - new OptimizerResult(new [] { 110.790052, 0.063165, 9.287423 }, 0.520903), - new OptimizerResult(new [] { 259.348583, 0.072041, 9.899872 }, 0.459911), - new OptimizerResult(new [] { 187.514870, 0.124334, 11.735301 }, 0.918126), - new OptimizerResult(new [] { 80.806287, 0.028735, 9.547892 }, 0.824839), - new OptimizerResult(new [] { 212.130398, 0.142035, 8.342675 }, 0.713911), - new OptimizerResult(new [] { 212.130398, 0.142035, 8.342675 }, 0.082547), - new OptimizerResult(new [] { 80.806287, 0.028735, 9.547892, }, 0.135099), - new OptimizerResult(new [] { 119.813471, 0.074485, 13.382158 }, 0.154206), - new OptimizerResult(new [] { 202.034806, 0.137801, 9.508964 },0.627903), - new OptimizerResult(new [] { 102.696143, 0.099462, 8.557010 },0.410965), - new OptimizerResult(new [] { 118.759207, 0.038629, 9.560888 },0.587768), - new OptimizerResult(new [] { 96.998060, 0.039504, 11.428746 },0.225692), - new OptimizerResult(new [] { 117.955108, 0.082906, 12.319315 }, 0.801867), - new OptimizerResult(new [] { 246.662655, 0.027162, 14.963403 }, 0.088704), - new OptimizerResult(new [] { 156.214348, 0.167765, 12.516866 }, 0.365275), - new OptimizerResult(new [] { 278.337940, 0.098931, 13.177449 }, 0.009549), + new(new [] { 183.050454, 0.103778, 10.521202 }, 0.815090), + new(new [] { 242.035154, 0.160589, 14.928944 }, 0.319450), + new(new [] { 217.110439, 0.121371, 9.134293 }, 0.287873), + new(new [] { 205.828006, 0.026428, 13.831848 }, 0.213150), + new(new [] { 81.318916, 0.028789, 13.468363 }, 0.833401), + new(new [] { 183.050454, 0.103778, 10.521202 }, 0.138057), + new(new [] { 280.115839, 0.043236, 14.109365 }, 0.315902), + new(new [] { 199.842478, 0.023487, 12.218300 }, 0.858262), + new(new [] { 89.288205, 0.029247, 12.503943 }, 0.960621), + new(new [] { 238.527937, 0.023610, 14.521096 }, 0.998539), + new(new [] { 103.184215, 0.048606, 11.929732 }, 0.391503), + new(new [] { 217.110439, 0.121371, 9.134293 }, 0.125866), + new(new [] { 80.598836, 0.039832, 8.388401 }, 0.962324), + new(new [] { 89.359300, 0.042719, 10.902781 }, 0.655116), + new(new [] { 183.050454, 0.103778, 10.521202 }, 0.045531), + new(new [] { 242.035154, 0.160589, 14.928944 }, 0.241034), + new(new [] { 205.828006, 0.026428, 13.831848 }, 0.072501), + new(new [] { 137.807164, 0.080876, 9.133881 }, 0.917069), + new(new [] { 122.739555, 0.071284, 9.159947 }, 0.428372), + new(new [] { 265.007895, 0.065434, 9.655193 }, 0.252369), + new(new [] { 242.616914, 0.051308, 14.785707 }, 0.990477), + new(new [] { 245.944001, 0.173415, 11.243352 }, 0.755331), + new(new [] { 87.069973, 0.049606, 9.162192 }, 0.412378), + new(new [] { 121.689890, 0.109421, 14.372696 }, 0.519928), + new(new [] { 211.466343, 0.060338, 10.341543 }, 0.589474), + new(new [] { 138.097042, 0.028550, 8.527269 }, 0.305832), + new(new [] { 81.318916, 0.028789, 13.468363 }, 0.065642), + new(new [] { 258.473191, 0.043830, 8.081241 }, 0.769086), + new(new [] { 110.790052, 0.063165, 9.287423 }, 0.520903), + new(new [] { 259.348583, 0.072041, 9.899872 }, 0.459911), + new(new [] { 187.514870, 0.124334, 11.735301 }, 0.918126), + new(new [] { 80.806287, 0.028735, 9.547892 }, 0.824839), + new(new [] { 212.130398, 0.142035, 8.342675 }, 0.713911), + new(new [] { 212.130398, 0.142035, 8.342675 }, 0.082547), + new(new [] { 80.806287, 0.028735, 9.547892, }, 0.135099), + new(new [] { 119.813471, 0.074485, 13.382158 }, 0.154206), + new(new [] { 202.034806, 0.137801, 9.508964 },0.627903), + new(new [] { 102.696143, 0.099462, 8.557010 },0.410965), + new(new [] { 118.759207, 0.038629, 9.560888 },0.587768), + new(new [] { 96.998060, 0.039504, 11.428746 },0.225692), + new(new [] { 117.955108, 0.082906, 12.319315 }, 0.801867), + new(new [] { 246.662655, 0.027162, 14.963403 }, 0.088704), + new(new [] { 156.214348, 0.167765, 12.516866 }, 0.365275), + new(new [] { 278.337940, 0.098931, 13.177449 }, 0.009549), }; } } diff --git a/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs b/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs index af5c1a4f..71f50a29 100644 --- a/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs @@ -16,9 +16,9 @@ public void ParticleSwarmOptimizer_OptimizeBest(int? maxDegreeOfParallelism) { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), }; var sut = CreateSut(maxDegreeOfParallelism, parameters); @@ -42,7 +42,7 @@ public void ParticleSwarmOptimizer_Optimize(int? maxDegreeOfParallelism) { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(0.0, 100.0, Transform.Linear) + new(0.0, 100.0, Transform.Linear) }; var sut = CreateSut(maxDegreeOfParallelism, parameters); @@ -53,8 +53,8 @@ public void ParticleSwarmOptimizer_Optimize(int? maxDegreeOfParallelism) var expected = new OptimizerResult[] { - new OptimizerResult(new double[] { 38.1151505704492 }, 115.978346548015), - new OptimizerResult(new double[] { 37.2514904205637 }, 118.093289672808), + new(new double[] { 38.1151505704492 }, 115.978346548015), + new(new double[] { 37.2514904205637 }, 118.093289672808), }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); diff --git a/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs b/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs index dbb24b41..02d54fbb 100644 --- a/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs +++ b/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs @@ -17,7 +17,7 @@ public void RandomSearchOptimizer_OptimizeBest(int? maxDegreeOfParallelism) { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(0.0, 100.0, Transform.Linear) + new(0.0, 100.0, Transform.Linear) }; var sut = maxDegreeOfParallelism.HasValue ? @@ -39,7 +39,7 @@ public void RandomSearchOptimizer_Optimize(int? maxDegreeOfParallelism) { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(10.0, 37.5, Transform.Linear) + new(10.0, 37.5, Transform.Linear) }; var sut = maxDegreeOfParallelism.HasValue ? @@ -50,8 +50,8 @@ public void RandomSearchOptimizer_Optimize(int? maxDegreeOfParallelism) var expected = new OptimizerResult[] { - new OptimizerResult(new double[] { 28.3729278125674 }, 3690.81119818742), - new OptimizerResult(new double[] { 19.1529422843144 }, 14251.396910816733), + new(new double[] { 28.3729278125674 }, 3690.81119818742), + new(new double[] { 19.1529422843144 }, 14251.396910816733), }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); diff --git a/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs b/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs index 729f23f7..761dec28 100644 --- a/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs @@ -14,7 +14,7 @@ public void SmacOptimizer_OptimizeBest_SingleParameter() { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(0.0, 100.0, Transform.Linear) + new(0.0, 100.0, Transform.Linear) }; var sut = CreateSut(parameters); @@ -30,9 +30,9 @@ public void SmacOptimizer_OptimizeBest_MultipleParameters() { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), }; var sut = CreateSut(parameters); @@ -52,7 +52,7 @@ public void SmacOptimizer_Optimize() { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(0.0, 100.0, Transform.Linear) + new(0.0, 100.0, Transform.Linear) }; var sut = CreateSut(parameters); @@ -61,8 +61,8 @@ public void SmacOptimizer_Optimize() var expected = new OptimizerResult[] { - new OptimizerResult(new double[] { 90.513222660177 }, 114559.431919558), - new OptimizerResult(new double[] { 41.8333740634068 }, 806.274612132759), + new(new double[] { 90.513222660177 }, 114559.431919558), + new(new double[] { 41.8333740634068 }, 806.274612132759), }; Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); @@ -92,37 +92,37 @@ public void SmacOptimizer_OptimizeBest_MultipleParameters_Open_Loop_Using_Previo { var previousResults = new List() { - new OptimizerResult(new[] {-6.83357586936726,6.0834837966056,-0.0766206300242906}, -0.476143174040315), - new OptimizerResult(new[] {-7.29391428515963,6.0834837966056,1.01057317620636}, -0.41300737879641), - new OptimizerResult(new[] {-7.29391428515963,6.0834837966056,1.01057317620636}, -0.41300737879641), - new OptimizerResult(new[] {-8.05557010604794,-5.14662256238359,0.0363854738121798}, -0.397724266113204), - new OptimizerResult(new[] {-8.06241082868651,5.88012208038947,-1.5210571566229}, -0.356975377788698), - new OptimizerResult(new[] {4.42408777513732,0.472018332440413,1.7076749781648}, -0.315360461074171), - new OptimizerResult(new[] {-8.14483470197061,7.54724840519356,0.0363854738121798}, -0.279108605472165), - new OptimizerResult(new[] {-6.64746686660101,6.7109944004151,-0.214493549528761}, -0.266917186594653), - new OptimizerResult(new[] {5.34224593795009,-6.45170816986435,-2.1147669628797}, -0.255769932489526), - new OptimizerResult(new[] {-7.84876385603508,6.28409400409278,3.1447921661403}, -0.241263236969342), - new OptimizerResult(new[] {-7.84876385603508,4.96554990995934,-0.0766206300242906}, -0.232637166385485), - new OptimizerResult(new[] {-8.14041409554911,7.16927772256047,1.75166608381628}, -0.220476103560048), - new OptimizerResult(new[] {-7.84876385603508,6.0834837966056,-3.60210045874217}, -0.212970686239402), - new OptimizerResult(new[] {-7.29391428515963,5.22505613752876,1.01057317620636}, -0.206689239504653), - new OptimizerResult(new[] {-9.20479206331297,6.0834837966056,-0.0766206300242906}, -0.198657722521128), - new OptimizerResult(new[] {-8.25145286426481,5.27274844947865,-1.82163462593296}, -0.17367847378187), - new OptimizerResult(new[] {-7.84876385603508,6.0834837966056,5.3824106023565}, -0.153564625328103), - new OptimizerResult(new[] {-1.37364300497511,-1.35665034472786,-0.585322245296707}, -0.131453543138338), - new OptimizerResult(new[] {-7.84876385603508,7.74187722138216,-0.0766206300242906}, -0.103906821017427), - new OptimizerResult(new[] {9.20868899636375,-9.38389458664874,1.51842798642741}, -0.0850657757130275), - new OptimizerResult(new[] {-7.72406242681856,5.70825177044992,9.95585092341334}, -0.0759553721161318), - new OptimizerResult(new[] {1.65093947744506,-4.37866264692445,-4.29402069854272}, -0.0616761163702651), - new OptimizerResult(new[] {-9.37414173938993,6.28409400409278,0.0363854738121798}, -0.0488375857853505), - new OptimizerResult(new[] {3.38691201684387,5.42095644186295,-5.71318443664964}, -0.0235423806080941), - new OptimizerResult(new[] {-6.48224856540665,-7.13935053774125,7.05507751417117}, -0.0160884883078408), - new OptimizerResult(new[] {-9.68539061941457,7.96346846873102,-0.990608674935348}, -0.0141441279734299), - new OptimizerResult(new[] {-9.41382774124566,5.12580713030221,0.630654976996897}, -0.00269773409680873), - new OptimizerResult(new[] {6.7694738305963,1.56629731485913,-2.12145430600338}, 0.000673595210828553), - new OptimizerResult(new[] {-0.0282478006688169,2.87566112022645,-4.84997700660023}, 0.00465834522866944), - new OptimizerResult(new[] {3.50054986472267,8.01269467827524,7.36471213277649}, 0.00663762309484885), - new OptimizerResult(new[] {3.05129390817662,-6.16640157819092,7.49125691013935}, 0.0105475373675896), + new(new[] {-6.83357586936726,6.0834837966056,-0.0766206300242906}, -0.476143174040315), + new(new[] {-7.29391428515963,6.0834837966056,1.01057317620636}, -0.41300737879641), + new(new[] {-7.29391428515963,6.0834837966056,1.01057317620636}, -0.41300737879641), + new(new[] {-8.05557010604794,-5.14662256238359,0.0363854738121798}, -0.397724266113204), + new(new[] {-8.06241082868651,5.88012208038947,-1.5210571566229}, -0.356975377788698), + new(new[] {4.42408777513732,0.472018332440413,1.7076749781648}, -0.315360461074171), + new(new[] {-8.14483470197061,7.54724840519356,0.0363854738121798}, -0.279108605472165), + new(new[] {-6.64746686660101,6.7109944004151,-0.214493549528761}, -0.266917186594653), + new(new[] {5.34224593795009,-6.45170816986435,-2.1147669628797}, -0.255769932489526), + new(new[] {-7.84876385603508,6.28409400409278,3.1447921661403}, -0.241263236969342), + new(new[] {-7.84876385603508,4.96554990995934,-0.0766206300242906}, -0.232637166385485), + new(new[] {-8.14041409554911,7.16927772256047,1.75166608381628}, -0.220476103560048), + new(new[] {-7.84876385603508,6.0834837966056,-3.60210045874217}, -0.212970686239402), + new(new[] {-7.29391428515963,5.22505613752876,1.01057317620636}, -0.206689239504653), + new(new[] {-9.20479206331297,6.0834837966056,-0.0766206300242906}, -0.198657722521128), + new(new[] {-8.25145286426481,5.27274844947865,-1.82163462593296}, -0.17367847378187), + new(new[] {-7.84876385603508,6.0834837966056,5.3824106023565}, -0.153564625328103), + new(new[] {-1.37364300497511,-1.35665034472786,-0.585322245296707}, -0.131453543138338), + new(new[] {-7.84876385603508,7.74187722138216,-0.0766206300242906}, -0.103906821017427), + new(new[] {9.20868899636375,-9.38389458664874,1.51842798642741}, -0.0850657757130275), + new(new[] {-7.72406242681856,5.70825177044992,9.95585092341334}, -0.0759553721161318), + new(new[] {1.65093947744506,-4.37866264692445,-4.29402069854272}, -0.0616761163702651), + new(new[] {-9.37414173938993,6.28409400409278,0.0363854738121798}, -0.0488375857853505), + new(new[] {3.38691201684387,5.42095644186295,-5.71318443664964}, -0.0235423806080941), + new(new[] {-6.48224856540665,-7.13935053774125,7.05507751417117}, -0.0160884883078408), + new(new[] {-9.68539061941457,7.96346846873102,-0.990608674935348}, -0.0141441279734299), + new(new[] {-9.41382774124566,5.12580713030221,0.630654976996897}, -0.00269773409680873), + new(new[] {6.7694738305963,1.56629731485913,-2.12145430600338}, 0.000673595210828553), + new(new[] {-0.0282478006688169,2.87566112022645,-4.84997700660023}, 0.00465834522866944), + new(new[] {3.50054986472267,8.01269467827524,7.36471213277649}, 0.00663762309484885), + new(new[] {3.05129390817662,-6.16640157819092,7.49125691013935}, 0.0105475373675896), }; OptimizerResult actual = RunOpenLoopOptimizationTest(previousResults); @@ -198,9 +198,9 @@ static OptimizerResult RunOpenLoopOptimizationTest(List results { var parameters = new MinMaxParameterSpec[] { - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), - new MinMaxParameterSpec(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), + new(-10.0, 10.0, Transform.Linear), }; var iterations = 80; diff --git a/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs b/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs index fe0346e9..e5a2a832 100644 --- a/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs +++ b/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs @@ -98,7 +98,7 @@ public void ClassificationForestModel_PredictProbability_Single() Assert.AreEqual(0.076923076923076927, error, m_delta); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.650149027443145 }, { 1, 0.349850972556855 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.752781908451026 }, { 1, 0.247218091548974 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.491736055611056 }, { 1, 0.508263944388944 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.574583315377433 }, { 1, 0.425416684622567 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.838724674018791 }, { 1, 0.161275325981208 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.385258186258186 }, { 1, 0.614741813741813 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.706733044733045 }, { 1, 0.293266955266955 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.801266011766012 }, { 1, 0.198733988233988 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.294952297702298 }, { 1, 0.705047702297702 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.780062391856509 }, { 1, 0.21993760814349 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.554444388944389 }, { 1, 0.445555611055611 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.261349872349872 }, { 1, 0.738650127650127 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.419758186258186 }, { 1, 0.580241813741813 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.71382231249143 }, { 1, 0.28617768750857 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.47562148962149 }, { 1, 0.52437851037851 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.666244987039105 }, { 1, 0.333755012960895 }, }) }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.650149027443145 }, { 1, 0.349850972556855 }, }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new(0, new Dictionary { { 0, 0.752781908451026 }, { 1, 0.247218091548974 }, }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new(1, new Dictionary { { 0, 0.491736055611056 }, { 1, 0.508263944388944 }, }), new(0, new Dictionary { { 0, 0.574583315377433 }, { 1, 0.425416684622567 }, }), new(0, new Dictionary { { 0, 0.838724674018791 }, { 1, 0.161275325981208 }, }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new(1, new Dictionary { { 0, 0.385258186258186 }, { 1, 0.614741813741813 }, }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new(0, new Dictionary { { 0, 0.706733044733045 }, { 1, 0.293266955266955 }, }), new(0, new Dictionary { { 0, 0.801266011766012 }, { 1, 0.198733988233988 }, }), new(1, new Dictionary { { 0, 0.294952297702298 }, { 1, 0.705047702297702 }, }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new(0, new Dictionary { { 0, 0.780062391856509 }, { 1, 0.21993760814349 }, }), new(0, new Dictionary { { 0, 0.554444388944389 }, { 1, 0.445555611055611 }, }), new(1, new Dictionary { { 0, 0.261349872349872 }, { 1, 0.738650127650127 }, }), new(1, new Dictionary { { 0, 0.419758186258186 }, { 1, 0.580241813741813 }, }), new(0, new Dictionary { { 0, 0.71382231249143 }, { 1, 0.28617768750857 }, }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new(1, new Dictionary { { 0, 0.47562148962149 }, { 1, 0.52437851037851 }, }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new(0, new Dictionary { { 0, 0.666244987039105 }, { 1, 0.333755012960895 }, }) }; CollectionAssert.AreEqual(expected, actual); } @@ -116,7 +116,7 @@ public void ClassificationForestModel_PredictProbability_Multiple() Assert.AreEqual(0.076923076923076927, error, m_delta); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.650149027443145 }, { 1, 0.349850972556855 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.752781908451026 }, { 1, 0.247218091548974 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.491736055611056 }, { 1, 0.508263944388944 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.574583315377433 }, { 1, 0.425416684622567 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.838724674018791 }, { 1, 0.161275325981208 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.385258186258186 }, { 1, 0.614741813741813 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.706733044733045 }, { 1, 0.293266955266955 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.801266011766012 }, { 1, 0.198733988233988 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.294952297702298 }, { 1, 0.705047702297702 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.780062391856509 }, { 1, 0.21993760814349 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.554444388944389 }, { 1, 0.445555611055611 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.261349872349872 }, { 1, 0.738650127650127 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.419758186258186 }, { 1, 0.580241813741813 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.71382231249143 }, { 1, 0.28617768750857 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.47562148962149 }, { 1, 0.52437851037851 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.666244987039105 }, { 1, 0.333755012960895 }, }) }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.650149027443145 }, { 1, 0.349850972556855 }, }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new(0, new Dictionary { { 0, 0.752781908451026 }, { 1, 0.247218091548974 }, }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new(1, new Dictionary { { 0, 0.491736055611056 }, { 1, 0.508263944388944 }, }), new(0, new Dictionary { { 0, 0.574583315377433 }, { 1, 0.425416684622567 }, }), new(0, new Dictionary { { 0, 0.838724674018791 }, { 1, 0.161275325981208 }, }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new(1, new Dictionary { { 0, 0.385258186258186 }, { 1, 0.614741813741813 }, }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new(0, new Dictionary { { 0, 0.706733044733045 }, { 1, 0.293266955266955 }, }), new(0, new Dictionary { { 0, 0.801266011766012 }, { 1, 0.198733988233988 }, }), new(1, new Dictionary { { 0, 0.294952297702298 }, { 1, 0.705047702297702 }, }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new(0, new Dictionary { { 0, 0.780062391856509 }, { 1, 0.21993760814349 }, }), new(0, new Dictionary { { 0, 0.554444388944389 }, { 1, 0.445555611055611 }, }), new(1, new Dictionary { { 0, 0.261349872349872 }, { 1, 0.738650127650127 }, }), new(1, new Dictionary { { 0, 0.419758186258186 }, { 1, 0.580241813741813 }, }), new(0, new Dictionary { { 0, 0.71382231249143 }, { 1, 0.28617768750857 }, }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new(1, new Dictionary { { 0, 0.47562148962149 }, { 1, 0.52437851037851 }, }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new(0, new Dictionary { { 0, 0.666244987039105 }, { 1, 0.333755012960895 }, }) }; CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs b/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs index 85ba5654..c1354135 100644 --- a/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs +++ b/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs @@ -74,7 +74,7 @@ public void RegressionForestModel_PredictCertainty_Single() Assert.AreEqual(0.15381141277554411, error, m_delta); - var expected = new CertaintyPrediction[] { new CertaintyPrediction(0.379151515151515, 0.0608255007215007), new CertaintyPrediction(0.411071351850763, 0.0831655436577049), new CertaintyPrediction(0.243420918950331, 0.0452827034233046), new CertaintyPrediction(0.302332251082251, 0.0699917594408057), new CertaintyPrediction(0.411071351850763, 0.0831655436577049), new CertaintyPrediction(0.175743762773174, 0.0354069437824887), new CertaintyPrediction(0.574083361083361, 0.0765858693929188), new CertaintyPrediction(0.259063776093188, 0.0491198812971218), new CertaintyPrediction(0.163878898878899, 0.0331543420321184), new CertaintyPrediction(0.671753996003996, 0.0624466591504497), new CertaintyPrediction(0.418472943722944, 0.0607014359023913), new CertaintyPrediction(0.243420918950331, 0.0452827034233046), new CertaintyPrediction(0.443779942279942, 0.0941961872991865), new CertaintyPrediction(0.156999361749362, 0.0435804333960299), new CertaintyPrediction(0.591222034501446, 0.0873624628347336), new CertaintyPrediction(0.123822406351818, 0.0283119805431255), new CertaintyPrediction(0.162873993653405, 0.0333697457759022), new CertaintyPrediction(0.596261932511932, 0.0695341060210394), new CertaintyPrediction(0.671753996003996, 0.0624466591504497), new CertaintyPrediction(0.418472943722944, 0.0607014359023913), new CertaintyPrediction(0.329000027750028, 0.0788869852405852), new CertaintyPrediction(0.671753996003996, 0.0624466591504497), new CertaintyPrediction(0.499770375049787, 0.0913884936411888), new CertaintyPrediction(0.140025508804921, 0.0309875116490099), new CertaintyPrediction(0.161207326986739, 0.0336321035325246), new CertaintyPrediction(0.389553418803419, 0.0744433596104835), }; + var expected = new CertaintyPrediction[] { new(0.379151515151515, 0.0608255007215007), new(0.411071351850763, 0.0831655436577049), new(0.243420918950331, 0.0452827034233046), new(0.302332251082251, 0.0699917594408057), new(0.411071351850763, 0.0831655436577049), new(0.175743762773174, 0.0354069437824887), new(0.574083361083361, 0.0765858693929188), new(0.259063776093188, 0.0491198812971218), new(0.163878898878899, 0.0331543420321184), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.243420918950331, 0.0452827034233046), new(0.443779942279942, 0.0941961872991865), new(0.156999361749362, 0.0435804333960299), new(0.591222034501446, 0.0873624628347336), new(0.123822406351818, 0.0283119805431255), new(0.162873993653405, 0.0333697457759022), new(0.596261932511932, 0.0695341060210394), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.329000027750028, 0.0788869852405852), new(0.671753996003996, 0.0624466591504497), new(0.499770375049787, 0.0913884936411888), new(0.140025508804921, 0.0309875116490099), new(0.161207326986739, 0.0336321035325246), new(0.389553418803419, 0.0744433596104835), }; CollectionAssert.AreEqual(expected, actual); } @@ -92,7 +92,7 @@ public void RegressionForestModel_PredictProbability_Multiple() Assert.AreEqual(0.15381141277554411, error, m_delta); - var expected = new CertaintyPrediction[] { new CertaintyPrediction(0.379151515151515, 0.0608255007215007), new CertaintyPrediction(0.411071351850763, 0.0831655436577049), new CertaintyPrediction(0.243420918950331, 0.0452827034233046), new CertaintyPrediction(0.302332251082251, 0.0699917594408057), new CertaintyPrediction(0.411071351850763, 0.0831655436577049), new CertaintyPrediction(0.175743762773174, 0.0354069437824887), new CertaintyPrediction(0.574083361083361, 0.0765858693929188), new CertaintyPrediction(0.259063776093188, 0.0491198812971218), new CertaintyPrediction(0.163878898878899, 0.0331543420321184), new CertaintyPrediction(0.671753996003996, 0.0624466591504497), new CertaintyPrediction(0.418472943722944, 0.0607014359023913), new CertaintyPrediction(0.243420918950331, 0.0452827034233046), new CertaintyPrediction(0.443779942279942, 0.0941961872991865), new CertaintyPrediction(0.156999361749362, 0.0435804333960299), new CertaintyPrediction(0.591222034501446, 0.0873624628347336), new CertaintyPrediction(0.123822406351818, 0.0283119805431255), new CertaintyPrediction(0.162873993653405, 0.0333697457759022), new CertaintyPrediction(0.596261932511932, 0.0695341060210394), new CertaintyPrediction(0.671753996003996, 0.0624466591504497), new CertaintyPrediction(0.418472943722944, 0.0607014359023913), new CertaintyPrediction(0.329000027750028, 0.0788869852405852), new CertaintyPrediction(0.671753996003996, 0.0624466591504497), new CertaintyPrediction(0.499770375049787, 0.0913884936411888), new CertaintyPrediction(0.140025508804921, 0.0309875116490099), new CertaintyPrediction(0.161207326986739, 0.0336321035325246), new CertaintyPrediction(0.389553418803419, 0.0744433596104835), }; + var expected = new CertaintyPrediction[] { new(0.379151515151515, 0.0608255007215007), new(0.411071351850763, 0.0831655436577049), new(0.243420918950331, 0.0452827034233046), new(0.302332251082251, 0.0699917594408057), new(0.411071351850763, 0.0831655436577049), new(0.175743762773174, 0.0354069437824887), new(0.574083361083361, 0.0765858693929188), new(0.259063776093188, 0.0491198812971218), new(0.163878898878899, 0.0331543420321184), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.243420918950331, 0.0452827034233046), new(0.443779942279942, 0.0941961872991865), new(0.156999361749362, 0.0435804333960299), new(0.591222034501446, 0.0873624628347336), new(0.123822406351818, 0.0283119805431255), new(0.162873993653405, 0.0333697457759022), new(0.596261932511932, 0.0695341060210394), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.329000027750028, 0.0788869852405852), new(0.671753996003996, 0.0624466591504497), new(0.499770375049787, 0.0913884936411888), new(0.140025508804921, 0.0309875116490099), new(0.161207326986739, 0.0336321035325246), new(0.389553418803419, 0.0744433596104835), }; CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.XGBoost.Test/Models/ClassificationXGBoostModelTest.cs b/src/SharpLearning.XGBoost.Test/Models/ClassificationXGBoostModelTest.cs index 0c7f94f2..584c64fa 100644 --- a/src/SharpLearning.XGBoost.Test/Models/ClassificationXGBoostModelTest.cs +++ b/src/SharpLearning.XGBoost.Test/Models/ClassificationXGBoostModelTest.cs @@ -104,7 +104,7 @@ public void ClassificationXGBoostModel_PredictProbability_Single() Assert.AreEqual(0.17757009345794392, error, m_delta); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.204421937465668 }, { 1, 0.165088519454002 }, { 2, 0.169509157538414 }, { 3, 0.15365232527256 }, { 4, 0.15364582836628 }, { 5, 0.153682202100754 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.171550869941711 }, { 1, 0.17407751083374 }, { 2, 0.168291121721268 }, { 3, 0.162018626928329 }, { 4, 0.162011757493019 }, { 5, 0.162050127983093 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.200587809085846 }, { 1, 0.200458511710167 }, { 2, 0.1540387570858 }, { 3, 0.1482974588871 }, { 4, 0.148291185498238 }, { 5, 0.14832629263401 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.209190428256989 }, { 1, 0.166168510913849 }, { 2, 0.160645022988319 }, { 3, 0.154657498002052 }, { 4, 0.154650956392288 }, { 5, 0.154687568545341 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.231333449482918 }, { 1, 0.158397167921066 }, { 2, 0.16797336935997 }, { 3, 0.147424504160881 }, { 4, 0.147418275475502 }, { 5, 0.147453173995018 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.222247675061226 }, { 1, 0.191451728343964 }, { 2, 0.161376118659973 }, { 3, 0.141634315252304 }, { 4, 0.141628324985504 }, { 5, 0.14166185259819 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.211069479584694 }, { 1, 0.217032581567764 }, { 2, 0.157594978809357 }, { 3, 0.134510651230812 }, { 4, 0.134504958987236 }, { 5, 0.145287364721298 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.221573829650879 }, { 1, 0.151714622974396 }, { 2, 0.20307545363903 }, { 3, 0.141204878687859 }, { 4, 0.141198918223381 }, { 5, 0.141232341527939 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.218772485852242 }, { 1, 0.18845808506012 }, { 2, 0.163346409797668 }, { 3, 0.13941964507103 }, { 4, 0.139413744211197 }, { 5, 0.150589644908905 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.196985200047493 }, { 1, 0.201361879706383 }, { 2, 0.154732927680016 }, { 3, 0.148965761065483 }, { 4, 0.148959457874298 }, { 5, 0.148994728922844 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.22473056614399 }, { 1, 0.153876096010208 }, { 2, 0.157996505498886 }, { 3, 0.143216624855995 }, { 4, 0.143210560083389 }, { 5, 0.176969602704048 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.228362649679184 }, { 1, 0.156363025307655 }, { 2, 0.170506909489632 }, { 3, 0.14553128182888 }, { 4, 0.153676524758339 }, { 5, 0.145559579133987 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.231693357229233 }, { 1, 0.15864360332489 }, { 2, 0.162881851196289 }, { 3, 0.151451021432877 }, { 4, 0.147647619247437 }, { 5, 0.147682577371597 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.18250384926796 }, { 1, 0.232570126652718 }, { 2, 0.151334419846535 }, { 3, 0.15921525657177 }, { 4, 0.137171939015388 }, { 5, 0.137204423546791 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.185695022344589 }, { 1, 0.238080278038979 }, { 2, 0.154919907450676 }, { 3, 0.140427812933922 }, { 4, 0.140421867370605 }, { 5, 0.14045512676239 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.161344453692436 }, { 1, 0.186508595943451 }, { 2, 0.161158725619316 }, { 3, 0.160697221755981 }, { 4, 0.160690426826477 }, { 5, 0.169600605964661 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.154577597975731 }, { 1, 0.20810940861702 }, { 2, 0.175417006015778 }, { 3, 0.153957515954971 }, { 4, 0.15395100414753 }, { 5, 0.15398745238781 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.14681002497673 }, { 1, 0.247902169823647 }, { 2, 0.166602239012718 }, { 3, 0.146221116185188 }, { 4, 0.146214917302132 }, { 5, 0.146249532699585 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.149800211191177 }, { 1, 0.252951383590698 }, { 2, 0.149627774953842 }, { 3, 0.149199306964874 }, { 4, 0.149192988872528 }, { 5, 0.149228319525719 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.164128586649895 }, { 1, 0.175637125968933 }, { 2, 0.169798880815506 }, { 3, 0.163470193743706 }, { 4, 0.163463264703751 }, { 5, 0.163501977920532 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.142483577132225 }, { 1, 0.228974625468254 }, { 2, 0.142319530248642 }, { 3, 0.164056032896042 }, { 4, 0.14190599322319 }, { 5, 0.18026028573513 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.143185585737228 }, { 1, 0.230102762579918 }, { 2, 0.143020734190941 }, { 3, 0.16486431658268 }, { 4, 0.142605155706406 }, { 5, 0.176221489906311 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.206467673182487 }, { 1, 0.166740626096725 }, { 2, 0.161198109388351 }, { 3, 0.155189976096153 }, { 4, 0.155183419585228 }, { 5, 0.155220150947571 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.164174765348434 }, { 1, 0.209589347243309 }, { 2, 0.16105517745018 }, { 3, 0.155052363872528 }, { 4, 0.155045807361603 }, { 5, 0.155082508921623 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.149210333824158 }, { 1, 0.200883388519287 }, { 2, 0.149038568139076 }, { 3, 0.203621536493301 }, { 4, 0.148605495691299 }, { 5, 0.148640677332878 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205285787582397 }, { 1, 0.171510457992554 }, { 2, 0.160275369882584 }, { 3, 0.154301628470421 }, { 4, 0.154295101761818 }, { 5, 0.154331624507904 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.19794899225235 }, { 1, 0.201120212674141 }, { 2, 0.154547214508057 }, { 3, 0.148786976933479 }, { 4, 0.148780673742294 }, { 5, 0.148815900087357 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.198042631149292 }, { 1, 0.202442809939384 }, { 2, 0.150195524096489 }, { 3, 0.14976541697979 }, { 4, 0.149759083986282 }, { 5, 0.149794533848763 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.220375582575798 }, { 1, 0.189839035272598 }, { 2, 0.145878404378891 }, { 3, 0.16300305724144 }, { 4, 0.140435323119164 }, { 5, 0.140468567609787 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.157011136412621 }, { 1, 0.211385697126389 }, { 2, 0.162435546517372 }, { 3, 0.156381294131279 }, { 4, 0.156374678015709 }, { 5, 0.156411692500114 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.200586974620819 }, { 1, 0.161991447210312 }, { 2, 0.176644444465637 }, { 3, 0.150769785046577 }, { 4, 0.159208223223686 }, { 5, 0.150799110531807 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.192420691251755 }, { 1, 0.161742717027664 }, { 2, 0.209272593259811 }, { 3, 0.145513951778412 }, { 4, 0.145507797598839 }, { 5, 0.145542249083519 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.203436717391014 }, { 1, 0.165295079350471 }, { 2, 0.169711023569107 }, { 3, 0.153844580054283 }, { 4, 0.153838068246841 }, { 5, 0.15387450158596 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.202308386564255 }, { 1, 0.164378300309181 }, { 2, 0.174316108226776 }, { 3, 0.152991309762001 }, { 4, 0.152984827756882 }, { 5, 0.153021052479744 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.193465068936348 }, { 1, 0.15719299018383 }, { 2, 0.210408434271812 }, { 3, 0.146303743124008 }, { 4, 0.146297559142113 }, { 5, 0.146332189440727 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.221237704157829 }, { 1, 0.190581694245338 }, { 2, 0.165187060832977 }, { 3, 0.140990674495697 }, { 4, 0.140984714031219 }, { 5, 0.141018092632294 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.216337636113167 }, { 1, 0.186360627412796 }, { 2, 0.161528438329697 }, { 3, 0.160016357898712 }, { 4, 0.137862130999565 }, { 5, 0.13789476454258 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.149128466844559 }, { 1, 0.163378983736038 }, { 2, 0.148956805467606 }, { 3, 0.206476286053658 }, { 4, 0.148523956537247 }, { 5, 0.183535546064377 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.156232386827469 }, { 1, 0.167187243700027 }, { 2, 0.156052529811859 }, { 3, 0.209292829036713 }, { 4, 0.15559908747673 }, { 5, 0.15563590824604 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.14455483853817 }, { 1, 0.154690876603127 }, { 2, 0.144388437271118 }, { 3, 0.229516208171844 }, { 4, 0.143968880176544 }, { 5, 0.182880714535713 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.153719380497932 }, { 1, 0.164498031139374 }, { 2, 0.153542414307594 }, { 3, 0.209774866700172 }, { 4, 0.153096258640289 }, { 5, 0.165369004011154 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.152724325656891 }, { 1, 0.160508275032043 }, { 2, 0.149818390607834 }, { 3, 0.238147541880608 }, { 4, 0.149383053183556 }, { 5, 0.14941842854023 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.152712091803551 }, { 1, 0.20559786260128 }, { 2, 0.152536302804947 }, { 3, 0.176534190773964 }, { 4, 0.152093067765236 }, { 5, 0.160526528954506 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.144403666257858 }, { 1, 0.194412112236023 }, { 2, 0.14423742890358 }, { 3, 0.229276165366173 }, { 4, 0.14381830394268 }, { 5, 0.143852367997169 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.152389481663704 }, { 1, 0.160156399011612 }, { 2, 0.149489924311638 }, { 3, 0.149061858654022 }, { 4, 0.23981149494648 }, { 5, 0.149090841412544 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.149491384625435 }, { 1, 0.163839146494865 }, { 2, 0.149319306015968 }, { 3, 0.148891717195511 }, { 4, 0.23953777551651 }, { 5, 0.148920670151711 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.142711848020554 }, { 1, 0.152718678116798 }, { 2, 0.142547562718391 }, { 3, 0.191180393099785 }, { 4, 0.228674560785294 }, { 5, 0.142167016863823 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.151802018284798 }, { 1, 0.163394033908844 }, { 2, 0.148913636803627 }, { 3, 0.148487210273743 }, { 4, 0.238887012004852 }, { 5, 0.148516088724136 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.14544840157032 }, { 1, 0.159347251057625 }, { 2, 0.145280972123146 }, { 3, 0.144864946603775 }, { 4, 0.144858822226524 }, { 5, 0.26019960641861 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.158301413059235 }, { 1, 0.17342846095562 }, { 2, 0.158119171857834 }, { 3, 0.157666385173798 }, { 4, 0.157659709453583 }, { 5, 0.194824859499931 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.219101145863533 }, { 1, 0.15002153813839 }, { 2, 0.200809195637703 }, { 3, 0.139629080891609 }, { 4, 0.139623180031776 }, { 5, 0.15081587433815 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.158076956868172 }, { 1, 0.169161155819893 }, { 2, 0.157894983887672 }, { 3, 0.157442837953568 }, { 4, 0.157436162233353 }, { 5, 0.199987947940826 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.161370471119881 }, { 1, 0.16959510743618 }, { 2, 0.158300027251244 }, { 3, 0.157846719026566 }, { 4, 0.157840043306351 }, { 5, 0.195047691464424 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.160495221614838 }, { 1, 0.168675243854523 }, { 2, 0.157441437244415 }, { 3, 0.156990587711334 }, { 4, 0.156983941793442 }, { 5, 0.199413493275642 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145015180110931 }, { 1, 0.158933326601982 }, { 2, 0.144848257303238 }, { 3, 0.144433453679085 }, { 4, 0.147345185279846 }, { 5, 0.259424567222595 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.146661177277565 }, { 1, 0.152337715029716 }, { 2, 0.1464923620224 }, { 3, 0.146072864532471 }, { 4, 0.146066680550575 }, { 5, 0.262369185686111 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.138151779770851 }, { 1, 0.147838860750198 }, { 2, 0.13799275457859 }, { 3, 0.191278502345085 }, { 4, 0.137591779232025 }, { 5, 0.24714632332325 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.204421937465668 }, { 1, 0.165088519454002 }, { 2, 0.169509157538414 }, { 3, 0.15365232527256 }, { 4, 0.15364582836628 }, { 5, 0.153682202100754 }, }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(1, new Dictionary { { 0, 0.171550869941711 }, { 1, 0.17407751083374 }, { 2, 0.168291121721268 }, { 3, 0.162018626928329 }, { 4, 0.162011757493019 }, { 5, 0.162050127983093 }, }), new(0, new Dictionary { { 0, 0.200587809085846 }, { 1, 0.200458511710167 }, { 2, 0.1540387570858 }, { 3, 0.1482974588871 }, { 4, 0.148291185498238 }, { 5, 0.14832629263401 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.209190428256989 }, { 1, 0.166168510913849 }, { 2, 0.160645022988319 }, { 3, 0.154657498002052 }, { 4, 0.154650956392288 }, { 5, 0.154687568545341 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.231333449482918 }, { 1, 0.158397167921066 }, { 2, 0.16797336935997 }, { 3, 0.147424504160881 }, { 4, 0.147418275475502 }, { 5, 0.147453173995018 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.222247675061226 }, { 1, 0.191451728343964 }, { 2, 0.161376118659973 }, { 3, 0.141634315252304 }, { 4, 0.141628324985504 }, { 5, 0.14166185259819 }, }), new(1, new Dictionary { { 0, 0.211069479584694 }, { 1, 0.217032581567764 }, { 2, 0.157594978809357 }, { 3, 0.134510651230812 }, { 4, 0.134504958987236 }, { 5, 0.145287364721298 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.221573829650879 }, { 1, 0.151714622974396 }, { 2, 0.20307545363903 }, { 3, 0.141204878687859 }, { 4, 0.141198918223381 }, { 5, 0.141232341527939 }, }), new(0, new Dictionary { { 0, 0.218772485852242 }, { 1, 0.18845808506012 }, { 2, 0.163346409797668 }, { 3, 0.13941964507103 }, { 4, 0.139413744211197 }, { 5, 0.150589644908905 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.196985200047493 }, { 1, 0.201361879706383 }, { 2, 0.154732927680016 }, { 3, 0.148965761065483 }, { 4, 0.148959457874298 }, { 5, 0.148994728922844 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.22473056614399 }, { 1, 0.153876096010208 }, { 2, 0.157996505498886 }, { 3, 0.143216624855995 }, { 4, 0.143210560083389 }, { 5, 0.176969602704048 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.228362649679184 }, { 1, 0.156363025307655 }, { 2, 0.170506909489632 }, { 3, 0.14553128182888 }, { 4, 0.153676524758339 }, { 5, 0.145559579133987 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.231693357229233 }, { 1, 0.15864360332489 }, { 2, 0.162881851196289 }, { 3, 0.151451021432877 }, { 4, 0.147647619247437 }, { 5, 0.147682577371597 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.18250384926796 }, { 1, 0.232570126652718 }, { 2, 0.151334419846535 }, { 3, 0.15921525657177 }, { 4, 0.137171939015388 }, { 5, 0.137204423546791 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.185695022344589 }, { 1, 0.238080278038979 }, { 2, 0.154919907450676 }, { 3, 0.140427812933922 }, { 4, 0.140421867370605 }, { 5, 0.14045512676239 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.161344453692436 }, { 1, 0.186508595943451 }, { 2, 0.161158725619316 }, { 3, 0.160697221755981 }, { 4, 0.160690426826477 }, { 5, 0.169600605964661 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new(1, new Dictionary { { 0, 0.154577597975731 }, { 1, 0.20810940861702 }, { 2, 0.175417006015778 }, { 3, 0.153957515954971 }, { 4, 0.15395100414753 }, { 5, 0.15398745238781 }, }), new(1, new Dictionary { { 0, 0.14681002497673 }, { 1, 0.247902169823647 }, { 2, 0.166602239012718 }, { 3, 0.146221116185188 }, { 4, 0.146214917302132 }, { 5, 0.146249532699585 }, }), new(1, new Dictionary { { 0, 0.149800211191177 }, { 1, 0.252951383590698 }, { 2, 0.149627774953842 }, { 3, 0.149199306964874 }, { 4, 0.149192988872528 }, { 5, 0.149228319525719 }, }), new(1, new Dictionary { { 0, 0.164128586649895 }, { 1, 0.175637125968933 }, { 2, 0.169798880815506 }, { 3, 0.163470193743706 }, { 4, 0.163463264703751 }, { 5, 0.163501977920532 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new(1, new Dictionary { { 0, 0.142483577132225 }, { 1, 0.228974625468254 }, { 2, 0.142319530248642 }, { 3, 0.164056032896042 }, { 4, 0.14190599322319 }, { 5, 0.18026028573513 }, }), new(1, new Dictionary { { 0, 0.143185585737228 }, { 1, 0.230102762579918 }, { 2, 0.143020734190941 }, { 3, 0.16486431658268 }, { 4, 0.142605155706406 }, { 5, 0.176221489906311 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(0, new Dictionary { { 0, 0.206467673182487 }, { 1, 0.166740626096725 }, { 2, 0.161198109388351 }, { 3, 0.155189976096153 }, { 4, 0.155183419585228 }, { 5, 0.155220150947571 }, }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.164174765348434 }, { 1, 0.209589347243309 }, { 2, 0.16105517745018 }, { 3, 0.155052363872528 }, { 4, 0.155045807361603 }, { 5, 0.155082508921623 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(3, new Dictionary { { 0, 0.149210333824158 }, { 1, 0.200883388519287 }, { 2, 0.149038568139076 }, { 3, 0.203621536493301 }, { 4, 0.148605495691299 }, { 5, 0.148640677332878 }, }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(0, new Dictionary { { 0, 0.205285787582397 }, { 1, 0.171510457992554 }, { 2, 0.160275369882584 }, { 3, 0.154301628470421 }, { 4, 0.154295101761818 }, { 5, 0.154331624507904 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(1, new Dictionary { { 0, 0.19794899225235 }, { 1, 0.201120212674141 }, { 2, 0.154547214508057 }, { 3, 0.148786976933479 }, { 4, 0.148780673742294 }, { 5, 0.148815900087357 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.198042631149292 }, { 1, 0.202442809939384 }, { 2, 0.150195524096489 }, { 3, 0.14976541697979 }, { 4, 0.149759083986282 }, { 5, 0.149794533848763 }, }), new(0, new Dictionary { { 0, 0.220375582575798 }, { 1, 0.189839035272598 }, { 2, 0.145878404378891 }, { 3, 0.16300305724144 }, { 4, 0.140435323119164 }, { 5, 0.140468567609787 }, }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(1, new Dictionary { { 0, 0.157011136412621 }, { 1, 0.211385697126389 }, { 2, 0.162435546517372 }, { 3, 0.156381294131279 }, { 4, 0.156374678015709 }, { 5, 0.156411692500114 }, }), new(0, new Dictionary { { 0, 0.200586974620819 }, { 1, 0.161991447210312 }, { 2, 0.176644444465637 }, { 3, 0.150769785046577 }, { 4, 0.159208223223686 }, { 5, 0.150799110531807 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(2, new Dictionary { { 0, 0.192420691251755 }, { 1, 0.161742717027664 }, { 2, 0.209272593259811 }, { 3, 0.145513951778412 }, { 4, 0.145507797598839 }, { 5, 0.145542249083519 }, }), new(0, new Dictionary { { 0, 0.203436717391014 }, { 1, 0.165295079350471 }, { 2, 0.169711023569107 }, { 3, 0.153844580054283 }, { 4, 0.153838068246841 }, { 5, 0.15387450158596 }, }), new(0, new Dictionary { { 0, 0.202308386564255 }, { 1, 0.164378300309181 }, { 2, 0.174316108226776 }, { 3, 0.152991309762001 }, { 4, 0.152984827756882 }, { 5, 0.153021052479744 }, }), new(2, new Dictionary { { 0, 0.193465068936348 }, { 1, 0.15719299018383 }, { 2, 0.210408434271812 }, { 3, 0.146303743124008 }, { 4, 0.146297559142113 }, { 5, 0.146332189440727 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(0, new Dictionary { { 0, 0.221237704157829 }, { 1, 0.190581694245338 }, { 2, 0.165187060832977 }, { 3, 0.140990674495697 }, { 4, 0.140984714031219 }, { 5, 0.141018092632294 }, }), new(0, new Dictionary { { 0, 0.216337636113167 }, { 1, 0.186360627412796 }, { 2, 0.161528438329697 }, { 3, 0.160016357898712 }, { 4, 0.137862130999565 }, { 5, 0.13789476454258 }, }), new(3, new Dictionary { { 0, 0.149128466844559 }, { 1, 0.163378983736038 }, { 2, 0.148956805467606 }, { 3, 0.206476286053658 }, { 4, 0.148523956537247 }, { 5, 0.183535546064377 }, }), new(3, new Dictionary { { 0, 0.156232386827469 }, { 1, 0.167187243700027 }, { 2, 0.156052529811859 }, { 3, 0.209292829036713 }, { 4, 0.15559908747673 }, { 5, 0.15563590824604 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.14455483853817 }, { 1, 0.154690876603127 }, { 2, 0.144388437271118 }, { 3, 0.229516208171844 }, { 4, 0.143968880176544 }, { 5, 0.182880714535713 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.153719380497932 }, { 1, 0.164498031139374 }, { 2, 0.153542414307594 }, { 3, 0.209774866700172 }, { 4, 0.153096258640289 }, { 5, 0.165369004011154 }, }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new(3, new Dictionary { { 0, 0.152724325656891 }, { 1, 0.160508275032043 }, { 2, 0.149818390607834 }, { 3, 0.238147541880608 }, { 4, 0.149383053183556 }, { 5, 0.14941842854023 }, }), new(1, new Dictionary { { 0, 0.152712091803551 }, { 1, 0.20559786260128 }, { 2, 0.152536302804947 }, { 3, 0.176534190773964 }, { 4, 0.152093067765236 }, { 5, 0.160526528954506 }, }), new(3, new Dictionary { { 0, 0.144403666257858 }, { 1, 0.194412112236023 }, { 2, 0.14423742890358 }, { 3, 0.229276165366173 }, { 4, 0.14381830394268 }, { 5, 0.143852367997169 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.152389481663704 }, { 1, 0.160156399011612 }, { 2, 0.149489924311638 }, { 3, 0.149061858654022 }, { 4, 0.23981149494648 }, { 5, 0.149090841412544 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.149491384625435 }, { 1, 0.163839146494865 }, { 2, 0.149319306015968 }, { 3, 0.148891717195511 }, { 4, 0.23953777551651 }, { 5, 0.148920670151711 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.142711848020554 }, { 1, 0.152718678116798 }, { 2, 0.142547562718391 }, { 3, 0.191180393099785 }, { 4, 0.228674560785294 }, { 5, 0.142167016863823 }, }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new(4, new Dictionary { { 0, 0.151802018284798 }, { 1, 0.163394033908844 }, { 2, 0.148913636803627 }, { 3, 0.148487210273743 }, { 4, 0.238887012004852 }, { 5, 0.148516088724136 }, }), new(5, new Dictionary { { 0, 0.14544840157032 }, { 1, 0.159347251057625 }, { 2, 0.145280972123146 }, { 3, 0.144864946603775 }, { 4, 0.144858822226524 }, { 5, 0.26019960641861 }, }), new(5, new Dictionary { { 0, 0.158301413059235 }, { 1, 0.17342846095562 }, { 2, 0.158119171857834 }, { 3, 0.157666385173798 }, { 4, 0.157659709453583 }, { 5, 0.194824859499931 }, }), new(0, new Dictionary { { 0, 0.219101145863533 }, { 1, 0.15002153813839 }, { 2, 0.200809195637703 }, { 3, 0.139629080891609 }, { 4, 0.139623180031776 }, { 5, 0.15081587433815 }, }), new(5, new Dictionary { { 0, 0.158076956868172 }, { 1, 0.169161155819893 }, { 2, 0.157894983887672 }, { 3, 0.157442837953568 }, { 4, 0.157436162233353 }, { 5, 0.199987947940826 }, }), new(5, new Dictionary { { 0, 0.161370471119881 }, { 1, 0.16959510743618 }, { 2, 0.158300027251244 }, { 3, 0.157846719026566 }, { 4, 0.157840043306351 }, { 5, 0.195047691464424 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.160495221614838 }, { 1, 0.168675243854523 }, { 2, 0.157441437244415 }, { 3, 0.156990587711334 }, { 4, 0.156983941793442 }, { 5, 0.199413493275642 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145015180110931 }, { 1, 0.158933326601982 }, { 2, 0.144848257303238 }, { 3, 0.144433453679085 }, { 4, 0.147345185279846 }, { 5, 0.259424567222595 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.146661177277565 }, { 1, 0.152337715029716 }, { 2, 0.1464923620224 }, { 3, 0.146072864532471 }, { 4, 0.146066680550575 }, { 5, 0.262369185686111 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.138151779770851 }, { 1, 0.147838860750198 }, { 2, 0.13799275457859 }, { 3, 0.191278502345085 }, { 4, 0.137591779232025 }, { 5, 0.24714632332325 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), }; CollectionAssert.AreEqual(expected, actual); } } @@ -134,7 +134,7 @@ public void ClassificationXGBoostModel_PredictProbability_Single_BinaryLogistic( Assert.AreEqual(0.14953271028037382, error, m_delta); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 }, }), new(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 }, }), new(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), }; CollectionAssert.AreEqual(expected, actual); } } @@ -154,7 +154,7 @@ public void ClassificationXGBoostModel_PredictProbability_Multiple() Assert.AreEqual(0.17757009345794392, error, m_delta); - var expected = new ProbabilityPrediction[] { new ProbabilityPrediction(0, new Dictionary { { 0, 0.204421937465668 }, { 1, 0.165088519454002 }, { 2, 0.169509157538414 }, { 3, 0.15365232527256 }, { 4, 0.15364582836628 }, { 5, 0.153682202100754 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.171550869941711 }, { 1, 0.17407751083374 }, { 2, 0.168291121721268 }, { 3, 0.162018626928329 }, { 4, 0.162011757493019 }, { 5, 0.162050127983093 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.200587809085846 }, { 1, 0.200458511710167 }, { 2, 0.1540387570858 }, { 3, 0.1482974588871 }, { 4, 0.148291185498238 }, { 5, 0.14832629263401 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.209190428256989 }, { 1, 0.166168510913849 }, { 2, 0.160645022988319 }, { 3, 0.154657498002052 }, { 4, 0.154650956392288 }, { 5, 0.154687568545341 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.231333449482918 }, { 1, 0.158397167921066 }, { 2, 0.16797336935997 }, { 3, 0.147424504160881 }, { 4, 0.147418275475502 }, { 5, 0.147453173995018 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.222247675061226 }, { 1, 0.191451728343964 }, { 2, 0.161376118659973 }, { 3, 0.141634315252304 }, { 4, 0.141628324985504 }, { 5, 0.14166185259819 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.211069479584694 }, { 1, 0.217032581567764 }, { 2, 0.157594978809357 }, { 3, 0.134510651230812 }, { 4, 0.134504958987236 }, { 5, 0.145287364721298 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.221573829650879 }, { 1, 0.151714622974396 }, { 2, 0.20307545363903 }, { 3, 0.141204878687859 }, { 4, 0.141198918223381 }, { 5, 0.141232341527939 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.218772485852242 }, { 1, 0.18845808506012 }, { 2, 0.163346409797668 }, { 3, 0.13941964507103 }, { 4, 0.139413744211197 }, { 5, 0.150589644908905 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.196985200047493 }, { 1, 0.201361879706383 }, { 2, 0.154732927680016 }, { 3, 0.148965761065483 }, { 4, 0.148959457874298 }, { 5, 0.148994728922844 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.22473056614399 }, { 1, 0.153876096010208 }, { 2, 0.157996505498886 }, { 3, 0.143216624855995 }, { 4, 0.143210560083389 }, { 5, 0.176969602704048 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.228362649679184 }, { 1, 0.156363025307655 }, { 2, 0.170506909489632 }, { 3, 0.14553128182888 }, { 4, 0.153676524758339 }, { 5, 0.145559579133987 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.231693357229233 }, { 1, 0.15864360332489 }, { 2, 0.162881851196289 }, { 3, 0.151451021432877 }, { 4, 0.147647619247437 }, { 5, 0.147682577371597 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.18250384926796 }, { 1, 0.232570126652718 }, { 2, 0.151334419846535 }, { 3, 0.15921525657177 }, { 4, 0.137171939015388 }, { 5, 0.137204423546791 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.185695022344589 }, { 1, 0.238080278038979 }, { 2, 0.154919907450676 }, { 3, 0.140427812933922 }, { 4, 0.140421867370605 }, { 5, 0.14045512676239 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.161344453692436 }, { 1, 0.186508595943451 }, { 2, 0.161158725619316 }, { 3, 0.160697221755981 }, { 4, 0.160690426826477 }, { 5, 0.169600605964661 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.154577597975731 }, { 1, 0.20810940861702 }, { 2, 0.175417006015778 }, { 3, 0.153957515954971 }, { 4, 0.15395100414753 }, { 5, 0.15398745238781 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.14681002497673 }, { 1, 0.247902169823647 }, { 2, 0.166602239012718 }, { 3, 0.146221116185188 }, { 4, 0.146214917302132 }, { 5, 0.146249532699585 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.149800211191177 }, { 1, 0.252951383590698 }, { 2, 0.149627774953842 }, { 3, 0.149199306964874 }, { 4, 0.149192988872528 }, { 5, 0.149228319525719 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.164128586649895 }, { 1, 0.175637125968933 }, { 2, 0.169798880815506 }, { 3, 0.163470193743706 }, { 4, 0.163463264703751 }, { 5, 0.163501977920532 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.142483577132225 }, { 1, 0.228974625468254 }, { 2, 0.142319530248642 }, { 3, 0.164056032896042 }, { 4, 0.14190599322319 }, { 5, 0.18026028573513 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.143185585737228 }, { 1, 0.230102762579918 }, { 2, 0.143020734190941 }, { 3, 0.16486431658268 }, { 4, 0.142605155706406 }, { 5, 0.176221489906311 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.206467673182487 }, { 1, 0.166740626096725 }, { 2, 0.161198109388351 }, { 3, 0.155189976096153 }, { 4, 0.155183419585228 }, { 5, 0.155220150947571 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.164174765348434 }, { 1, 0.209589347243309 }, { 2, 0.16105517745018 }, { 3, 0.155052363872528 }, { 4, 0.155045807361603 }, { 5, 0.155082508921623 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.149210333824158 }, { 1, 0.200883388519287 }, { 2, 0.149038568139076 }, { 3, 0.203621536493301 }, { 4, 0.148605495691299 }, { 5, 0.148640677332878 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205285787582397 }, { 1, 0.171510457992554 }, { 2, 0.160275369882584 }, { 3, 0.154301628470421 }, { 4, 0.154295101761818 }, { 5, 0.154331624507904 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.19794899225235 }, { 1, 0.201120212674141 }, { 2, 0.154547214508057 }, { 3, 0.148786976933479 }, { 4, 0.148780673742294 }, { 5, 0.148815900087357 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.198042631149292 }, { 1, 0.202442809939384 }, { 2, 0.150195524096489 }, { 3, 0.14976541697979 }, { 4, 0.149759083986282 }, { 5, 0.149794533848763 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.220375582575798 }, { 1, 0.189839035272598 }, { 2, 0.145878404378891 }, { 3, 0.16300305724144 }, { 4, 0.140435323119164 }, { 5, 0.140468567609787 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.157011136412621 }, { 1, 0.211385697126389 }, { 2, 0.162435546517372 }, { 3, 0.156381294131279 }, { 4, 0.156374678015709 }, { 5, 0.156411692500114 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.200586974620819 }, { 1, 0.161991447210312 }, { 2, 0.176644444465637 }, { 3, 0.150769785046577 }, { 4, 0.159208223223686 }, { 5, 0.150799110531807 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.192420691251755 }, { 1, 0.161742717027664 }, { 2, 0.209272593259811 }, { 3, 0.145513951778412 }, { 4, 0.145507797598839 }, { 5, 0.145542249083519 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.203436717391014 }, { 1, 0.165295079350471 }, { 2, 0.169711023569107 }, { 3, 0.153844580054283 }, { 4, 0.153838068246841 }, { 5, 0.15387450158596 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.202308386564255 }, { 1, 0.164378300309181 }, { 2, 0.174316108226776 }, { 3, 0.152991309762001 }, { 4, 0.152984827756882 }, { 5, 0.153021052479744 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.193465068936348 }, { 1, 0.15719299018383 }, { 2, 0.210408434271812 }, { 3, 0.146303743124008 }, { 4, 0.146297559142113 }, { 5, 0.146332189440727 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new ProbabilityPrediction(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.221237704157829 }, { 1, 0.190581694245338 }, { 2, 0.165187060832977 }, { 3, 0.140990674495697 }, { 4, 0.140984714031219 }, { 5, 0.141018092632294 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.216337636113167 }, { 1, 0.186360627412796 }, { 2, 0.161528438329697 }, { 3, 0.160016357898712 }, { 4, 0.137862130999565 }, { 5, 0.13789476454258 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.149128466844559 }, { 1, 0.163378983736038 }, { 2, 0.148956805467606 }, { 3, 0.206476286053658 }, { 4, 0.148523956537247 }, { 5, 0.183535546064377 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.156232386827469 }, { 1, 0.167187243700027 }, { 2, 0.156052529811859 }, { 3, 0.209292829036713 }, { 4, 0.15559908747673 }, { 5, 0.15563590824604 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.14455483853817 }, { 1, 0.154690876603127 }, { 2, 0.144388437271118 }, { 3, 0.229516208171844 }, { 4, 0.143968880176544 }, { 5, 0.182880714535713 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.153719380497932 }, { 1, 0.164498031139374 }, { 2, 0.153542414307594 }, { 3, 0.209774866700172 }, { 4, 0.153096258640289 }, { 5, 0.165369004011154 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.152724325656891 }, { 1, 0.160508275032043 }, { 2, 0.149818390607834 }, { 3, 0.238147541880608 }, { 4, 0.149383053183556 }, { 5, 0.14941842854023 }, }), new ProbabilityPrediction(1, new Dictionary { { 0, 0.152712091803551 }, { 1, 0.20559786260128 }, { 2, 0.152536302804947 }, { 3, 0.176534190773964 }, { 4, 0.152093067765236 }, { 5, 0.160526528954506 }, }), new ProbabilityPrediction(3, new Dictionary { { 0, 0.144403666257858 }, { 1, 0.194412112236023 }, { 2, 0.14423742890358 }, { 3, 0.229276165366173 }, { 4, 0.14381830394268 }, { 5, 0.143852367997169 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.152389481663704 }, { 1, 0.160156399011612 }, { 2, 0.149489924311638 }, { 3, 0.149061858654022 }, { 4, 0.23981149494648 }, { 5, 0.149090841412544 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.149491384625435 }, { 1, 0.163839146494865 }, { 2, 0.149319306015968 }, { 3, 0.148891717195511 }, { 4, 0.23953777551651 }, { 5, 0.148920670151711 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.142711848020554 }, { 1, 0.152718678116798 }, { 2, 0.142547562718391 }, { 3, 0.191180393099785 }, { 4, 0.228674560785294 }, { 5, 0.142167016863823 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new ProbabilityPrediction(4, new Dictionary { { 0, 0.151802018284798 }, { 1, 0.163394033908844 }, { 2, 0.148913636803627 }, { 3, 0.148487210273743 }, { 4, 0.238887012004852 }, { 5, 0.148516088724136 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.14544840157032 }, { 1, 0.159347251057625 }, { 2, 0.145280972123146 }, { 3, 0.144864946603775 }, { 4, 0.144858822226524 }, { 5, 0.26019960641861 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.158301413059235 }, { 1, 0.17342846095562 }, { 2, 0.158119171857834 }, { 3, 0.157666385173798 }, { 4, 0.157659709453583 }, { 5, 0.194824859499931 }, }), new ProbabilityPrediction(0, new Dictionary { { 0, 0.219101145863533 }, { 1, 0.15002153813839 }, { 2, 0.200809195637703 }, { 3, 0.139629080891609 }, { 4, 0.139623180031776 }, { 5, 0.15081587433815 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.158076956868172 }, { 1, 0.169161155819893 }, { 2, 0.157894983887672 }, { 3, 0.157442837953568 }, { 4, 0.157436162233353 }, { 5, 0.199987947940826 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.161370471119881 }, { 1, 0.16959510743618 }, { 2, 0.158300027251244 }, { 3, 0.157846719026566 }, { 4, 0.157840043306351 }, { 5, 0.195047691464424 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.160495221614838 }, { 1, 0.168675243854523 }, { 2, 0.157441437244415 }, { 3, 0.156990587711334 }, { 4, 0.156983941793442 }, { 5, 0.199413493275642 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145015180110931 }, { 1, 0.158933326601982 }, { 2, 0.144848257303238 }, { 3, 0.144433453679085 }, { 4, 0.147345185279846 }, { 5, 0.259424567222595 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.146661177277565 }, { 1, 0.152337715029716 }, { 2, 0.1464923620224 }, { 3, 0.146072864532471 }, { 4, 0.146066680550575 }, { 5, 0.262369185686111 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.138151779770851 }, { 1, 0.147838860750198 }, { 2, 0.13799275457859 }, { 3, 0.191278502345085 }, { 4, 0.137591779232025 }, { 5, 0.24714632332325 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new ProbabilityPrediction(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.204421937465668 }, { 1, 0.165088519454002 }, { 2, 0.169509157538414 }, { 3, 0.15365232527256 }, { 4, 0.15364582836628 }, { 5, 0.153682202100754 }, }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(1, new Dictionary { { 0, 0.171550869941711 }, { 1, 0.17407751083374 }, { 2, 0.168291121721268 }, { 3, 0.162018626928329 }, { 4, 0.162011757493019 }, { 5, 0.162050127983093 }, }), new(0, new Dictionary { { 0, 0.200587809085846 }, { 1, 0.200458511710167 }, { 2, 0.1540387570858 }, { 3, 0.1482974588871 }, { 4, 0.148291185498238 }, { 5, 0.14832629263401 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.209190428256989 }, { 1, 0.166168510913849 }, { 2, 0.160645022988319 }, { 3, 0.154657498002052 }, { 4, 0.154650956392288 }, { 5, 0.154687568545341 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.231333449482918 }, { 1, 0.158397167921066 }, { 2, 0.16797336935997 }, { 3, 0.147424504160881 }, { 4, 0.147418275475502 }, { 5, 0.147453173995018 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.222247675061226 }, { 1, 0.191451728343964 }, { 2, 0.161376118659973 }, { 3, 0.141634315252304 }, { 4, 0.141628324985504 }, { 5, 0.14166185259819 }, }), new(1, new Dictionary { { 0, 0.211069479584694 }, { 1, 0.217032581567764 }, { 2, 0.157594978809357 }, { 3, 0.134510651230812 }, { 4, 0.134504958987236 }, { 5, 0.145287364721298 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.221573829650879 }, { 1, 0.151714622974396 }, { 2, 0.20307545363903 }, { 3, 0.141204878687859 }, { 4, 0.141198918223381 }, { 5, 0.141232341527939 }, }), new(0, new Dictionary { { 0, 0.218772485852242 }, { 1, 0.18845808506012 }, { 2, 0.163346409797668 }, { 3, 0.13941964507103 }, { 4, 0.139413744211197 }, { 5, 0.150589644908905 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.196985200047493 }, { 1, 0.201361879706383 }, { 2, 0.154732927680016 }, { 3, 0.148965761065483 }, { 4, 0.148959457874298 }, { 5, 0.148994728922844 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.22473056614399 }, { 1, 0.153876096010208 }, { 2, 0.157996505498886 }, { 3, 0.143216624855995 }, { 4, 0.143210560083389 }, { 5, 0.176969602704048 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.228362649679184 }, { 1, 0.156363025307655 }, { 2, 0.170506909489632 }, { 3, 0.14553128182888 }, { 4, 0.153676524758339 }, { 5, 0.145559579133987 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.231693357229233 }, { 1, 0.15864360332489 }, { 2, 0.162881851196289 }, { 3, 0.151451021432877 }, { 4, 0.147647619247437 }, { 5, 0.147682577371597 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.18250384926796 }, { 1, 0.232570126652718 }, { 2, 0.151334419846535 }, { 3, 0.15921525657177 }, { 4, 0.137171939015388 }, { 5, 0.137204423546791 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.185695022344589 }, { 1, 0.238080278038979 }, { 2, 0.154919907450676 }, { 3, 0.140427812933922 }, { 4, 0.140421867370605 }, { 5, 0.14045512676239 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.161344453692436 }, { 1, 0.186508595943451 }, { 2, 0.161158725619316 }, { 3, 0.160697221755981 }, { 4, 0.160690426826477 }, { 5, 0.169600605964661 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new(1, new Dictionary { { 0, 0.154577597975731 }, { 1, 0.20810940861702 }, { 2, 0.175417006015778 }, { 3, 0.153957515954971 }, { 4, 0.15395100414753 }, { 5, 0.15398745238781 }, }), new(1, new Dictionary { { 0, 0.14681002497673 }, { 1, 0.247902169823647 }, { 2, 0.166602239012718 }, { 3, 0.146221116185188 }, { 4, 0.146214917302132 }, { 5, 0.146249532699585 }, }), new(1, new Dictionary { { 0, 0.149800211191177 }, { 1, 0.252951383590698 }, { 2, 0.149627774953842 }, { 3, 0.149199306964874 }, { 4, 0.149192988872528 }, { 5, 0.149228319525719 }, }), new(1, new Dictionary { { 0, 0.164128586649895 }, { 1, 0.175637125968933 }, { 2, 0.169798880815506 }, { 3, 0.163470193743706 }, { 4, 0.163463264703751 }, { 5, 0.163501977920532 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new(1, new Dictionary { { 0, 0.142483577132225 }, { 1, 0.228974625468254 }, { 2, 0.142319530248642 }, { 3, 0.164056032896042 }, { 4, 0.14190599322319 }, { 5, 0.18026028573513 }, }), new(1, new Dictionary { { 0, 0.143185585737228 }, { 1, 0.230102762579918 }, { 2, 0.143020734190941 }, { 3, 0.16486431658268 }, { 4, 0.142605155706406 }, { 5, 0.176221489906311 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(0, new Dictionary { { 0, 0.206467673182487 }, { 1, 0.166740626096725 }, { 2, 0.161198109388351 }, { 3, 0.155189976096153 }, { 4, 0.155183419585228 }, { 5, 0.155220150947571 }, }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.164174765348434 }, { 1, 0.209589347243309 }, { 2, 0.16105517745018 }, { 3, 0.155052363872528 }, { 4, 0.155045807361603 }, { 5, 0.155082508921623 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(3, new Dictionary { { 0, 0.149210333824158 }, { 1, 0.200883388519287 }, { 2, 0.149038568139076 }, { 3, 0.203621536493301 }, { 4, 0.148605495691299 }, { 5, 0.148640677332878 }, }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(0, new Dictionary { { 0, 0.205285787582397 }, { 1, 0.171510457992554 }, { 2, 0.160275369882584 }, { 3, 0.154301628470421 }, { 4, 0.154295101761818 }, { 5, 0.154331624507904 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(1, new Dictionary { { 0, 0.19794899225235 }, { 1, 0.201120212674141 }, { 2, 0.154547214508057 }, { 3, 0.148786976933479 }, { 4, 0.148780673742294 }, { 5, 0.148815900087357 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.198042631149292 }, { 1, 0.202442809939384 }, { 2, 0.150195524096489 }, { 3, 0.14976541697979 }, { 4, 0.149759083986282 }, { 5, 0.149794533848763 }, }), new(0, new Dictionary { { 0, 0.220375582575798 }, { 1, 0.189839035272598 }, { 2, 0.145878404378891 }, { 3, 0.16300305724144 }, { 4, 0.140435323119164 }, { 5, 0.140468567609787 }, }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(1, new Dictionary { { 0, 0.157011136412621 }, { 1, 0.211385697126389 }, { 2, 0.162435546517372 }, { 3, 0.156381294131279 }, { 4, 0.156374678015709 }, { 5, 0.156411692500114 }, }), new(0, new Dictionary { { 0, 0.200586974620819 }, { 1, 0.161991447210312 }, { 2, 0.176644444465637 }, { 3, 0.150769785046577 }, { 4, 0.159208223223686 }, { 5, 0.150799110531807 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(2, new Dictionary { { 0, 0.192420691251755 }, { 1, 0.161742717027664 }, { 2, 0.209272593259811 }, { 3, 0.145513951778412 }, { 4, 0.145507797598839 }, { 5, 0.145542249083519 }, }), new(0, new Dictionary { { 0, 0.203436717391014 }, { 1, 0.165295079350471 }, { 2, 0.169711023569107 }, { 3, 0.153844580054283 }, { 4, 0.153838068246841 }, { 5, 0.15387450158596 }, }), new(0, new Dictionary { { 0, 0.202308386564255 }, { 1, 0.164378300309181 }, { 2, 0.174316108226776 }, { 3, 0.152991309762001 }, { 4, 0.152984827756882 }, { 5, 0.153021052479744 }, }), new(2, new Dictionary { { 0, 0.193465068936348 }, { 1, 0.15719299018383 }, { 2, 0.210408434271812 }, { 3, 0.146303743124008 }, { 4, 0.146297559142113 }, { 5, 0.146332189440727 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(0, new Dictionary { { 0, 0.221237704157829 }, { 1, 0.190581694245338 }, { 2, 0.165187060832977 }, { 3, 0.140990674495697 }, { 4, 0.140984714031219 }, { 5, 0.141018092632294 }, }), new(0, new Dictionary { { 0, 0.216337636113167 }, { 1, 0.186360627412796 }, { 2, 0.161528438329697 }, { 3, 0.160016357898712 }, { 4, 0.137862130999565 }, { 5, 0.13789476454258 }, }), new(3, new Dictionary { { 0, 0.149128466844559 }, { 1, 0.163378983736038 }, { 2, 0.148956805467606 }, { 3, 0.206476286053658 }, { 4, 0.148523956537247 }, { 5, 0.183535546064377 }, }), new(3, new Dictionary { { 0, 0.156232386827469 }, { 1, 0.167187243700027 }, { 2, 0.156052529811859 }, { 3, 0.209292829036713 }, { 4, 0.15559908747673 }, { 5, 0.15563590824604 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.14455483853817 }, { 1, 0.154690876603127 }, { 2, 0.144388437271118 }, { 3, 0.229516208171844 }, { 4, 0.143968880176544 }, { 5, 0.182880714535713 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.153719380497932 }, { 1, 0.164498031139374 }, { 2, 0.153542414307594 }, { 3, 0.209774866700172 }, { 4, 0.153096258640289 }, { 5, 0.165369004011154 }, }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new(3, new Dictionary { { 0, 0.152724325656891 }, { 1, 0.160508275032043 }, { 2, 0.149818390607834 }, { 3, 0.238147541880608 }, { 4, 0.149383053183556 }, { 5, 0.14941842854023 }, }), new(1, new Dictionary { { 0, 0.152712091803551 }, { 1, 0.20559786260128 }, { 2, 0.152536302804947 }, { 3, 0.176534190773964 }, { 4, 0.152093067765236 }, { 5, 0.160526528954506 }, }), new(3, new Dictionary { { 0, 0.144403666257858 }, { 1, 0.194412112236023 }, { 2, 0.14423742890358 }, { 3, 0.229276165366173 }, { 4, 0.14381830394268 }, { 5, 0.143852367997169 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.152389481663704 }, { 1, 0.160156399011612 }, { 2, 0.149489924311638 }, { 3, 0.149061858654022 }, { 4, 0.23981149494648 }, { 5, 0.149090841412544 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.149491384625435 }, { 1, 0.163839146494865 }, { 2, 0.149319306015968 }, { 3, 0.148891717195511 }, { 4, 0.23953777551651 }, { 5, 0.148920670151711 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.142711848020554 }, { 1, 0.152718678116798 }, { 2, 0.142547562718391 }, { 3, 0.191180393099785 }, { 4, 0.228674560785294 }, { 5, 0.142167016863823 }, }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new(4, new Dictionary { { 0, 0.151802018284798 }, { 1, 0.163394033908844 }, { 2, 0.148913636803627 }, { 3, 0.148487210273743 }, { 4, 0.238887012004852 }, { 5, 0.148516088724136 }, }), new(5, new Dictionary { { 0, 0.14544840157032 }, { 1, 0.159347251057625 }, { 2, 0.145280972123146 }, { 3, 0.144864946603775 }, { 4, 0.144858822226524 }, { 5, 0.26019960641861 }, }), new(5, new Dictionary { { 0, 0.158301413059235 }, { 1, 0.17342846095562 }, { 2, 0.158119171857834 }, { 3, 0.157666385173798 }, { 4, 0.157659709453583 }, { 5, 0.194824859499931 }, }), new(0, new Dictionary { { 0, 0.219101145863533 }, { 1, 0.15002153813839 }, { 2, 0.200809195637703 }, { 3, 0.139629080891609 }, { 4, 0.139623180031776 }, { 5, 0.15081587433815 }, }), new(5, new Dictionary { { 0, 0.158076956868172 }, { 1, 0.169161155819893 }, { 2, 0.157894983887672 }, { 3, 0.157442837953568 }, { 4, 0.157436162233353 }, { 5, 0.199987947940826 }, }), new(5, new Dictionary { { 0, 0.161370471119881 }, { 1, 0.16959510743618 }, { 2, 0.158300027251244 }, { 3, 0.157846719026566 }, { 4, 0.157840043306351 }, { 5, 0.195047691464424 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.160495221614838 }, { 1, 0.168675243854523 }, { 2, 0.157441437244415 }, { 3, 0.156990587711334 }, { 4, 0.156983941793442 }, { 5, 0.199413493275642 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145015180110931 }, { 1, 0.158933326601982 }, { 2, 0.144848257303238 }, { 3, 0.144433453679085 }, { 4, 0.147345185279846 }, { 5, 0.259424567222595 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.146661177277565 }, { 1, 0.152337715029716 }, { 2, 0.1464923620224 }, { 3, 0.146072864532471 }, { 4, 0.146066680550575 }, { 5, 0.262369185686111 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.138151779770851 }, { 1, 0.147838860750198 }, { 2, 0.13799275457859 }, { 3, 0.191278502345085 }, { 4, 0.137591779232025 }, { 5, 0.24714632332325 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), }; CollectionAssert.AreEqual(expected, actual); } } diff --git a/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs b/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs index 9ca054d2..42f6c5cc 100644 --- a/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs +++ b/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs @@ -142,7 +142,7 @@ void ArrayAssert(double[] expected, double[] actual) 12:leaf=-0.0020202,cover=98 6:leaf=0.3,cover=2"; - static readonly List m_tree1Nodes = new List + static readonly List m_tree1Nodes = new() { /*-1*/ new GBMNode { @@ -228,7 +228,7 @@ void ArrayAssert(double[] expected, double[] actual) 11:leaf=0.0257847,cover=54 12:leaf=-0.00524031,cover=98"; - static readonly List m_tree2Nodes = new List + static readonly List m_tree2Nodes = new() { /*-1*/ new GBMNode { diff --git a/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs b/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs index 9e0b53a6..cf33a41b 100644 --- a/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs +++ b/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs @@ -163,7 +163,7 @@ public Dictionary GetVariableImportance(Dictionary /// /// public static ClassificationXGBoostModel Load(string modelFilePath) - => new ClassificationXGBoostModel(new Booster(modelFilePath)); + => new(new Booster(modelFilePath)); /// /// Saves the ClassificationXGBoostModel. diff --git a/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs b/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs index b74f1baf..50ef28fe 100644 --- a/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs +++ b/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs @@ -94,7 +94,7 @@ public Dictionary GetVariableImportance(Dictionary /// /// public static RegressionXGBoostModel Load(string modelFilePath) - => new RegressionXGBoostModel(new Booster(modelFilePath)); + => new(new Booster(modelFilePath)); /// /// Saves the RegressionXGBoostModel. diff --git a/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs b/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs index 4edf8291..18778a99 100644 --- a/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs +++ b/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs @@ -51,8 +51,7 @@ static List ConvertXGBoostNodesToGBMNodes(string textTree) var nodes = new List { // Add special root node for sharplearning - new GBMNode - { + new() { FeatureIndex = -1, SplitValue = -1, LeftConstant = 0.5, From 5dd8e1b4a184b86cc850c2efced5659509e7fb26 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 21:49:07 +0100 Subject: [PATCH 13/15] Disable CA1668 --- src/Directory.Build.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Directory.Build.props b/src/Directory.Build.props index acd51e1a..6e007acf 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -34,7 +34,7 @@ true true true - CS1591 + CS1591;CS1668 From fb47f056e19934d8ba0cfee340a58e552b0bcfa5 Mon Sep 17 00:00:00 2001 From: mdabros Date: Wed, 24 Jan 2024 22:07:08 +0100 Subject: [PATCH 14/15] Fix CA2208 --- src/SharpLearning.Containers/Views/Interval1D.cs | 6 +++++- .../Augmentators/NominalMungeAugmentator.cs | 7 ++++++- .../LearningCurves/LearningCurvesCalculator.cs | 8 ++++---- .../ClassificationImpurityCalculator.cs | 6 +++--- .../ImpurityCalculators/RegressionImpurityCalculator.cs | 4 ++-- .../TreeBuilders/BestFirstTreeBuilder.cs | 4 ++-- .../TreeBuilders/DepthFirstTreeBuilder.cs | 4 ++-- .../Learners/ClassificationStackingEnsembleLearner.cs | 6 +++--- .../Learners/RegressionStackingEnsembleLearner.cs | 6 +++--- .../Models/ClassificationStackingEnsembleModel.cs | 4 ++-- .../Models/RegressionStackingEnsembleModel.cs | 4 ++-- .../CsvRowTransforms/DateTimeFeatureTransformer.cs | 2 +- .../CsvRowTransforms/ReplaceMissingValuesTransformer.cs | 4 ++-- .../Models/ClassificationGradientBoostModel.cs | 2 +- src/SharpLearning.InputOutput/Csv/CsvParser.cs | 2 +- src/SharpLearning.InputOutput/Csv/CsvRow.cs | 2 +- src/SharpLearning.InputOutput/Csv/CsvWriter.cs | 2 +- src/SharpLearning.Metrics/Classification/F1ScoreMetric.cs | 2 +- .../Classification/PrecisionMetric.cs | 2 +- src/SharpLearning.Metrics/Classification/RecallMetric.cs | 2 +- .../NormalizedGiniCoefficientRegressionMetric.cs | 5 ++++- src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs | 2 +- src/SharpLearning.Optimization/OptimizerResult.cs | 2 +- .../Models/RegressionForestModel.cs | 4 ++-- 24 files changed, 52 insertions(+), 40 deletions(-) diff --git a/src/SharpLearning.Containers/Views/Interval1D.cs b/src/SharpLearning.Containers/Views/Interval1D.cs index ea52ecdf..48432812 100644 --- a/src/SharpLearning.Containers/Views/Interval1D.cs +++ b/src/SharpLearning.Containers/Views/Interval1D.cs @@ -31,7 +31,11 @@ public struct Interval1D : IEquatable /// public Interval1D(int fromInclusive, int toExclusive) { - if (fromInclusive >= toExclusive) { throw new ArgumentException(); } + if (fromInclusive >= toExclusive) + { + throw new ArgumentException($"FromInclusive: {fromInclusive}" + + "is larger or equal to toExclusive: {toExclusive}"); + } FromInclusive = fromInclusive; ToExclusive = toExclusive; Length = toExclusive - fromInclusive; diff --git a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs index aec4562b..69eca9b5 100644 --- a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs +++ b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs @@ -107,7 +107,12 @@ public F64Matrix Agument(F64Matrix dataset) static double GetHammingDistance(double[] a, double[] b) { - if (a.Length != b.Length) throw new ArgumentOutOfRangeException("lengths are not equal"); + if (a.Length != b.Length) + { + throw new ArgumentOutOfRangeException($"Length of a: {a.Length}" + + $"is different from length of b: {b.Length} "); + } + int count = 0; for (int i = 0; i < a.Length; i++) { diff --git a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs index c5dfaaec..7a204eb8 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs @@ -43,12 +43,12 @@ public class LearningCurvesCalculator : ILearningCurvesCalculator trainingValidationIndexSplitter, IIndexSampler sampler, IMetric metric, double[] samplePercentages, int numberOfShufflesPrSample = 5) { - m_trainingValidationIndexSplitter = trainingValidationIndexSplitter ?? throw new ArgumentException(nameof(trainingValidationIndexSplitter)); - m_indexedSampler = sampler ?? throw new ArgumentException(nameof(sampler)); + m_trainingValidationIndexSplitter = trainingValidationIndexSplitter ?? throw new ArgumentNullException(nameof(trainingValidationIndexSplitter)); + m_indexedSampler = sampler ?? throw new ArgumentNullException(nameof(sampler)); m_metric = metric ?? throw new ArgumentNullException(nameof(metric)); - if (samplePercentages == null) { throw new ArgumentNullException("samplePercentages"); } + if (samplePercentages == null) { throw new ArgumentNullException(nameof(samplePercentages)); } if (samplePercentages.Length < 1) { throw new ArgumentException("SamplePercentages length must be at least 1"); } - if (numberOfShufflesPrSample < 1) { throw new ArgumentNullException("numberOfShufflesPrSample must be at least 1"); } + if (numberOfShufflesPrSample < 1) { throw new ArgumentException("numberOfShufflesPrSample must be at least 1"); } m_samplePercentages = samplePercentages; m_numberOfShufflesPrSample = numberOfShufflesPrSample; diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs index e43924bf..21fe50d7 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs @@ -81,9 +81,9 @@ public abstract class ClassificationImpurityCalculator /// public void Init(double[] targetNames, double[] targets, double[] weights, Interval1D interval) { - m_targets = targets ?? throw new ArgumentException(nameof(targets)); - m_weights = weights ?? throw new ArgumentException(nameof(weights)); - m_targetNames = targetNames ?? throw new ArgumentException(nameof(targetNames)); + m_targets = targets ?? throw new ArgumentNullException(nameof(targets)); + m_weights = weights ?? throw new ArgumentNullException(nameof(weights)); + m_targetNames = targetNames ?? throw new ArgumentNullException(nameof(targetNames)); m_interval = interval; SetMinMaxTargetNames(); diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs index 41dc06a3..b49e3cdb 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs @@ -60,8 +60,8 @@ public RegressionImpurityCalculator() /// public void Init(double[] uniqueTargets, double[] targets, double[] weights, Interval1D interval) { - m_targets = targets ?? throw new ArgumentException(nameof(targets)); - m_weights = weights ?? throw new ArgumentException(nameof(weights)); + m_targets = targets ?? throw new ArgumentNullException(nameof(targets)); + m_weights = weights ?? throw new ArgumentNullException(nameof(weights)); m_interval = interval; m_weightedTotal = 0.0; diff --git a/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs b/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs index 8a10ab4b..927c7111 100644 --- a/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs +++ b/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs @@ -68,8 +68,8 @@ public BestFirstTreeBuilder(int maximumTreeDepth, if (maximumLeafCount <= 1) { throw new ArgumentException("maximum leaf count must be larger than 1"); } if (minimumInformationGain <= 0) { throw new ArgumentException("minimum information gain must be larger than 0"); } if (featuresPrSplit < 0) { throw new ArgumentException("features pr split must be at least 0"); } - m_splitSearcher = splitSearcher ?? throw new ArgumentException(nameof(splitSearcher)); - m_impurityCalculator = impurityCalculator ?? throw new ArgumentException(nameof(impurityCalculator)); + m_splitSearcher = splitSearcher ?? throw new ArgumentNullException(nameof(splitSearcher)); + m_impurityCalculator = impurityCalculator ?? throw new ArgumentNullException(nameof(impurityCalculator)); m_maximumTreeDepth = maximumTreeDepth; m_maximumLeafCount = maximumLeafCount; diff --git a/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs b/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs index 4610db50..a9807e0e 100644 --- a/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs +++ b/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs @@ -63,8 +63,8 @@ public DepthFirstTreeBuilder(int maximumTreeDepth, if (maximumTreeDepth <= 0) { throw new ArgumentException("maximum tree depth must be larger than 0"); } if (minimumInformationGain <= 0) { throw new ArgumentException("minimum information gain must be larger than 0"); } if (featuresPrSplit < 0) { throw new ArgumentException("features pr split must be at least 0"); } - m_splitSearcher = splitSearcher ?? throw new ArgumentException(nameof(splitSearcher)); - m_impurityCalculator = impurityCalculator ?? throw new ArgumentException(nameof(impurityCalculator)); + m_splitSearcher = splitSearcher ?? throw new ArgumentNullException(nameof(splitSearcher)); + m_impurityCalculator = impurityCalculator ?? throw new ArgumentNullException(nameof(impurityCalculator)); m_maximumTreeDepth = maximumTreeDepth; m_featuresPrSplit = featuresPrSplit; diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs index 385faede..158e3320 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs @@ -82,9 +82,9 @@ public ClassificationStackingEnsembleLearner( ICrossValidation crossValidation, bool includeOriginalFeaturesForMetaLearner = true) { - m_learners = learners ?? throw new ArgumentException(nameof(learners)); - m_crossValidation = crossValidation ?? throw new ArgumentException(nameof(crossValidation)); - m_metaLearner = metaLearner ?? throw new ArgumentException(nameof(metaLearner)); + m_learners = learners ?? throw new ArgumentNullException(nameof(learners)); + m_crossValidation = crossValidation ?? throw new ArgumentNullException(nameof(crossValidation)); + m_metaLearner = metaLearner ?? throw new ArgumentNullException(nameof(metaLearner)); m_includeOriginalFeaturesForMetaLearner = includeOriginalFeaturesForMetaLearner; } diff --git a/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs index 41209e51..dacfa1e3 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs @@ -76,9 +76,9 @@ public RegressionStackingEnsembleLearner( ICrossValidation crossValidation, bool includeOriginalFeaturesForMetaLearner = true) { - m_learners = learners ?? throw new ArgumentException(nameof(learners)); - m_crossValidation = crossValidation ?? throw new ArgumentException(nameof(crossValidation)); - m_metaLearner = metaLearner ?? throw new ArgumentException(nameof(metaLearner)); + m_learners = learners ?? throw new ArgumentNullException(nameof(learners)); + m_crossValidation = crossValidation ?? throw new ArgumentNullException(nameof(crossValidation)); + m_metaLearner = metaLearner ?? throw new ArgumentNullException(nameof(metaLearner)); m_includeOriginalFeaturesForMetaLearner = includeOriginalFeaturesForMetaLearner; } diff --git a/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs index 8858acb4..09c77a47 100644 --- a/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs @@ -31,8 +31,8 @@ public ClassificationStackingEnsembleModel(IPredictorModel /// False; the meta learner only receives the output of the ensemble models as features public RegressionStackingEnsembleModel(IPredictorModel[] ensembleModels, IPredictorModel metaModel, bool includeOriginalFeaturesForMetaLearner) { - m_ensembleModels = ensembleModels ?? throw new ArgumentException(nameof(ensembleModels)); - m_metaModel = metaModel ?? throw new ArgumentException(nameof(metaModel)); + m_ensembleModels = ensembleModels ?? throw new ArgumentNullException(nameof(ensembleModels)); + m_metaModel = metaModel ?? throw new ArgumentNullException(nameof(metaModel)); m_includeOriginalFeaturesForMetaLearner = includeOriginalFeaturesForMetaLearner; } diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs index b74a5139..eb8defd1 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs @@ -41,7 +41,7 @@ public DateTimeFeatureTransformer(string dateTimeColumn) /// public DateTimeFeatureTransformer(string dateTimeColumn, DateTime startDate) { - if (startDate == null) { throw new ArgumentException("startDate"); } + if (startDate == null) { throw new ArgumentNullException("startDate"); } m_dateTimeColumn = dateTimeColumn; m_startDate = startDate; } diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs index 538316b6..6aa1b36d 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs @@ -23,8 +23,8 @@ public sealed class ReplaceMissingValuesTransformer : ICsvRowTransformer /// public ReplaceMissingValuesTransformer(string replacementValue, params string[] missingValueIdentifiers) { - m_replacementValue = replacementValue ?? throw new ArgumentException(nameof(replacementValue)); - if (missingValueIdentifiers == null) { throw new ArgumentException(nameof(missingValueIdentifiers)); } + m_replacementValue = replacementValue ?? throw new ArgumentNullException(nameof(replacementValue)); + if (missingValueIdentifiers == null) { throw new ArgumentNullException(nameof(missingValueIdentifiers)); } m_missingValueIdentifiers = missingValueIdentifiers.ToDictionary(v => v, v => v); } diff --git a/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs b/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs index c5ec3647..d1df9e31 100644 --- a/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs +++ b/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs @@ -59,7 +59,7 @@ public ClassificationGradientBoostModel( int featureCount) { Trees = trees ?? throw new ArgumentNullException(nameof(trees)); - TargetNames = targetNames ?? throw new ArgumentException(nameof(targetNames)); + TargetNames = targetNames ?? throw new ArgumentNullException(nameof(targetNames)); LearningRate = learningRate; InitialLoss = initialLoss; diff --git a/src/SharpLearning.InputOutput/Csv/CsvParser.cs b/src/SharpLearning.InputOutput/Csv/CsvParser.cs index 4085627d..78b144b6 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvParser.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvParser.cs @@ -32,7 +32,7 @@ public CsvParser(Func reader, bool quoteInclosedColumns = false, bool hasHeader = true) { - m_getReader = reader ?? throw new ArgumentException("reader"); + m_getReader = reader ?? throw new ArgumentNullException(nameof(reader)); m_separator = separator; m_quoteInclosedColumns = quoteInclosedColumns; m_hasHeader = hasHeader; diff --git a/src/SharpLearning.InputOutput/Csv/CsvRow.cs b/src/SharpLearning.InputOutput/Csv/CsvRow.cs index 32d6aa01..95f2c974 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvRow.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvRow.cs @@ -27,7 +27,7 @@ public class CsvRow public CsvRow(Dictionary columnNameToIndex, params string[] data) { if (data == null) { throw new ArgumentException("row"); } - if (columnNameToIndex == null) { throw new ArgumentException("columnNameToIndex"); } + if (columnNameToIndex == null) { throw new ArgumentNullException(nameof(columnNameToIndex)); } if (data.Length != columnNameToIndex.Count) { throw new ArgumentException("data and columNameToIndex lengths does not match"); } Values = data; ColumnNameToIndex = columnNameToIndex; diff --git a/src/SharpLearning.InputOutput/Csv/CsvWriter.cs b/src/SharpLearning.InputOutput/Csv/CsvWriter.cs index f73aca1f..a312ccfa 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvWriter.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvWriter.cs @@ -20,7 +20,7 @@ public class CsvWriter /// public CsvWriter(Func writer, char separator = CsvParser.DefaultDelimiter) { - m_writer = writer ?? throw new ArgumentException("writer"); + m_writer = writer ?? throw new ArgumentNullException(nameof(writer)); m_separator = separator; } diff --git a/src/SharpLearning.Metrics/Classification/F1ScoreMetric.cs b/src/SharpLearning.Metrics/Classification/F1ScoreMetric.cs index ae7883b4..f90556dc 100644 --- a/src/SharpLearning.Metrics/Classification/F1ScoreMetric.cs +++ b/src/SharpLearning.Metrics/Classification/F1ScoreMetric.cs @@ -18,7 +18,7 @@ public sealed class F1ScoreMetric : IClassificationMetric /// public F1ScoreMetric(T positiveTarget) { - if (positiveTarget == null) { throw new ArgumentNullException("positiveClassLabel"); } + if (positiveTarget == null) { throw new ArgumentNullException(nameof(positiveTarget)); } m_positiveTarget = positiveTarget; } diff --git a/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs b/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs index ba35c02f..78d4d75a 100644 --- a/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs +++ b/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs @@ -17,7 +17,7 @@ public sealed class PrecisionMetric : IClassificationMetric /// public PrecisionMetric(T positiveTarget) { - if (positiveTarget == null) { throw new ArgumentNullException("positiveClassLabel"); } + if (positiveTarget == null) { throw new ArgumentNullException(nameof(positiveTarget)); } m_positiveTarget = positiveTarget; } diff --git a/src/SharpLearning.Metrics/Classification/RecallMetric.cs b/src/SharpLearning.Metrics/Classification/RecallMetric.cs index ebe98f90..2689d150 100644 --- a/src/SharpLearning.Metrics/Classification/RecallMetric.cs +++ b/src/SharpLearning.Metrics/Classification/RecallMetric.cs @@ -17,7 +17,7 @@ public sealed class RecallMetric : IClassificationMetric /// public RecallMetric(T positiveTarget) { - if (positiveTarget == null) { throw new ArgumentNullException("positiveClassLabel"); } + if (positiveTarget == null) { throw new ArgumentNullException(nameof(positiveTarget)); } m_positiveTarget = positiveTarget; } diff --git a/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs index a24311f2..cd6d1fa2 100644 --- a/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs @@ -30,7 +30,10 @@ public double Error(double[] target, double[] predicted) static double GiniCoefficient(double[] target, double[] predicted) { if (target.Length != predicted.Length) - { throw new ArgumentException(); } + { + throw new ArgumentException($"Target length: {target.Length}" + + $"differs from prediction length: {predicted.Length}"); + } var all = predicted.Zip(target, (prediction, actual) => new { diff --git a/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs b/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs index d7766577..65947b66 100644 --- a/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs +++ b/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs @@ -267,7 +267,7 @@ void UpdateParam(int i, float[] parameters, float[] gradients, double l2Decay, d } break; default: - throw new ArgumentOutOfRangeException(); + throw new ArgumentException($"Unknown optimizer method {m_optimizerMethod}"); } gradients[j] = 0.0f; // zero out gradient between each iteration diff --git a/src/SharpLearning.Optimization/OptimizerResult.cs b/src/SharpLearning.Optimization/OptimizerResult.cs index b17fe7e2..fc8f44a3 100644 --- a/src/SharpLearning.Optimization/OptimizerResult.cs +++ b/src/SharpLearning.Optimization/OptimizerResult.cs @@ -15,7 +15,7 @@ public sealed class OptimizerResult /// public OptimizerResult(double[] parameterSet, double error) { - ParameterSet = parameterSet ?? throw new ArgumentException(nameof(parameterSet)); + ParameterSet = parameterSet ?? throw new ArgumentNullException(nameof(parameterSet)); Error = error; } diff --git a/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs b/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs index bfc46c12..7a1e9899 100644 --- a/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs +++ b/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs @@ -25,8 +25,8 @@ public sealed class RegressionForestModel : IPredictorModel /// The summed variable importance from all decision trees public RegressionForestModel(RegressionDecisionTreeModel[] trees, double[] rawVariableImportance) { - Trees = trees ?? throw new ArgumentNullException("models"); - m_rawVariableImportance = rawVariableImportance ?? throw new ArgumentNullException("rawVariableImportance"); + Trees = trees ?? throw new ArgumentNullException(nameof(trees)); + m_rawVariableImportance = rawVariableImportance ?? throw new ArgumentNullException(nameof(rawVariableImportance)); } /// From 607d40009cad43f944a3785c6d77feb5a06571df Mon Sep 17 00:00:00 2001 From: mdabros Date: Sat, 13 Apr 2024 17:31:46 +0200 Subject: [PATCH 15/15] Fix link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ddfee786..111fa327 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,7 @@ However, several optimization methods are available for hyperparameter tuning: License ------- -SharpLearning is covered under the terms of the [MIT](LICENSE.md) license. You may therefore link to it and use it in both opensource and proprietary software projects. +SharpLearning is covered under the terms of the [MIT](LICENSE) license. You may therefore link to it and use it in both opensource and proprietary software projects. Documentation -------------