Skip to content

Code analysis: Add editorconfig and fix code according to rules #150

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 48 commits into from
Feb 9, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
f26909a
Add and enable editor config
mdabros Jan 24, 2024
966e8e3
Fix IDE0055
mdabros Jan 24, 2024
d85e0d7
Fix CA1052
mdabros Jan 24, 2024
2661364
Remove public constructor
mdabros Jan 24, 2024
6381976
remove private/internal where not needed
mdabros Jan 24, 2024
5eeb4e5
Fix CA1802
mdabros Jan 24, 2024
3a3f191
Fix CA1822
mdabros Jan 24, 2024
d0dba98
Fix CA1822
mdabros Jan 24, 2024
1feb287
Fix IDE0044
mdabros Jan 24, 2024
acc445c
Make static
mdabros Jan 24, 2024
97ae27f
Fix src links
mdabros Jan 24, 2024
974ba53
Fix IDE0090
mdabros Jan 24, 2024
5dd8e1b
Disable CA1668
mdabros Jan 24, 2024
fb47f05
Fix CA2208
mdabros Jan 24, 2024
0ecaf89
Merge branch 'master' into add-editorconfig
mdabros Apr 13, 2024
607d400
Fix link
mdabros Apr 13, 2024
9296433
Switch to file scoped namespace
mdabros Feb 9, 2025
b427c4e
use var
mdabros Feb 9, 2025
5636163
Fix bug
mdabros Feb 9, 2025
64dca67
Remove unused
mdabros Feb 9, 2025
81ea114
remove unused
mdabros Feb 9, 2025
f0bcc9c
Fix usings
mdabros Feb 9, 2025
923a5dd
Fix static and unused
mdabros Feb 9, 2025
3f2614d
Use collection expression
mdabros Feb 9, 2025
7baf6bb
Fix tests
mdabros Feb 9, 2025
354e1aa
Simplify
mdabros Feb 9, 2025
ee54e46
Remove redundant
mdabros Feb 9, 2025
49c18d3
Remove unused
mdabros Feb 9, 2025
7c0c292
Remove unused
mdabros Feb 9, 2025
574d62f
Simplify collection initializtion
mdabros Feb 9, 2025
33a556a
Use compound assignment
mdabros Feb 9, 2025
cd776ba
Simplify using
mdabros Feb 9, 2025
db6759f
use pattern matching
mdabros Feb 9, 2025
de51748
Fix namespace
mdabros Feb 9, 2025
339e533
Fix names
mdabros Feb 9, 2025
ab3381d
Fix error
mdabros Feb 9, 2025
310ace9
Fix namespace
mdabros Feb 9, 2025
7d95af2
Fix model format
mdabros Feb 9, 2025
902e4e4
dotnet format
mdabros Feb 9, 2025
dbf1312
Use Assert.ThrowsException
mdabros Feb 9, 2025
b7109c0
Use tryget
mdabros Feb 9, 2025
f45460e
dotnet format
mdabros Feb 9, 2025
914fc09
Fix warnings
mdabros Feb 9, 2025
71755d2
Fix warning
mdabros Feb 9, 2025
e8cad73
Fix warnings
mdabros Feb 9, 2025
f055ab6
Fix warnings
mdabros Feb 9, 2025
c953f7f
disable
mdabros Feb 9, 2025
6f6ae5d
fix order en enable
mdabros Feb 9, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
362 changes: 362 additions & 0 deletions .editorconfig

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ However, several optimization methods are available for hyperparameter tuning:
License
-------

SharpLearning is covered under the terms of the [MIT](LICENSE.md) license. You may therefore link to it and use it in both opensource and proprietary software projects.
SharpLearning is covered under the terms of the [MIT](LICENSE) license. You may therefore link to it and use it in both opensource and proprietary software projects.

Documentation
-------------
Expand Down
13 changes: 7 additions & 6 deletions SharpLearning.sln
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ci", "ci", "{45CBAF1D-3116-
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "root", "root", "{D83436F7-2EE6-4D59-ACC3-355C973BCDAF}"
ProjectSection(SolutionItems) = preProject
.editorconfig = .editorconfig
.gitattributes = .gitattributes
.gitignore = .gitignore
CONTRIBUTING.md = CONTRIBUTING.md
Expand All @@ -74,12 +75,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "root", "root", "{D83436F7-2
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{2204FA16-973A-48CF-A9FD-94FA72424BA4}"
ProjectSection(SolutionItems) = preProject
Directory.Build.props = Directory.Build.props
Directory.Build.targets = Directory.Build.targets
OutputBuildProject.props = OutputBuildProject.props
OutputBuildProps.props = OutputBuildProps.props
OutputBuildTargets.props = OutputBuildTargets.props
SourceLink.GitHub.props = SourceLink.GitHub.props
src\Directory.Build.props = src\Directory.Build.props
src\Directory.Build.targets = src\Directory.Build.targets
src\OutputBuildProject.props = src\OutputBuildProject.props
src\OutputBuildProps.props = src\OutputBuildProps.props
src\OutputBuildTargets.props = src\OutputBuildTargets.props
src\SourceLink.GitHub.props = src\SourceLink.GitHub.props
EndProjectSection
EndProject
Global
Expand Down
7 changes: 3 additions & 4 deletions src/Directory.Build.props
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,12 @@

<GenerateDocumentationFile>true</GenerateDocumentationFile>

<!--Disabled for now-->
<!--<AnalysisLevel>latest</AnalysisLevel>
<AnalysisLevel>latest</AnalysisLevel>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<RunAnalyzersDuringBuild>true</RunAnalyzersDuringBuild>
<EnableNETAnalyzers>true</EnableNETAnalyzers>
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>-->
<NoWarn>CS1591</NoWarn>
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>
<NoWarn>CS1591;CS1668</NoWarn>

</PropertyGroup>

Expand Down
25 changes: 12 additions & 13 deletions src/SharpLearning.AdaBoost.Test/AssemblyInitializeCultureTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,18 @@
using System.Threading;
using Microsoft.VisualStudio.TestTools.UnitTesting;

namespace SharpLearning.AdaBoost.Test
namespace SharpLearning.AdaBoost.Test;

[TestClass]
public static class AssemblyInitializeCultureTest
{
[TestClass]
public class AssemblyInitializeCultureTest
[AssemblyInitialize]
public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c)
{
[AssemblyInitialize]
public static void AssemblyInitializeCultureTest_InvariantCulture(TestContext c)
{
CultureInfo culture = CultureInfo.InvariantCulture;
CultureInfo.DefaultThreadCurrentCulture = culture;
CultureInfo.DefaultThreadCurrentUICulture = culture;
Thread.CurrentThread.CurrentCulture = culture;
Thread.CurrentThread.CurrentUICulture = culture;
}
CultureInfo culture = CultureInfo.InvariantCulture;
CultureInfo.DefaultThreadCurrentCulture = culture;
CultureInfo.DefaultThreadCurrentUICulture = culture;
Thread.CurrentThread.CurrentCulture = culture;
Thread.CurrentThread.CurrentUICulture = culture;
}
}
}
41 changes: 20 additions & 21 deletions src/SharpLearning.AdaBoost.Test/DataSetUtilities.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,28 +2,28 @@
using SharpLearning.Containers.Matrices;
using SharpLearning.InputOutput.Csv;

namespace SharpLearning.AdaBoost.Test
namespace SharpLearning.AdaBoost.Test;

public static class DataSetUtilities
{
public static class DataSetUtilities
public static (F64Matrix observations, double[] targets) LoadAptitudeDataSet()
{
public static (F64Matrix observations, double[] targets) LoadAptitudeDataSet()
{
var parser = new CsvParser(() => new StringReader(AptitudeData));
var observations = parser.EnumerateRows(v => v != "Pass").ToF64Matrix();
var targets = parser.EnumerateRows("Pass").ToF64Vector();
return (observations, targets);
}
var parser = new CsvParser(() => new StringReader(AptitudeData));
var observations = parser.EnumerateRows(v => v != "Pass").ToF64Matrix();
var targets = parser.EnumerateRows("Pass").ToF64Vector();
return (observations, targets);
}

public static (F64Matrix observations, double[] targets) LoadGlassDataSet()
{
var parser = new CsvParser(() => new StringReader(GlassData));
var observations = parser.EnumerateRows(v => v != "Target").ToF64Matrix();
var targets = parser.EnumerateRows("Target").ToF64Vector();
return (observations, targets);
}
public static (F64Matrix observations, double[] targets) LoadGlassDataSet()
{
var parser = new CsvParser(() => new StringReader(GlassData));
var observations = parser.EnumerateRows(v => v != "Target").ToF64Matrix();
var targets = parser.EnumerateRows("Target").ToF64Vector();
return (observations, targets);
}

public const string AptitudeData =
@"AptitudeTestScore;PreviousExperience_month;Pass
public const string AptitudeData =
@"AptitudeTestScore;PreviousExperience_month;Pass
5;6;0
1;15;0
1;12;0
Expand Down Expand Up @@ -51,7 +51,7 @@ public static (F64Matrix observations, double[] targets) LoadGlassDataSet()
1;8;0
5;12;0";

public const string GlassData =
public const string GlassData =
@"F1;F2;F3;F4;F5;F6;F7;F8;F10;Target
1.52101;13.64;4.49;1.1;71.78;0.06;8.75;0;0;1
1.51761;13.89;3.6;1.36;72.73;0.48;7.83;0;0;1
Expand Down Expand Up @@ -268,5 +268,4 @@ public static (F64Matrix observations, double[] targets) LoadGlassDataSet()
1.51651;14.38;0;1.94;73.61;0;8.48;1.57;0;7
1.51711;14.23;0;2.08;73.36;0;8.62;1.67;0;7";

}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,83 +5,82 @@
using SharpLearning.Containers.Extensions;
using SharpLearning.Metrics.Classification;

namespace SharpLearning.AdaBoost.Test.Learners
namespace SharpLearning.AdaBoost.Test.Learners;

[TestClass]
public class ClassificationAdaBoostLearnerTest
{
[TestClass]
public class ClassificationAdaBoostLearnerTest
[TestMethod]
public void ClassificationAdaBoostLearner_Learn_AptitudeData()
{
[TestMethod]
public void ClassificationAdaBoostLearner_Learn_AptitudeData()
{
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

var sut = new ClassificationAdaBoostLearner(10);

var model = sut.Learn(observations, targets);
var predictions = model.Predict(observations);
var sut = new ClassificationAdaBoostLearner(10);

var evaluator = new TotalErrorClassificationMetric<double>();
var actual = evaluator.Error(targets, predictions);
var model = sut.Learn(observations, targets);
var predictions = model.Predict(observations);

Assert.AreEqual(0.038461538461538464, actual);
}
var evaluator = new TotalErrorClassificationMetric<double>();
var actual = evaluator.Error(targets, predictions);

[TestMethod]
public void ClassificationAdaBoostLearner_Learn_Glass()
{
var (observations, targets) = DataSetUtilities.LoadGlassDataSet();
Assert.AreEqual(0.038461538461538464, actual);
}

var sut = new ClassificationAdaBoostLearner(10, 1, 5);
[TestMethod]
public void ClassificationAdaBoostLearner_Learn_Glass()
{
var (observations, targets) = DataSetUtilities.LoadGlassDataSet();

var model = sut.Learn(observations, targets);
var predictions = model.Predict(observations);
var sut = new ClassificationAdaBoostLearner(10, 1, 5);

var evaluator = new TotalErrorClassificationMetric<double>();
var actual = evaluator.Error(targets, predictions);
var model = sut.Learn(observations, targets);
var predictions = model.Predict(observations);

Assert.AreEqual(0.0, actual);
}
var evaluator = new TotalErrorClassificationMetric<double>();
var actual = evaluator.Error(targets, predictions);

[TestMethod]
public void ClassificationAdaBoostLearner_Learn_AptitudeData_SequenceContainNoItemIssue_Solved()
{
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();
var indices = new int[] { 22, 6, 23, 12 };
Assert.AreEqual(0.0, actual);
}

var sut = new ClassificationAdaBoostLearner(10);

var model = sut.Learn(observations, targets, indices);
var predictions = model.Predict(observations);
var indexedPredictions = predictions.GetIndices(indices);
var indexedTargets = targets.GetIndices(indices);
[TestMethod]
public void ClassificationAdaBoostLearner_Learn_AptitudeData_SequenceContainNoItemIssue_Solved()
{
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();
var indices = new int[] { 22, 6, 23, 12 };

var evaluator = new TotalErrorClassificationMetric<double>();
var actual = evaluator.Error(indexedTargets, indexedPredictions);
var sut = new ClassificationAdaBoostLearner(10);

Assert.AreEqual(0.0, actual);
}
var model = sut.Learn(observations, targets, indices);
var predictions = model.Predict(observations);
var indexedPredictions = predictions.GetIndices(indices);
var indexedTargets = targets.GetIndices(indices);

[TestMethod]
public void ClassificationAdaBoostLearner_Learn_Glass_Indexed()
{
var (observations, targets) = DataSetUtilities.LoadGlassDataSet();
var evaluator = new TotalErrorClassificationMetric<double>();
var actual = evaluator.Error(indexedTargets, indexedPredictions);

Assert.AreEqual(0.0, actual);
}

[TestMethod]
public void ClassificationAdaBoostLearner_Learn_Glass_Indexed()
{
var (observations, targets) = DataSetUtilities.LoadGlassDataSet();

var sut = new ClassificationAdaBoostLearner(10, 1, 5);
var sut = new ClassificationAdaBoostLearner(10, 1, 5);

var indices = Enumerable.Range(0, targets.Length).ToArray();
indices.Shuffle(new Random(42));
indices = indices.Take((int)(targets.Length * 0.7))
.ToArray();
var indices = Enumerable.Range(0, targets.Length).ToArray();
indices.Shuffle(new Random(42));
indices = indices.Take((int)(targets.Length * 0.7))
.ToArray();

var model = sut.Learn(observations, targets, indices);
var predictions = model.Predict(observations);
var indexedPredictions = predictions.GetIndices(indices);
var indexedTargets = targets.GetIndices(indices);
var model = sut.Learn(observations, targets, indices);
var predictions = model.Predict(observations);
var indexedPredictions = predictions.GetIndices(indices);
var indexedTargets = targets.GetIndices(indices);

var evaluator = new TotalErrorClassificationMetric<double>();
var actual = evaluator.Error(indexedTargets, indexedPredictions);
var evaluator = new TotalErrorClassificationMetric<double>();
var actual = evaluator.Error(indexedTargets, indexedPredictions);

Assert.AreEqual(0.0, actual);
}
Assert.AreEqual(0.0, actual);
}
}
Loading