Skip to content

Commit

Permalink
.editorconfig: Update .editorconfig and add Roslynator.Analyzers/.For…
Browse files Browse the repository at this point in the history
…matting.Analyzers (#159)
  • Loading branch information
mdabros authored Feb 16, 2025
1 parent df625bc commit 6ec9a87
Show file tree
Hide file tree
Showing 315 changed files with 2,536 additions and 2,059 deletions.
1,202 changes: 1,141 additions & 61 deletions .editorconfig

Large diffs are not rendered by default.

8 changes: 7 additions & 1 deletion src/Directory.Build.props
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,22 @@
<PublishRelease>true</PublishRelease>
<PackRelease>true</PackRelease>

<!-- https://github.com/dotnet/roslyn/issues/41640 🤦 -->
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<NoWarn>$(NoWarn);CS1591;RCS1138;CS1668</NoWarn>

<AnalysisLevel>latest</AnalysisLevel>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<RunAnalyzersDuringBuild>true</RunAnalyzersDuringBuild>
<EnableNETAnalyzers>true</EnableNETAnalyzers>
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>
<NoWarn>CS1591;CS1668</NoWarn>

</PropertyGroup>

<ItemGroup>
<PackageReference Include="Roslynator.Analyzers" Version="4.12.11" PrivateAssets="All"/>
<PackageReference Include="Roslynator.Formatting.Analyzers" Version="4.12.11" PrivateAssets="All"/>
</ItemGroup>

<Import Project="$(MSBuildThisFileDirectory)\OutputBuildProps.props" />

Expand Down
1 change: 0 additions & 1 deletion src/SharpLearning.AdaBoost.Test/DataSetUtilities.cs
Original file line number Diff line number Diff line change
Expand Up @@ -267,5 +267,4 @@ public static (F64Matrix observations, double[] targets) LoadGlassDataSet()
1.52065;14.36;0;2.02;73.42;0;8.44;1.64;0;7
1.51651;14.38;0;1.94;73.61;0;8.48;1.57;0;7
1.51711;14.23;0;2.08;73.36;0;8.62;1.67;0;7";

}
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public void ClassificationAdaBoostModel_PredictProbability_Single()

Assert.AreEqual(0.038461538461538464, error, 0.0000001);

var expected = new ProbabilityPrediction[] { new(0, new Dictionary<double, double> { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary<double, double> { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(1, new Dictionary<double, double> { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new(1, new Dictionary<double, double> { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary<double, double> { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new(0, new Dictionary<double, double> { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new(1, new Dictionary<double, double> { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary<double, double> { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), };
var expected = new ProbabilityPrediction[] { new(0, new Dictionary<double, double> { { 0, 0.553917222019051 }, { 1, 0.446082777980949 } }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary<double, double> { { 0, 0.564961572849738 }, { 1, 0.435038427150263 } }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(1, new Dictionary<double, double> { { 0, 0.417527839140627 }, { 1, 0.582472160859373 } }), new(1, new Dictionary<double, double> { { 0, 0.409988559960094 }, { 1, 0.590011440039906 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.461264944069783 }, { 1, 0.538735055930217 } }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary<double, double> { { 0, 0.549503146925505 }, { 1, 0.450496853074495 } }), new(0, new Dictionary<double, double> { { 0, 0.537653803214063 }, { 1, 0.462346196785938 } }), new(1, new Dictionary<double, double> { { 0, 0.37650723540928 }, { 1, 0.62349276459072 } }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary<double, double> { { 0, 0.524371409810479 }, { 1, 0.475628590189522 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.471117379964633 }, { 1, 0.528882620035367 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.404976804073458 }, { 1, 0.595023195926542 } }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }) };
CollectionAssert.AreEqual(expected, actual);
}

Expand All @@ -90,7 +90,7 @@ public void ClassificationAdaBoostModel_PredictProbability_Multiple()

Assert.AreEqual(0.038461538461538464, error, 0.0000001);

var expected = new ProbabilityPrediction[] { new(0, new Dictionary<double, double> { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary<double, double> { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(1, new Dictionary<double, double> { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new(1, new Dictionary<double, double> { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary<double, double> { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new(0, new Dictionary<double, double> { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new(1, new Dictionary<double, double> { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary<double, double> { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary<double, double> { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), };
var expected = new ProbabilityPrediction[] { new(0, new Dictionary<double, double> { { 0, 0.553917222019051 }, { 1, 0.446082777980949 } }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary<double, double> { { 0, 0.564961572849738 }, { 1, 0.435038427150263 } }), new(1, new Dictionary<double, double> { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(1, new Dictionary<double, double> { { 0, 0.417527839140627 }, { 1, 0.582472160859373 } }), new(1, new Dictionary<double, double> { { 0, 0.409988559960094 }, { 1, 0.590011440039906 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.461264944069783 }, { 1, 0.538735055930217 } }), new(0, new Dictionary<double, double> { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary<double, double> { { 0, 0.549503146925505 }, { 1, 0.450496853074495 } }), new(0, new Dictionary<double, double> { { 0, 0.537653803214063 }, { 1, 0.462346196785938 } }), new(1, new Dictionary<double, double> { { 0, 0.37650723540928 }, { 1, 0.62349276459072 } }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary<double, double> { { 0, 0.524371409810479 }, { 1, 0.475628590189522 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.471117379964633 }, { 1, 0.528882620035367 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary<double, double> { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary<double, double> { { 0, 0.404976804073458 }, { 1, 0.595023195926542 } }), new(0, new Dictionary<double, double> { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary<double, double> { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary<double, double> { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }) };
CollectionAssert.AreEqual(expected, actual);
}

Expand All @@ -100,14 +100,14 @@ public void ClassificationAdaBoostModel_GetVariableImportance()
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

var featureNameToIndex = new Dictionary<string, int> { { "AptitudeTestScore", 0 },
{ "PreviousExperience_month", 1 } };
{ "PreviousExperience_month", 1 }, };

var learner = new ClassificationAdaBoostLearner(10, 1, 3);
var sut = learner.Learn(observations, targets);

var actual = sut.GetVariableImportance(featureNameToIndex);
var expected = new Dictionary<string, double> { { "PreviousExperience_month", 100.0 },
{ "AptitudeTestScore", 24.0268096428771 } };
{ "AptitudeTestScore", 24.0268096428771 }, };

Assert.AreEqual(expected.Count, actual.Count);
var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,14 +56,14 @@ public void RegressionAdaBoostModel_GetVariableImportance()
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

var featureNameToIndex = new Dictionary<string, int> { { "AptitudeTestScore", 0 },
{ "PreviousExperience_month", 1 } };
{ "PreviousExperience_month", 1 }, };

var learner = new RegressionAdaBoostLearner(10);
var sut = learner.Learn(observations, targets);

var actual = sut.GetVariableImportance(featureNameToIndex);
var expected = new Dictionary<string, double> { { "PreviousExperience_month", 100.0 },
{ "AptitudeTestScore", 33.8004886838701 } };
{ "AptitudeTestScore", 33.8004886838701 }, };

Assert.AreEqual(expected.Count, actual.Count);
var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,5 @@ public enum AdaBoostRegressionLoss
/// <summary>
/// Exponential loss
/// </summary>
Exponential
Exponential,
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ public sealed class ClassificationAdaBoostLearner
/// </summary>
/// <param name="iterations">Number of iterations (models) to boost</param>
/// <param name="learningRate">How much each boost iteration should add (between 1.0 and 0.0)</param>
/// <param name="maximumTreeDepth">The maximum depth of the tree models.
/// for 2 class problem 1 is usually enough. For more classes or larger problems between 3 to 8 is recommended.
/// <param name="maximumTreeDepth">The maximum depth of the tree models.
/// for 2 class problem 1 is usually enough. For more classes or larger problems between 3 to 8 is recommended.
/// 0 will set the depth equal to the number of classes in the problem</param>
/// <param name="minimumSplitSize">minimum node split size in the trees 1 is default</param>
/// <param name="minimumInformationGain">The minimum improvement in information gain before a split is made</param>
Expand Down Expand Up @@ -139,27 +139,35 @@ public ClassificationAdaBoostModel Learn(F64Matrix observations, double[] target
for (var i = 0; i < m_iterations; i++)
{
if (!Boost(observations, targets, indices, i))
{
break;
}

var ensembleError = ErrorEstimate(observations, indices);

if (ensembleError == 0.0)
{
break;
}

if (m_modelErrors[i] == 0.0)
{
break;
}

var weightSum = m_sampleWeights.Sum(indices);
if (weightSum <= 0.0)
{
break;
}

if (i == m_iterations - 1)
{
// Normalize weights
for (var j = 0; j < indices.Length; j++)
{
var index = indices[j];
m_sampleWeights[index] = m_sampleWeights[index] / weightSum;
m_sampleWeights[index] /= weightSum;
}
}
}
Expand Down
18 changes: 12 additions & 6 deletions src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
namespace SharpLearning.AdaBoost.Learners;

/// <summary>
/// Regression AdaBoost learner using the R2 algorithm
/// Regression AdaBoost learner using the R2 algorithm
/// using weighted sampling to target the observations with largest error and
/// weighted median to ensemble the models.
/// </summary>
Expand Down Expand Up @@ -44,13 +44,13 @@ public sealed class RegressionAdaBoostLearner : IIndexedLearner<double>, ILearne
readonly WeightedRandomSampler m_sampler;

/// <summary>
/// Regression AdaBoost learner using the R2 algorithm
/// Regression AdaBoost learner using the R2 algorithm
/// using weighted sampling to target the observations with largest error and
/// weighted median to ensemble the models.
/// </summary>
/// <param name="iterations">Number of iterations (models) to boost</param>
/// <param name="learningRate">How much each boost iteration should add (between 1.0 and 0.0)</param>
/// <param name="maximumTreeDepth">The maximum depth of the tree models.
/// <param name="maximumTreeDepth">The maximum depth of the tree models.
/// 0 will set the depth to default 3</param>
/// <param name="loss">Type of loss used when boosting weights. Linear is default</param>
/// <param name="minimumSplitSize">minimum node split size in the trees 1 is default</param>
Expand Down Expand Up @@ -137,27 +137,35 @@ public RegressionAdaBoostModel Learn(F64Matrix observations, double[] targets,
for (var i = 0; i < m_iterations; i++)
{
if (!Boost(observations, targets, indices, i))
{
break;
}

var ensembleError = ErrorEstimate(observations, indices);

if (ensembleError == 0.0)
{
break;
}

if (m_modelErrors[i] == 0.0)
{
break;
}

var weightSum = m_sampleWeights.Sum(indices);
if (weightSum <= 0.0)
{
break;
}

if (i == m_iterations - 1)
{
// Normalize weights
for (var j = 0; j < indices.Length; j++)
{
var index = indices[j];
m_sampleWeights[index] = m_sampleWeights[index] / weightSum;
m_sampleWeights[index] /= weightSum;
}
}
}
Expand Down Expand Up @@ -195,7 +203,6 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio
var model = m_modelLearner.Learn(observations, targets,
m_sampleIndices); // weighted sampling is used instead of weights in training


var predictions = model.Predict(observations, indices);

for (var i = 0; i < predictions.Length; i++)
Expand All @@ -208,7 +215,6 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio

for (var i = 0; i < m_workErrors.Length; i++)
{

var error = m_workErrors[i];

if (maxError != 0.0)
Expand Down
8 changes: 4 additions & 4 deletions src/SharpLearning.AdaBoost/WeightedRandomSampler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -47,26 +47,26 @@ public void Sample(int[] indices, double[] weights, int[] outIndices)
var totalWeight = weights.Sum(indices);
var i = 0;

var index = indices.First();
var index = indices[0];
var weight = weights[index];

var samples = outIndices.Length;
var current = 0;

while (samples > 0)
{
var x = totalWeight * (1.0 - Math.Pow(m_random.NextDouble(), (1.0 / samples)));
var x = totalWeight * (1.0 - Math.Pow(m_random.NextDouble(), 1.0 / samples));
totalWeight -= x;
while (x > weight)
{
x -= weight;
i += 1;
i++;
index = indices[i];
weight = weights[index];
}
weight -= x;
outIndices[current++] = index;
samples -= 1;
samples--;
}
}
}
2 changes: 1 addition & 1 deletion src/SharpLearning.Common.Interfaces/IIndexedLearner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
namespace SharpLearning.Common.Interfaces;

/// <summary>
/// Interface for indexed learner.
/// Interface for indexed learner.
/// Only the observations from the provided indices in the index array will be used for training
/// </summary>
/// <typeparam name="TPrediction">The prediction type of the resulting model.</typeparam>
Expand Down
Loading

0 comments on commit 6ec9a87

Please sign in to comment.