From 7cea436f374acfd8fb76d2e0bde34a35c469842b Mon Sep 17 00:00:00 2001
From: sayanshaw24 <52221015+sayanshaw24@users.noreply.github.com>
Date: Mon, 1 Jul 2019 21:39:38 -0700
Subject: [PATCH 1/4] Reformatted Regression samples
---
.../Dynamic/Trainers/Regression/FastForest.cs | 51 +++++----
.../Dynamic/Trainers/Regression/FastForest.tt | 11 +-
.../Regression/FastForestWithOptions.cs | 47 +++++---
.../Regression/FastForestWithOptions.tt | 6 +-
.../Dynamic/Trainers/Regression/FastTree.cs | 51 +++++----
.../Dynamic/Trainers/Regression/FastTree.tt | 10 +-
.../Trainers/Regression/FastTreeTweedie.cs | 51 +++++----
.../Trainers/Regression/FastTreeTweedie.tt | 10 +-
.../Regression/FastTreeTweedieWithOptions.cs | 51 +++++----
.../Regression/FastTreeTweedieWithOptions.tt | 10 +-
.../Regression/FastTreeWithOptions.cs | 55 ++++++----
.../Regression/FastTreeWithOptions.tt | 14 ++-
.../Dynamic/Trainers/Regression/Gam.cs | 51 +++++----
.../Dynamic/Trainers/Regression/Gam.tt | 11 +-
.../Trainers/Regression/GamAdvanced.cs | 99 ++++++++++-------
.../Trainers/Regression/GamWithOptions.cs | 50 +++++----
.../Trainers/Regression/GamWithOptions.tt | 6 +-
.../Regression/GamWithOptionsAdvanced.cs | 100 +++++++++++-------
.../Regression/LbfgsPoissonRegression.cs | 46 +++++---
.../Regression/LbfgsPoissonRegression.tt | 5 +-
.../LbfgsPoissonRegressionWithOptions.cs | 50 +++++----
.../LbfgsPoissonRegressionWithOptions.tt | 6 +-
.../Dynamic/Trainers/Regression/LightGbm.cs | 50 ++++++---
.../Dynamic/Trainers/Regression/LightGbm.tt | 11 +-
.../Trainers/Regression/LightGbmAdvanced.cs | 49 +++++----
.../Regression/LightGbmWithOptions.cs | 51 +++++----
.../Regression/LightGbmWithOptions.tt | 9 +-
.../Regression/LightGbmWithOptionsAdvanced.cs | 59 ++++++-----
.../Regression/OnlineGradientDescent.cs | 48 ++++++---
.../Regression/OnlineGradientDescent.tt | 8 +-
.../OnlineGradientDescentWithOptions.cs | 50 +++++----
.../OnlineGradientDescentWithOptions.tt | 6 +-
.../Regression/OrdinaryLeastSquares.cs | 45 +++++---
.../Regression/OrdinaryLeastSquares.tt | 4 +-
.../OrdinaryLeastSquaresAdvanced.cs | 38 ++++---
.../OrdinaryLeastSquaresWithOptions.cs | 47 +++++---
.../OrdinaryLeastSquaresWithOptions.tt | 3 +-
...OrdinaryLeastSquaresWithOptionsAdvanced.cs | 38 ++++---
.../PermutationFeatureImportance.cs | 59 +++++++----
.../RegressionSamplesTemplate.ttinclude | 44 +++++---
.../Dynamic/Trainers/Regression/Sdca.cs | 45 +++++---
.../Dynamic/Trainers/Regression/Sdca.tt | 4 +-
.../Trainers/Regression/SdcaWithOptions.cs | 52 +++++----
.../Trainers/Regression/SdcaWithOptions.tt | 8 +-
44 files changed, 972 insertions(+), 547 deletions(-)
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs
index bcb132a815..206faadca1 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs
@@ -8,37 +8,47 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastForestRegression
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.FastForest(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));
+ var pipeline = mlContext.Regression.Trainers.FastForest(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -60,7 +70,8 @@ public static void Example()
// RSquared: 0.96 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -70,12 +81,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -95,10 +108,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.tt
index fbc1d6253d..39d3a0920b 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.tt
@@ -1,12 +1,17 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastForestRegression";
string ExtraUsing = null;
-string Trainer = @"FastForest(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
+string Trainer = @"FastForest(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features))";
+
string TrainerOptions = null;
string ExpectedOutputPerInstance= @"// Expected output:
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs
index 780545f68a..f6b620d3ad 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs
@@ -9,19 +9,23 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastForestWithOptionsRegression
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
@@ -43,16 +47,20 @@ public static void Example()
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -74,7 +82,8 @@ public static void Example()
// RSquared: 0.95 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -84,12 +93,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -109,10 +120,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.tt
index 00b3740ddb..32cf15eefd 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.tt
@@ -1,8 +1,10 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastForestWithOptionsRegression";
string ExtraUsing = "using Microsoft.ML.Trainers.FastTree;";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs
index 7a46027bc6..39fcb2185f 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs
@@ -8,37 +8,47 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastTreeRegression
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.FastTree(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));
+ var pipeline = mlContext.Regression.Trainers.FastForest(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -60,7 +70,8 @@ public static void Example()
// RSquared: 0.99 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -70,12 +81,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -95,10 +108,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.tt
index 5c06288114..0efbd8ab64 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.tt
@@ -1,12 +1,16 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastTreeRegression";
string ExtraUsing = null;
-string Trainer = @"FastTree(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
+string Trainer = @"FastForest(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features))";
string TrainerOptions = null;
string ExpectedOutputPerInstance= @"// Expected output:
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs
index 22e6ff2e01..6202e60560 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs
@@ -8,37 +8,47 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastTreeTweedieRegression
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.FastTreeTweedie(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));
+ var pipeline = mlContext.Regression.Trainers.FastForest(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -60,7 +70,8 @@ public static void Example()
// RSquared: 0.96 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -70,12 +81,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -95,10 +108,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.tt
index 4ecbbf1134..c1b8e16fff 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.tt
@@ -1,12 +1,16 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastTreeTweedieRegression";
string ExtraUsing = null;
-string Trainer = @"FastTreeTweedie(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
+string Trainer = @"FastForest(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features))";
string TrainerOptions = null;
string ExpectedOutputPerInstance= @"// Expected output:
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs
index a0f8230954..84ab42a74e 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs
@@ -9,19 +9,23 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastTreeTweedieWithOptionsRegression
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
@@ -30,7 +34,9 @@ public static void Example()
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
// Use L2Norm for early stopping.
- EarlyStoppingMetric = Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+ EarlyStoppingMetric =
+ Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+
// Create a simpler model by penalizing usage of new features.
FeatureFirstUsePenalty = 0.1,
// Reduce the number of trees to 50.
@@ -43,16 +49,20 @@ public static void Example()
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -74,7 +84,8 @@ public static void Example()
// RSquared: 0.98 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -84,12 +95,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -109,10 +122,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.tt
index 98d4d5e6a5..79e27df9a5 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.tt
@@ -1,8 +1,10 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastTreeTweedieWithOptionsRegression";
string ExtraUsing = "using Microsoft.ML.Trainers.FastTree;";
@@ -12,7 +14,9 @@ string TrainerOptions = @"FastTreeTweedieTrainer.Options
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
// Use L2Norm for early stopping.
- EarlyStoppingMetric = Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+ EarlyStoppingMetric =
+ Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+
// Create a simpler model by penalizing usage of new features.
FeatureFirstUsePenalty = 0.1,
// Reduce the number of trees to 50.
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs
index 1ed25cff1b..61bc6af6dd 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs
@@ -9,19 +9,23 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastTreeWithOptionsRegression
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
@@ -29,9 +33,11 @@ public static void Example()
{
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
- // Use L2-norm for early stopping. If the gradient's L2-norm is smaller than
- // an auto-computed value, training process will stop.
- EarlyStoppingMetric = Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+ // Use L2-norm for early stopping. If the gradient's L2-norm is
+ // smaller than an auto-computed value, training process will stop.
+ EarlyStoppingMetric =
+ Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+
// Create a simpler model by penalizing usage of new features.
FeatureFirstUsePenalty = 0.1,
// Reduce the number of trees to 50.
@@ -44,16 +50,20 @@ public static void Example()
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -75,7 +85,8 @@ public static void Example()
// RSquared: 0.99 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -85,12 +96,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -110,10 +123,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt
index df768cf53d..a30304f65e 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt
@@ -1,8 +1,10 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastTreeWithOptionsRegression";
string ExtraUsing = "using Microsoft.ML.Trainers.FastTree;";
@@ -11,9 +13,11 @@ string TrainerOptions = @"FastTreeRegressionTrainer.Options
{
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
- // Use L2-norm for early stopping. If the gradient's L2-norm is smaller than
- // an auto-computed value, training process will stop.
- EarlyStoppingMetric = Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+ // Use L2-norm for early stopping. If the gradient's L2-norm is
+ // smaller than an auto-computed value, training process will stop.
+ EarlyStoppingMetric =
+ Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+
// Create a simpler model by penalizing usage of new features.
FeatureFirstUsePenalty = 0.1,
// Reduce the number of trees to 50.
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs
index f58450d9f8..0b4efa799a 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs
@@ -8,37 +8,47 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class Gam
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.Gam(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));
+ var pipeline = mlContext.Regression.Trainers.Gam(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -60,7 +70,8 @@ public static void Example()
// RSquared: 0.99 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -70,12 +81,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -95,10 +108,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.tt
index e928c0bcc2..8f9e138b11 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.tt
@@ -1,12 +1,17 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="Gam";
string ExtraUsing = null;
-string Trainer = @"Gam(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
+string Trainer = @"Gam(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features))";
+
string TrainerOptions = null;
string ExpectedOutputPerInstance= @"// Expected output:
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamAdvanced.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamAdvanced.cs
index 419ee531ee..98485d0697 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamAdvanced.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamAdvanced.cs
@@ -7,11 +7,13 @@ namespace Samples.Dynamic.Trainers.BinaryClassification
{
public static class GamAdvanced
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging,
// as a catalog of available operations and as the source of randomness.
var mlContext = new MLContext();
@@ -27,30 +29,36 @@ public static void Example()
var validSet = dataSets.TestSet;
// Create a GAM trainer.
- // Use a small number of bins for this example. The setting below means for each feature,
- // we divide its range into 16 discrete regions for the training process. Note that these
- // regions are not evenly spaced, and that the final model may contain fewer bins, as
- // neighboring bins with identical values will be combined. In general, we recommend using
- // at least the default number of bins, as a small number of bins limits the capacity of
- // the model.
- var trainer = mlContext.BinaryClassification.Trainers.Gam(maximumBinCountPerFeature: 16);
-
- // Fit the model using both of training and validation sets. GAM can use a technique called
- // pruning to tune the model to the validation set after training to improve generalization.
+ // Use a small number of bins for this example. The setting below means
+ // for each feature, we divide its range into 16 discrete regions for
+ // the training process. Note that these regions are not evenly spaced,
+ // and that the final model may contain fewer bins, as neighboring bins
+ // with identical values will be combined. In general, we recommend
+ // using at least the default number of bins, as a small number of bins
+ // limits the capacity of the model.
+ var trainer = mlContext.BinaryClassification.Trainers.Gam(
+ maximumBinCountPerFeature: 16);
+
+ // Fit the model using both of training and validation sets. GAM can use
+ // a technique called pruning to tune the model to the validation set
+ // after training to improve generalization.
var model = trainer.Fit(trainSet, validSet);
// Extract the model parameters.
var gam = model.Model.SubModel;
- // Now we can inspect the parameters of the Generalized Additive Model to understand the fit
- // and potentially learn about our dataset.
- // First, we will look at the bias; the bias represents the average prediction for the training data.
+ // Now we can inspect the parameters of the Generalized Additive Model
+ // to understand the fit and potentially learn about our dataset.
+ // First, we will look at the bias; the bias represents the average
+ // prediction for the training data.
Console.WriteLine($"Average prediction: {gam.Bias:0.00}");
- // Now look at the shape functions that the model has learned. Similar to a linear model, we have
- // one response per feature, and they are independent. Unlike a linear model, this response is a
- // generic function instead of a line. Because we have included a bias term, each feature response
- // represents the deviation from the average prediction as a function of the feature value.
+ // Now look at the shape functions that the model has learned. Similar
+ // to a linear model, we have one response per feature, and they are
+ // independent. Unlike a linear model, this response is a generic
+ // function instead of a line. Because we have included a bias term,
+ // each feature response represents the deviation from the average
+ // prediction as a function of the feature value.
for (int i = 0; i < gam.NumberOfShapeFunctions; i++)
{
// Break a line.
@@ -62,11 +70,13 @@ public static void Example()
// Get the bin effects; these are the function values for each bin.
var binEffects = gam.GetBinEffects(i);
- // Now, write the function to the console. The function is a set of bins, and the corresponding
- // function values. You can think of GAMs as building a bar-chart or lookup table for each feature.
+ // Now, write the function to the console. The function is a set of
+ // bins, and the corresponding function values. You can think of
+ // GAMs as building a bar-chart or lookup table for each feature.
Console.WriteLine($"Feature{i}");
for (int j = 0; j < binUpperBounds.Count; j++)
- Console.WriteLine($"x < {binUpperBounds[j]:0.00} => {binEffects[j]:0.000}");
+ Console.WriteLine(
+ $"x < {binUpperBounds[j]:0.00} => {binEffects[j]:0.000}");
}
// Expected output:
@@ -91,18 +101,23 @@ public static void Example()
// x < 0.31 => -0.138
// x < ∞ => -0.188
- // Let's consider this output. To score a given example, we look up the first bin where the inequality
- // is satisfied for the feature value. We can look at the whole function to get a sense for how the
- // model responds to the variable on a global level.
- // The model can be seen to reconstruct the parabolic and step-wise function, shifted with respect to the average
- // expected output over the training set. Very few bins are used to model the second feature because the GAM model
- // discards unchanged bins to create smaller models.
- // One last thing to notice is that these feature functions can be noisy. While we know that Feature1 should be
- // symmetric, this is not captured in the model. This is due to noise in the data. Common practice is to use
- // resampling methods to estimate a confidence interval at each bin. This will help to determine if the effect is
- // real or just sampling noise. See for example:
- // Tan, Caruana, Hooker, and Lou. "Distill-and-Compare: Auditing Black-Box Models Using Transparent Model
- // Distillation." arXiv:1710.06169."
+ // Let's consider this output. To score a given example, we look up the
+ // first bin where the inequality is satisfied for the feature value.
+ // We can look at the whole function to get a sense for how the model
+ // responds to the variable on a global level. The model can be seen to
+ // reconstruct the parabolic and step-wise function, shifted with
+ // respect to the average expected output over the training set. Very
+ // few bins are used to model the second feature because the GAM model
+ // discards unchanged bins to create smaller models. One last thing to
+ // notice is that these feature functions can be noisy. While we know
+ // that Feature1 should be symmetric, this is not captured in the model.
+ // This is due to noise in the data. Common practice is to use
+ // resampling methods to estimate a confidence interval at each bin.
+ // This will help to determine if the effect is real or just sampling
+ // noise. See for example: Tan, Caruana, Hooker, and Lou.
+ // "Distill-and-Compare: Auditing Black-Box Models Using Transparent
+ // Model Distillation."
+ // arXiv:1710.06169."
}
private class Data
@@ -114,13 +129,16 @@ private class Data
}
///
- /// Creates a dataset, an IEnumerable of Data objects, for a GAM sample. Feature1 is a parabola centered around 0,
- /// while Feature2 is a simple piecewise function.
+ /// Creates a dataset, an IEnumerable of Data objects, for a GAM sample.
+ /// Feature1 is a parabola centered around 0, while Feature2 is a simple
+ /// piecewise function.
///
/// The number of examples to generate.
- /// The seed for the random number generator used to produce data.
+ /// The seed for the random number generator used to
+ /// produce data.
///
- private static IEnumerable GenerateData(int numExamples = 25000, int seed = 1)
+ private static IEnumerable GenerateData(int numExamples = 25000,
+ int seed = 1)
{
var rng = new Random(seed);
float centeredFloat() => (float)(rng.NextDouble() - 0.5);
@@ -132,7 +150,8 @@ private static IEnumerable GenerateData(int numExamples = 25000, int seed
Features = new float[2] { centeredFloat(), centeredFloat() }
};
// Compute the label from the shape functions and add noise.
- data.Label = Sigmoid(Parabola(data.Features[0]) + SimplePiecewise(data.Features[1]) + centeredFloat()) > 0.5;
+ data.Label = Sigmoid(Parabola(data.Features[0]) + SimplePiecewise(
+ data.Features[1]) + centeredFloat()) > 0.5;
yield return data;
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs
index 004f90fb20..70ab559289 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs
@@ -9,19 +9,23 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class GamWithOptions
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
@@ -36,21 +40,26 @@ public static void Example()
};
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.Gam(options);
+ var pipeline =
+ mlContext.Regression.Trainers.Gam(options);
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -72,7 +81,8 @@ public static void Example()
// RSquared: 0.98 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -82,12 +92,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -107,10 +119,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.tt
index cd45b0442b..fbd15768af 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.tt
@@ -1,8 +1,10 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="GamWithOptions";
string ExtraUsing = "using Microsoft.ML.Trainers.FastTree;";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptionsAdvanced.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptionsAdvanced.cs
index 167f7aec2e..1f5b546239 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptionsAdvanced.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptionsAdvanced.cs
@@ -8,12 +8,14 @@ namespace Samples.Dynamic.Trainers.BinaryClassification
{
public static class GamWithOptionsAdvanced
{
- // This example requires installation of additional NuGet package
- // Microsoft.ML.FastTree.
+ // This example requires installation of additional NuGet package for
+ // Microsoft.ML.FastTree found at
+ // https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness.
var mlContext = new MLContext();
// Create the dataset.
@@ -28,14 +30,15 @@ public static void Example()
var validSet = dataSets.TestSet;
// Create a GAM trainer.
- // Use a small number of bins for this example. The setting below means for each feature,
- // we divide its range into 16 discrete regions for the training process. Note that these
- // regions are not evenly spaced, and that the final model may contain fewer bins, as
- // neighboring bins with identical values will be combined. In general, we recommend using
- // at least the default number of bins, as a small number of bins limits the capacity of
- // the model.
- // Also, set the learning rate to half the default to slow down the gradient descent, and
- // double the number of iterations to compensate.
+ // Use a small number of bins for this example. The setting below means
+ // for each feature, we divide its range into 16 discrete regions for
+ // the training process. Note that these regions are not evenly spaced,
+ // and that the final model may contain fewer bins, as neighboring bins
+ // with identical values will be combined. In general, we recommend
+ // using at least the default number of bins, as a small number of bins
+ // limits the capacity of the model. Also, set the learning rate to half
+ // the default to slow down the gradient descent, and double the number
+ // of iterations to compensate.
var trainer = mlContext.BinaryClassification.Trainers.Gam(
new GamBinaryTrainer.Options
{
@@ -44,22 +47,26 @@ public static void Example()
LearningRate = 0.001
});
- // Fit the model using both of training and validation sets. GAM can use a technique called
- // pruning to tune the model to the validation set after training to improve generalization.
+ // Fit the model using both of training and validation sets. GAM can use
+ // a technique called pruning to tune the model to the validation set
+ // after training to improve generalization.
var model = trainer.Fit(trainSet, validSet);
// Extract the model parameters.
var gam = model.Model.SubModel;
- // Now we can inspect the parameters of the Generalized Additive Model to understand the fit
- // and potentially learn about our dataset.
- // First, we will look at the bias; the bias represents the average prediction for the training data.
+ // Now we can inspect the parameters of the Generalized Additive Model
+ // to understand the fit and potentially learn about our dataset.
+ // First, we will look at the bias; the bias represents the average
+ // prediction for the training data.
Console.WriteLine($"Average prediction: {gam.Bias:0.00}");
- // Now look at the shape functions that the model has learned. Similar to a linear model, we have
- // one response per feature, and they are independent. Unlike a linear model, this response is a
- // generic function instead of a line. Because we have included a bias term, each feature response
- // represents the deviation from the average prediction as a function of the feature value.
+ // Now look at the shape functions that the model has learned. Similar
+ // to a linear model, we have one response per feature, and they are
+ // independent. Unlike a linear model, this response is a generic
+ // function instead of a line. Because we have included a bias term,
+ // each feature response represents the deviation from the average
+ // prediction as a function of the feature value.
for (int i = 0; i < gam.NumberOfShapeFunctions; i++)
{
// Break a line.
@@ -71,11 +78,13 @@ public static void Example()
// Get the bin effects; these are the function values for each bin.
var binEffects = gam.GetBinEffects(i);
- // Now, write the function to the console. The function is a set of bins, and the corresponding
- // function values. You can think of GAMs as building a bar-chart or lookup table for each feature.
+ // Now, write the function to the console. The function is a set of
+ // bins, and the corresponding function values. You can think of
+ // GAMs as building a bar-chart or lookup table for each feature.
Console.WriteLine($"Feature{i}");
for (int j = 0; j < binUpperBounds.Count; j++)
- Console.WriteLine($"x < {binUpperBounds[j]:0.00} => {binEffects[j]:0.000}");
+ Console.WriteLine(
+ $"x < {binUpperBounds[j]:0.00} => {binEffects[j]:0.000}");
}
// Expected output:
@@ -100,18 +109,23 @@ public static void Example()
// x < 0.31 => -0.138
// x < ∞ => -0.188
- // Let's consider this output. To score a given example, we look up the first bin where the inequality
- // is satisfied for the feature value. We can look at the whole function to get a sense for how the
- // model responds to the variable on a global level.
- // The model can be seen to reconstruct the parabolic and step-wise function, shifted with respect to the average
- // expected output over the training set. Very few bins are used to model the second feature because the GAM model
- // discards unchanged bins to create smaller models.
- // One last thing to notice is that these feature functions can be noisy. While we know that Feature1 should be
- // symmetric, this is not captured in the model. This is due to noise in the data. Common practice is to use
- // resampling methods to estimate a confidence interval at each bin. This will help to determine if the effect is
- // real or just sampling noise. See for example:
- // Tan, Caruana, Hooker, and Lou. "Distill-and-Compare: Auditing Black-Box Models Using Transparent Model
- // Distillation." arXiv:1710.06169."
+ // Let's consider this output. To score a given example, we look up the
+ // first bin where the inequality is satisfied for the feature value.
+ // We can look at the whole function to get a sense for how the model
+ // responds to the variable on a global level. The model can be seen to
+ // reconstruct the parabolic and step-wise function, shifted with
+ // respect to the average expected output over the training set. Very
+ // few bins are used to model the second feature because the GAM model
+ // discards unchanged bins to create smaller models. One last thing to
+ // notice is that these feature functions can be noisy. While we know
+ // that Feature1 should be symmetric, this is not captured in the model.
+ // This is due to noise in the data. Common practice is to use
+ // resampling methods to estimate a confidence interval at each bin.
+ // This will help to determine if the effect is real or just sampling
+ // noise. See for example: Tan, Caruana, Hooker, and Lou.
+ // "Distill-and-Compare: Auditing Black-Box Models Using Transparent
+ // Model Distillation."
+ // arXiv:1710.06169."
}
private class Data
@@ -123,13 +137,16 @@ private class Data
}
///
- /// Creates a dataset, an IEnumerable of Data objects, for a GAM sample. Feature1 is a parabola centered around 0,
- /// while Feature2 is a simple piecewise function.
+ /// Creates a dataset, an IEnumerable of Data objects, for a GAM sample.
+ /// Feature1 is a parabola centered around 0, while Feature2 is a simple
+ /// piecewise function.
///
/// The number of examples to generate.
- /// The seed for the random number generator used to produce data.
+ /// The seed for the random number generator used to
+ /// produce data.
///
- private static IEnumerable GenerateData(int numExamples = 25000, int seed = 1)
+ private static IEnumerable GenerateData(int numExamples = 25000,
+ int seed = 1)
{
var rng = new Random(seed);
float centeredFloat() => (float)(rng.NextDouble() - 0.5);
@@ -141,7 +158,8 @@ private static IEnumerable GenerateData(int numExamples = 25000, int seed
Features = new float[2] { centeredFloat(), centeredFloat() }
};
// Compute the label from the shape functions and add noise.
- data.Label = Sigmoid(Parabola(data.Features[0]) + SimplePiecewise(data.Features[1]) + centeredFloat()) > 0.5;
+ data.Label = Sigmoid(Parabola(data.Features[0]) + SimplePiecewise(
+ data.Features[1]) + centeredFloat()) > 0.5;
yield return data;
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs
index 854cddc537..391d8f9e08 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs
@@ -10,33 +10,42 @@ public static class LbfgsPoissonRegression
{
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.LbfgsPoissonRegression(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));
+ var pipeline = mlContext.Regression.Trainers.
+ LbfgsPoissonRegression(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -58,7 +67,8 @@ public static void Example()
// RSquared: 0.93 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -68,12 +78,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -93,10 +105,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.tt
index 08d6d22969..2c410cf954 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.tt
@@ -4,7 +4,10 @@
string ClassHeader = null;
string ClassName="LbfgsPoissonRegression";
string ExtraUsing = null;
-string Trainer = "LbfgsPoissonRegression(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
+string Trainer = @"
+ LbfgsPoissonRegression(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features))";
string TrainerOptions = null;
string ExpectedOutputPerInstance= @"// Expected output:
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs
index b649699f9c..8e8c577c88 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs
@@ -11,15 +11,17 @@ public static class LbfgsPoissonRegressionWithOptions
{
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
@@ -27,30 +29,37 @@ public static void Example()
{
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
- // Reduce optimization tolerance to speed up training at the cost of accuracy.
+ // Reduce optimization tolerance to speed up training at the cost of
+ // accuracy.
OptimizationTolerance = 1e-4f,
- // Decrease history size to speed up training at the cost of accuracy.
+ // Decrease history size to speed up training at the cost of
+ // accuracy.
HistorySize = 30,
// Specify scale for initial weights.
InitialWeightsDiameter = 0.2f
};
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.LbfgsPoissonRegression(options);
+ var pipeline =
+ mlContext.Regression.Trainers.LbfgsPoissonRegression(options);
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -72,7 +81,8 @@ public static void Example()
// RSquared: 0.89 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -82,12 +92,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -107,10 +119,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.tt
index f1513d9868..bbe4dbaefa 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.tt
@@ -9,9 +9,11 @@ string TrainerOptions = @"LbfgsPoissonRegressionTrainer.Options
{
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
- // Reduce optimization tolerance to speed up training at the cost of accuracy.
+ // Reduce optimization tolerance to speed up training at the cost of
+ // accuracy.
OptimizationTolerance = 1e-4f,
- // Decrease history size to speed up training at the cost of accuracy.
+ // Decrease history size to speed up training at the cost of
+ // accuracy.
HistorySize = 30,
// Specify scale for initial weights.
InitialWeightsDiameter = 0.2f
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs
index 4579280109..ff5bc28a25 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs
@@ -8,37 +8,48 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class LightGbm
{
+
// This example requires installation of additional NuGet package
- // Microsoft.ML.LightGBM.
+ // for Microsoft.ML.LightGBM
+ // at https://www.nuget.org/packages/Microsoft.ML.LightGbm/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.LightGbm(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));
+ var pipeline = mlContext.Regression.Trainers.
+ LightGbm(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -60,7 +71,8 @@ public static void Example()
// RSquared: 0.89 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -70,12 +82,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -95,10 +109,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.tt
index cb7f481342..01edbe9e5c 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.tt
@@ -1,12 +1,17 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.LightGBM. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package
+ // for Microsoft.ML.LightGBM
+ // at https://www.nuget.org/packages/Microsoft.ML.LightGbm/";
string ClassName="LightGbm";
string ExtraUsing = null;
-string Trainer = @"LightGbm(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
+string Trainer = @"
+ LightGbm(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features))";
string TrainerOptions = null;
string ExpectedOutputPerInstance= @"// Expected output:
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmAdvanced.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmAdvanced.cs
index 363d746305..feba7ca9d3 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmAdvanced.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmAdvanced.cs
@@ -7,15 +7,18 @@ namespace Samples.Dynamic.Trainers.Regression
{
class LightGbmAdvanced
{
- // This example requires installation of additional nuget package Microsoft.ML.LightGBM.
+ // This example requires installation of additional NuGet package
+ // for Microsoft.ML.LightGBM
+ // at https://www.nuget.org/packages/Microsoft.ML.LightGbm/
public static void Example()
{
- // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging,
- // as well as the source of randomness.
+ // Create a new ML context, for ML.NET operations. It can be used for
+ // exception tracking and logging, as well as the source of randomness.
var mlContext = new MLContext();
// Download and load the housing dataset into an IDataView.
- var dataView = Microsoft.ML.SamplesUtils.DatasetUtils.LoadHousingRegressionDataset(mlContext);
+ var dataView = Microsoft.ML.SamplesUtils.DatasetUtils
+ .LoadHousingRegressionDataset(mlContext);
//////////////////// Data Preview ////////////////////
/// Only 6 columns are displayed here.
@@ -33,26 +36,32 @@ public static void Example()
.Select(column => column.Name) // Get the column names
.Where(name => name != labelName) // Drop the Label
.ToArray();
- var pipeline = mlContext.Transforms.Concatenate("Features", featureNames)
- .Append(mlContext.Regression.Trainers.LightGbm(
- labelColumnName: labelName,
- numberOfLeaves: 4,
- minimumExampleCountPerLeaf: 6,
- learningRate: 0.001));
+ var pipeline = mlContext.Transforms.Concatenate("Features",
+ featureNames)
+ .Append(mlContext.Regression.Trainers.LightGbm(
+ labelColumnName: labelName,
+ numberOfLeaves: 4,
+ minimumExampleCountPerLeaf: 6,
+ learningRate: 0.001));
// Fit this pipeline to the training data.
var model = pipeline.Fit(split.TrainSet);
- // Get the feature importance based on the information gain used during training.
+ // Get the feature importance based on the information gain used during
+ // training.
VBuffer weights = default;
model.LastTransformer.Model.GetFeatureWeights(ref weights);
var weightsValues = weights.DenseValues().ToArray();
- Console.WriteLine($"weight 0 - {weightsValues[0]}"); // CrimesPerCapita (weight 0) = 0.1898361
- Console.WriteLine($"weight 5 - {weightsValues[5]}"); // RoomsPerDwelling (weight 5) = 1
+ Console.WriteLine($"weight 0 - {weightsValues[0]}");
+ // CrimesPerCapita (weight 0) = 0.1898361
+ Console.WriteLine($"weight 5 - {weightsValues[5]}");
+ // RoomsPerDwelling (weight 5) = 1
// Evaluate how the model is doing on the test data.
var dataWithPredictions = model.Transform(split.TestSet);
- var metrics = mlContext.Regression.Evaluate(dataWithPredictions, labelColumnName: labelName);
+ var metrics = mlContext.Regression.Evaluate(
+ dataWithPredictions,
+ labelColumnName: labelName);
PrintMetrics(metrics);
// Expected output
@@ -63,12 +72,14 @@ public static void Example()
// RSquared: 0.08
}
- public static void PrintMetrics(RegressionMetrics metrics)
+ private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs
index 24caa23360..d79a784e3c 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs
@@ -9,19 +9,23 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class LightGbmWithOptions
{
+
// This example requires installation of additional NuGet package
- // Microsoft.ML.LightGBM.
+ // for Microsoft.ML.LightGBM
+ // at https://www.nuget.org/packages/Microsoft.ML.LightGbm/
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
@@ -34,7 +38,8 @@ public static void Example()
// Each leaf contains at least this number of training data points.
MinimumExampleCountPerLeaf = 6,
// The step size per update. Using a large value might reduce the
- // training time but also increase the algorithm's numerical stability.
+ // training time but also increase the algorithm's numerical
+ // stability.
LearningRate = 0.001,
Booster = new Microsoft.ML.Trainers.LightGbm.GossBooster.Options()
{
@@ -44,21 +49,26 @@ public static void Example()
};
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.LightGbm(options);
+ var pipeline =
+ mlContext.Regression.Trainers.LightGbm(options);
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -80,7 +90,8 @@ public static void Example()
// RSquared: 0.97 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -90,12 +101,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -115,10 +128,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.tt
index 22b6522baf..80b861d094 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.tt
@@ -1,8 +1,10 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"// This example requires installation of additional NuGet package
- // Microsoft.ML.LightGBM. ";
+string ClassHeader = @"
+ // This example requires installation of additional NuGet package
+ // for Microsoft.ML.LightGBM
+ // at https://www.nuget.org/packages/Microsoft.ML.LightGbm/";
string ClassName="LightGbmWithOptions";
string ExtraUsing = "using Microsoft.ML.Trainers.LightGbm;";
@@ -16,7 +18,8 @@ string TrainerOptions = @"LightGbmRegressionTrainer.Options
// Each leaf contains at least this number of training data points.
MinimumExampleCountPerLeaf = 6,
// The step size per update. Using a large value might reduce the
- // training time but also increase the algorithm's numerical stability.
+ // training time but also increase the algorithm's numerical
+ // stability.
LearningRate = 0.001,
Booster = new Microsoft.ML.Trainers.LightGbm.GossBooster.Options()
{
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptionsAdvanced.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptionsAdvanced.cs
index acc48bbdf7..ea98c44670 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptionsAdvanced.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptionsAdvanced.cs
@@ -8,11 +8,13 @@ namespace Samples.Dynamic.Trainers.Regression
{
class LightGbmWithOptionsAdvanced
{
- // This example requires installation of additional nuget package Microsoft.ML.LightGBM.
+ // This example requires installation of additional NuGet package
+ // for Microsoft.ML.LightGBM
+ // at https://www.nuget.org/packages/Microsoft.ML.LightGbm/
public static void Example()
{
- // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging,
- // as well as the source of randomness.
+ // Create a new ML context, for ML.NET operations. It can be used for
+ // exception tracking and logging, as well as the source of randomness.
var mlContext = new MLContext();
// Download and load the housing dataset into an IDataView.
@@ -35,33 +37,40 @@ public static void Example()
.Select(column => column.Name) // Get the column names
.Where(name => name != labelName) // Drop the Label
.ToArray();
- var pipeline = mlContext.Transforms.Concatenate("Features", featureNames)
- .Append(mlContext.Regression.Trainers.LightGbm(new LightGbmRegressionTrainer.Options
- {
- LabelColumnName = labelName,
- NumberOfLeaves = 4,
- MinimumExampleCountPerLeaf = 6,
- LearningRate = 0.001,
- Booster = new GossBooster.Options()
- {
- TopRate = 0.3,
- OtherRate = 0.2
- }
- }));
+ var pipeline = mlContext.Transforms.Concatenate(
+ "Features", featureNames)
+ .Append(mlContext.Regression.Trainers.LightGbm(
+ new LightGbmRegressionTrainer.Options
+ {
+ LabelColumnName = labelName,
+ NumberOfLeaves = 4,
+ MinimumExampleCountPerLeaf = 6,
+ LearningRate = 0.001,
+ Booster = new GossBooster.Options()
+ {
+ TopRate = 0.3,
+ OtherRate = 0.2
+ }
+ }));
// Fit this pipeline to the training data.
var model = pipeline.Fit(split.TrainSet);
- // Get the feature importance based on the information gain used during training.
+ // Get the feature importance based on the information gain used during
+ // training.
VBuffer weights = default;
model.LastTransformer.Model.GetFeatureWeights(ref weights);
var weightsValues = weights.DenseValues().ToArray();
- Console.WriteLine($"weight 0 - {weightsValues[0]}"); // CrimesPerCapita (weight 0) = 0.1898361
- Console.WriteLine($"weight 5 - {weightsValues[5]}"); // RoomsPerDwelling (weight 5) = 1
+ Console.WriteLine($"weight 0 - {weightsValues[0]}");
+ // CrimesPerCapita (weight 0) = 0.1898361
+ Console.WriteLine($"weight 5 - {weightsValues[5]}");
+ // RoomsPerDwelling (weight 5) = 1
// Evaluate how the model is doing on the test data.
var dataWithPredictions = model.Transform(split.TestSet);
- var metrics = mlContext.Regression.Evaluate(dataWithPredictions, labelColumnName: labelName);
+ var metrics = mlContext.Regression.Evaluate(
+ dataWithPredictions,
+ labelColumnName: labelName);
PrintMetrics(metrics);
// Expected output
@@ -74,10 +83,12 @@ public static void Example()
public static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs
index 3914d43c03..1f8d176269 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs
@@ -10,37 +10,46 @@ public static class OnlineGradientDescent
{
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.OnlineGradientDescent(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));
+ var pipeline = mlContext.Regression.Trainers.OnlineGradientDescent(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
- // This trainer is not numerically stable. Please see issue #2425.
+
+ // This trainer is not numerically stable. Please see issue #2425.
// Evaluate the overall metrics
var metrics = mlContext.Regression.Evaluate(transformedTestData);
@@ -49,7 +58,8 @@ public static void Example()
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -59,12 +69,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -84,10 +96,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.tt
index 783f535261..59873acd57 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.tt
@@ -4,9 +4,13 @@
string ClassHeader = null;
string ClassName="OnlineGradientDescent";
string ExtraUsing = null;
-string Trainer = "OnlineGradientDescent(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
+string Trainer = @"OnlineGradientDescent(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features))";
string TrainerOptions = null;
-string ExpectedOutputPerInstance= @"// This trainer is not numerically stable. Please see issue #2425.";
+string ExpectedOutputPerInstance= @"
+ // This trainer is not numerically stable. Please see issue #2425.";
+
string ExpectedOutput = @"";
#>
\ No newline at end of file
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs
index 36e3d187ca..e8a0984db4 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs
@@ -11,15 +11,17 @@ public static class OnlineGradientDescentWithOptions
{
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
@@ -38,34 +40,42 @@ public static void Example()
};
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.OnlineGradientDescent(options);
+ var pipeline =
+ mlContext.Regression.Trainers.OnlineGradientDescent(options);
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
- // This trainer is not numerically stable. Please see issue #2425.
+
+// This trainer is not numerically stable. Please see issue #2425.
// Evaluate the overall metrics
var metrics = mlContext.Regression.Evaluate(transformedTestData);
PrintMetrics(metrics);
- // This trainer is not numerically stable. Please see issue #2425.
+
+// This trainer is not numerically stable. Please see issue #2425.
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -75,12 +85,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -100,10 +112,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.tt
index 1616bf8fcb..13571d1720 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.tt
@@ -19,6 +19,8 @@ string TrainerOptions = @" OnlineGradientDescentTrainer.Options
InitialWeightsDiameter = 0.2f
}";
-string ExpectedOutputPerInstance= @"// This trainer is not numerically stable. Please see issue #2425.";
-string ExpectedOutput = @"// This trainer is not numerically stable. Please see issue #2425.";
+string ExpectedOutputPerInstance= @"
+// This trainer is not numerically stable. Please see issue #2425.";
+string ExpectedOutput = @"
+// This trainer is not numerically stable. Please see issue #2425.";
#>
\ No newline at end of file
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs
index 31ca5cfd9d..8d5c2deee8 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs
@@ -10,33 +10,41 @@ public static class Ols
{
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.Ols(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));
+ var pipeline = mlContext.Regression.Trainers.Ols(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -58,7 +66,8 @@ public static void Example()
// RSquared: 0.97 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -68,12 +77,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -93,10 +104,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.tt
index b2aed0bae7..909ef50ba0 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.tt
@@ -4,7 +4,9 @@
string ClassHeader = null;
string ClassName="Ols";
string ExtraUsing = null;
-string Trainer = "Ols(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
+string Trainer = @"Ols(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features))";
string TrainerOptions = null;
string ExpectedOutputPerInstance= @"// Expected output:
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresAdvanced.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresAdvanced.cs
index c5c3b2c097..dd09f21fef 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresAdvanced.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresAdvanced.cs
@@ -7,23 +7,29 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class OrdinaryLeastSquaresAdvanced
{
- // This example requires installation of additional nuget package Microsoft.ML.Mkl.Components.
- // In this examples we will use the housing price dataset. The goal is to predict median home value.
- // For more details about this dataset, please see https://archive.ics.uci.edu/ml/machine-learning-databases/housing/
+ // This example requires installation of additional nuget package
+ // for Microsoft.ML.Mkl.Components at
+ // "https://www.nuget.org/packages/Microsoft.ML.Mkl.Components/"
+ // In this examples we will use the housing price dataset. The goal is to
+ // predict median home value. For more details about this dataset, please
+ // see https://archive.ics.uci.edu/ml/machine-learning-databases/housing/
public static void Example()
{
- // Downloading a regression dataset from github.com/dotnet/machinelearning
- string dataFile = Microsoft.ML.SamplesUtils.DatasetUtils.DownloadHousingRegressionDataset();
+ // Downloading a regression dataset from
+ // github.com/dotnet/machinelearning
+ string dataFile = Microsoft.ML.SamplesUtils.DatasetUtils
+ .DownloadHousingRegressionDataset();
- // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging,
- // as well as the source of randomness.
+ // Create a new ML context, for ML.NET operations. It can be used for
+ // exception tracking and logging, as well as the source of randomness.
var mlContext = new MLContext(seed: 3);
// Creating a data loader, based on the format of the data
// The data is tab separated with all numeric columns.
// The first column being the label and rest are numeric features
// Here only seven numeric columns are used as features
- var dataView = mlContext.Data.LoadFromTextFile(dataFile, new TextLoader.Options
+ var dataView = mlContext.Data.LoadFromTextFile(dataFile,
+ new TextLoader.Options
{
Separators = new[] { '\t' },
HasHeader = true,
@@ -50,8 +56,10 @@ public static void Example()
// Check the weights that the model learned
var weightsValues = model.Model.Weights;
- Console.WriteLine($"weight 0 - {weightsValues[0]}"); // CrimesPerCapita (weight 0) = -0.1682112
- Console.WriteLine($"weight 3 - {weightsValues[3]}"); // CharlesRiver (weight 1) = 3.663493
+ Console.WriteLine($"weight 0 - {weightsValues[0]}");
+ // CrimesPerCapita (weight 0) = -0.1682112
+ Console.WriteLine($"weight 3 - {weightsValues[3]}");
+ // CharlesRiver (weight 1) = 3.663493
var dataWithPredictions = model.Transform(split.TestSet);
var metrics = mlContext.Regression.Evaluate(dataWithPredictions);
@@ -67,10 +75,12 @@ public static void Example()
public static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs
index c56b71cd33..79ccb3dd49 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs
@@ -11,15 +11,17 @@ public static class OlsWithOptions
{
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
@@ -29,26 +31,32 @@ public static void Example()
FeatureColumnName = nameof(DataPoint.Features),
// Larger values leads to smaller (closer to zero) model parameters.
L2Regularization = 0.1f,
- // Whether to computate standard error and other statistics of model parameters.
+ // Whether to computate standard error and other statistics of model
+ // parameters.
CalculateStatistics = false
};
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.Ols(options);
+ var pipeline =
+ mlContext.Regression.Trainers.Ols(options);
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -70,7 +78,8 @@ public static void Example()
// RSquared: 0.97 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -80,12 +89,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -105,10 +116,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.tt
index d37761637e..527620b141 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.tt
@@ -11,7 +11,8 @@ string TrainerOptions = @"OlsTrainer.Options
FeatureColumnName = nameof(DataPoint.Features),
// Larger values leads to smaller (closer to zero) model parameters.
L2Regularization = 0.1f,
- // Whether to computate standard error and other statistics of model parameters.
+ // Whether to computate standard error and other statistics of model
+ // parameters.
CalculateStatistics = false
}";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptionsAdvanced.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptionsAdvanced.cs
index 69728ac5b9..e53b14cce7 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptionsAdvanced.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptionsAdvanced.cs
@@ -8,23 +8,28 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class OrdinaryLeastSquaresWithOptionsAdvanced
{
- // This example requires installation of additional nuget package Microsoft.ML.Mkl.Components.
- // In this examples we will use the housing price dataset. The goal is to predict median home value.
- // For more details about this dataset, please see https://archive.ics.uci.edu/ml/machine-learning-databases/housing/
+ // This example requires installation of additional nuget package
+ // for Microsoft.ML.Mkl.Components at
+ // "https://www.nuget.org/packages/Microsoft.ML.Mkl.Components/"
+ // In this examples we will use the housing price dataset. The goal is to
+ // predict median home value. For more details about this dataset, please
+ // see https://archive.ics.uci.edu/ml/machine-learning-databases/housing/
public static void Example()
{
- // Downloading a regression dataset from github.com/dotnet/machinelearning
+ // Downloading a regression dataset from
+ // github.com/dotnet/machinelearning
string dataFile = DatasetUtils.DownloadHousingRegressionDataset();
- // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging,
- // as well as the source of randomness.
+ // Create a new ML context, for ML.NET operations. It can be used for
+ // exception tracking and logging, as well as the source of randomness.
var mlContext = new MLContext(seed: 3);
// Creating a data loader, based on the format of the data
// The data is tab separated with all numeric columns.
// The first column being the label and rest are numeric features
// Here only seven numeric columns are used as features
- var dataView = mlContext.Data.LoadFromTextFile(dataFile, new TextLoader.Options
+ var dataView = mlContext.Data.LoadFromTextFile(dataFile,
+ new TextLoader.Options
{
Separators = new[] { '\t' },
HasHeader = true,
@@ -45,7 +50,8 @@ public static void Example()
// Create the estimator, here we only need OrdinaryLeastSquares trainer
// as data is already processed in a form consumable by the trainer
- var pipeline = mlContext.Regression.Trainers.Ols(new OlsTrainer.Options()
+ var pipeline = mlContext.Regression.Trainers.Ols(
+ new OlsTrainer.Options()
{
L2Regularization = 0.1f,
CalculateStatistics = false
@@ -54,8 +60,10 @@ public static void Example()
// Check the weights that the model learned
var weightsValues = model.Model.Weights;
- Console.WriteLine($"weight 0 - {weightsValues[0]}"); // CrimesPerCapita (weight 0) = -0.1783206
- Console.WriteLine($"weight 3 - {weightsValues[3]}"); // CharlesRiver (weight 1) = 3.118422
+ Console.WriteLine($"weight 0 - {weightsValues[0]}");
+ // CrimesPerCapita (weight 0) = -0.1783206
+ Console.WriteLine($"weight 3 - {weightsValues[3]}");
+ // CharlesRiver (weight 1) = 3.118422
var dataWithPredictions = model.Transform(split.TestSet);
var metrics = mlContext.Regression.Evaluate(dataWithPredictions);
@@ -71,10 +79,12 @@ public static void Example()
public static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportance.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportance.cs
index 90cf94db2a..dfa04d1b76 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportance.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportance.cs
@@ -9,8 +9,9 @@ public static class PermutationFeatureImportance
{
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness.
var mlContext = new MLContext(seed:1);
// Create sample data.
@@ -19,10 +20,14 @@ public static void Example()
// Load the sample data as an IDataView.
var data = mlContext.Data.LoadFromEnumerable(samples);
- // Define a training pipeline that concatenates features into a vector, normalizes them, and then
- // trains a linear model.
- var featureColumns = new string[] { nameof(Data.Feature1), nameof(Data.Feature2) };
- var pipeline = mlContext.Transforms.Concatenate("Features", featureColumns)
+ // Define a training pipeline that concatenates features into a vector,
+ // normalizes them, and then trains a linear model.
+ var featureColumns = new string[] { nameof(Data.Feature1),
+ nameof(Data.Feature2) };
+
+ var pipeline = mlContext.Transforms.Concatenate(
+ "Features",
+ featureColumns)
.Append(mlContext.Transforms.NormalizeMinMax("Features"))
.Append(mlContext.Regression.Trainers.Ols());
@@ -35,18 +40,29 @@ public static void Example()
// Extract the predictor.
var linearPredictor = model.LastTransformer;
- // Compute the permutation metrics for the linear model using the normalized data.
- var permutationMetrics = mlContext.Regression.PermutationFeatureImportance(
+ // Compute the permutation metrics for the linear model using the
+ // normalized data.
+ var permutationMetrics = mlContext.Regression
+ .PermutationFeatureImportance(
linearPredictor, transformedData, permutationCount: 30);
- // Now let's look at which features are most important to the model overall.
- // Get the feature indices sorted by their impact on RMSE.
- var sortedIndices = permutationMetrics.Select((metrics, index) => new { index, metrics.RootMeanSquaredError})
- .OrderByDescending(feature => Math.Abs(feature.RootMeanSquaredError.Mean))
+ // Now let's look at which features are most important to the model
+ // overall. Get the feature indices sorted by their impact on RMSE.
+ var sortedIndices = permutationMetrics
+ .Select((metrics, index) => new { index,
+ metrics.RootMeanSquaredError})
+
+ .OrderByDescending(feature => Math.Abs(
+ feature.RootMeanSquaredError.Mean))
+
.Select(feature => feature.index);
- Console.WriteLine("Feature\tModel Weight\tChange in RMSE\t95% Confidence in the Mean Change in RMSE");
- var rmse = permutationMetrics.Select(x => x.RootMeanSquaredError).ToArray();
+ Console.WriteLine("Feature\tModel Weight\tChange in RMSE\t95%" +
+ "Confidence in the Mean Change in RMSE");
+
+ var rmse = permutationMetrics.Select(x => x.RootMeanSquaredError)
+ .ToArray();
+
foreach (int i in sortedIndices)
{
Console.WriteLine("{0}\t{1:0.00}\t{2:G4}\t{3:G4}",
@@ -76,10 +92,14 @@ private class Data
/// linear combination of the features.
///
/// The number of examples.
- /// The bias, or offset, in the calculation of the label.
- /// The weight to multiply the first feature with to compute the label.
- /// The weight to multiply the second feature with to compute the label.
- /// The seed for generating feature values and label noise.
+ /// The bias, or offset, in the calculation of the label.
+ ///
+ /// The weight to multiply the first feature with to
+ /// compute the label.
+ /// The weight to multiply the second feature with to
+ /// compute the label.
+ /// The seed for generating feature values and label
+ /// noise.
/// An enumerable of Data objects.
private static IEnumerable GenerateData(int nExamples = 10000,
double bias = 0, double weight1 = 1, double weight2 = 2, int seed = 1)
@@ -94,7 +114,8 @@ private static IEnumerable GenerateData(int nExamples = 10000,
};
// Create a noisy label.
- data.Label = (float)(bias + weight1 * data.Feature1 + weight2 * data.Feature2 + rng.NextDouble() - 0.5);
+ data.Label = (float)(bias + weight1 * data.Feature1 + weight2 *
+ data.Feature2 + rng.NextDouble() - 0.5);
yield return data;
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/RegressionSamplesTemplate.ttinclude b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/RegressionSamplesTemplate.ttinclude
index f4193eefd4..f409f22fc0 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/RegressionSamplesTemplate.ttinclude
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/RegressionSamplesTemplate.ttinclude
@@ -16,15 +16,17 @@ namespace Samples.Dynamic.Trainers.Regression
<# } #>
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
<# if (TrainerOptions == null) { #>
@@ -35,22 +37,27 @@ namespace Samples.Dynamic.Trainers.Regression
var options = new <#=TrainerOptions#>;
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.<#=Trainer#>(options);
+ var pipeline =
+ mlContext.Regression.Trainers.<#=Trainer#>(options);
<# } #>
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -63,7 +70,8 @@ namespace Samples.Dynamic.Trainers.Regression
<#=ExpectedOutput#>
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -73,12 +81,14 @@ namespace Samples.Dynamic.Trainers.Regression
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -98,10 +108,12 @@ namespace Samples.Dynamic.Trainers.Regression
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
\ No newline at end of file
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs
index 460334f283..4a166ecd0e 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs
@@ -10,33 +10,41 @@ public static class Sdca
{
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.Sdca(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features));
+ var pipeline = mlContext.Regression.Trainers.Sdca(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features));
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -58,7 +66,8 @@ public static void Example()
// RSquared: 0.97 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -68,12 +77,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -93,10 +104,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.tt
index 2a74a40116..bd1581da6b 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.tt
@@ -4,7 +4,9 @@
string ClassHeader = null;
string ClassName="Sdca";
string ExtraUsing = null;
-string Trainer = "Sdca(labelColumnName: nameof(DataPoint.Label), featureColumnName: nameof(DataPoint.Features))";
+string Trainer = @"Sdca(
+ labelColumnName: nameof(DataPoint.Label),
+ featureColumnName: nameof(DataPoint.Features))";
string TrainerOptions = null;
string ExpectedOutputPerInstance= @"// Expected output:
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs
index c5c06350e7..6d3ea55ad9 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs
@@ -11,15 +11,17 @@ public static class SdcaWithOptions
{
public static void Example()
{
- // Create a new context for ML.NET operations. It can be used for exception tracking and logging,
- // as a catalog of available operations and as the source of randomness.
- // Setting the seed to a fixed number in this example to make outputs deterministic.
+ // Create a new context for ML.NET operations. It can be used for
+ // exception tracking and logging, as a catalog of available operations
+ // and as the source of randomness. Setting the seed to a fixed number
+ // in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
- // Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
+ // Convert the list of data points to an IDataView object, which is
+ // consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
@@ -27,31 +29,38 @@ public static void Example()
{
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
- // Make the convergence tolerance tighter. It effectively leads to more training iterations.
+ // Make the convergence tolerance tighter. It effectively leads to
+ // more training iterations.
ConvergenceTolerance = 0.02f,
- // Increase the maximum number of passes over training data. Similar to ConvergenceTolerance,
- // this value specifics the hard iteration limit on the training algorithm.
+ // Increase the maximum number of passes over training data. Similar
+ // to ConvergenceTolerance, this value specifics the hard iteration
+ // limit on the training algorithm.
MaximumNumberOfIterations = 30,
// Increase learning rate for bias.
BiasLearningRate = 0.1f
};
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.Sdca(options);
+ var pipeline =
+ mlContext.Regression.Trainers.Sdca(options);
// Train the model.
var model = pipeline.Fit(trainingData);
- // Create testing data. Use different random seed to make it different from training data.
- var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(5, seed: 123));
+ // Create testing data. Use different random seed to make it different
+ // from training data.
+ var testData = mlContext.Data.LoadFromEnumerable(
+ GenerateRandomDataPoints(5, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
- var predictions = mlContext.Data.CreateEnumerable(transformedTestData, reuseRowObject: false).ToList();
+ var predictions = mlContext.Data.CreateEnumerable(
+ transformedTestData, reuseRowObject: false).ToList();
- // Look at 5 predictions for the Label, side by side with the actual Label for comparison.
+ // Look at 5 predictions for the Label, side by side with the actual
+ // Label for comparison.
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
@@ -73,7 +82,8 @@ public static void Example()
// RSquared: 0.97 (closer to 1 is better. The worest case is 0)
}
- private static IEnumerable GenerateRandomDataPoints(int count, int seed=0)
+ private static IEnumerable GenerateRandomDataPoints(int count,
+ int seed=0)
{
var random = new Random(seed);
for (int i = 0; i < count; i++)
@@ -83,12 +93,14 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se
{
Label = label,
// Create random features that are correlated with the label.
- Features = Enumerable.Repeat(label, 50).Select(x => x + (float)random.NextDouble()).ToArray()
+ Features = Enumerable.Repeat(label, 50).Select(
+ x => x + (float)random.NextDouble()).ToArray()
};
}
}
- // Example with label and 50 feature values. A data set is a collection of such examples.
+ // Example with label and 50 feature values. A data set is a collection of
+ // such examples.
private class DataPoint
{
public float Label { get; set; }
@@ -108,10 +120,12 @@ private class Prediction
// Print some evaluation metrics to regression problems.
private static void PrintMetrics(RegressionMetrics metrics)
{
- Console.WriteLine($"Mean Absolute Error: {metrics.MeanAbsoluteError:F2}");
- Console.WriteLine($"Mean Squared Error: {metrics.MeanSquaredError:F2}");
- Console.WriteLine($"Root Mean Squared Error: {metrics.RootMeanSquaredError:F2}");
- Console.WriteLine($"RSquared: {metrics.RSquared:F2}");
+ Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
+ Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
+ Console.WriteLine(
+ "Root Mean Squared Error: " + metrics.RootMeanSquaredError);
+
+ Console.WriteLine("RSquared: " + metrics.RSquared);
}
}
}
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.tt
index 083efe8cf6..471db62c26 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.tt
@@ -9,10 +9,12 @@ string TrainerOptions = @"SdcaRegressionTrainer.Options
{
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
- // Make the convergence tolerance tighter. It effectively leads to more training iterations.
+ // Make the convergence tolerance tighter. It effectively leads to
+ // more training iterations.
ConvergenceTolerance = 0.02f,
- // Increase the maximum number of passes over training data. Similar to ConvergenceTolerance,
- // this value specifics the hard iteration limit on the training algorithm.
+ // Increase the maximum number of passes over training data. Similar
+ // to ConvergenceTolerance, this value specifics the hard iteration
+ // limit on the training algorithm.
MaximumNumberOfIterations = 30,
// Increase learning rate for bias.
BiasLearningRate = 0.1f
From 1805d8c22362ce40466e3a26007e58a02ee4739f Mon Sep 17 00:00:00 2001
From: sayanshaw24 <52221015+sayanshaw24@users.noreply.github.com>
Date: Tue, 2 Jul 2019 08:55:10 -0700
Subject: [PATCH 2/4] Untabified comments in tt files.
---
.../Dynamic/Trainers/Regression/FastForest.cs | 2 +-
.../Dynamic/Trainers/Regression/FastForestWithOptions.cs | 5 +++--
.../Dynamic/Trainers/Regression/FastTree.cs | 2 +-
.../Dynamic/Trainers/Regression/FastTreeTweedie.cs | 2 +-
.../Trainers/Regression/FastTreeTweedieWithOptions.cs | 5 +++--
.../Dynamic/Trainers/Regression/FastTreeWithOptions.cs | 7 ++++---
.../Dynamic/Trainers/Regression/FastTreeWithOptions.tt | 2 +-
.../Dynamic/Trainers/Regression/Gam.cs | 2 +-
.../Dynamic/Trainers/Regression/GamWithOptions.cs | 2 +-
.../Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs | 2 +-
.../Regression/LbfgsPoissonRegressionWithOptions.cs | 2 +-
.../Dynamic/Trainers/Regression/LightGbm.cs | 2 +-
.../Dynamic/Trainers/Regression/LightGbmWithOptions.cs | 2 +-
.../Dynamic/Trainers/Regression/OnlineGradientDescent.cs | 2 +-
.../Regression/OnlineGradientDescentWithOptions.cs | 2 +-
.../Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs | 2 +-
.../Regression/OrdinaryLeastSquaresWithOptions.cs | 4 ++--
.../Regression/OrdinaryLeastSquaresWithOptions.tt | 2 +-
.../Regression/RegressionSamplesTemplate.ttinclude | 2 +-
.../Dynamic/Trainers/Regression/Sdca.cs | 2 +-
.../Dynamic/Trainers/Regression/SdcaWithOptions.cs | 8 ++++----
.../Dynamic/Trainers/Regression/SdcaWithOptions.tt | 6 +++---
22 files changed, 35 insertions(+), 32 deletions(-)
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs
index 206faadca1..0287d0c89a 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs
@@ -36,7 +36,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs
index f6b620d3ad..79e3c5bfb4 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs
@@ -42,13 +42,14 @@ public static void Example()
};
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.FastForest(options);
+ var pipeline =
+ mlContext.Regression.Trainers.FastForest(options);
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs
index 39fcb2185f..6084a0738e 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs
@@ -36,7 +36,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs
index 6202e60560..01bcfa7122 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs
@@ -36,7 +36,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs
index 84ab42a74e..6d91bfe5f2 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs
@@ -44,13 +44,14 @@ public static void Example()
};
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.FastTreeTweedie(options);
+ var pipeline =
+ mlContext.Regression.Trainers.FastTreeTweedie(options);
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs
index 61bc6af6dd..28225b7357 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs
@@ -34,7 +34,7 @@ public static void Example()
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
// Use L2-norm for early stopping. If the gradient's L2-norm is
- // smaller than an auto-computed value, training process will stop.
+ // smaller than an auto-computed value, training process will stop.
EarlyStoppingMetric =
Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
@@ -45,13 +45,14 @@ public static void Example()
};
// Define the trainer.
- var pipeline = mlContext.Regression.Trainers.FastTree(options);
+ var pipeline =
+ mlContext.Regression.Trainers.FastTree(options);
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt
index a30304f65e..dbfe1d1907 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt
@@ -14,7 +14,7 @@ string TrainerOptions = @"FastTreeRegressionTrainer.Options
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
// Use L2-norm for early stopping. If the gradient's L2-norm is
- // smaller than an auto-computed value, training process will stop.
+ // smaller than an auto-computed value, training process will stop.
EarlyStoppingMetric =
Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs
index 0b4efa799a..701211cf5b 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs
@@ -36,7 +36,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs
index 70ab559289..6850fb27c6 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs
@@ -47,7 +47,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs
index 391d8f9e08..bf6978f300 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs
@@ -33,7 +33,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs
index 8e8c577c88..3c5a7da604 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs
@@ -47,7 +47,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs
index ff5bc28a25..39a856ea80 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs
@@ -37,7 +37,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs
index d79a784e3c..42c8d9c566 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs
@@ -56,7 +56,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs
index 1f8d176269..67c0fceb64 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs
@@ -32,7 +32,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs
index e8a0984db4..4130317368 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs
@@ -47,7 +47,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs
index 8d5c2deee8..1b50d80443 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs
@@ -32,7 +32,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs
index 79ccb3dd49..04b55ecfbe 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs
@@ -32,7 +32,7 @@ public static void Example()
// Larger values leads to smaller (closer to zero) model parameters.
L2Regularization = 0.1f,
// Whether to computate standard error and other statistics of model
- // parameters.
+ // parameters.
CalculateStatistics = false
};
@@ -44,7 +44,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.tt
index 527620b141..03c4fd961e 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.tt
@@ -12,7 +12,7 @@ string TrainerOptions = @"OlsTrainer.Options
// Larger values leads to smaller (closer to zero) model parameters.
L2Regularization = 0.1f,
// Whether to computate standard error and other statistics of model
- // parameters.
+ // parameters.
CalculateStatistics = false
}";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/RegressionSamplesTemplate.ttinclude b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/RegressionSamplesTemplate.ttinclude
index f409f22fc0..6b8d3ff2b4 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/RegressionSamplesTemplate.ttinclude
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/RegressionSamplesTemplate.ttinclude
@@ -45,7 +45,7 @@ namespace Samples.Dynamic.Trainers.Regression
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs
index 4a166ecd0e..4247c1447e 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs
@@ -32,7 +32,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs
index 6d3ea55ad9..55e05ab3a1 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs
@@ -30,11 +30,11 @@ public static void Example()
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
// Make the convergence tolerance tighter. It effectively leads to
- // more training iterations.
+ // more training iterations.
ConvergenceTolerance = 0.02f,
// Increase the maximum number of passes over training data. Similar
- // to ConvergenceTolerance, this value specifics the hard iteration
- // limit on the training algorithm.
+ // to ConvergenceTolerance, this value specifics the hard iteration
+ // limit on the training algorithm.
MaximumNumberOfIterations = 30,
// Increase learning rate for bias.
BiasLearningRate = 0.1f
@@ -48,7 +48,7 @@ public static void Example()
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
- // from training data.
+ // from training data.
var testData = mlContext.Data.LoadFromEnumerable(
GenerateRandomDataPoints(5, seed: 123));
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.tt
index 471db62c26..5c350caf66 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.tt
@@ -10,11 +10,11 @@ string TrainerOptions = @"SdcaRegressionTrainer.Options
LabelColumnName = nameof(DataPoint.Label),
FeatureColumnName = nameof(DataPoint.Features),
// Make the convergence tolerance tighter. It effectively leads to
- // more training iterations.
+ // more training iterations.
ConvergenceTolerance = 0.02f,
// Increase the maximum number of passes over training data. Similar
- // to ConvergenceTolerance, this value specifics the hard iteration
- // limit on the training algorithm.
+ // to ConvergenceTolerance, this value specifics the hard iteration
+ // limit on the training algorithm.
MaximumNumberOfIterations = 30,
// Increase learning rate for bias.
BiasLearningRate = 0.1f
From d272b43b025195d4235ab814f42737b819df77e8 Mon Sep 17 00:00:00 2001
From: sayanshaw24 <52221015+sayanshaw24@users.noreply.github.com>
Date: Tue, 2 Jul 2019 15:39:01 -0700
Subject: [PATCH 3/4] Removed extra lines
---
.../Dynamic/Trainers/Regression/FastForest.cs | 5 ++---
.../Dynamic/Trainers/Regression/FastForest.tt | 5 ++---
.../Dynamic/Trainers/Regression/FastForestWithOptions.cs | 5 ++---
.../Dynamic/Trainers/Regression/FastForestWithOptions.tt | 5 ++---
.../Dynamic/Trainers/Regression/FastTree.cs | 5 ++---
.../Dynamic/Trainers/Regression/FastTree.tt | 5 ++---
.../Dynamic/Trainers/Regression/FastTreeTweedie.cs | 5 ++---
.../Dynamic/Trainers/Regression/FastTreeTweedie.tt | 5 ++---
.../Trainers/Regression/FastTreeTweedieWithOptions.cs | 7 +++----
.../Trainers/Regression/FastTreeTweedieWithOptions.tt | 7 +++----
.../Dynamic/Trainers/Regression/FastTreeWithOptions.cs | 7 +++----
.../Dynamic/Trainers/Regression/FastTreeWithOptions.tt | 7 +++----
.../Dynamic/Trainers/Regression/Gam.cs | 5 ++---
.../Dynamic/Trainers/Regression/Gam.tt | 5 ++---
.../Dynamic/Trainers/Regression/GamWithOptions.cs | 5 ++---
.../Dynamic/Trainers/Regression/GamWithOptions.tt | 5 ++---
.../Dynamic/Trainers/Regression/LightGbm.cs | 5 ++---
.../Dynamic/Trainers/Regression/LightGbm.tt | 5 ++---
.../Dynamic/Trainers/Regression/LightGbmWithOptions.cs | 5 ++---
.../Dynamic/Trainers/Regression/LightGbmWithOptions.tt | 5 ++---
20 files changed, 44 insertions(+), 64 deletions(-)
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs
index 0287d0c89a..1cb2c7c466 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs
@@ -8,9 +8,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastForestRegression
{
-
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.tt
index 39d3a0920b..92b968f921 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastForestRegression";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs
index 79e3c5bfb4..5482db46be 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs
@@ -9,9 +9,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastForestWithOptionsRegression
{
-
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.tt
index 32cf15eefd..b32a5920b0 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastForestWithOptionsRegression";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs
index 6084a0738e..24f4626727 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs
@@ -8,9 +8,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastTreeRegression
{
-
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.tt
index 0efbd8ab64..b23398c9fd 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastTreeRegression";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs
index 01bcfa7122..497919cdcb 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs
@@ -8,9 +8,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastTreeTweedieRegression
{
-
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.tt
index c1b8e16fff..9befcb25e1 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastTreeTweedieRegression";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs
index 6d91bfe5f2..eb4de40868 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs
@@ -9,9 +9,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastTreeTweedieWithOptionsRegression
{
-
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
@@ -35,7 +34,7 @@ public static void Example()
FeatureColumnName = nameof(DataPoint.Features),
// Use L2Norm for early stopping.
EarlyStoppingMetric =
- Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+ Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
// Create a simpler model by penalizing usage of new features.
FeatureFirstUsePenalty = 0.1,
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.tt
index 79e27df9a5..1c37bbe506 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastTreeTweedieWithOptionsRegression";
@@ -15,7 +14,7 @@ string TrainerOptions = @"FastTreeTweedieTrainer.Options
FeatureColumnName = nameof(DataPoint.Features),
// Use L2Norm for early stopping.
EarlyStoppingMetric =
- Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+ Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
// Create a simpler model by penalizing usage of new features.
FeatureFirstUsePenalty = 0.1,
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs
index 28225b7357..235a298d82 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs
@@ -9,9 +9,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class FastTreeWithOptionsRegression
{
-
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
@@ -36,7 +35,7 @@ public static void Example()
// Use L2-norm for early stopping. If the gradient's L2-norm is
// smaller than an auto-computed value, training process will stop.
EarlyStoppingMetric =
- Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+ Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
// Create a simpler model by penalizing usage of new features.
FeatureFirstUsePenalty = 0.1,
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt
index dbfe1d1907..cb80ea4566 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="FastTreeWithOptionsRegression";
@@ -16,7 +15,7 @@ string TrainerOptions = @"FastTreeRegressionTrainer.Options
// Use L2-norm for early stopping. If the gradient's L2-norm is
// smaller than an auto-computed value, training process will stop.
EarlyStoppingMetric =
- Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
+ Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm,
// Create a simpler model by penalizing usage of new features.
FeatureFirstUsePenalty = 0.1,
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs
index 701211cf5b..f1c568ce9c 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs
@@ -8,9 +8,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class Gam
{
-
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.tt
index 8f9e138b11..06a0d9bf0b 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="Gam";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs
index 6850fb27c6..1b69ae8764 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs
@@ -9,9 +9,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class GamWithOptions
{
-
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.tt
index fbd15768af..c15a436f13 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package for
- // Microsoft.ML.FastTree found at
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.FastTree found at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/";
string ClassName="GamWithOptions";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs
index 39a856ea80..4502682f26 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs
@@ -8,9 +8,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class LightGbm
{
-
- // This example requires installation of additional NuGet package
- // for Microsoft.ML.LightGBM
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.LightGBM
// at https://www.nuget.org/packages/Microsoft.ML.LightGbm/
public static void Example()
{
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.tt
index 01edbe9e5c..c3af40c187 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package
- // for Microsoft.ML.LightGBM
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.LightGBM
// at https://www.nuget.org/packages/Microsoft.ML.LightGbm/";
string ClassName="LightGbm";
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs
index 42c8d9c566..fb9b783631 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs
@@ -9,9 +9,8 @@ namespace Samples.Dynamic.Trainers.Regression
{
public static class LightGbmWithOptions
{
-
- // This example requires installation of additional NuGet package
- // for Microsoft.ML.LightGBM
+ // This example requires installation of additional NuGet
+ // package for Microsoft.ML.LightGBM
// at https://www.nuget.org/packages/Microsoft.ML.LightGbm/
public static void Example()
{
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.tt
index 80b861d094..b1105186db 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.tt
@@ -1,9 +1,8 @@
<#@ include file="RegressionSamplesTemplate.ttinclude"#>
<#+
-string ClassHeader = @"
- // This example requires installation of additional NuGet package
- // for Microsoft.ML.LightGBM
+string ClassHeader = @"// This example requires installation of additional NuGet
+ // package for Microsoft.ML.LightGBM
// at https://www.nuget.org/packages/Microsoft.ML.LightGbm/";
string ClassName="LightGbmWithOptions";
From e418be160ff18dec08be2522c030f87cd84971d8 Mon Sep 17 00:00:00 2001
From: sayanshaw24 <52221015+sayanshaw24@users.noreply.github.com>
Date: Tue, 2 Jul 2019 16:24:26 -0700
Subject: [PATCH 4/4] Removed extra lines and added necessary indents
---
.../Dynamic/Trainers/Regression/OnlineGradientDescent.cs | 4 ++--
.../Dynamic/Trainers/Regression/OnlineGradientDescent.tt | 4 ++--
.../Regression/OnlineGradientDescentWithOptions.cs | 8 ++++----
.../Regression/OnlineGradientDescentWithOptions.tt | 8 ++++----
4 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs
index 67c0fceb64..e02d4c626f 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs
@@ -48,8 +48,8 @@ public static void Example()
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
-
- // This trainer is not numerically stable. Please see issue #2425.
+ // This trainer is not numerically stable.
+ // Please see issue #2425.
// Evaluate the overall metrics
var metrics = mlContext.Regression.Evaluate(transformedTestData);
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.tt
index 59873acd57..e9579e9121 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.tt
@@ -9,8 +9,8 @@ string Trainer = @"OnlineGradientDescent(
featureColumnName: nameof(DataPoint.Features))";
string TrainerOptions = null;
-string ExpectedOutputPerInstance= @"
- // This trainer is not numerically stable. Please see issue #2425.";
+string ExpectedOutputPerInstance= @"// This trainer is not numerically stable.
+ // Please see issue #2425.";
string ExpectedOutput = @"";
#>
\ No newline at end of file
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs
index 4130317368..59a7d1fdbc 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs
@@ -63,15 +63,15 @@ public static void Example()
foreach (var p in predictions)
Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");
-
-// This trainer is not numerically stable. Please see issue #2425.
+ // This trainer is not numerically stable.
+ // Please see issue #2425.
// Evaluate the overall metrics
var metrics = mlContext.Regression.Evaluate(transformedTestData);
PrintMetrics(metrics);
-
-// This trainer is not numerically stable. Please see issue #2425.
+ // This trainer is not numerically stable. Please see
+ // issue #2425.
}
private static IEnumerable GenerateRandomDataPoints(int count,
diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.tt
index 13571d1720..c794a418a2 100644
--- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.tt
+++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.tt
@@ -19,8 +19,8 @@ string TrainerOptions = @" OnlineGradientDescentTrainer.Options
InitialWeightsDiameter = 0.2f
}";
-string ExpectedOutputPerInstance= @"
-// This trainer is not numerically stable. Please see issue #2425.";
-string ExpectedOutput = @"
-// This trainer is not numerically stable. Please see issue #2425.";
+string ExpectedOutputPerInstance= @"// This trainer is not numerically stable.
+ // Please see issue #2425.";
+string ExpectedOutput = @"// This trainer is not numerically stable. Please see
+ // issue #2425.";
#>
\ No newline at end of file