diff --git a/Microsoft.ML.AutoML.sln b/Microsoft.ML.AutoML.sln
index bbe5810a67..79992506c3 100644
--- a/Microsoft.ML.AutoML.sln
+++ b/Microsoft.ML.AutoML.sln
@@ -12,6 +12,10 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "mlnet.Tests", "test\mlnet.T
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.AutoML.Samples", "docs\samples\Microsoft.ML.AutoML.Samples\Microsoft.ML.AutoML.Samples.csproj", "{6E84E7C5-FECE-45A9-AD4C-4B0F39F78904}"
EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.InternalCodeAnalyzer", "tools-local\Microsoft.ML.InternalCodeAnalyzer\Microsoft.ML.InternalCodeAnalyzer.csproj", "{954F14B0-BBDF-4A30-811E-2D86FDB5B399}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tools-local", "tools-local", "{2DEB692B-D982-4301-A074-DAA095658230}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -82,10 +86,25 @@ Global
{6E84E7C5-FECE-45A9-AD4C-4B0F39F78904}.Release-Intrinsics|Any CPU.Build.0 = Release-Intrinsics|Any CPU
{6E84E7C5-FECE-45A9-AD4C-4B0F39F78904}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
{6E84E7C5-FECE-45A9-AD4C-4B0F39F78904}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Debug-Intrinsics|Any CPU.ActiveCfg = Debug-Intrinsics|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Debug-Intrinsics|Any CPU.Build.0 = Debug-Intrinsics|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Release|Any CPU.Build.0 = Release|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Release-Intrinsics|Any CPU.ActiveCfg = Release-Intrinsics|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Release-Intrinsics|Any CPU.Build.0 = Release-Intrinsics|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
+ GlobalSection(NestedProjects) = preSolution
+ {954F14B0-BBDF-4A30-811E-2D86FDB5B399} = {2DEB692B-D982-4301-A074-DAA095658230}
+ EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {8C1BC26C-B87E-47CD-928E-00EFE4353B40}
EndGlobalSection
diff --git a/src/Microsoft.ML.AutoML/API/AutoCatalog.cs b/src/Microsoft.ML.AutoML/API/AutoCatalog.cs
index 803b3cca65..30a4d7b803 100644
--- a/src/Microsoft.ML.AutoML/API/AutoCatalog.cs
+++ b/src/Microsoft.ML.AutoML/API/AutoCatalog.cs
@@ -27,9 +27,9 @@ internal AutoCatalog(MLContext context)
/// See for a more detailed code example of an AutoML regression experiment.
/// An experiment may run for longer than .
/// This is because once AutoML starts training an ML.NET model, AutoML lets the
- /// model train to completion. For instance, if the first model
+ /// model train to completion. For instance, if the first model
/// AutoML trains takes 4 hours, and the second model trained takes 5 hours,
- /// but was the number of seconds in 6 hours,
+ /// but was the number of seconds in 6 hours,
/// the experiment will run for 4 + 5 = 9 hours (not 6 hours).
///
public RegressionExperiment CreateRegressionExperiment(uint maxExperimentTimeInSeconds)
@@ -62,9 +62,9 @@ public RegressionExperiment CreateRegressionExperiment(RegressionExperimentSetti
/// See for a more detailed code example of an AutoML binary classification experiment.
/// An experiment may run for longer than .
/// This is because once AutoML starts training an ML.NET model, AutoML lets the
- /// model train to completion. For instance, if the first model
+ /// model train to completion. For instance, if the first model
/// AutoML trains takes 4 hours, and the second model trained takes 5 hours,
- /// but was the number of seconds in 6 hours,
+ /// but was the number of seconds in 6 hours,
/// the experiment will run for 4 + 5 = 9 hours (not 6 hours).
///
public BinaryClassificationExperiment CreateBinaryClassificationExperiment(uint maxExperimentTimeInSeconds)
@@ -97,9 +97,9 @@ public BinaryClassificationExperiment CreateBinaryClassificationExperiment(Binar
/// See for a more detailed code example of an AutoML multiclass classification experiment.
/// An experiment may run for longer than .
/// This is because once AutoML starts training an ML.NET model, AutoML lets the
- /// model train to completion. For instance, if the first model
+ /// model train to completion. For instance, if the first model
/// AutoML trains takes 4 hours, and the second model trained takes 5 hours,
- /// but was the number of seconds in 6 hours,
+ /// but was the number of seconds in 6 hours,
/// the experiment will run for 4 + 5 = 9 hours (not 6 hours).
///
public MulticlassClassificationExperiment CreateMulticlassClassificationExperiment(uint maxExperimentTimeInSeconds)
@@ -136,15 +136,15 @@ public MulticlassClassificationExperiment CreateMulticlassClassificationExperime
/// Information inferred about the columns in the provided dataset.
///
/// Infers information about the name, data type, and purpose of each column.
- /// The returned can be used to
- /// instantiate a . The can be used to
- /// obtain an that can be fed into an AutoML experiment,
+ /// The returned can be used to
+ /// instantiate a . The can be used to
+ /// obtain an that can be fed into an AutoML experiment,
/// or used elsewhere in the ML.NET ecosystem (ie in .
/// The contains the inferred purpose of each column in the dataset.
/// (For instance, is the column categorical, numeric, or text data? Should the column be ignored? Etc.)
/// The can be inspected and modified (or kept as is) and used by an AutoML experiment.
///
- public ColumnInferenceResults InferColumns(string path, string labelColumnName = DefaultColumnNames.Label, char? separatorChar = null, bool? allowQuoting = null,
+ public ColumnInferenceResults InferColumns(string path, string labelColumnName = DefaultColumnNames.Label, char? separatorChar = null, bool? allowQuoting = null,
bool? allowSparse = null, bool trimWhitespace = false, bool groupColumns = true)
{
UserInputValidationUtil.ValidateInferColumnsArgs(path, labelColumnName);
@@ -164,9 +164,9 @@ public ColumnInferenceResults InferColumns(string path, string labelColumnName =
/// Information inferred about the columns in the provided dataset.
///
/// Infers information about the name, data type, and purpose of each column.
- /// The returned can be used to
- /// instantiate a . The can be used to
- /// obtain an that can be fed into an AutoML experiment,
+ /// The returned can be used to
+ /// instantiate a . The can be used to
+ /// obtain an that can be fed into an AutoML experiment,
/// or used elsewhere in the ML.NET ecosystem (ie in .
/// The contains the inferred purpose of each column in the dataset.
/// (For instance, is the column categorical, numeric, or text data? Should the column be ignored? Etc.)
@@ -194,15 +194,15 @@ public ColumnInferenceResults InferColumns(string path, ColumnInformation column
/// Information inferred about the columns in the provided dataset.
///
/// Infers information about the name, data type, and purpose of each column.
- /// The returned can be used to
- /// instantiate a . The can be used to
- /// obtain an that can be fed into an AutoML experiment,
+ /// The returned can be used to
+ /// instantiate a . The can be used to
+ /// obtain an that can be fed into an AutoML experiment,
/// or used elsewhere in the ML.NET ecosystem (ie in .
/// The contains the inferred purpose of each column in the dataset.
/// (For instance, is the column categorical, numeric, or text data? Should the column be ignored? Etc.)
/// The can be inspected and modified (or kept as is) and used by an AutoML experiment.
///
- public ColumnInferenceResults InferColumns(string path, uint labelColumnIndex, bool hasHeader = false, char? separatorChar = null,
+ public ColumnInferenceResults InferColumns(string path, uint labelColumnIndex, bool hasHeader = false, char? separatorChar = null,
bool? allowQuoting = null, bool? allowSparse = null, bool trimWhitespace = false, bool groupColumns = true)
{
UserInputValidationUtil.ValidateInferColumnsArgs(path);
diff --git a/src/Microsoft.ML.AutoML/API/BinaryClassificationExperiment.cs b/src/Microsoft.ML.AutoML/API/BinaryClassificationExperiment.cs
index e268528507..84f539c932 100644
--- a/src/Microsoft.ML.AutoML/API/BinaryClassificationExperiment.cs
+++ b/src/Microsoft.ML.AutoML/API/BinaryClassificationExperiment.cs
@@ -13,23 +13,30 @@
namespace Microsoft.ML.AutoML
{
///
- /// Settings for AutoML experiments on binary classification datasets.
+ /// Settings for AutoML experiments on binary classification datasets.
///
public sealed class BinaryExperimentSettings : ExperimentSettings
{
///
/// Metric that AutoML will try to optimize over the course of the experiment.
///
- public BinaryClassificationMetric OptimizingMetric { get; set; } = BinaryClassificationMetric.Accuracy;
+ /// The default value is .
+ public BinaryClassificationMetric OptimizingMetric { get; set; }
///
/// Collection of trainers the AutoML experiment can leverage.
///
- ///
- /// The collection is auto-populated with all possible trainers (all values of ).
- ///
- public ICollection Trainers { get; } =
- Enum.GetValues(typeof(BinaryClassificationTrainer)).OfType().ToList();
+ /// The default value is a collection auto-populated with all possible trainers (all values of ).
+ public ICollection Trainers { get; }
+
+ ///
+ /// Initializes a new instance of .
+ ///
+ public BinaryExperimentSettings()
+ {
+ OptimizingMetric = BinaryClassificationMetric.Accuracy;
+ Trainers = Enum.GetValues(typeof(BinaryClassificationTrainer)).OfType().ToList();
+ }
}
///
diff --git a/src/Microsoft.ML.AutoML/API/ColumnInference.cs b/src/Microsoft.ML.AutoML/API/ColumnInference.cs
index 602bd9227b..0c7d834613 100644
--- a/src/Microsoft.ML.AutoML/API/ColumnInference.cs
+++ b/src/Microsoft.ML.AutoML/API/ColumnInference.cs
@@ -20,18 +20,18 @@ public sealed class ColumnInferenceResults
/// Can be used to instantiate a new to load
/// data into an .
///
- public TextLoader.Options TextLoaderOptions { get; internal set; } = new TextLoader.Options();
+ public TextLoader.Options TextLoaderOptions { get; internal set; }
///
/// Information about the inferred columns in the dataset.
///
///
/// Contains the inferred purposes of each column. See for more details.
- /// This can be fed to the AutoML API when running an experiment.
+ /// This can be fed to the AutoML API when running an experiment.
/// See
/// for example.
///
- public ColumnInformation ColumnInformation { get; internal set; } = new ColumnInformation();
+ public ColumnInformation ColumnInformation { get; internal set; }
}
///
@@ -39,7 +39,7 @@ public sealed class ColumnInferenceResults
///
///
/// Contains information about the purpose of each column in the dataset. For instance,
- /// it enumerates the dataset columns that AutoML should treat as categorical,
+ /// it enumerates the dataset columns that AutoML should treat as categorical,
/// the columns AutoML should ignore, which column is the label, etc.
/// can be fed to the AutoML API when running an experiment.
/// See
@@ -50,7 +50,8 @@ public sealed class ColumnInformation
///
/// The dataset column to use as the label.
///
- public string LabelColumnName { get; set; } = DefaultColumnNames.Label;
+ /// The default value is "Label".
+ public string LabelColumnName { get; set; }
///
/// The dataset column to use for example weight.
@@ -58,9 +59,9 @@ public sealed class ColumnInformation
public string ExampleWeightColumnName { get; set; }
///
- /// The dataset column to use for grouping rows.
+ /// The dataset column to use for grouping rows.
/// If two examples share the same sampling key column name,
- /// they are guaranteed to appear in the same subset (train or test).
+ /// they are guaranteed to appear in the same subset (train or test).
/// This can be used to ensure no label leakage from the train to the test set.
/// If , no row grouping will be performed.
///
@@ -69,24 +70,37 @@ public sealed class ColumnInformation
///
/// The dataset columns that are categorical.
///
+ /// The default value is a new, empty .
///
/// Categorical data columns should generally be columns that contain a small number of unique values.
///
- public ICollection CategoricalColumnNames { get; } = new Collection();
+ public ICollection CategoricalColumnNames { get; }
///
/// The dataset columns that are numeric.
///
- public ICollection NumericColumnNames { get; } = new Collection();
+ /// The default value is a new, empty .
+ public ICollection NumericColumnNames { get; }
///
/// The dataset columns that are text.
///
- public ICollection TextColumnNames { get; } = new Collection();
+ /// The default value is a new, empty .
+ public ICollection TextColumnNames { get; }
///
/// The dataset columns that AutoML should ignore.
///
- public ICollection IgnoredColumnNames { get; } = new Collection();
+ /// The default value is a new, empty .
+ public ICollection IgnoredColumnNames { get; }
+
+ public ColumnInformation()
+ {
+ LabelColumnName = DefaultColumnNames.Label;
+ CategoricalColumnNames = new Collection();
+ NumericColumnNames = new Collection();
+ TextColumnNames = new Collection();
+ IgnoredColumnNames = new Collection();
+ }
}
}
\ No newline at end of file
diff --git a/src/Microsoft.ML.AutoML/API/ExperimentBase.cs b/src/Microsoft.ML.AutoML/API/ExperimentBase.cs
index 05601342dc..beef7952cd 100644
--- a/src/Microsoft.ML.AutoML/API/ExperimentBase.cs
+++ b/src/Microsoft.ML.AutoML/API/ExperimentBase.cs
@@ -13,7 +13,7 @@ namespace Microsoft.ML.AutoML
///
/// Metrics type used by task-specific AutoML experiments.
/// Experiment settings type.
- public abstract class ExperimentBase
+ public abstract class ExperimentBase
where TMetrics : class
where TExperimentSettings : ExperimentSettings
{
@@ -47,15 +47,15 @@ internal ExperimentBase(MLContext context,
///
/// The training data used by the AutoML experiment.
/// The dataset column used as the label.
- /// The dataset column used as the sampling key column.
+ /// The dataset column used as the sampling key column.
/// See for more information.
/// Pre-featurizer that AutoML will apply to the data during an
- /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
- /// trained transform. Then, the trained transform will be applied to both the training
+ /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
+ /// trained transform. Then, the trained transform will be applied to both the training
/// data split and corresponding validation data split.)
- /// A user-defined object that implements
+ /// A user-defined object that implements
/// the interface. AutoML will invoke the method
- /// after each model it produces during the
+ /// after each model it produces during the
/// course of the experiment.
///
/// The experiment result.
@@ -79,19 +79,19 @@ public ExperimentResult Execute(IDataView trainData, string labelColum
/// The training data to be used by the AutoML experiment.
/// Column information for the dataset.
/// Pre-featurizer that AutoML will apply to the data during an
- /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
- /// trained transform. Then, the trained transform will be applied to both the training
+ /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
+ /// trained transform. Then, the trained transform will be applied to both the training
/// data split and corresponding validation data split.)
- /// A user-defined object that implements
+ /// A user-defined object that implements
/// the interface. AutoML will invoke the method
- /// after each model it produces during the
+ /// after each model it produces during the
/// course of the experiment.
///
/// The experiment result.
///
/// Depending on the size of your data, the AutoML experiment could take a long time to execute.
///
- public ExperimentResult Execute(IDataView trainData, ColumnInformation columnInformation,
+ public ExperimentResult Execute(IDataView trainData, ColumnInformation columnInformation,
IEstimator preFeaturizer = null, IProgress> progressHandler = null)
{
// Cross val threshold for # of dataset rows --
@@ -121,12 +121,12 @@ public ExperimentResult Execute(IDataView trainData, ColumnInformation
/// The validation data to be used by the AutoML experiment.
/// The name of the label column.
/// Pre-featurizer that AutoML will apply to the data during an
- /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
- /// trained transform. Then, the trained transform will be applied to both the training
+ /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
+ /// trained transform. Then, the trained transform will be applied to both the training
/// data split and corresponding validation data split.)
- /// A user-defined object that implements
+ /// A user-defined object that implements
/// the interface. AutoML will invoke the method
- /// after each model it produces during the
+ /// after each model it produces during the
/// course of the experiment.
///
/// The experiment result.
@@ -146,12 +146,12 @@ public ExperimentResult Execute(IDataView trainData, IDataView validat
/// The validation data to be used by the AutoML experiment.
/// Column information for the dataset.
/// Pre-featurizer that AutoML will apply to the data during an
- /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
- /// trained transform. Then, the trained transform will be applied to both the training
+ /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
+ /// trained transform. Then, the trained transform will be applied to both the training
/// data split and corresponding validation data split.)
- /// A user-defined object that implements
+ /// A user-defined object that implements
/// the interface. AutoML will invoke the method
- /// after each model it produces during the
+ /// after each model it produces during the
/// course of the experiment.
///
/// The experiment result.
@@ -174,12 +174,12 @@ public ExperimentResult Execute(IDataView trainData, IDataView validat
/// The number of cross validation folds into which the training data should be divided when fitting a model.
/// Column information for the dataset.
/// Pre-featurizer that AutoML will apply to the data during an
- /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
- /// trained transform. Then, the trained transform will be applied to both the training
+ /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
+ /// trained transform. Then, the trained transform will be applied to both the training
/// data split and corresponding validation data split.)
- /// A user-defined object that implements
+ /// A user-defined object that implements
/// the interface. AutoML will invoke the method
- /// after each model it produces during the
+ /// after each model it produces during the
/// course of the experiment.
///
/// The cross validation experiment result.
@@ -201,21 +201,21 @@ public CrossValidationExperimentResult Execute(IDataView trainData, ui
/// The name of the label column.
/// The name of the sampling key column.
/// Pre-featurizer that AutoML will apply to the data during an
- /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
- /// trained transform. Then, the trained transform will be applied to both the training
+ /// experiment. (The pre-featurizer will be fit only on the training data split to produce a
+ /// trained transform. Then, the trained transform will be applied to both the training
/// data split and corresponding validation data split.)
- /// A user-defined object that implements
+ /// A user-defined object that implements
/// the interface. AutoML will invoke the method
- /// after each model it produces during the
+ /// after each model it produces during the
/// course of the experiment.
///
/// The cross validation experiment result.
///
/// Depending on the size of your data, the AutoML experiment could take a long time to execute.
///
- public CrossValidationExperimentResult Execute(IDataView trainData,
+ public CrossValidationExperimentResult Execute(IDataView trainData,
uint numberOfCVFolds, string labelColumnName = DefaultColumnNames.Label,
- string samplingKeyColumn = null, IEstimator preFeaturizer = null,
+ string samplingKeyColumn = null, IEstimator preFeaturizer = null,
Progress> progressHandler = null)
{
var columnInformation = new ColumnInformation()
diff --git a/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs b/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs
index 6322d327f6..21d08eca30 100644
--- a/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs
+++ b/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs
@@ -16,15 +16,16 @@ public abstract class ExperimentSettings
///
/// Maximum time in seconds the experiment is allowed to run.
///
+ /// The default value is 86,400, the number of seconds in one day.
///
/// An experiment may run for longer than .
/// This is because once AutoML starts training an ML.NET model, AutoML lets the
- /// model train to completion. For instance, if the first model
+ /// model train to completion. For instance, if the first model
/// AutoML trains takes 4 hours, and the second model trained takes 5 hours,
- /// but was the number of seconds in 6 hours,
+ /// but was the number of seconds in 6 hours,
/// the experiment will run for 4 + 5 = 9 hours (not 6 hours).
///
- public uint MaxExperimentTimeInSeconds { get; set; } = 24 * 60 * 60;
+ public uint MaxExperimentTimeInSeconds { get; set; }
///
/// Cancellation token for the AutoML experiment. It propagates the notification
@@ -33,28 +34,42 @@ public abstract class ExperimentSettings
///
/// An experiment may not immediately stop after cancellation.
/// This is because once AutoML starts training an ML.NET model, AutoML lets the
- /// model train to completion. For instance, if the first model
+ /// model train to completion. For instance, if the first model
/// AutoML trains takes 4 hours, and the second model trained takes 5 hours,
- /// but cancellation is requested after 6 hours,
+ /// but cancellation is requested after 6 hours,
/// the experiment will stop after 4 + 5 = 9 hours (not 6 hours).
///
- public CancellationToken CancellationToken { get; set; } = default;
+ public CancellationToken CancellationToken { get; set; }
///
/// This is a pointer to a directory where all models trained during the AutoML experiment will be saved.
/// If , models will be kept in memory instead of written to disk.
- /// (Please note: for an experiment with high runtime operating on a large dataset, opting to keep models in
+ /// (Please note: for an experiment with high runtime operating on a large dataset, opting to keep models in
/// memory could cause a system to run out of memory.)
///
- public DirectoryInfo CacheDirectory { get; set; } = new DirectoryInfo(Path.Combine(Path.GetTempPath(), "Microsoft.ML.AutoML"));
+ /// The default value is the directory named "Microsoft.ML.AutoML" in the current user's temporary folder.
+ public DirectoryInfo CacheDirectory { get; set; }
///
/// Whether AutoML should cache before ML.NET trainers.
/// See for more information on caching.
///
- public CacheBeforeTrainer CacheBeforeTrainer = CacheBeforeTrainer.Auto;
-
- internal int MaxModels = int.MaxValue;
+ /// The default value is .
+ public CacheBeforeTrainer CacheBeforeTrainer { get; set; }
+
+ internal int MaxModels;
+
+ ///
+ /// Initializes a new instance of .
+ ///
+ public ExperimentSettings()
+ {
+ MaxExperimentTimeInSeconds = 24 * 60 * 60;
+ CancellationToken = default;
+ CacheDirectory = new DirectoryInfo(Path.Combine(Path.GetTempPath(), "Microsoft.ML.AutoML"));
+ CacheBeforeTrainer = CacheBeforeTrainer.Auto;
+ MaxModels = int.MaxValue;
+ }
}
///
diff --git a/src/Microsoft.ML.AutoML/API/InferenceException.cs b/src/Microsoft.ML.AutoML/API/InferenceException.cs
index 03e49eb29b..667309d7f4 100644
--- a/src/Microsoft.ML.AutoML/API/InferenceException.cs
+++ b/src/Microsoft.ML.AutoML/API/InferenceException.cs
@@ -31,7 +31,7 @@ public sealed class InferenceException : Exception
/// Type of AutoML exception that occurred.
///
public InferenceExceptionType InferenceExceptionType;
-
+
internal InferenceException(InferenceExceptionType inferenceType, string message)
: base(message)
{
diff --git a/src/Microsoft.ML.AutoML/API/MulticlassClassificationExperiment.cs b/src/Microsoft.ML.AutoML/API/MulticlassClassificationExperiment.cs
index 2c368ae603..6f45e6f54a 100644
--- a/src/Microsoft.ML.AutoML/API/MulticlassClassificationExperiment.cs
+++ b/src/Microsoft.ML.AutoML/API/MulticlassClassificationExperiment.cs
@@ -20,16 +20,25 @@ public sealed class MulticlassExperimentSettings : ExperimentSettings
///
/// Metric that AutoML will try to optimize over the course of the experiment.
///
- public MulticlassClassificationMetric OptimizingMetric { get; set; } = MulticlassClassificationMetric.MicroAccuracy;
+ /// The default value is .
+ public MulticlassClassificationMetric OptimizingMetric { get; set; }
///
/// Collection of trainers the AutoML experiment can leverage.
///
- ///
- /// The collection is auto-populated with all possible trainers (all values of ).
- ///
- public ICollection Trainers { get; } =
- Enum.GetValues(typeof(MulticlassClassificationTrainer)).OfType().ToList();
+ ///
+ /// The default value is a collection auto-populated with all possible trainers (all values of ).
+ ///
+ public ICollection Trainers { get; }
+
+ ///
+ /// Initializes a new instances of .
+ ///
+ public MulticlassExperimentSettings()
+ {
+ OptimizingMetric = MulticlassClassificationMetric.MicroAccuracy;
+ Trainers = Enum.GetValues(typeof(MulticlassClassificationTrainer)).OfType().ToList();
+ }
}
///
@@ -71,17 +80,17 @@ public enum MulticlassClassificationTrainer
///
/// using .
///
- AveragedPerceptronOVA,
+ AveragedPerceptronOva,
///
/// using .
///
- FastForestOVA,
+ FastForestOva,
///
/// using .
///
- FastTreeOVA,
+ FastTreeOva,
///
/// See .
@@ -91,7 +100,7 @@ public enum MulticlassClassificationTrainer
///
/// using .
///
- LinearSupportVectorMachinesOVA,
+ LinearSupportVectorMachinesOva,
///
/// See .
@@ -101,7 +110,7 @@ public enum MulticlassClassificationTrainer
///
/// using .
///
- LbfgsLogisticRegressionOVA,
+ LbfgsLogisticRegressionOva,
///
/// See .
@@ -111,12 +120,12 @@ public enum MulticlassClassificationTrainer
///
/// using .
///
- SgdCalibratedOVA,
+ SgdCalibratedOva,
///
/// using .
///
- SymbolicSgdLogisticRegressionOVA,
+ SymbolicSgdLogisticRegressionOva,
}
///
diff --git a/src/Microsoft.ML.AutoML/API/Pipeline.cs b/src/Microsoft.ML.AutoML/API/Pipeline.cs
index 8bf6c34632..28674fd2b9 100644
--- a/src/Microsoft.ML.AutoML/API/Pipeline.cs
+++ b/src/Microsoft.ML.AutoML/API/Pipeline.cs
@@ -21,7 +21,7 @@ public Pipeline(PipelineNode[] nodes, bool cacheBeforeTrainer = false)
internal Pipeline()
{
}
-
+
public IEstimator ToEstimator(MLContext context)
{
var inferredPipeline = SuggestedPipeline.FromPipeline(context, this);
@@ -48,7 +48,7 @@ public PipelineNode(string name, PipelineNodeType nodeType,
Properties = properties ?? new Dictionary();
}
- public PipelineNode(string name, PipelineNodeType nodeType,
+ public PipelineNode(string name, PipelineNodeType nodeType,
string inColumn, string outColumn, IDictionary properties = null) :
this(name, nodeType, new string[] { inColumn }, new string[] { outColumn }, properties)
{
diff --git a/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs b/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs
index 394618e3c1..438260a1eb 100644
--- a/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs
+++ b/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs
@@ -20,16 +20,22 @@ public sealed class RegressionExperimentSettings : ExperimentSettings
///
/// Metric that AutoML will try to optimize over the course of the experiment.
///
- public RegressionMetric OptimizingMetric { get; set; } = RegressionMetric.RSquared;
+ /// The default value is .
+ public RegressionMetric OptimizingMetric { get; set; }
///
/// Collection of trainers the AutoML experiment can leverage.
///
- ///
- /// The collection is auto-populated with all possible trainers (all values of ).
- ///
- public ICollection Trainers { get; } =
- Enum.GetValues(typeof(RegressionTrainer)).OfType().ToList();
+ ///
+ /// The default value is a collection auto-populated with all possible trainers (all values of ).
+ ///
+ public ICollection Trainers { get; }
+
+ public RegressionExperimentSettings()
+ {
+ OptimizingMetric = RegressionMetric.RSquared;
+ Trainers = Enum.GetValues(typeof(RegressionTrainer)).OfType().ToList();
+ }
}
///
@@ -58,7 +64,6 @@ public enum RegressionMetric
RSquared
}
-
///
/// Enumeration of ML.NET multiclass classification trainers used by AutoML.
///
@@ -116,7 +121,7 @@ public enum RegressionTrainer
///
public sealed class RegressionExperiment : ExperimentBase
{
- internal RegressionExperiment(MLContext context, RegressionExperimentSettings settings)
+ internal RegressionExperiment(MLContext context, RegressionExperimentSettings settings)
: base(context,
new RegressionMetricsAgent(context, settings.OptimizingMetric),
new OptimizingMetricInfo(settings.OptimizingMetric),
diff --git a/src/Microsoft.ML.AutoML/API/RunDetails/CrossValidationRunDetail.cs b/src/Microsoft.ML.AutoML/API/RunDetails/CrossValidationRunDetail.cs
index 924132b725..b83de334c9 100644
--- a/src/Microsoft.ML.AutoML/API/RunDetails/CrossValidationRunDetail.cs
+++ b/src/Microsoft.ML.AutoML/API/RunDetails/CrossValidationRunDetail.cs
@@ -13,7 +13,7 @@ namespace Microsoft.ML.AutoML
/// Metrics type for the run.
///
/// Over the course of an experiment, many models are evaluated on a dataset
- /// using cross validation. This object contains information about each model
+ /// using cross validation. This object contains information about each model
/// evaluated during the AutoML experiment.
///
public sealed class CrossValidationRunDetail : RunDetail
@@ -54,7 +54,7 @@ public sealed class TrainResult
public ITransformer Model { get { return _modelContainer.GetModel(); } }
///
- /// Exception encountered while training the fold. This property is
+ /// Exception encountered while training the fold. This property is
/// if no exception was encountered.
///
///
diff --git a/src/Microsoft.ML.AutoML/API/RunDetails/RunDetail.cs b/src/Microsoft.ML.AutoML/API/RunDetails/RunDetail.cs
index 0460b32d61..bce89ecf1f 100644
--- a/src/Microsoft.ML.AutoML/API/RunDetails/RunDetail.cs
+++ b/src/Microsoft.ML.AutoML/API/RunDetails/RunDetail.cs
@@ -29,7 +29,6 @@ public sealed class RunDetail : RunDetail
///
public TMetrics ValidationMetrics { get; private set; }
-
///
/// Model trained during the run.
///
@@ -81,7 +80,7 @@ public abstract class RunDetail
/// Runtime in seconds.
///
///
- /// Runtime includes model training time. Depending on the size of the data,
+ /// Runtime includes model training time. Depending on the size of the data,
/// the runtime may be quite long.
///
public double RuntimeInSeconds { get; internal set; }
@@ -90,7 +89,7 @@ public abstract class RunDetail
/// An ML.NET that represents the pipeline in this run.
///
///
- /// You can call on
+ /// You can call on
/// this estimator to re-train your pipeline on any .
///
public IEstimator Estimator { get; private set; }
diff --git a/src/Microsoft.ML.AutoML/AutoMlUtils.cs b/src/Microsoft.ML.AutoML/AutoMlUtils.cs
index 3a6917ec69..69ccc0489c 100644
--- a/src/Microsoft.ML.AutoML/AutoMlUtils.cs
+++ b/src/Microsoft.ML.AutoML/AutoMlUtils.cs
@@ -9,7 +9,7 @@ namespace Microsoft.ML.AutoML
{
internal static class AutoMlUtils
{
- public static readonly ThreadLocal random = new ThreadLocal(() => new Random());
+ public static readonly ThreadLocal Random = new ThreadLocal(() => new Random());
public static void Assert(bool boolVal, string message = null)
{
diff --git a/src/Microsoft.ML.AutoML/ColumnInference/ColumnGroupingInference.cs b/src/Microsoft.ML.AutoML/ColumnInference/ColumnGroupingInference.cs
index 626b287e2e..9dc2018016 100644
--- a/src/Microsoft.ML.AutoML/ColumnInference/ColumnGroupingInference.cs
+++ b/src/Microsoft.ML.AutoML/ColumnInference/ColumnGroupingInference.cs
@@ -83,7 +83,7 @@ into g
private static int GetPurposeGroupId(int columnIndex, ColumnPurpose purpose)
{
- if (purpose == ColumnPurpose.CategoricalFeature ||
+ if (purpose == ColumnPurpose.CategoricalFeature ||
purpose == ColumnPurpose.TextFeature ||
purpose == ColumnPurpose.Ignore)
return columnIndex;
diff --git a/src/Microsoft.ML.AutoML/ColumnInference/ColumnTypeInference.cs b/src/Microsoft.ML.AutoML/ColumnInference/ColumnTypeInference.cs
index ef377c5abc..d3f40a4902 100644
--- a/src/Microsoft.ML.AutoML/ColumnInference/ColumnTypeInference.cs
+++ b/src/Microsoft.ML.AutoML/ColumnInference/ColumnTypeInference.cs
@@ -74,7 +74,7 @@ public IntermediateColumn(ReadOnlyMemory[] data, int columnId)
public bool HasAllBooleanValues()
{
- if (this.RawData.Skip(1)
+ if (RawData.Skip(1)
.All(x =>
{
bool value;
diff --git a/src/Microsoft.ML.AutoML/ColumnInference/PurposeInference.cs b/src/Microsoft.ML.AutoML/ColumnInference/PurposeInference.cs
index 59edf16568..cc568abdcb 100644
--- a/src/Microsoft.ML.AutoML/ColumnInference/PurposeInference.cs
+++ b/src/Microsoft.ML.AutoML/ColumnInference/PurposeInference.cs
@@ -87,7 +87,7 @@ public IReadOnlyList> GetColumnData()
var results = new List>();
var column = _data.Schema[_columnId];
-
+
using (var cursor = _data.GetRowCursor(new[] { column }))
{
var getter = cursor.GetGetter>(column);
diff --git a/src/Microsoft.ML.AutoML/ColumnInference/TextFileContents.cs b/src/Microsoft.ML.AutoML/ColumnInference/TextFileContents.cs
index e439494afb..fe0066ab6e 100644
--- a/src/Microsoft.ML.AutoML/ColumnInference/TextFileContents.cs
+++ b/src/Microsoft.ML.AutoML/ColumnInference/TextFileContents.cs
@@ -78,7 +78,7 @@ from _sep in separatorCandidates
return foundAny ? result : new ColumnSplitResult(false, null, true, true, 0);
}
- private static bool TryParseFile(MLContext context, TextLoader.Options options, IMultiStreamSource source,
+ private static bool TryParseFile(MLContext context, TextLoader.Options options, IMultiStreamSource source,
out ColumnSplitResult result)
{
result = null;
diff --git a/src/Microsoft.ML.AutoML/ColumnInference/TextFileSample.cs b/src/Microsoft.ML.AutoML/ColumnInference/TextFileSample.cs
index 8cedc3b99b..5d9510f720 100644
--- a/src/Microsoft.ML.AutoML/ColumnInference/TextFileSample.cs
+++ b/src/Microsoft.ML.AutoML/ColumnInference/TextFileSample.cs
@@ -97,7 +97,7 @@ public static TextFileSample CreateFromFullStream(Stream stream)
return CreateFromHead(stream);
}
var fileSize = stream.Length;
-
+
if (fileSize <= 2 * BufferSizeMb * (1 << 20))
{
return CreateFromHead(stream);
@@ -139,7 +139,7 @@ public static TextFileSample CreateFromFullStream(Stream stream)
long fileSizeRemaining = fileSize - firstChunk.Length - ((long)chunkSize) * chunkCount;
var chunkStartIndices = Enumerable.Range(0, chunkCount)
- .Select(x => AutoMlUtils.random.Value.NextDouble() * fileSizeRemaining)
+ .Select(x => AutoMlUtils.Random.Value.NextDouble() * fileSizeRemaining)
.OrderBy(x => x)
.Select((spot, i) => (long)(spot + firstChunk.Length + i * chunkSize))
.ToArray();
diff --git a/src/Microsoft.ML.AutoML/DatasetDimensions/DatasetDimensionsApi.cs b/src/Microsoft.ML.AutoML/DatasetDimensions/DatasetDimensionsApi.cs
index 73dca65e5d..4282610fc4 100644
--- a/src/Microsoft.ML.AutoML/DatasetDimensions/DatasetDimensionsApi.cs
+++ b/src/Microsoft.ML.AutoML/DatasetDimensions/DatasetDimensionsApi.cs
@@ -36,8 +36,8 @@ public static ColumnDimensions[] CalcColumnDimensions(MLContext context, IDataVi
// If numeric feature, discover missing values
if (itemType == NumberDataViewType.Single)
{
- hasMissing = column.Type.IsVector() ?
- DatasetDimensionsUtil.HasMissingNumericVector(data, column) :
+ hasMissing = column.Type.IsVector() ?
+ DatasetDimensionsUtil.HasMissingNumericVector(data, column) :
DatasetDimensionsUtil.HasMissingNumericSingleValue(data, column);
}
diff --git a/src/Microsoft.ML.AutoML/Experiment/Experiment.cs b/src/Microsoft.ML.AutoML/Experiment/Experiment.cs
index a0503b00a5..fca8aeda95 100644
--- a/src/Microsoft.ML.AutoML/Experiment/Experiment.cs
+++ b/src/Microsoft.ML.AutoML/Experiment/Experiment.cs
@@ -22,10 +22,9 @@ internal class Experiment where TRunDetail : RunDetail
private readonly DirectoryInfo _modelDirectory;
private readonly DatasetColumnInfo[] _datasetColumnInfo;
private readonly IRunner _runner;
- private readonly IList _history = new List();
+ private readonly IList _history;
private readonly AutoMLLogger _logger;
-
public Experiment(MLContext context,
TaskKind task,
OptimizingMetricInfo metricInfo,
@@ -38,6 +37,7 @@ public Experiment(MLContext context,
AutoMLLogger logger)
{
_context = context;
+ _history = new List();
_optimizingMetricInfo = metricInfo;
_task = task;
_progressCallback = progressCallback;
diff --git a/src/Microsoft.ML.AutoML/Experiment/MetricsAgents/MultiMetricsAgent.cs b/src/Microsoft.ML.AutoML/Experiment/MetricsAgents/MultiMetricsAgent.cs
index b7ce3bddca..abd584ae53 100644
--- a/src/Microsoft.ML.AutoML/Experiment/MetricsAgents/MultiMetricsAgent.cs
+++ b/src/Microsoft.ML.AutoML/Experiment/MetricsAgents/MultiMetricsAgent.cs
@@ -65,7 +65,7 @@ public bool IsModelPerfect(double score)
throw MetricsAgentUtil.BuildMetricNotSupportedException(_optimizingMetric);
}
}
-
+
public MulticlassClassificationMetrics EvaluateMetrics(IDataView data, string labelColumn)
{
return _mlContext.MulticlassClassification.Evaluate(data, labelColumn);
diff --git a/src/Microsoft.ML.AutoML/Experiment/Runners/CrossValRunner.cs b/src/Microsoft.ML.AutoML/Experiment/Runners/CrossValRunner.cs
index 6ba85a2ead..1ee6ea84f4 100644
--- a/src/Microsoft.ML.AutoML/Experiment/Runners/CrossValRunner.cs
+++ b/src/Microsoft.ML.AutoML/Experiment/Runners/CrossValRunner.cs
@@ -41,7 +41,7 @@ public CrossValRunner(MLContext context,
_modelInputSchema = trainDatasets[0].Schema;
}
- public (SuggestedPipelineRunDetail suggestedPipelineRunDetail, CrossValidationRunDetail runDetail)
+ public (SuggestedPipelineRunDetail suggestedPipelineRunDetail, CrossValidationRunDetail runDetail)
Run(SuggestedPipeline pipeline, DirectoryInfo modelDirectory, int iterationNum)
{
var trainResults = new List>();
diff --git a/src/Microsoft.ML.AutoML/Experiment/Runners/CrossValSummaryRunner.cs b/src/Microsoft.ML.AutoML/Experiment/Runners/CrossValSummaryRunner.cs
index b393bc9520..60520613bd 100644
--- a/src/Microsoft.ML.AutoML/Experiment/Runners/CrossValSummaryRunner.cs
+++ b/src/Microsoft.ML.AutoML/Experiment/Runners/CrossValSummaryRunner.cs
@@ -54,7 +54,7 @@ public CrossValSummaryRunner(MLContext context,
{
var modelFileInfo = RunnerUtil.GetModelFileInfo(modelDirectory, iterationNum, i + 1);
var trainResult = RunnerUtil.TrainAndScorePipeline(_context, pipeline, _trainDatasets[i], _validDatasets[i],
- _labelColumn, _metricsAgent, _preprocessorTransforms?.ElementAt(i), modelFileInfo, _modelInputSchema,
+ _labelColumn, _metricsAgent, _preprocessorTransforms?.ElementAt(i), modelFileInfo, _modelInputSchema,
_logger);
trainResults.Add(trainResult);
}
diff --git a/src/Microsoft.ML.AutoML/Experiment/Runners/IRunner.cs b/src/Microsoft.ML.AutoML/Experiment/Runners/IRunner.cs
index b66c43168a..8d417c1f45 100644
--- a/src/Microsoft.ML.AutoML/Experiment/Runners/IRunner.cs
+++ b/src/Microsoft.ML.AutoML/Experiment/Runners/IRunner.cs
@@ -8,7 +8,7 @@ namespace Microsoft.ML.AutoML
{
internal interface IRunner where TRunDetail : RunDetail
{
- (SuggestedPipelineRunDetail suggestedPipelineRunDetail, TRunDetail runDetail)
+ (SuggestedPipelineRunDetail suggestedPipelineRunDetail, TRunDetail runDetail)
Run (SuggestedPipeline pipeline, DirectoryInfo modelDirectory, int iterationNum);
}
}
\ No newline at end of file
diff --git a/src/Microsoft.ML.AutoML/Experiment/Runners/RunnerUtil.cs b/src/Microsoft.ML.AutoML/Experiment/Runners/RunnerUtil.cs
index 475ac9f535..c9615c3254 100644
--- a/src/Microsoft.ML.AutoML/Experiment/Runners/RunnerUtil.cs
+++ b/src/Microsoft.ML.AutoML/Experiment/Runners/RunnerUtil.cs
@@ -9,9 +9,9 @@ namespace Microsoft.ML.AutoML
{
internal static class RunnerUtil
{
- public static (ModelContainer model, TMetrics metrics, Exception exception, double score)
+ public static (ModelContainer model, TMetrics metrics, Exception exception, double score)
TrainAndScorePipeline(MLContext context,
- SuggestedPipeline pipeline,
+ SuggestedPipeline pipeline,
IDataView trainData,
IDataView validData,
string labelColumn,
@@ -29,7 +29,7 @@ public static (ModelContainer model, TMetrics metrics, Exception exception, doub
var scoredData = model.Transform(validData);
var metrics = metricsAgent.EvaluateMetrics(scoredData, labelColumn);
var score = metricsAgent.GetScore(metrics);
-
+
if (preprocessorTransform != null)
{
model = preprocessorTransform.Append(model);
diff --git a/src/Microsoft.ML.AutoML/Experiment/Runners/TrainValidateRunner.cs b/src/Microsoft.ML.AutoML/Experiment/Runners/TrainValidateRunner.cs
index 5a9b4527d5..a7eba29fc1 100644
--- a/src/Microsoft.ML.AutoML/Experiment/Runners/TrainValidateRunner.cs
+++ b/src/Microsoft.ML.AutoML/Experiment/Runners/TrainValidateRunner.cs
@@ -39,7 +39,7 @@ public TrainValidateRunner(MLContext context,
_modelInputSchema = trainData.Schema;
}
- public (SuggestedPipelineRunDetail suggestedPipelineRunDetail, RunDetail runDetail)
+ public (SuggestedPipelineRunDetail suggestedPipelineRunDetail, RunDetail runDetail)
Run(SuggestedPipeline pipeline, DirectoryInfo modelDirectory, int iterationNum)
{
var modelFileInfo = GetModelFileInfo(modelDirectory, iterationNum);
@@ -57,7 +57,7 @@ public TrainValidateRunner(MLContext context,
private static FileInfo GetModelFileInfo(DirectoryInfo modelDirectory, int iterationNum)
{
- return modelDirectory == null ?
+ return modelDirectory == null ?
null :
new FileInfo(Path.Combine(modelDirectory.FullName, $"Model{iterationNum}.zip"));
}
diff --git a/src/Microsoft.ML.AutoML/Experiment/SuggestedPipeline.cs b/src/Microsoft.ML.AutoML/Experiment/SuggestedPipeline.cs
index 27f6380652..72c314fe93 100644
--- a/src/Microsoft.ML.AutoML/Experiment/SuggestedPipeline.cs
+++ b/src/Microsoft.ML.AutoML/Experiment/SuggestedPipeline.cs
@@ -34,8 +34,8 @@ public SuggestedPipeline(IEnumerable transforms,
_context = context;
_cacheBeforeTrainer = cacheBeforeTrainer;
}
-
- public override string ToString() => $"{string.Join(" ", Transforms.Select(t => $"xf={t}"))} tr={this.Trainer} {string.Join(" ", TransformsPostTrainer.Select(t => $"xf={t}"))} cache={(_cacheBeforeTrainer ? "+" : "-")}";
+
+ public override string ToString() => $"{string.Join(" ", Transforms.Select(t => $"xf={t}"))} tr={Trainer} {string.Join(" ", TransformsPostTrainer.Select(t => $"xf={t}"))} cache={(_cacheBeforeTrainer ? "+" : "-")}";
public override bool Equals(object obj)
{
@@ -44,7 +44,7 @@ public override bool Equals(object obj)
{
return false;
}
- return pipeline.ToString() == this.ToString();
+ return pipeline.ToString() == ToString();
}
public override int GetHashCode()
diff --git a/src/Microsoft.ML.AutoML/Microsoft.ML.AutoML.csproj b/src/Microsoft.ML.AutoML/Microsoft.ML.AutoML.csproj
index b831f4e4d4..85a641601c 100644
--- a/src/Microsoft.ML.AutoML/Microsoft.ML.AutoML.csproj
+++ b/src/Microsoft.ML.AutoML/Microsoft.ML.AutoML.csproj
@@ -3,9 +3,6 @@
netstandard2.0
7.3
Microsoft.ML.AutoML
-
- false
- false
Microsoft.ML.AutoML
diff --git a/src/Microsoft.ML.AutoML/PipelineSuggesters/PipelineSuggester.cs b/src/Microsoft.ML.AutoML/PipelineSuggesters/PipelineSuggester.cs
index 3b8d600907..dce6f53f6a 100644
--- a/src/Microsoft.ML.AutoML/PipelineSuggesters/PipelineSuggester.cs
+++ b/src/Microsoft.ML.AutoML/PipelineSuggesters/PipelineSuggester.cs
@@ -32,7 +32,7 @@ public static SuggestedPipeline GetNextInferredPipeline(MLContext context,
CacheBeforeTrainer cacheBeforeTrainer,
IEnumerable trainerWhitelist = null)
{
- var availableTrainers = RecipeInference.AllowedTrainers(context, task,
+ var availableTrainers = RecipeInference.AllowedTrainers(context, task,
ColumnInformationUtil.BuildColumnInfo(columns), trainerWhitelist);
var transforms = TransformInferenceApi.InferTransforms(context, task, columns).ToList();
var transformsPostTrainer = TransformInferenceApi.InferTransformsPostTrainer(context, task, columns).ToList();
@@ -83,11 +83,11 @@ public static SuggestedPipeline GetNextInferredPipeline(MLContext context,
return null;
}
-
+
///
/// Get top trainers from first stage
///
- private static IEnumerable GetTopTrainers(IEnumerable history,
+ private static IEnumerable GetTopTrainers(IEnumerable history,
IEnumerable availableTrainers,
bool isMaximizingMetric)
{
diff --git a/src/Microsoft.ML.AutoML/Sweepers/Parameters.cs b/src/Microsoft.ML.AutoML/Sweepers/Parameters.cs
index c5939f3b7d..7b1579bae5 100644
--- a/src/Microsoft.ML.AutoML/Sweepers/Parameters.cs
+++ b/src/Microsoft.ML.AutoML/Sweepers/Parameters.cs
@@ -9,44 +9,51 @@ namespace Microsoft.ML.AutoML
{
internal abstract class BaseParamArguments
{
- //[Argument(ArgumentType.Required, HelpText = "Parameter name", ShortName = "n")]
+ // Parameter name
public string Name;
}
internal abstract class NumericParamArguments : BaseParamArguments
{
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Number of steps for grid runthrough.", ShortName = "steps")]
- public int NumSteps = 100;
+ // Number of steps for grid runthrough.
+ public int NumSteps;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Amount of increment between steps (multiplicative if log).", ShortName = "inc")]
- public Double? StepSize = null;
+ // Amount of increment between steps (multiplicative if log).
+ public Double? StepSize;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Log scale.", ShortName = "log")]
- public bool LogBase = false;
+ // Log scale.
+ public bool LogBase;
+
+ public NumericParamArguments()
+ {
+ NumSteps = 100;
+ StepSize = null;
+ LogBase = false;
+ }
}
internal class FloatParamArguments : NumericParamArguments
{
- //[Argument(ArgumentType.Required, HelpText = "Minimum value")]
+ // Minimum value
public float Min;
- //[Argument(ArgumentType.Required, HelpText = "Maximum value")]
+ // Maximum value
public float Max;
}
internal class LongParamArguments : NumericParamArguments
{
- //[Argument(ArgumentType.Required, HelpText = "Minimum value")]
+ // Minimum value
public long Min;
- //[Argument(ArgumentType.Required, HelpText = "Maximum value")]
+ // Maximum value
public long Max;
}
internal class DiscreteParamArguments : BaseParamArguments
{
- //[Argument(ArgumentType.Multiple, HelpText = "Values", ShortName = "v")]
- public string[] Values = null;
+ // Values
+ public string[] Values;
}
internal sealed class LongParameterValue : IParameterValue
diff --git a/src/Microsoft.ML.AutoML/Sweepers/Random.cs b/src/Microsoft.ML.AutoML/Sweepers/Random.cs
index 573979d66d..c045436b23 100644
--- a/src/Microsoft.ML.AutoML/Sweepers/Random.cs
+++ b/src/Microsoft.ML.AutoML/Sweepers/Random.cs
@@ -23,7 +23,7 @@ public UniformRandomSweeper(ArgumentsBase args, IValueGenerator[] sweepParameter
protected override ParameterSet CreateParamSet()
{
- return new ParameterSet(SweepParameters.Select(sweepParameter => sweepParameter.CreateFromNormalized(AutoMlUtils.random.Value.NextDouble())));
+ return new ParameterSet(SweepParameters.Select(sweepParameter => sweepParameter.CreateFromNormalized(AutoMlUtils.Random.Value.NextDouble())));
}
}
}
diff --git a/src/Microsoft.ML.AutoML/Sweepers/SmacSweeper.cs b/src/Microsoft.ML.AutoML/Sweepers/SmacSweeper.cs
index 354978500b..3ef3fb9c76 100644
--- a/src/Microsoft.ML.AutoML/Sweepers/SmacSweeper.cs
+++ b/src/Microsoft.ML.AutoML/Sweepers/SmacSweeper.cs
@@ -18,38 +18,51 @@ internal sealed class SmacSweeper : ISweeper
{
public sealed class Arguments
{
- //[Argument(ArgumentType.Multiple | ArgumentType.Required, HelpText = "Swept parameters", ShortName = "p", SignatureType = typeof(SignatureSweeperParameter))]
+ // Swept parameters
public IValueGenerator[] SweptParameters;
- //[Argument(ArgumentType.AtMostOnce, HelpText = "Seed for the random number generator for the first batch sweeper", ShortName = "seed")]
+ // Seed for the random number generator for the first batch sweeper
public int RandomSeed;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "If iteration point is outside parameter definitions, should it be projected?", ShortName = "project")]
- public bool ProjectInBounds = true;
+ // If iteration point is outside parameter definitions, should it be projected?
+ public bool ProjectInBounds;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Number of regression trees in forest", ShortName = "numtrees")]
- public int NumOfTrees = 10;
+ // Number of regression trees in forest
+ public int NumOfTrees;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Minimum number of data points required to be in a node if it is to be split further", ShortName = "nmin")]
- public int NMinForSplit = 2;
+ // Minimum number of data points required to be in a node if it is to be split further
+ public int NMinForSplit;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Number of points to use for random initialization", ShortName = "nip")]
- public int NumberInitialPopulation = 20;
+ // Number of points to use for random initialization
+ public int NumberInitialPopulation;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Number of search parents to use for local search in maximizing EI acquisition function", ShortName = "lsp")]
- public int LocalSearchParentCount = 10;
+ // Number of search parents to use for local search in maximizing EI acquisition function
+ public int LocalSearchParentCount;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Number of random configurations when maximizing EI acquisition function", ShortName = "nrcan")]
- public int NumRandomEISearchConfigurations = 10000;
+ // Number of random configurations when maximizing EI acquisition function
+ public int NumRandomEISearchConfigurations;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Fraction of eligible dimensions to split on (i.e., split ratio)", ShortName = "sr")]
- public Float SplitRatio = (Float)0.8;
+ // Fraction of eligible dimensions to split on (i.e., split ratio)
+ public Float SplitRatio;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Epsilon threshold for ending local searches", ShortName = "eps")]
- public Float Epsilon = (Float)0.00001;
+ // Epsilon threshold for ending local searches
+ public Float Epsilon;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Number of neighbors to sample for locally searching each numerical parameter", ShortName = "nnnp")]
- public int NumNeighborsForNumericalParams = 4;
+ // Number of neighbors to sample for locally searching each numerical parameter
+ public int NumNeighborsForNumericalParams;
+
+ public Arguments()
+ {
+ ProjectInBounds = true;
+ NumOfTrees = 10;
+ NMinForSplit = 2;
+ NumberInitialPopulation = 20;
+ LocalSearchParentCount = 10;
+ NumRandomEISearchConfigurations = 10000;
+ SplitRatio = 0.8f;
+ Epsilon = 0.00001f;
+ NumNeighborsForNumericalParams = 4;
+ }
}
private readonly ISweeper _randomSweeper;
@@ -328,21 +341,21 @@ private double[][] GetForestRegressionLeafValues(FastForestRegressionModelParame
return datasetLeafValues.ToArray();
}
- // Todo: Remove the reflection below for TreeTreeEnsembleModelParameters methods GetLeaf and GetLeafValue.
+ // Todo: Remove the reflection below for TreeTreeEnsembleModelParameters methods GetLeaf and GetLeafValue.
// Long-term, replace with tree featurizer once it becomes available
// Tracking issue -- https://github.com/dotnet/machinelearning-automl/issues/342
- private static MethodInfo GetLeafMethod = typeof(TreeEnsembleModelParameters).GetMethod("GetLeaf", BindingFlags.NonPublic | BindingFlags.Instance);
- private static MethodInfo GetLeafValueMethod = typeof(TreeEnsembleModelParameters).GetMethod("GetLeafValue", BindingFlags.NonPublic | BindingFlags.Instance);
+ private static MethodInfo _getLeafMethod = typeof(TreeEnsembleModelParameters).GetMethod("GetLeaf", BindingFlags.NonPublic | BindingFlags.Instance);
+ private static MethodInfo _getLeafValueMethod = typeof(TreeEnsembleModelParameters).GetMethod("GetLeafValue", BindingFlags.NonPublic | BindingFlags.Instance);
private static int GetLeaf(TreeEnsembleModelParameters model, int treeId, VBuffer features)
{
List path = null;
- return (int)GetLeafMethod.Invoke(model, new object[] { treeId, features, path });
+ return (int)_getLeafMethod.Invoke(model, new object[] { treeId, features, path });
}
private static float GetLeafValue(TreeEnsembleModelParameters model, int treeId, int leafId)
{
- return (float)GetLeafValueMethod.Invoke(model, new object[] { treeId, leafId });
+ return (float)_getLeafValueMethod.Invoke(model, new object[] { treeId, leafId });
}
///
diff --git a/src/Microsoft.ML.AutoML/Sweepers/SweeperBase.cs b/src/Microsoft.ML.AutoML/Sweepers/SweeperBase.cs
index 5d38d241bb..3f3da31477 100644
--- a/src/Microsoft.ML.AutoML/Sweepers/SweeperBase.cs
+++ b/src/Microsoft.ML.AutoML/Sweepers/SweeperBase.cs
@@ -19,11 +19,15 @@ internal abstract class SweeperBase : ISweeper
{
internal class ArgumentsBase
{
- //[Argument(ArgumentType.Multiple, HelpText = "Swept parameters", ShortName = "p", SignatureType = typeof(SignatureSweeperParameter))]
public IValueGenerator[] SweptParameters;
- //[Argument(ArgumentType.LastOccurenceWins, HelpText = "Number of tries to generate distinct parameter sets.", ShortName = "r")]
- public int Retries = 10;
+ // Number of tries to generate distinct parameter sets.
+ public int Retries;
+
+ public ArgumentsBase()
+ {
+ Retries = 10;
+ }
}
private readonly ArgumentsBase _args;
diff --git a/src/Microsoft.ML.AutoML/Sweepers/SweeperProbabilityUtils.cs b/src/Microsoft.ML.AutoML/Sweepers/SweeperProbabilityUtils.cs
index 5200a394c4..b32870f4c8 100644
--- a/src/Microsoft.ML.AutoML/Sweepers/SweeperProbabilityUtils.cs
+++ b/src/Microsoft.ML.AutoML/Sweepers/SweeperProbabilityUtils.cs
@@ -34,8 +34,8 @@ public double[] NormalRVs(int numRVs, double mu, double sigma)
for (int i = 0; i < numRVs; i++)
{
- u1 = AutoMlUtils.random.Value.NextDouble();
- u2 = AutoMlUtils.random.Value.NextDouble();
+ u1 = AutoMlUtils.Random.Value.NextDouble();
+ u2 = AutoMlUtils.Random.Value.NextDouble();
rvs.Add(mu + sigma * Math.Sqrt(-2.0 * Math.Log(u1)) * Math.Sin(2.0 * Math.PI * u2));
}
diff --git a/src/Microsoft.ML.AutoML/TrainerExtensions/TrainerExtensionUtil.cs b/src/Microsoft.ML.AutoML/TrainerExtensions/TrainerExtensionUtil.cs
index a13101a253..a700291794 100644
--- a/src/Microsoft.ML.AutoML/TrainerExtensions/TrainerExtensionUtil.cs
+++ b/src/Microsoft.ML.AutoML/TrainerExtensions/TrainerExtensionUtil.cs
@@ -311,25 +311,25 @@ public static TrainerName GetTrainerName(MulticlassClassificationTrainer multiTr
{
switch (multiTrainer)
{
- case MulticlassClassificationTrainer.AveragedPerceptronOVA:
+ case MulticlassClassificationTrainer.AveragedPerceptronOva:
return TrainerName.AveragedPerceptronOva;
- case MulticlassClassificationTrainer.FastForestOVA:
+ case MulticlassClassificationTrainer.FastForestOva:
return TrainerName.FastForestOva;
- case MulticlassClassificationTrainer.FastTreeOVA:
+ case MulticlassClassificationTrainer.FastTreeOva:
return TrainerName.FastTreeOva;
case MulticlassClassificationTrainer.LightGbm:
return TrainerName.LightGbmMulti;
- case MulticlassClassificationTrainer.LinearSupportVectorMachinesOVA:
+ case MulticlassClassificationTrainer.LinearSupportVectorMachinesOva:
return TrainerName.LinearSvmOva;
case MulticlassClassificationTrainer.LbfgsMaximumEntropy:
return TrainerName.LbfgsMaximumEntropyMulti;
- case MulticlassClassificationTrainer.LbfgsLogisticRegressionOVA:
+ case MulticlassClassificationTrainer.LbfgsLogisticRegressionOva:
return TrainerName.LbfgsLogisticRegressionOva;
case MulticlassClassificationTrainer.SdcaMaximumEntropy:
return TrainerName.SdcaMaximumEntropyMulti;
- case MulticlassClassificationTrainer.SgdCalibratedOVA:
+ case MulticlassClassificationTrainer.SgdCalibratedOva:
return TrainerName.SgdCalibratedOva;
- case MulticlassClassificationTrainer.SymbolicSgdLogisticRegressionOVA:
+ case MulticlassClassificationTrainer.SymbolicSgdLogisticRegressionOva:
return TrainerName.SymbolicSgdLogisticRegressionOva;
}
diff --git a/src/Microsoft.ML.AutoML/TransformInference/TransformInference.cs b/src/Microsoft.ML.AutoML/TransformInference/TransformInference.cs
index 0192c58c39..fd8e9d3c48 100644
--- a/src/Microsoft.ML.AutoML/TransformInference/TransformInference.cs
+++ b/src/Microsoft.ML.AutoML/TransformInference/TransformInference.cs
@@ -299,7 +299,7 @@ public override IEnumerable Apply(IntermediateColumn[] colum
&& column.Purpose == ColumnPurpose.NumericFeature
&& column.Dimensions.HasMissing == true)
{
- columnsWithMissing.Add(column.ColumnName);
+ columnsWithMissing.Add(column.ColumnName);
}
}
if (columnsWithMissing.Any())
@@ -338,7 +338,7 @@ public static SuggestedTransform[] InferTransforms(MLContext context, TaskKind t
return suggestedTransforms.ToArray();
}
-
+
///
/// Build final features concat transform, using output of all suggested experts.
/// Take the output columns from all suggested experts (except for 'Label'), and concatenate them
diff --git a/src/Microsoft.ML.AutoML/TransformInference/TransformPostTrainerInference.cs b/src/Microsoft.ML.AutoML/TransformInference/TransformPostTrainerInference.cs
index 09bde9046e..ca98152e6d 100644
--- a/src/Microsoft.ML.AutoML/TransformInference/TransformPostTrainerInference.cs
+++ b/src/Microsoft.ML.AutoML/TransformInference/TransformPostTrainerInference.cs
@@ -17,7 +17,7 @@ public static IEnumerable InferTransforms(MLContext context,
return suggestedTransforms;
}
- private static IEnumerable InferLabelTransforms(MLContext context, TaskKind task,
+ private static IEnumerable InferLabelTransforms(MLContext context, TaskKind task,
DatasetColumnInfo[] columns)
{
var inferredTransforms = new List();
@@ -29,7 +29,7 @@ private static IEnumerable InferLabelTransforms(MLContext co
// If label column type wasn't originally key type,
// convert predicted label column back from key to value.
- // (Non-key label column was converted to key, b/c multiclass trainers only
+ // (Non-key label column was converted to key, b/c multiclass trainers only
// accept label columns that are key type)
var labelColumn = columns.First(c => c.Purpose == ColumnPurpose.Label);
if (!labelColumn.Type.IsKey())
diff --git a/src/Microsoft.ML.AutoML/Utils/MLNetUtils/ArrayDataViewBuilder.cs b/src/Microsoft.ML.AutoML/Utils/MLNetUtils/ArrayDataViewBuilder.cs
index ace65288c5..d22087c6a4 100644
--- a/src/Microsoft.ML.AutoML/Utils/MLNetUtils/ArrayDataViewBuilder.cs
+++ b/src/Microsoft.ML.AutoML/Utils/MLNetUtils/ArrayDataViewBuilder.cs
@@ -182,7 +182,7 @@ private sealed class DataView : IDataView
public DataView(IHostEnvironment env, ArrayDataViewBuilder builder, int rowCount)
{
_host = env.Register("ArrayDataView");
-
+
_columns = builder._columns.ToArray();
var schemaBuilder = new DataViewSchema.Builder();
@@ -205,14 +205,14 @@ public DataView(IHostEnvironment env, ArrayDataViewBuilder builder, int rowCount
public DataViewRowCursor GetRowCursor(IEnumerable columnsNeeded, Random rand = null)
{
var predicate = RowCursorUtils.FromColumnsToPredicate(columnsNeeded, Schema);
-
+
return new Cursor(_host, this, predicate, rand);
}
public DataViewRowCursor[] GetRowCursorSet(IEnumerable columnsNeeded, int n, Random rand = null)
{
var predicate = RowCursorUtils.FromColumnsToPredicate(columnsNeeded, Schema);
-
+
return new DataViewRowCursor[] { new Cursor(_host, this, predicate, rand) };
}
diff --git a/src/Microsoft.ML.AutoML/Utils/MLNetUtils/VBufferUtils.cs b/src/Microsoft.ML.AutoML/Utils/MLNetUtils/VBufferUtils.cs
index d007b418e5..54a7884210 100644
--- a/src/Microsoft.ML.AutoML/Utils/MLNetUtils/VBufferUtils.cs
+++ b/src/Microsoft.ML.AutoML/Utils/MLNetUtils/VBufferUtils.cs
@@ -2,8 +2,8 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
-using Microsoft.ML.Data;
using System;
+using Microsoft.ML.Data;
namespace Microsoft.ML.AutoML
{
diff --git a/src/Microsoft.ML.AutoML/Utils/SplitUtil.cs b/src/Microsoft.ML.AutoML/Utils/SplitUtil.cs
index cc77f0fbc9..aa98cfa0a8 100644
--- a/src/Microsoft.ML.AutoML/Utils/SplitUtil.cs
+++ b/src/Microsoft.ML.AutoML/Utils/SplitUtil.cs
@@ -1,6 +1,6 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-// See the LICENSE file in the project root for more information.
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
@@ -10,14 +10,14 @@ namespace Microsoft.ML.AutoML
{
internal static class SplitUtil
{
- public static (IDataView[] trainDatasets, IDataView[] validationDatasets) CrossValSplit(MLContext context,
+ public static (IDataView[] trainDatasets, IDataView[] validationDatasets) CrossValSplit(MLContext context,
IDataView trainData, uint numFolds, string samplingKeyColumn)
{
var originalColumnNames = trainData.Schema.Select(c => c.Name);
var splits = context.Data.CrossValidationSplit(trainData, (int)numFolds, samplingKeyColumnName: samplingKeyColumn);
var trainDatasets = new List();
var validationDatasets = new List();
-
+
foreach (var split in splits)
{
if (DatasetDimensionsUtil.IsDataViewEmpty(split.TrainSet) ||
@@ -46,7 +46,7 @@ public static (IDataView[] trainDatasets, IDataView[] validationDatasets) CrossV
///
/// Split the data into a single train/test split.
///
- public static (IDataView trainData, IDataView validationData) TrainValidateSplit(MLContext context, IDataView trainData,
+ public static (IDataView trainData, IDataView validationData) TrainValidateSplit(MLContext context, IDataView trainData,
string samplingKeyColumn)
{
var originalColumnNames = trainData.Schema.Select(c => c.Name);
diff --git a/src/mlnet/AutoML/AutoMLEngine.cs b/src/mlnet/AutoML/AutoMLEngine.cs
index 7ad28b3cd4..c8d1f9a38b 100644
--- a/src/mlnet/AutoML/AutoMLEngine.cs
+++ b/src/mlnet/AutoML/AutoMLEngine.cs
@@ -14,31 +14,31 @@ namespace Microsoft.ML.CLI.CodeGenerator
{
internal class AutoMLEngine : IAutoMLEngine
{
- private NewCommandSettings settings;
- private TaskKind taskKind;
- private CacheBeforeTrainer cacheBeforeTrainer;
- private static Logger logger = LogManager.GetCurrentClassLogger();
+ private NewCommandSettings _settings;
+ private TaskKind _taskKind;
+ private CacheBeforeTrainer _cacheBeforeTrainer;
+ private static Logger _logger = LogManager.GetCurrentClassLogger();
public AutoMLEngine(NewCommandSettings settings)
{
- this.settings = settings;
- this.taskKind = Utils.GetTaskKind(settings.MlTask);
- this.cacheBeforeTrainer = Utils.GetCacheSettings(settings.Cache);
+ _settings = settings;
+ _taskKind = Utils.GetTaskKind(settings.MlTask);
+ _cacheBeforeTrainer = Utils.GetCacheSettings(settings.Cache);
}
public ColumnInferenceResults InferColumns(MLContext context, ColumnInformation columnInformation)
{
// Check what overload method of InferColumns needs to be called.
- logger.Log(LogLevel.Trace, Strings.InferColumns);
+ _logger.Log(LogLevel.Trace, Strings.InferColumns);
ColumnInferenceResults columnInference = null;
- var dataset = settings.Dataset.FullName;
+ var dataset = _settings.Dataset.FullName;
if (columnInformation.LabelColumnName != null)
{
columnInference = context.Auto().InferColumns(dataset, columnInformation, groupColumns: false);
}
else
{
- columnInference = context.Auto().InferColumns(dataset, settings.LabelColumnIndex, hasHeader: settings.HasHeader, groupColumns: false);
+ columnInference = context.Auto().InferColumns(dataset, _settings.LabelColumnIndex, hasHeader: _settings.HasHeader, groupColumns: false);
}
return columnInference;
@@ -49,13 +49,13 @@ void IAutoMLEngine.ExploreBinaryClassificationModels(MLContext context, IDataVie
ExperimentResult result = context.Auto()
.CreateBinaryClassificationExperiment(new BinaryExperimentSettings()
{
- MaxExperimentTimeInSeconds = settings.MaxExplorationTime,
- CacheBeforeTrainer = this.cacheBeforeTrainer,
+ MaxExperimentTimeInSeconds = _settings.MaxExplorationTime,
+ CacheBeforeTrainer = _cacheBeforeTrainer,
OptimizingMetric = optimizationMetric
})
.Execute(trainData, validationData, columnInformation, progressHandler: handler);
- logger.Log(LogLevel.Trace, Strings.RetrieveBestPipeline);
+ _logger.Log(LogLevel.Trace, Strings.RetrieveBestPipeline);
}
void IAutoMLEngine.ExploreRegressionModels(MLContext context, IDataView trainData, IDataView validationData, ColumnInformation columnInformation, RegressionMetric optimizationMetric, ProgressHandlers.RegressionHandler handler, ProgressBar progressBar)
@@ -63,12 +63,12 @@ void IAutoMLEngine.ExploreRegressionModels(MLContext context, IDataView trainDat
ExperimentResult result = context.Auto()
.CreateRegressionExperiment(new RegressionExperimentSettings()
{
- MaxExperimentTimeInSeconds = settings.MaxExplorationTime,
+ MaxExperimentTimeInSeconds = _settings.MaxExplorationTime,
OptimizingMetric = optimizationMetric,
- CacheBeforeTrainer = this.cacheBeforeTrainer
+ CacheBeforeTrainer = _cacheBeforeTrainer
}).Execute(trainData, validationData, columnInformation, progressHandler: handler);
- logger.Log(LogLevel.Trace, Strings.RetrieveBestPipeline);
+ _logger.Log(LogLevel.Trace, Strings.RetrieveBestPipeline);
}
void IAutoMLEngine.ExploreMultiClassificationModels(MLContext context, IDataView trainData, IDataView validationData, ColumnInformation columnInformation, MulticlassClassificationMetric optimizationMetric, ProgressHandlers.MulticlassClassificationHandler handler, ProgressBar progressBar)
@@ -76,12 +76,12 @@ void IAutoMLEngine.ExploreMultiClassificationModels(MLContext context, IDataView
ExperimentResult result = context.Auto()
.CreateMulticlassClassificationExperiment(new MulticlassExperimentSettings()
{
- MaxExperimentTimeInSeconds = settings.MaxExplorationTime,
- CacheBeforeTrainer = this.cacheBeforeTrainer,
+ MaxExperimentTimeInSeconds = _settings.MaxExplorationTime,
+ CacheBeforeTrainer = _cacheBeforeTrainer,
OptimizingMetric = optimizationMetric
}).Execute(trainData, validationData, columnInformation, progressHandler: handler);
- logger.Log(LogLevel.Trace, Strings.RetrieveBestPipeline);
+ _logger.Log(LogLevel.Trace, Strings.RetrieveBestPipeline);
}
}
diff --git a/src/mlnet/CodeGenerator/CSharp/CodeGenerator.cs b/src/mlnet/CodeGenerator/CSharp/CodeGenerator.cs
index c9ba930c21..e32b8180f9 100644
--- a/src/mlnet/CodeGenerator/CSharp/CodeGenerator.cs
+++ b/src/mlnet/CodeGenerator/CSharp/CodeGenerator.cs
@@ -16,25 +16,24 @@ namespace Microsoft.ML.CLI.CodeGenerator.CSharp
{
internal class CodeGenerator : IProjectGenerator
{
- private readonly Pipeline pipeline;
- private readonly CodeGeneratorSettings settings;
- private readonly ColumnInferenceResults columnInferenceResult;
- private readonly HashSet LightGBMTrainers = new HashSet() { TrainerName.LightGbmBinary.ToString(), TrainerName.LightGbmMulti.ToString(), TrainerName.LightGbmRegression.ToString() };
- private readonly HashSet mklComponentsTrainers = new HashSet() { TrainerName.OlsRegression.ToString(), TrainerName.SymbolicSgdLogisticRegressionBinary.ToString() };
- private readonly HashSet FastTreeTrainers = new HashSet() { TrainerName.FastForestBinary.ToString(), TrainerName.FastForestRegression.ToString(), TrainerName.FastTreeBinary.ToString(), TrainerName.FastTreeRegression.ToString(), TrainerName.FastTreeTweedieRegression.ToString() };
-
+ private readonly Pipeline _pipeline;
+ private readonly CodeGeneratorSettings _settings;
+ private readonly ColumnInferenceResults _columnInferenceResult;
+ private static readonly HashSet _lightGbmTrainers = new HashSet() { TrainerName.LightGbmBinary.ToString(), TrainerName.LightGbmMulti.ToString(), TrainerName.LightGbmRegression.ToString() };
+ private static readonly HashSet _mklComponentsTrainers = new HashSet() { TrainerName.OlsRegression.ToString(), TrainerName.SymbolicSgdLogisticRegressionBinary.ToString() };
+ private static readonly HashSet _fastTreeTrainers = new HashSet() { TrainerName.FastForestBinary.ToString(), TrainerName.FastForestRegression.ToString(), TrainerName.FastTreeBinary.ToString(), TrainerName.FastTreeRegression.ToString(), TrainerName.FastTreeTweedieRegression.ToString() };
internal CodeGenerator(Pipeline pipeline, ColumnInferenceResults columnInferenceResult, CodeGeneratorSettings settings)
{
- this.pipeline = pipeline;
- this.columnInferenceResult = columnInferenceResult;
- this.settings = settings;
+ _pipeline = pipeline;
+ _columnInferenceResult = columnInferenceResult;
+ _settings = settings;
}
public void GenerateOutput()
{
// Get the extra nuget packages to be included in the generated project.
- var trainerNodes = pipeline.Nodes.Where(t => t.NodeType == PipelineNodeType.Trainer);
+ var trainerNodes = _pipeline.Nodes.Where(t => t.NodeType == PipelineNodeType.Trainer);
bool includeLightGbmPackage = false;
bool includeMklComponentsPackage = false;
@@ -42,17 +41,17 @@ public void GenerateOutput()
SetRequiredNugetPackages(trainerNodes, ref includeLightGbmPackage, ref includeMklComponentsPackage, ref includeFastTreeePackage);
// Get Namespace
- var namespaceValue = Utils.Normalize(settings.OutputName);
- var labelType = columnInferenceResult.TextLoaderOptions.Columns.Where(t => t.Name == settings.LabelName).First().DataKind;
+ var namespaceValue = Utils.Normalize(_settings.OutputName);
+ var labelType = _columnInferenceResult.TextLoaderOptions.Columns.Where(t => t.Name == _settings.LabelName).First().DataKind;
Type labelTypeCsharp = Utils.GetCSharpType(labelType);
// Generate Model Project
var modelProjectContents = GenerateModelProjectContents(namespaceValue, labelTypeCsharp, includeLightGbmPackage, includeMklComponentsPackage, includeFastTreeePackage);
- // Write files to disk.
- var modelprojectDir = Path.Combine(settings.OutputBaseDir, $"{settings.OutputName}.Model");
+ // Write files to disk.
+ var modelprojectDir = Path.Combine(_settings.OutputBaseDir, $"{_settings.OutputName}.Model");
var dataModelsDir = Path.Combine(modelprojectDir, "DataModels");
- var modelProjectName = $"{settings.OutputName}.Model.csproj";
+ var modelProjectName = $"{_settings.OutputName}.Model.csproj";
Utils.WriteOutputToFiles(modelProjectContents.ModelInputCSFileContent, "ModelInput.cs", dataModelsDir);
Utils.WriteOutputToFiles(modelProjectContents.ModelOutputCSFileContent, "ModelOutput.cs", dataModelsDir);
@@ -61,19 +60,19 @@ public void GenerateOutput()
// Generate ConsoleApp Project
var consoleAppProjectContents = GenerateConsoleAppProjectContents(namespaceValue, labelTypeCsharp, includeLightGbmPackage, includeMklComponentsPackage, includeFastTreeePackage);
- // Write files to disk.
- var consoleAppProjectDir = Path.Combine(settings.OutputBaseDir, $"{settings.OutputName}.ConsoleApp");
- var consoleAppProjectName = $"{settings.OutputName}.ConsoleApp.csproj";
+ // Write files to disk.
+ var consoleAppProjectDir = Path.Combine(_settings.OutputBaseDir, $"{_settings.OutputName}.ConsoleApp");
+ var consoleAppProjectName = $"{_settings.OutputName}.ConsoleApp.csproj";
Utils.WriteOutputToFiles(consoleAppProjectContents.ConsoleAppProgramCSFileContent, "Program.cs", consoleAppProjectDir);
Utils.WriteOutputToFiles(consoleAppProjectContents.modelBuilderCSFileContent, "ModelBuilder.cs", consoleAppProjectDir);
Utils.WriteOutputToFiles(consoleAppProjectContents.ConsoleAppProjectFileContent, consoleAppProjectName, consoleAppProjectDir);
// New solution file.
- Utils.CreateSolutionFile(settings.OutputName, settings.OutputBaseDir);
+ Utils.CreateSolutionFile(_settings.OutputName, _settings.OutputBaseDir);
// Add projects to solution
- var solutionPath = Path.Combine(settings.OutputBaseDir, $"{settings.OutputName}.sln");
+ var solutionPath = Path.Combine(_settings.OutputBaseDir, $"{_settings.OutputName}.sln");
Utils.AddProjectsToSolution(modelprojectDir, modelProjectName, consoleAppProjectDir, consoleAppProjectName, solutionPath);
}
@@ -87,15 +86,15 @@ private void SetRequiredNugetPackages(IEnumerable trainerNodes, re
currentNode = (PipelineNode)currentNode.Properties["BinaryTrainer"];
}
- if (LightGBMTrainers.Contains(currentNode.Name))
+ if (_lightGbmTrainers.Contains(currentNode.Name))
{
includeLightGbmPackage = true;
}
- else if (mklComponentsTrainers.Contains(currentNode.Name))
+ else if (_mklComponentsTrainers.Contains(currentNode.Name))
{
includeMklComponentsPackage = true;
}
- else if (FastTreeTrainers.Contains(currentNode.Name))
+ else if (_fastTreeTrainers.Contains(currentNode.Name))
{
includeFastTreePackage = true;
}
@@ -110,7 +109,7 @@ private void SetRequiredNugetPackages(IEnumerable trainerNodes, re
var predictProjectFileContent = GeneratPredictProjectFileContent(namespaceValue, includeLightGbmPackage, includeMklComponentsPackage, includeFastTreePackage);
var transformsAndTrainers = GenerateTransformsAndTrainers();
- var modelBuilderCSFileContent = GenerateModelBuilderCSFileContent(transformsAndTrainers.Usings, transformsAndTrainers.TrainerMethod, transformsAndTrainers.PreTrainerTransforms, transformsAndTrainers.PostTrainerTransforms, namespaceValue, pipeline.CacheBeforeTrainer, labelTypeCsharp.Name);
+ var modelBuilderCSFileContent = GenerateModelBuilderCSFileContent(transformsAndTrainers.Usings, transformsAndTrainers.TrainerMethod, transformsAndTrainers.PreTrainerTransforms, transformsAndTrainers.PostTrainerTransforms, namespaceValue, _pipeline.CacheBeforeTrainer, labelTypeCsharp.Name);
modelBuilderCSFileContent = Utils.FormatCode(modelBuilderCSFileContent);
return (predictProgramCSFileContent, predictProjectFileContent, modelBuilderCSFileContent);
@@ -118,7 +117,7 @@ private void SetRequiredNugetPackages(IEnumerable trainerNodes, re
internal (string ModelInputCSFileContent, string ModelOutputCSFileContent, string ModelProjectFileContent) GenerateModelProjectContents(string namespaceValue, Type labelTypeCsharp, bool includeLightGbmPackage, bool includeMklComponentsPackage, bool includeFastTreePackage)
{
- var classLabels = this.GenerateClassLabels();
+ var classLabels = GenerateClassLabels();
var modelInputCSFileContent = GenerateModelInputCSFileContent(namespaceValue, classLabels);
modelInputCSFileContent = Utils.FormatCode(modelInputCSFileContent);
var modelOutputCSFileContent = GenerateModelOutputCSFileContent(labelTypeCsharp.Name, namespaceValue);
@@ -133,17 +132,17 @@ private void SetRequiredNugetPackages(IEnumerable trainerNodes, re
var usings = new List();
// Get pre-trainer transforms
- var nodes = pipeline.Nodes.TakeWhile(t => t.NodeType == PipelineNodeType.Transform);
- var preTrainerTransformsAndUsings = this.GenerateTransformsAndUsings(nodes);
+ var nodes = _pipeline.Nodes.TakeWhile(t => t.NodeType == PipelineNodeType.Transform);
+ var preTrainerTransformsAndUsings = GenerateTransformsAndUsings(nodes);
// Get post trainer transforms
- nodes = pipeline.Nodes.SkipWhile(t => t.NodeType == PipelineNodeType.Transform)
+ nodes = _pipeline.Nodes.SkipWhile(t => t.NodeType == PipelineNodeType.Transform)
.SkipWhile(t => t.NodeType == PipelineNodeType.Trainer) //skip the trainer
.TakeWhile(t => t.NodeType == PipelineNodeType.Transform); //post trainer transforms
- var postTrainerTransformsAndUsings = this.GenerateTransformsAndUsings(nodes);
+ var postTrainerTransformsAndUsings = GenerateTransformsAndUsings(nodes);
//Get trainer code and its associated usings.
- (string trainerMethod, string[] trainerUsings) = this.GenerateTrainerAndUsings();
+ (string trainerMethod, string[] trainerUsings) = GenerateTrainerAndUsings();
if (trainerUsings != null)
{
usings.AddRange(trainerUsings);
@@ -183,9 +182,9 @@ private void SetRequiredNugetPackages(IEnumerable trainerNodes, re
internal (string, string[]) GenerateTrainerAndUsings()
{
- if (pipeline == null)
- throw new ArgumentNullException(nameof(pipeline));
- var node = pipeline.Nodes.Where(t => t.NodeType == PipelineNodeType.Trainer).First();
+ if (_pipeline == null)
+ throw new ArgumentNullException(nameof(_pipeline));
+ var node = _pipeline.Nodes.Where(t => t.NodeType == PipelineNodeType.Trainer).First();
if (node == null)
throw new ArgumentException($"The trainer was not found.");
@@ -198,7 +197,7 @@ private void SetRequiredNugetPackages(IEnumerable trainerNodes, re
internal IList GenerateClassLabels()
{
IList result = new List();
- foreach (var column in columnInferenceResult.TextLoaderOptions.Columns)
+ foreach (var column in _columnInferenceResult.TextLoaderOptions.Columns)
{
StringBuilder sb = new StringBuilder();
int range = (column.Source[0].Max - column.Source[0].Min).Value;
@@ -263,7 +262,7 @@ private static string GenerateModelProjectFileContent(bool includeLightGbmPackag
private string GenerateModelOutputCSFileContent(string predictionLabelType, string namespaceValue)
{
- ModelOutputClass modelOutputClass = new ModelOutputClass() { TaskType = settings.MlTask.ToString(), PredictionLabelType = predictionLabelType, Namespace = namespaceValue };
+ ModelOutputClass modelOutputClass = new ModelOutputClass() { TaskType = _settings.MlTask.ToString(), PredictionLabelType = predictionLabelType, Namespace = namespaceValue };
return modelOutputClass.TransformText();
}
@@ -285,15 +284,15 @@ private string GeneratePredictProgramCSFileContent(string namespaceValue)
{
PredictProgram predictProgram = new PredictProgram()
{
- TaskType = settings.MlTask.ToString(),
- LabelName = settings.LabelName,
+ TaskType = _settings.MlTask.ToString(),
+ LabelName = _settings.LabelName,
Namespace = namespaceValue,
- TestDataPath = settings.TestDataset,
- TrainDataPath = settings.TrainDataset,
- HasHeader = columnInferenceResult.TextLoaderOptions.HasHeader,
- Separator = columnInferenceResult.TextLoaderOptions.Separators.FirstOrDefault(),
- AllowQuoting = columnInferenceResult.TextLoaderOptions.AllowQuoting,
- AllowSparse = columnInferenceResult.TextLoaderOptions.AllowSparse,
+ TestDataPath = _settings.TestDataset,
+ TrainDataPath = _settings.TrainDataset,
+ HasHeader = _columnInferenceResult.TextLoaderOptions.HasHeader,
+ Separator = _columnInferenceResult.TextLoaderOptions.Separators.FirstOrDefault(),
+ AllowQuoting = _columnInferenceResult.TextLoaderOptions.AllowQuoting,
+ AllowSparse = _columnInferenceResult.TextLoaderOptions.AllowSparse,
};
return predictProgram.TransformText();
}
@@ -310,17 +309,17 @@ private string GenerateModelBuilderCSFileContent(string usings,
{
PreTrainerTransforms = preTrainerTransforms,
PostTrainerTransforms = postTrainerTransforms,
- HasHeader = columnInferenceResult.TextLoaderOptions.HasHeader,
- Separator = columnInferenceResult.TextLoaderOptions.Separators.FirstOrDefault(),
- AllowQuoting = columnInferenceResult.TextLoaderOptions.AllowQuoting,
- AllowSparse = columnInferenceResult.TextLoaderOptions.AllowSparse,
+ HasHeader = _columnInferenceResult.TextLoaderOptions.HasHeader,
+ Separator = _columnInferenceResult.TextLoaderOptions.Separators.FirstOrDefault(),
+ AllowQuoting = _columnInferenceResult.TextLoaderOptions.AllowQuoting,
+ AllowSparse = _columnInferenceResult.TextLoaderOptions.AllowSparse,
Trainer = trainerMethod,
GeneratedUsings = usings,
- Path = settings.TrainDataset,
- TestPath = settings.TestDataset,
- TaskType = settings.MlTask.ToString(),
+ Path = _settings.TrainDataset,
+ TestPath = _settings.TestDataset,
+ TaskType = _settings.MlTask.ToString(),
Namespace = namespaceValue,
- LabelName = settings.LabelName,
+ LabelName = _settings.LabelName,
CacheBeforeTrainer = cacheBeforeTrainer,
};
diff --git a/src/mlnet/CodeGenerator/CSharp/TrainerGeneratorBase.cs b/src/mlnet/CodeGenerator/CSharp/TrainerGeneratorBase.cs
index c5bfc33c30..760ae4519e 100644
--- a/src/mlnet/CodeGenerator/CSharp/TrainerGeneratorBase.cs
+++ b/src/mlnet/CodeGenerator/CSharp/TrainerGeneratorBase.cs
@@ -16,10 +16,9 @@ namespace Microsoft.ML.CLI.CodeGenerator.CSharp
///
internal abstract class TrainerGeneratorBase : ITrainerGenerator
{
- private PipelineNode node;
- private Dictionary arguments = new Dictionary();
- private bool hasAdvancedSettings = false;
- private string seperator = null;
+ private Dictionary _arguments;
+ private bool _hasAdvancedSettings;
+ private string _seperator;
//abstract properties
internal abstract string OptionsName { get; }
@@ -38,12 +37,12 @@ protected TrainerGeneratorBase(PipelineNode node)
private void Initialize(PipelineNode node)
{
- this.node = node;
+ _arguments = new Dictionary();
if (NamedParameters != null)
{
- hasAdvancedSettings = node.Properties.Keys.Any(t => !NamedParameters.ContainsKey(t));
+ _hasAdvancedSettings = node.Properties.Keys.Any(t => !NamedParameters.ContainsKey(t));
}
- seperator = hasAdvancedSettings ? "=" : ":";
+ _seperator = _hasAdvancedSettings ? "=" : ":";
if (!node.Properties.ContainsKey("LabelColumnName"))
{
node.Properties.Add("LabelColumnName", "Label");
@@ -102,11 +101,11 @@ private void Initialize(PipelineNode node)
if (NamedParameters != null)
{
- arguments.Add(hasAdvancedSettings ? kv.Key : NamedParameters[kv.Key], value);
+ _arguments.Add(_hasAdvancedSettings ? kv.Key : NamedParameters[kv.Key], value);
}
else
{
- arguments.Add(kv.Key, value);
+ _arguments.Add(kv.Key, value);
}
}
@@ -148,14 +147,14 @@ public virtual string GenerateTrainer()
StringBuilder sb = new StringBuilder();
sb.Append(MethodName);
sb.Append("(");
- if (hasAdvancedSettings)
+ if (_hasAdvancedSettings)
{
- var paramString = BuildComplexParameter(OptionsName, arguments, "=");
+ var paramString = BuildComplexParameter(OptionsName, _arguments, "=");
sb.Append(paramString);
}
else
{
- sb.Append(AppendArguments(arguments, ":"));
+ sb.Append(AppendArguments(_arguments, ":"));
}
sb.Append(")");
return sb.ToString();
@@ -163,7 +162,7 @@ public virtual string GenerateTrainer()
public virtual string[] GenerateUsings()
{
- if (hasAdvancedSettings)
+ if (_hasAdvancedSettings)
return Usings;
return null;
diff --git a/src/mlnet/CodeGenerator/CSharp/TrainerGenerators.cs b/src/mlnet/CodeGenerator/CSharp/TrainerGenerators.cs
index 3e0d0861c5..c2d08a4b84 100644
--- a/src/mlnet/CodeGenerator/CSharp/TrainerGenerators.cs
+++ b/src/mlnet/CodeGenerator/CSharp/TrainerGenerators.cs
@@ -514,8 +514,8 @@ public SymbolicSgdLogisticRegressionBinary(PipelineNode node) : base(node)
internal class OneVersusAll : TrainerGeneratorBase
{
- private PipelineNode node;
- private string[] binaryTrainerUsings = null;
+ private PipelineNode _node;
+ private string[] _binaryTrainerUsings;
//ClassName of the trainer
internal override string MethodName => "OneVersusAll";
@@ -530,7 +530,7 @@ internal class OneVersusAll : TrainerGeneratorBase
public OneVersusAll(PipelineNode node) : base(node)
{
- this.node = node;
+ _node = node;
}
public override string GenerateTrainer()
@@ -539,13 +539,13 @@ public override string GenerateTrainer()
sb.Append(MethodName);
sb.Append("(");
sb.Append("mlContext.BinaryClassification.Trainers."); // This is dependent on the name of the MLContext object in template.
- var trainerGenerator = TrainerGeneratorFactory.GetInstance((PipelineNode)this.node.Properties["BinaryTrainer"]);
- binaryTrainerUsings = trainerGenerator.GenerateUsings();
+ var trainerGenerator = TrainerGeneratorFactory.GetInstance((PipelineNode)_node.Properties["BinaryTrainer"]);
+ _binaryTrainerUsings = trainerGenerator.GenerateUsings();
sb.Append(trainerGenerator.GenerateTrainer());
sb.Append(",");
sb.Append("labelColumnName:");
sb.Append("\"");
- sb.Append(node.Properties["LabelColumnName"]);
+ sb.Append(_node.Properties["LabelColumnName"]);
sb.Append("\"");
sb.Append(")");
return sb.ToString();
@@ -553,7 +553,7 @@ public override string GenerateTrainer()
public override string[] GenerateUsings()
{
- return binaryTrainerUsings;
+ return _binaryTrainerUsings;
}
}
}
diff --git a/src/mlnet/CodeGenerator/CSharp/TransformGeneratorBase.cs b/src/mlnet/CodeGenerator/CSharp/TransformGeneratorBase.cs
index 9c77bab4aa..671d4a9f58 100644
--- a/src/mlnet/CodeGenerator/CSharp/TransformGeneratorBase.cs
+++ b/src/mlnet/CodeGenerator/CSharp/TransformGeneratorBase.cs
@@ -17,9 +17,9 @@ internal abstract class TransformGeneratorBase : ITransformGenerator
internal virtual string[] Usings => null;
- protected string[] inputColumns;
+ protected string[] InputColumns;
- protected string[] outputColumns;
+ protected string[] OutputColumns;
///
/// Generates an instance of TrainerGenerator
@@ -32,17 +32,17 @@ protected TransformGeneratorBase(PipelineNode node)
private void Initialize(PipelineNode node)
{
- inputColumns = new string[node.InColumns.Length];
- outputColumns = new string[node.OutColumns.Length];
+ InputColumns = new string[node.InColumns.Length];
+ OutputColumns = new string[node.OutColumns.Length];
int i = 0;
foreach (var column in node.InColumns)
{
- inputColumns[i++] = "\"" + column + "\"";
+ InputColumns[i++] = "\"" + column + "\"";
}
i = 0;
foreach (var column in node.OutColumns)
{
- outputColumns[i++] = "\"" + column + "\"";
+ OutputColumns[i++] = "\"" + column + "\"";
}
}
diff --git a/src/mlnet/CodeGenerator/CSharp/TransformGeneratorFactory.cs b/src/mlnet/CodeGenerator/CSharp/TransformGeneratorFactory.cs
index 2eb670aa01..b49db3b9b2 100644
--- a/src/mlnet/CodeGenerator/CSharp/TransformGeneratorFactory.cs
+++ b/src/mlnet/CodeGenerator/CSharp/TransformGeneratorFactory.cs
@@ -5,7 +5,6 @@
using System;
using Microsoft.ML.AutoML;
-
namespace Microsoft.ML.CLI.CodeGenerator.CSharp
{
internal interface ITransformGenerator
diff --git a/src/mlnet/CodeGenerator/CSharp/TransformGenerators.cs b/src/mlnet/CodeGenerator/CSharp/TransformGenerators.cs
index 550e3c32c2..0daf980671 100644
--- a/src/mlnet/CodeGenerator/CSharp/TransformGenerators.cs
+++ b/src/mlnet/CodeGenerator/CSharp/TransformGenerators.cs
@@ -20,8 +20,8 @@ public Normalizer(PipelineNode node) : base(node)
public override string GenerateTransformer()
{
StringBuilder sb = new StringBuilder();
- string inputColumn = inputColumns.Count() > 0 ? inputColumns[0] : "\"Features\"";
- string outputColumn = outputColumns.Count() > 0 ? outputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
+ string inputColumn = InputColumns.Count() > 0 ? InputColumns[0] : "\"Features\"";
+ string outputColumn = OutputColumns.Count() > 0 ? OutputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
sb.Append(MethodName);
sb.Append("(");
sb.Append(outputColumn);
@@ -40,7 +40,7 @@ public OneHotEncoding(PipelineNode node) : base(node)
internal override string MethodName => "Categorical.OneHotEncoding";
- private string ArgumentsName = "InputOutputColumnPair";
+ private const string ArgumentsName = "InputOutputColumnPair";
public override string GenerateTransformer()
{
@@ -48,14 +48,14 @@ public override string GenerateTransformer()
sb.Append(MethodName);
sb.Append("(");
sb.Append("new []{");
- for (int i = 0; i < inputColumns.Length; i++)
+ for (int i = 0; i < InputColumns.Length; i++)
{
sb.Append("new ");
sb.Append(ArgumentsName);
sb.Append("(");
- sb.Append(outputColumns[i]);
+ sb.Append(OutputColumns[i]);
sb.Append(",");
- sb.Append(inputColumns[i]);
+ sb.Append(InputColumns[i]);
sb.Append(")");
sb.Append(",");
}
@@ -78,14 +78,14 @@ public ColumnConcat(PipelineNode node) : base(node)
public override string GenerateTransformer()
{
StringBuilder sb = new StringBuilder();
- string inputColumn = inputColumns.Count() > 0 ? inputColumns[0] : "\"Features\"";
- string outputColumn = outputColumns.Count() > 0 ? outputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
+ string inputColumn = InputColumns.Count() > 0 ? InputColumns[0] : "\"Features\"";
+ string outputColumn = OutputColumns.Count() > 0 ? OutputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
sb.Append(MethodName);
sb.Append("(");
sb.Append(outputColumn);
sb.Append(",");
sb.Append("new []{");
- foreach (var col in inputColumns)
+ foreach (var col in InputColumns)
{
sb.Append(col);
sb.Append(",");
@@ -108,8 +108,8 @@ public ColumnCopying(PipelineNode node) : base(node)
public override string GenerateTransformer()
{
StringBuilder sb = new StringBuilder();
- string inputColumn = inputColumns.Count() > 0 ? inputColumns[0] : "\"Features\"";
- string outputColumn = outputColumns.Count() > 0 ? outputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
+ string inputColumn = InputColumns.Count() > 0 ? InputColumns[0] : "\"Features\"";
+ string outputColumn = OutputColumns.Count() > 0 ? OutputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
sb.Append(MethodName);
sb.Append("(");
sb.Append(outputColumn);
@@ -131,8 +131,8 @@ public KeyToValueMapping(PipelineNode node) : base(node)
public override string GenerateTransformer()
{
StringBuilder sb = new StringBuilder();
- string inputColumn = inputColumns.Count() > 0 ? inputColumns[0] : "\"Features\"";
- string outputColumn = outputColumns.Count() > 0 ? outputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
+ string inputColumn = InputColumns.Count() > 0 ? InputColumns[0] : "\"Features\"";
+ string outputColumn = OutputColumns.Count() > 0 ? OutputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
sb.Append(MethodName);
sb.Append("(");
sb.Append(outputColumn);
@@ -151,24 +151,24 @@ public MissingValueIndicator(PipelineNode node) : base(node)
internal override string MethodName => "IndicateMissingValues";
- private string ArgumentsName = "InputOutputColumnPair";
+ private const string ArgumentsName = "InputOutputColumnPair";
public override string GenerateTransformer()
{
StringBuilder sb = new StringBuilder();
- string inputColumn = inputColumns.Count() > 0 ? inputColumns[0] : "\"Features\"";
- string outputColumn = outputColumns.Count() > 0 ? outputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
+ string inputColumn = InputColumns.Count() > 0 ? InputColumns[0] : "\"Features\"";
+ string outputColumn = OutputColumns.Count() > 0 ? OutputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
sb.Append(MethodName);
sb.Append("(");
sb.Append("new []{");
- for (int i = 0; i < inputColumns.Length; i++)
+ for (int i = 0; i < InputColumns.Length; i++)
{
sb.Append("new ");
sb.Append(ArgumentsName);
sb.Append("(");
- sb.Append(outputColumns[i]);
+ sb.Append(OutputColumns[i]);
sb.Append(",");
- sb.Append(inputColumns[i]);
+ sb.Append(InputColumns[i]);
sb.Append(")");
sb.Append(",");
}
@@ -187,7 +187,7 @@ public MissingValueReplacer(PipelineNode node) : base(node)
internal override string MethodName => "ReplaceMissingValues";
- private string ArgumentsName = "InputOutputColumnPair";
+ private const string ArgumentsName = "InputOutputColumnPair";
public override string GenerateTransformer()
{
@@ -195,14 +195,14 @@ public override string GenerateTransformer()
sb.Append(MethodName);
sb.Append("(");
sb.Append("new []{");
- for (int i = 0; i < inputColumns.Length; i++)
+ for (int i = 0; i < InputColumns.Length; i++)
{
sb.Append("new ");
sb.Append(ArgumentsName);
sb.Append("(");
- sb.Append(outputColumns[i]);
+ sb.Append(OutputColumns[i]);
sb.Append(",");
- sb.Append(inputColumns[i]);
+ sb.Append(InputColumns[i]);
sb.Append(")");
sb.Append(",");
}
@@ -222,7 +222,7 @@ public OneHotHashEncoding(PipelineNode node) : base(node)
internal override string MethodName => "Categorical.OneHotHashEncoding";
- private string ArgumentsName = "InputOutputColumnPair";
+ private const string ArgumentsName = "InputOutputColumnPair";
public override string GenerateTransformer()
{
@@ -230,14 +230,14 @@ public override string GenerateTransformer()
sb.Append(MethodName);
sb.Append("(");
sb.Append("new []{");
- for (int i = 0; i < inputColumns.Length; i++)
+ for (int i = 0; i < InputColumns.Length; i++)
{
sb.Append("new ");
sb.Append(ArgumentsName);
sb.Append("(");
- sb.Append(outputColumns[i]);
+ sb.Append(OutputColumns[i]);
sb.Append(",");
- sb.Append(inputColumns[i]);
+ sb.Append(InputColumns[i]);
sb.Append(")");
sb.Append(",");
}
@@ -260,8 +260,8 @@ public TextFeaturizing(PipelineNode node) : base(node)
public override string GenerateTransformer()
{
StringBuilder sb = new StringBuilder();
- string inputColumn = inputColumns.Count() > 0 ? inputColumns[0] : "\"Features\"";
- string outputColumn = outputColumns.Count() > 0 ? outputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
+ string inputColumn = InputColumns.Count() > 0 ? InputColumns[0] : "\"Features\"";
+ string outputColumn = OutputColumns.Count() > 0 ? OutputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
sb.Append(MethodName);
sb.Append("(");
sb.Append(outputColumn);
@@ -280,7 +280,7 @@ public TypeConverting(PipelineNode node) : base(node)
internal override string MethodName => "Conversion.ConvertType";
- private string ArgumentsName = "InputOutputColumnPair";
+ private const string ArgumentsName = "InputOutputColumnPair";
public override string GenerateTransformer()
{
@@ -288,14 +288,14 @@ public override string GenerateTransformer()
sb.Append(MethodName);
sb.Append("(");
sb.Append("new []{");
- for (int i = 0; i < inputColumns.Length; i++)
+ for (int i = 0; i < InputColumns.Length; i++)
{
sb.Append("new ");
sb.Append(ArgumentsName);
sb.Append("(");
- sb.Append(outputColumns[i]);
+ sb.Append(OutputColumns[i]);
sb.Append(",");
- sb.Append(inputColumns[i]);
+ sb.Append(InputColumns[i]);
sb.Append(")");
sb.Append(",");
}
@@ -318,8 +318,8 @@ public ValueToKeyMapping(PipelineNode node) : base(node)
public override string GenerateTransformer()
{
StringBuilder sb = new StringBuilder();
- string inputColumn = inputColumns.Count() > 0 ? inputColumns[0] : "\"Features\"";
- string outputColumn = outputColumns.Count() > 0 ? outputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
+ string inputColumn = InputColumns.Count() > 0 ? InputColumns[0] : "\"Features\"";
+ string outputColumn = OutputColumns.Count() > 0 ? OutputColumns[0] : throw new Exception($"output columns for the suggested transform: {MethodName} are null");
sb.Append(MethodName);
sb.Append("(");
sb.Append(outputColumn);
diff --git a/src/mlnet/CodeGenerator/CodeGenerationHelper.cs b/src/mlnet/CodeGenerator/CodeGenerationHelper.cs
index a58088ad78..aae4f4303a 100644
--- a/src/mlnet/CodeGenerator/CodeGenerationHelper.cs
+++ b/src/mlnet/CodeGenerator/CodeGenerationHelper.cs
@@ -21,41 +21,41 @@ namespace Microsoft.ML.CLI.CodeGenerator
internal class CodeGenerationHelper
{
- private IAutoMLEngine automlEngine;
- private NewCommandSettings settings;
- private static Logger logger = LogManager.GetCurrentClassLogger();
- private TaskKind taskKind;
+ private IAutoMLEngine _automlEngine;
+ private NewCommandSettings _settings;
+ private static Logger _logger = LogManager.GetCurrentClassLogger();
+ private TaskKind _taskKind;
public CodeGenerationHelper(IAutoMLEngine automlEngine, NewCommandSettings settings)
{
- this.automlEngine = automlEngine;
- this.settings = settings;
- this.taskKind = Utils.GetTaskKind(settings.MlTask);
+ _automlEngine = automlEngine;
+ _settings = settings;
+ _taskKind = Utils.GetTaskKind(settings.MlTask);
}
public void GenerateCode()
{
Stopwatch watch = Stopwatch.StartNew();
var context = new MLContext();
- context.Log += ConsumeAutoMLSDKLog;
+ context.Log += ConsumeAutoMLSdkLog;
- var verboseLevel = Utils.GetVerbosity(settings.Verbosity);
+ var verboseLevel = Utils.GetVerbosity(_settings.Verbosity);
// Infer columns
ColumnInferenceResults columnInference = null;
try
{
var inputColumnInformation = new ColumnInformation();
- inputColumnInformation.LabelColumnName = settings.LabelColumnName;
- foreach (var value in settings.IgnoreColumns)
+ inputColumnInformation.LabelColumnName = _settings.LabelColumnName;
+ foreach (var value in _settings.IgnoreColumns)
{
inputColumnInformation.IgnoredColumnNames.Add(value);
}
- columnInference = automlEngine.InferColumns(context, inputColumnInformation);
+ columnInference = _automlEngine.InferColumns(context, inputColumnInformation);
}
catch (Exception)
{
- logger.Log(LogLevel.Error, $"{Strings.InferColumnError}");
+ _logger.Log(LogLevel.Error, $"{Strings.InferColumnError}");
throw;
}
@@ -71,7 +71,7 @@ public void GenerateCode()
// Explore the models
- // The reason why we are doing this way of defining 3 different results is because of the AutoML API
+ // The reason why we are doing this way of defining 3 different results is because of the AutoML API
// i.e there is no common class/interface to handle all three tasks together.
List> completedBinaryRuns = new List>();
@@ -86,7 +86,7 @@ public void GenerateCode()
{
Console.Write($"{Strings.ExplorePipeline}: ");
Console.ForegroundColor = ConsoleColor.Yellow;
- Console.WriteLine($"{settings.MlTask}");
+ Console.WriteLine($"{_settings.MlTask}");
Console.ResetColor();
Console.Write($"{Strings.FurtherLearning}: ");
Console.ForegroundColor = ConsoleColor.Yellow;
@@ -94,8 +94,8 @@ public void GenerateCode()
Console.ResetColor();
}
- logger.Log(LogLevel.Trace, $"{Strings.ExplorePipeline}: {settings.MlTask}");
- logger.Log(LogLevel.Trace, $"{Strings.FurtherLearning}: {Strings.LearningHttpLink}");
+ _logger.Log(LogLevel.Trace, $"{Strings.ExplorePipeline}: {_settings.MlTask}");
+ _logger.Log(LogLevel.Trace, $"{Strings.FurtherLearning}: {Strings.LearningHttpLink}");
// TODO the below region needs more refactoring to be done especially with so many switch cases.
@@ -110,7 +110,7 @@ public void GenerateCode()
ProgressCharacter = '\u2593',
BackgroundCharacter = '─',
};
- var wait = TimeSpan.FromSeconds(settings.MaxExplorationTime);
+ var wait = TimeSpan.FromSeconds(_settings.MaxExplorationTime);
if (verboseLevel > LogLevel.Trace && !Console.IsOutputRedirected)
{
@@ -119,30 +119,30 @@ public void GenerateCode()
{
pbar.Message = Strings.WaitingForFirstIteration;
Thread t = default;
- switch (taskKind)
+ switch (_taskKind)
{
// TODO: It may be a good idea to convert the below Threads to Tasks or get rid of this progress bar all together and use an existing one in opensource.
case TaskKind.BinaryClassification:
binaryHandler = new ProgressHandlers.BinaryClassificationHandler(new BinaryExperimentSettings().OptimizingMetric, completedBinaryRuns, pbar);
- t = new Thread(() => SafeExecute(() => automlEngine.ExploreBinaryClassificationModels(context, trainData, validationData, columnInformation, new BinaryExperimentSettings().OptimizingMetric, binaryHandler, pbar), out ex, pbar));
+ t = new Thread(() => SafeExecute(() => _automlEngine.ExploreBinaryClassificationModels(context, trainData, validationData, columnInformation, new BinaryExperimentSettings().OptimizingMetric, binaryHandler, pbar), out ex, pbar));
break;
case TaskKind.Regression:
regressionHandler = new ProgressHandlers.RegressionHandler(new RegressionExperimentSettings().OptimizingMetric, completedRegressionRuns, pbar);
- t = new Thread(() => SafeExecute(() => automlEngine.ExploreRegressionModels(context, trainData, validationData, columnInformation, new RegressionExperimentSettings().OptimizingMetric, regressionHandler, pbar), out ex, pbar));
+ t = new Thread(() => SafeExecute(() => _automlEngine.ExploreRegressionModels(context, trainData, validationData, columnInformation, new RegressionExperimentSettings().OptimizingMetric, regressionHandler, pbar), out ex, pbar));
break;
case TaskKind.MulticlassClassification:
multiClassHandler = new ProgressHandlers.MulticlassClassificationHandler(new MulticlassExperimentSettings().OptimizingMetric, completedMulticlassRuns, pbar);
- t = new Thread(() => SafeExecute(() => automlEngine.ExploreMultiClassificationModels(context, trainData, validationData, columnInformation, new MulticlassExperimentSettings().OptimizingMetric, multiClassHandler, pbar), out ex, pbar));
+ t = new Thread(() => SafeExecute(() => _automlEngine.ExploreMultiClassificationModels(context, trainData, validationData, columnInformation, new MulticlassExperimentSettings().OptimizingMetric, multiClassHandler, pbar), out ex, pbar));
break;
default:
- logger.Log(LogLevel.Error, Strings.UnsupportedMlTask);
+ _logger.Log(LogLevel.Error, Strings.UnsupportedMlTask);
break;
}
t.Start();
pbar.CompletedHandle.WaitOne(wait);
- context.Log -= ConsumeAutoMLSDKLog;
- switch (taskKind)
+ context.Log -= ConsumeAutoMLSdkLog;
+ switch (_taskKind)
{
case TaskKind.BinaryClassification:
binaryHandler.Stop();
@@ -154,7 +154,7 @@ public void GenerateCode()
multiClassHandler.Stop();
break;
default:
- logger.Log(LogLevel.Error, Strings.UnsupportedMlTask);
+ _logger.Log(LogLevel.Error, Strings.UnsupportedMlTask);
break;
}
@@ -168,29 +168,29 @@ public void GenerateCode()
{
Exception ex = null;
Thread t = default;
- switch (taskKind)
+ switch (_taskKind)
{
// TODO: It may be a good idea to convert the below Threads to Tasks or get rid of this progress bar all together and use an existing one in opensource.
case TaskKind.BinaryClassification:
binaryHandler = new ProgressHandlers.BinaryClassificationHandler(new BinaryExperimentSettings().OptimizingMetric, completedBinaryRuns, null);
- t = new Thread(() => SafeExecute(() => automlEngine.ExploreBinaryClassificationModels(context, trainData, validationData, columnInformation, new BinaryExperimentSettings().OptimizingMetric, binaryHandler, null), out ex, null));
+ t = new Thread(() => SafeExecute(() => _automlEngine.ExploreBinaryClassificationModels(context, trainData, validationData, columnInformation, new BinaryExperimentSettings().OptimizingMetric, binaryHandler, null), out ex, null));
break;
case TaskKind.Regression:
regressionHandler = new ProgressHandlers.RegressionHandler(new RegressionExperimentSettings().OptimizingMetric, completedRegressionRuns, null);
- t = new Thread(() => SafeExecute(() => automlEngine.ExploreRegressionModels(context, trainData, validationData, columnInformation, new RegressionExperimentSettings().OptimizingMetric, regressionHandler, null), out ex, null));
+ t = new Thread(() => SafeExecute(() => _automlEngine.ExploreRegressionModels(context, trainData, validationData, columnInformation, new RegressionExperimentSettings().OptimizingMetric, regressionHandler, null), out ex, null));
break;
case TaskKind.MulticlassClassification:
multiClassHandler = new ProgressHandlers.MulticlassClassificationHandler(new MulticlassExperimentSettings().OptimizingMetric, completedMulticlassRuns, null);
- t = new Thread(() => SafeExecute(() => automlEngine.ExploreMultiClassificationModels(context, trainData, validationData, columnInformation, new MulticlassExperimentSettings().OptimizingMetric, multiClassHandler, null), out ex, null));
+ t = new Thread(() => SafeExecute(() => _automlEngine.ExploreMultiClassificationModels(context, trainData, validationData, columnInformation, new MulticlassExperimentSettings().OptimizingMetric, multiClassHandler, null), out ex, null));
break;
default:
- logger.Log(LogLevel.Error, Strings.UnsupportedMlTask);
+ _logger.Log(LogLevel.Error, Strings.UnsupportedMlTask);
break;
}
t.Start();
Thread.Sleep(wait);
- context.Log -= ConsumeAutoMLSDKLog;
- switch (taskKind)
+ context.Log -= ConsumeAutoMLSdkLog;
+ switch (_taskKind)
{
case TaskKind.BinaryClassification:
binaryHandler.Stop();
@@ -202,7 +202,7 @@ public void GenerateCode()
multiClassHandler.Stop();
break;
default:
- logger.Log(LogLevel.Error, Strings.UnsupportedMlTask);
+ _logger.Log(LogLevel.Error, Strings.UnsupportedMlTask);
break;
}
@@ -214,12 +214,12 @@ public void GenerateCode()
}
catch (Exception)
{
- logger.Log(LogLevel.Error, $"{Strings.ExplorePipelineException}:");
+ _logger.Log(LogLevel.Error, $"{Strings.ExplorePipelineException}:");
throw;
}
finally
{
- context.Log -= ConsumeAutoMLSDKLog;
+ context.Log -= ConsumeAutoMLSdkLog;
}
var elapsedTime = watch.Elapsed.TotalSeconds;
@@ -229,7 +229,7 @@ public void GenerateCode()
ITransformer bestModel = null;
try
{
- switch (taskKind)
+ switch (_taskKind)
{
case TaskKind.BinaryClassification:
if (completedBinaryRuns.Count > 0)
@@ -238,13 +238,13 @@ public void GenerateCode()
var bestBinaryIteration = BestResultUtil.GetBestRun(completedBinaryRuns, binaryMetric);
bestPipeline = bestBinaryIteration.Pipeline;
bestModel = bestBinaryIteration.Model;
- ConsolePrinter.ExperimentResultsHeader(LogLevel.Info, settings.MlTask, settings.Dataset.Name, columnInformation.LabelColumnName, elapsedTime.ToString("F2"), completedBinaryRuns.Count());
+ ConsolePrinter.ExperimentResultsHeader(LogLevel.Info, _settings.MlTask, _settings.Dataset.Name, columnInformation.LabelColumnName, elapsedTime.ToString("F2"), completedBinaryRuns.Count());
ConsolePrinter.PrintIterationSummary(completedBinaryRuns, binaryMetric, 5);
}
else
{
- logger.Log(LogLevel.Error, string.Format(Strings.CouldNotFinshOnTime, settings.MaxExplorationTime));
- logger.Log(LogLevel.Info, Strings.Exiting);
+ _logger.Log(LogLevel.Error, string.Format(Strings.CouldNotFinshOnTime, _settings.MaxExplorationTime));
+ _logger.Log(LogLevel.Info, Strings.Exiting);
return;
}
break;
@@ -255,13 +255,13 @@ public void GenerateCode()
var bestRegressionIteration = BestResultUtil.GetBestRun(completedRegressionRuns, regressionMetric);
bestPipeline = bestRegressionIteration.Pipeline;
bestModel = bestRegressionIteration.Model;
- ConsolePrinter.ExperimentResultsHeader(LogLevel.Info, settings.MlTask, settings.Dataset.Name, columnInformation.LabelColumnName, elapsedTime.ToString("F2"), completedRegressionRuns.Count());
+ ConsolePrinter.ExperimentResultsHeader(LogLevel.Info, _settings.MlTask, _settings.Dataset.Name, columnInformation.LabelColumnName, elapsedTime.ToString("F2"), completedRegressionRuns.Count());
ConsolePrinter.PrintIterationSummary(completedRegressionRuns, regressionMetric, 5);
}
else
{
- logger.Log(LogLevel.Error, string.Format(Strings.CouldNotFinshOnTime, settings.MaxExplorationTime));
- logger.Log(LogLevel.Info, Strings.Exiting);
+ _logger.Log(LogLevel.Error, string.Format(Strings.CouldNotFinshOnTime, _settings.MaxExplorationTime));
+ _logger.Log(LogLevel.Info, Strings.Exiting);
return;
}
break;
@@ -272,13 +272,13 @@ public void GenerateCode()
var bestMulticlassIteration = BestResultUtil.GetBestRun(completedMulticlassRuns, muliclassMetric);
bestPipeline = bestMulticlassIteration.Pipeline;
bestModel = bestMulticlassIteration.Model;
- ConsolePrinter.ExperimentResultsHeader(LogLevel.Info, settings.MlTask, settings.Dataset.Name, columnInformation.LabelColumnName, elapsedTime.ToString("F2"), completedMulticlassRuns.Count());
+ ConsolePrinter.ExperimentResultsHeader(LogLevel.Info, _settings.MlTask, _settings.Dataset.Name, columnInformation.LabelColumnName, elapsedTime.ToString("F2"), completedMulticlassRuns.Count());
ConsolePrinter.PrintIterationSummary(completedMulticlassRuns, muliclassMetric, 5);
}
else
{
- logger.Log(LogLevel.Error, string.Format(Strings.CouldNotFinshOnTime, settings.MaxExplorationTime));
- logger.Log(LogLevel.Info, Strings.Exiting);
+ _logger.Log(LogLevel.Error, string.Format(Strings.CouldNotFinshOnTime, _settings.MaxExplorationTime));
+ _logger.Log(LogLevel.Info, Strings.Exiting);
return;
}
break;
@@ -286,24 +286,24 @@ public void GenerateCode()
}
catch (Exception)
{
- logger.Log(LogLevel.Info, Strings.ErrorBestPipeline);
+ _logger.Log(LogLevel.Info, Strings.ErrorBestPipeline);
throw;
}
#endregion
// Save the model
- var modelprojectDir = Path.Combine(settings.OutputPath.FullName, $"{settings.Name}.Model");
+ var modelprojectDir = Path.Combine(_settings.OutputPath.FullName, $"{_settings.Name}.Model");
var modelPath = new FileInfo(Path.Combine(modelprojectDir, "MLModel.zip"));
try
{
Utils.SaveModel(bestModel, modelPath, context, trainData.Schema);
Console.ForegroundColor = ConsoleColor.Yellow;
- logger.Log(LogLevel.Info, $"{Strings.SavingBestModel}: {modelPath}");
+ _logger.Log(LogLevel.Info, $"{Strings.SavingBestModel}: {modelPath}");
}
catch (Exception)
{
- logger.Log(LogLevel.Info, Strings.ErrorSavingModel);
+ _logger.Log(LogLevel.Info, Strings.ErrorSavingModel);
throw;
}
finally
@@ -316,13 +316,13 @@ public void GenerateCode()
{
GenerateProject(columnInference, bestPipeline, columnInformation.LabelColumnName, modelPath);
Console.ForegroundColor = ConsoleColor.Yellow;
- logger.Log(LogLevel.Info, $"{Strings.GenerateModelConsumption}: { Path.Combine(settings.OutputPath.FullName, $"{settings.Name}.ConsoleApp")}");
- logger.Log(LogLevel.Info, $"{Strings.SeeLogFileForMoreInfo}: {settings.LogFilePath}");
+ _logger.Log(LogLevel.Info, $"{Strings.GenerateModelConsumption}: { Path.Combine(_settings.OutputPath.FullName, $"{_settings.Name}.ConsoleApp")}");
+ _logger.Log(LogLevel.Info, $"{Strings.SeeLogFileForMoreInfo}: {_settings.LogFilePath}");
}
catch (Exception)
{
- logger.Log(LogLevel.Info, Strings.ErrorGeneratingProject);
+ _logger.Log(LogLevel.Info, Strings.ErrorGeneratingProject);
throw;
}
finally
@@ -339,11 +339,11 @@ internal void GenerateProject(ColumnInferenceResults columnInference, Pipeline p
columnInference,
new CodeGeneratorSettings()
{
- TrainDataset = settings.Dataset.FullName,
- MlTask = taskKind,
- TestDataset = settings.TestDataset?.FullName,
- OutputName = settings.Name,
- OutputBaseDir = settings.OutputPath.FullName,
+ TrainDataset = _settings.Dataset.FullName,
+ MlTask = _taskKind,
+ TestDataset = _settings.TestDataset?.FullName,
+ OutputName = _settings.Name,
+ OutputBaseDir = _settings.OutputPath.FullName,
LabelName = labelName,
ModelPath = modelPath.FullName
});
@@ -352,22 +352,22 @@ internal void GenerateProject(ColumnInferenceResults columnInference, Pipeline p
internal (IDataView, IDataView) LoadData(MLContext context, TextLoader.Options textLoaderOptions)
{
- logger.Log(LogLevel.Trace, Strings.CreateDataLoader);
+ _logger.Log(LogLevel.Trace, Strings.CreateDataLoader);
var textLoader = context.Data.CreateTextLoader(textLoaderOptions);
- logger.Log(LogLevel.Trace, Strings.LoadData);
- var trainData = textLoader.Load(settings.Dataset.FullName);
- var validationData = settings.ValidationDataset == null ? null : textLoader.Load(settings.ValidationDataset.FullName);
+ _logger.Log(LogLevel.Trace, Strings.LoadData);
+ var trainData = textLoader.Load(_settings.Dataset.FullName);
+ var validationData = _settings.ValidationDataset == null ? null : textLoader.Load(_settings.ValidationDataset.FullName);
return (trainData, validationData);
}
- private static void ConsumeAutoMLSDKLog(object sender, LoggingEventArgs args)
+ private static void ConsumeAutoMLSdkLog(object sender, LoggingEventArgs args)
{
var logMessage = args.Message;
if (logMessage.Contains(AutoMLLogger.ChannelName))
{
- logger.Trace(args.Message);
+ _logger.Trace(args.Message);
}
}
diff --git a/src/mlnet/Commands/New/NewCommandHandler.cs b/src/mlnet/Commands/New/NewCommandHandler.cs
index 2b6091849b..27b2a7a52f 100644
--- a/src/mlnet/Commands/New/NewCommandHandler.cs
+++ b/src/mlnet/Commands/New/NewCommandHandler.cs
@@ -10,20 +10,20 @@ namespace Microsoft.ML.CLI.Commands.New
{
internal class NewCommand : ICommand
{
- private readonly NewCommandSettings settings;
- private readonly MlTelemetry telemetry;
+ private readonly NewCommandSettings _settings;
+ private readonly MlTelemetry _telemetry;
internal NewCommand(NewCommandSettings settings, MlTelemetry telemetry)
{
- this.settings = settings;
- this.telemetry = telemetry;
+ _settings = settings;
+ _telemetry = telemetry;
}
public void Execute()
{
- telemetry.LogAutoTrainMlCommand(settings.Dataset.Name, settings.MlTask.ToString(), settings.Dataset.Length);
+ _telemetry.LogAutoTrainMlCommand(_settings.Dataset.Name, _settings.MlTask.ToString(), _settings.Dataset.Length);
- CodeGenerationHelper codeGenerationHelper = new CodeGenerationHelper(new AutoMLEngine(settings), settings); // Needs to be improved.
+ CodeGenerationHelper codeGenerationHelper = new CodeGenerationHelper(new AutoMLEngine(_settings), _settings); // Needs to be improved.
codeGenerationHelper.GenerateCode();
}
}
diff --git a/src/mlnet/Commands/New/NewCommandSettings.cs b/src/mlnet/Commands/New/NewCommandSettings.cs
index 22fb7c19d4..555b11c028 100644
--- a/src/mlnet/Commands/New/NewCommandSettings.cs
+++ b/src/mlnet/Commands/New/NewCommandSettings.cs
@@ -33,9 +33,13 @@ public class NewCommandSettings
public string Cache { get; set; }
- public List IgnoreColumns { get; set; } = new List();
+ public List IgnoreColumns { get; set; }
public string LogFilePath { get; set; }
+ public NewCommandSettings()
+ {
+ IgnoreColumns = new List();
+ }
}
}
diff --git a/src/mlnet/Program.cs b/src/mlnet/Program.cs
index 568eaa4d4a..731ce5aca3 100644
--- a/src/mlnet/Program.cs
+++ b/src/mlnet/Program.cs
@@ -17,9 +17,9 @@
namespace Microsoft.ML.CLI
{
- class Program
+ public class Program
{
- private static Logger logger = LogManager.GetCurrentClassLogger();
+ private static Logger _logger = LogManager.GetCurrentClassLogger();
public static void Main(string[] args)
{
var telemetry = new MlTelemetry();
@@ -67,10 +67,10 @@ public static void Main(string[] args)
}
catch (Exception e)
{
- logger.Log(LogLevel.Error, e.Message);
- logger.Log(LogLevel.Debug, e.ToString());
- logger.Log(LogLevel.Info, Strings.LookIntoLogFile);
- logger.Log(LogLevel.Error, Strings.Exiting);
+ _logger.Log(LogLevel.Error, e.Message);
+ _logger.Log(LogLevel.Debug, e.ToString());
+ _logger.Log(LogLevel.Info, Strings.LookIntoLogFile);
+ _logger.Log(LogLevel.Error, Strings.Exiting);
}
});
diff --git a/src/mlnet/ProgressBar/ChildProgressBar.cs b/src/mlnet/ProgressBar/ChildProgressBar.cs
index 99476c77b7..6867ce4e68 100644
--- a/src/mlnet/ProgressBar/ChildProgressBar.cs
+++ b/src/mlnet/ProgressBar/ChildProgressBar.cs
@@ -11,20 +11,19 @@ public class ChildProgressBar : ProgressBarBase, IProgressBar
private readonly Action _scheduleDraw;
private readonly Action _growth;
- public DateTime StartDate { get; } = DateTime.Now;
-
protected override void DisplayProgress() => _scheduleDraw?.Invoke();
internal ChildProgressBar(int maxTicks, string message, Action scheduleDraw, ProgressBarOptions options = null, Action growth = null)
: base(maxTicks, message, options)
{
+ _callOnce = new object();
_scheduleDraw = scheduleDraw;
_growth = growth;
_growth?.Invoke(ProgressBarHeight.Increment);
}
private bool _calledDone;
- private readonly object _callOnce = new object();
+ private readonly object _callOnce;
protected override void OnDone()
{
@@ -33,10 +32,10 @@ protected override void OnDone()
{
if (_calledDone) return;
- if (this.EndTime == null)
- this.EndTime = DateTime.Now;
+ if (EndTime == null)
+ EndTime = DateTime.Now;
- if (this.Collapse)
+ if (Collapse)
_growth?.Invoke(ProgressBarHeight.Decrement);
_calledDone = true;
@@ -46,7 +45,7 @@ protected override void OnDone()
public void Dispose()
{
OnDone();
- foreach (var c in this.Children) c.Dispose();
+ foreach (var c in Children) c.Dispose();
}
}
}
diff --git a/src/mlnet/ProgressBar/FixedDurationBar.cs b/src/mlnet/ProgressBar/FixedDurationBar.cs
index 7b4879ef18..8e44b09f36 100644
--- a/src/mlnet/ProgressBar/FixedDurationBar.cs
+++ b/src/mlnet/ProgressBar/FixedDurationBar.cs
@@ -11,31 +11,30 @@ public class FixedDurationBar : ProgressBar
{
public bool IsCompleted { get; private set; }
- private readonly ManualResetEvent _completedHandle = new ManualResetEvent(false);
+ private readonly ManualResetEvent _completedHandle;
public WaitHandle CompletedHandle => _completedHandle;
- public FixedDurationBar(TimeSpan duration, string message, ConsoleColor color) : this(duration, message, new ProgressBarOptions { ForegroundColor = color }) { }
-
public FixedDurationBar(TimeSpan duration, string message, ProgressBarOptions options = null) : base((int)Math.Ceiling(duration.TotalSeconds), message, options)
{
- if (!this.Options.DisplayTimeInRealTime)
+ _completedHandle = new ManualResetEvent(false);
+ if (!Options.DisplayTimeInRealTime)
throw new ArgumentException(
$"{nameof(ProgressBarOptions)}.{nameof(ProgressBarOptions.DisplayTimeInRealTime)} has to be true for {nameof(FixedDurationBar)}", nameof(options)
);
}
- private long _seenTicks = 0;
+ private long _seenTicks;
protected override void OnTimerTick()
{
Interlocked.Increment(ref _seenTicks);
- if (_seenTicks % 2 == 0) this.Tick();
+ if (_seenTicks % 2 == 0) Tick();
base.OnTimerTick();
}
protected override void OnDone()
{
- this.IsCompleted = true;
- this._completedHandle.Set();
+ IsCompleted = true;
+ _completedHandle.Set();
}
}
}
diff --git a/src/mlnet/ProgressBar/ProgressBar.cs b/src/mlnet/ProgressBar/ProgressBar.cs
index 5e72ad2f69..80dcc9264b 100644
--- a/src/mlnet/ProgressBar/ProgressBar.cs
+++ b/src/mlnet/ProgressBar/ProgressBar.cs
@@ -13,7 +13,7 @@ namespace Microsoft.ML.CLI.ShellProgressBar
{
public class ProgressBar : ProgressBarBase, IProgressBar
{
- private static readonly bool IsWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
+ private static readonly bool _isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
private readonly ConsoleColor _originalColor;
private readonly int _originalCursorTop;
@@ -21,7 +21,7 @@ public class ProgressBar : ProgressBarBase, IProgressBar
private int _isDisposed;
private Timer _timer;
- private int _visibleDescendants = 0;
+ private int _visibleDescendants;
private readonly AutoResetEvent _displayProgressEvent;
private readonly Task _displayProgress;
@@ -41,10 +41,10 @@ public ProgressBar(int maxTicks, string message, ProgressBarOptions options = nu
Console.CursorVisible = false;
- if (this.Options.EnableTaskBarProgress)
+ if (Options.EnableTaskBarProgress)
TaskbarProgress.SetState(TaskbarProgress.TaskbarStates.Normal);
- if (this.Options.DisplayTimeInRealTime)
+ if (Options.DisplayTimeInRealTime)
_timer = new Timer((s) => OnTimerTick(), null, 500, 500);
else //draw once
_timer = new Timer((s) =>
@@ -94,8 +94,8 @@ private struct Indentation
{
public Indentation(ConsoleColor color, bool lastChild)
{
- this.ConsoleColor = color;
- this.LastChild = lastChild;
+ ConsoleColor = color;
+ LastChild = lastChild;
}
public string Glyph => !LastChild ? "├─" : "└─";
@@ -198,26 +198,26 @@ protected override void DisplayProgress()
private void UpdateProgress()
{
Console.CursorVisible = false;
- var indentation = new[] { new Indentation(this.ForeGroundColor, true) };
- var mainPercentage = this.Percentage;
+ var indentation = new[] { new Indentation(ForeGroundColor, true) };
+ var mainPercentage = Percentage;
var cursorTop = _originalCursorTop;
- Console.ForegroundColor = this.ForeGroundColor;
+ Console.ForegroundColor = ForeGroundColor;
void TopHalf()
{
ProgressBarTopHalf(mainPercentage,
- this.Options.ProgressCharacter,
- this.Options.BackgroundCharacter,
- this.Options.BackgroundColor,
+ Options.ProgressCharacter,
+ Options.BackgroundCharacter,
+ Options.BackgroundColor,
indentation,
- this.Options.ProgressBarOnBottom
+ Options.ProgressBarOnBottom
);
}
- if (this.Options.ProgressBarOnBottom)
+ if (Options.ProgressBarOnBottom)
{
- ProgressBarBottomHalf(mainPercentage, this._startDate, null, this.Message, indentation, this.Options.ProgressBarOnBottom);
+ ProgressBarBottomHalf(mainPercentage, StartDate, null, Message, indentation, Options.ProgressBarOnBottom);
Console.SetCursorPosition(0, ++cursorTop);
TopHalf();
}
@@ -225,13 +225,13 @@ void TopHalf()
{
TopHalf();
Console.SetCursorPosition(0, ++cursorTop);
- ProgressBarBottomHalf(mainPercentage, this._startDate, null, this.Message, indentation, this.Options.ProgressBarOnBottom);
+ ProgressBarBottomHalf(mainPercentage, StartDate, null, Message, indentation, Options.ProgressBarOnBottom);
}
- if (this.Options.EnableTaskBarProgress)
+ if (Options.EnableTaskBarProgress)
TaskbarProgress.SetValue(mainPercentage, 100);
- DrawChildren(this.Children, indentation, ref cursorTop);
+ DrawChildren(Children, indentation, ref cursorTop);
ResetToBottom(ref cursorTop);
@@ -327,10 +327,10 @@ public void Dispose()
// of System.Console
UpdateProgress();
- if (this.EndTime == null) this.EndTime = DateTime.Now;
+ if (EndTime == null) EndTime = DateTime.Now;
var openDescendantsPadding = (_visibleDescendants * 2);
- if (this.Options.EnableTaskBarProgress)
+ if (Options.EnableTaskBarProgress)
TaskbarProgress.SetState(TaskbarProgress.TaskbarStates.NoProgress);
try
@@ -355,7 +355,7 @@ public void Dispose()
Console.WriteLine();
_timer?.Dispose();
_timer = null;
- foreach (var c in this.Children) c.Dispose();
+ foreach (var c in Children) c.Dispose();
OnDone();
}
}
diff --git a/src/mlnet/ProgressBar/ProgressBarBase.cs b/src/mlnet/ProgressBar/ProgressBarBase.cs
index d47985f1e5..020ababfc9 100644
--- a/src/mlnet/ProgressBar/ProgressBarBase.cs
+++ b/src/mlnet/ProgressBar/ProgressBarBase.cs
@@ -16,20 +16,22 @@ static ProgressBarBase()
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
}
- protected readonly DateTime _startDate = DateTime.Now;
+ public readonly DateTime StartDate;
private int _maxTicks;
private int _currentTick;
private string _message;
protected ProgressBarBase(int maxTicks, string message, ProgressBarOptions options)
{
- this._maxTicks = Math.Max(0, maxTicks);
- this._message = message;
- this.Options = options ?? ProgressBarOptions.Default;
+ _maxTicks = Math.Max(0, maxTicks);
+ _message = message;
+ Options = options ?? ProgressBarOptions.Default;
+ StartDate = DateTime.Now;
+ Children = new ConcurrentBag();
}
internal ProgressBarOptions Options { get; }
- internal ConcurrentBag Children { get; } = new ConcurrentBag();
+ internal ConcurrentBag Children { get; }
protected abstract void DisplayProgress();
@@ -44,7 +46,7 @@ protected virtual void OnDone()
public DateTime? EndTime { get; protected set; }
public ConsoleColor ForeGroundColor =>
- EndTime.HasValue ? this.Options.ForegroundColorDone ?? this.Options.ForegroundColor : this.Options.ForegroundColor;
+ EndTime.HasValue ? Options.ForegroundColorDone ?? Options.ForegroundColor : Options.ForegroundColor;
public int CurrentTick => _currentTick;
@@ -72,19 +74,19 @@ public double Percentage
{
get
{
- var percentage = Math.Max(0, Math.Min(100, (100.0 / this._maxTicks) * this._currentTick));
+ var percentage = Math.Max(0, Math.Min(100, (100.0 / _maxTicks) * _currentTick));
// Gracefully handle if the percentage is NaN due to division by 0
if (double.IsNaN(percentage) || percentage < 0) percentage = 100;
return percentage;
}
}
- public bool Collapse => this.EndTime.HasValue && this.Options.CollapseWhenFinished;
+ public bool Collapse => EndTime.HasValue && Options.CollapseWhenFinished;
public ChildProgressBar Spawn(int maxTicks, string message, ProgressBarOptions options = null)
{
- var pbar = new ChildProgressBar(maxTicks, message, DisplayProgress, options, this.Grow);
- this.Children.Add(pbar);
+ var pbar = new ChildProgressBar(maxTicks, message, DisplayProgress, options, Grow);
+ Children.Add(pbar);
DisplayProgress();
return pbar;
}
@@ -110,8 +112,8 @@ private void FinishTick(string message)
if (_currentTick >= _maxTicks)
{
- this.EndTime = DateTime.Now;
- this.OnDone();
+ EndTime = DateTime.Now;
+ OnDone();
}
DisplayProgress();
}
diff --git a/src/mlnet/ProgressBar/ProgressBarOptions.cs b/src/mlnet/ProgressBar/ProgressBarOptions.cs
index e8de881af0..d44032e998 100644
--- a/src/mlnet/ProgressBar/ProgressBarOptions.cs
+++ b/src/mlnet/ProgressBar/ProgressBarOptions.cs
@@ -16,7 +16,7 @@ public class ProgressBarOptions
public static readonly ProgressBarOptions Default = new ProgressBarOptions();
/// The foreground color of the progress bar, message and time
- public ConsoleColor ForegroundColor { get; set; } = ConsoleColor.Green;
+ public ConsoleColor ForegroundColor { get; set; }
/// The foreground color the progressbar has reached a 100 percent
public ConsoleColor? ForegroundColorDone { get; set; }
@@ -25,7 +25,7 @@ public class ProgressBarOptions
public ConsoleColor? BackgroundColor { get; set; }
/// The character to use to draw the progressbar
- public char ProgressCharacter { get; set; } = '\u2588';
+ public char ProgressCharacter { get; set; }
///
/// The character to use for the background of the progress defaults to
@@ -37,13 +37,13 @@ public class ProgressBarOptions
/// is called.
/// Defaults to true
///
- public bool DisplayTimeInRealTime { get; set; } = true;
+ public bool DisplayTimeInRealTime { get; set; }
///
/// Collapse the progressbar when done, very useful for child progressbars
/// Defaults to true
///
- public bool CollapseWhenFinished { get; set; } = true;
+ public bool CollapseWhenFinished { get; set; }
///
/// By default the text and time information is displayed at the bottom and the progress bar at the top.
@@ -68,5 +68,13 @@ public bool EnableTaskBarProgress
_enableTaskBarProgress = value;
}
}
+
+ public ProgressBarOptions()
+ {
+ ForegroundColor = ConsoleColor.Green;
+ ProgressCharacter = '\u2588';
+ DisplayTimeInRealTime = true;
+ CollapseWhenFinished = true;
+ }
}
}
diff --git a/src/mlnet/ProgressBar/TaskbarProgress.cs b/src/mlnet/ProgressBar/TaskbarProgress.cs
index 19dbab3c70..c21b800247 100644
--- a/src/mlnet/ProgressBar/TaskbarProgress.cs
+++ b/src/mlnet/ProgressBar/TaskbarProgress.cs
@@ -58,7 +58,7 @@ private class TaskbarInstance
{ }
[DllImport("kernel32.dll")]
- static extern IntPtr GetConsoleWindow();
+ private static extern IntPtr GetConsoleWindow();
private static readonly ITaskbarList3 _taskbarInstance = (ITaskbarList3)new TaskbarInstance();
private static readonly bool _taskbarSupported = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/CollectionsExtensions.cs b/src/mlnet/Telemetry/DotNetAppInsights/CollectionsExtensions.cs
index 08779f65e0..d48767d963 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/CollectionsExtensions.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/CollectionsExtensions.cs
@@ -10,7 +10,7 @@ public static class CollectionsExtensions
{
public static IEnumerable OrEmptyIfNull(this IEnumerable enumerable)
{
- return enumerable == null
+ return enumerable == null
? Enumerable.Empty()
: enumerable;
}
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/EnvironmentProvider.cs b/src/mlnet/Telemetry/DotNetAppInsights/EnvironmentProvider.cs
index f0a9fefbe9..398a14eed8 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/EnvironmentProvider.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/EnvironmentProvider.cs
@@ -11,10 +11,10 @@ namespace Microsoft.DotNet.Cli.Utils
{
public class EnvironmentProvider : IEnvironmentProvider
{
- private static char[] s_pathSeparator = new char[] { Path.PathSeparator };
- private static char[] s_quote = new char[] { '"' };
+ private static char[] _sPathSeparator = new char[] { Path.PathSeparator };
+ private static char[] _sQuote = new char[] { '"' };
private IEnumerable _searchPaths;
- private readonly Lazy _userHomeDirectory = new Lazy(() => Environment.GetEnvironmentVariable("HOME") ?? string.Empty);
+ private readonly Lazy _userHomeDirectory;
private IEnumerable _executableExtensions;
public IEnumerable ExecutableExtensions
@@ -45,8 +45,8 @@ private IEnumerable SearchPaths
searchPaths.AddRange(Environment
.GetEnvironmentVariable("PATH")
- .Split(s_pathSeparator)
- .Select(p => p.Trim(s_quote))
+ .Split(_sPathSeparator)
+ .Select(p => p.Trim(_sQuote))
.Where(p => !string.IsNullOrWhiteSpace(p))
.Select(p => ExpandTildeSlash(p)));
@@ -76,6 +76,7 @@ public EnvironmentProvider(
{
_executableExtensions = extensionsOverride;
_searchPaths = searchPathsOverride;
+ _userHomeDirectory = new Lazy(() => Environment.GetEnvironmentVariable("HOME") ?? string.Empty);
}
public string GetCommandPath(string commandName, params string[] extensions)
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/ExceptionExtensions.cs b/src/mlnet/Telemetry/DotNetAppInsights/ExceptionExtensions.cs
index 03f984404c..0a9bf1b9d1 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/ExceptionExtensions.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/ExceptionExtensions.cs
@@ -7,16 +7,16 @@ namespace Microsoft.DotNet.Cli.Utils
{
internal static class ExceptionExtensions
{
- public static TException DisplayAsError(this TException exception)
- where TException : Exception
+ public static TException DisplayAsError(this TException exception)
+ where TException : Exception
{
- exception.Data.Add(CLI_User_Displayed_Exception, true);
+ exception.Data.Add(CliUserDisplayedException, true);
return exception;
}
public static bool ShouldBeDisplayedAsError(this Exception e) =>
- e.Data.Contains(CLI_User_Displayed_Exception);
+ e.Data.Contains(CliUserDisplayedException);
- internal const string CLI_User_Displayed_Exception = "CLI_User_Displayed_Exception";
+ internal const string CliUserDisplayedException = "CLI_User_Displayed_Exception";
}
}
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/FileSystemWrapper.cs b/src/mlnet/Telemetry/DotNetAppInsights/FileSystemWrapper.cs
index 8818074310..01ab61d582 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/FileSystemWrapper.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/FileSystemWrapper.cs
@@ -7,8 +7,14 @@ internal class FileSystemWrapper : IFileSystem
{
public static IFileSystem Default { get; } = new FileSystemWrapper();
- public IFile File { get; } = new FileWrapper();
+ public IFile File { get; }
- public IDirectory Directory { get; } = new DirectoryWrapper();
+ public IDirectory Directory { get; }
+
+ public FileSystemWrapper()
+ {
+ File = new FileWrapper();
+ Directory = new DirectoryWrapper();
+ }
}
}
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/FirstTimeUseNoticeSentinel.cs b/src/mlnet/Telemetry/DotNetAppInsights/FirstTimeUseNoticeSentinel.cs
index 0ea9dfd9dd..faea7ce391 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/FirstTimeUseNoticeSentinel.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/FirstTimeUseNoticeSentinel.cs
@@ -9,14 +9,14 @@ namespace Microsoft.DotNet.Configurer
{
public class FirstTimeUseNoticeSentinel : IFirstTimeUseNoticeSentinel
{
- public static readonly string SENTINEL = $"{Product.Version}.MLNET.dotnetFirstUseSentinel";
+ public static readonly string Sentinel = $"{Product.Version}.MLNET.dotnetFirstUseSentinel";
private readonly IFile _file;
private readonly IDirectory _directory;
private string _dotnetUserProfileFolderPath;
- private string SentinelPath => Path.Combine(_dotnetUserProfileFolderPath, SENTINEL);
+ private string SentinelPath => Path.Combine(_dotnetUserProfileFolderPath, Sentinel);
public FirstTimeUseNoticeSentinel() :
this(
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/MacAddressGetter.cs b/src/mlnet/Telemetry/DotNetAppInsights/MacAddressGetter.cs
index 1106fbdc90..e6cebe6a3d 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/MacAddressGetter.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/MacAddressGetter.cs
@@ -1,14 +1,13 @@
// Copyright (c) .NET Foundation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
-using System;
-using System.Linq;
-using System.Diagnostics;
using System.Collections.Generic;
+using System.ComponentModel;
+using System.Diagnostics;
+using System.Linq;
+using System.Net.NetworkInformation;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
-using System.Net.NetworkInformation;
-using System.ComponentModel;
using Microsoft.DotNet.Cli.Utils;
namespace Microsoft.DotNet.Cli.Telemetry
@@ -28,7 +27,7 @@ public static string GetMacAddress()
return null;
}
- return ParseMACAddress(shelloutput);
+ return ParseMacAddress(shelloutput);
}
catch (Win32Exception e)
{
@@ -43,7 +42,7 @@ public static string GetMacAddress()
}
}
- private static string ParseMACAddress(string shelloutput)
+ private static string ParseMacAddress(string shelloutput)
{
string macAddress = null;
foreach (Match match in Regex.Matches(shelloutput, MacRegex, RegexOptions.IgnoreCase))
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/NativeMethods.cs b/src/mlnet/Telemetry/DotNetAppInsights/NativeMethods.cs
index a22fcd8378..8d198d24a2 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/NativeMethods.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/NativeMethods.cs
@@ -73,8 +73,8 @@ internal static class Posix
[DllImport("libc", SetLastError = true)]
internal static extern int kill(int pid, int sig);
- internal const int SIGINT = 2;
- internal const int SIGTERM = 15;
+ internal const int SigInt = 2;
+ internal const int SigTerm = 15;
}
}
}
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/ProcessReaper.cs b/src/mlnet/Telemetry/DotNetAppInsights/ProcessReaper.cs
index e33f2bd658..35b6095566 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/ProcessReaper.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/ProcessReaper.cs
@@ -144,7 +144,7 @@ private void HandleProcessExit(object sender, EventArgs args)
// this one does.
_shutdownMutex.WaitOne();
- if (!_process.WaitForExit(0) && NativeMethods.Posix.kill(processId, NativeMethods.Posix.SIGTERM) != 0)
+ if (!_process.WaitForExit(0) && NativeMethods.Posix.kill(processId, NativeMethods.Posix.SigTerm) != 0)
{
// Couldn't send the signal, don't wait
return;
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/StreamForwarder.cs b/src/mlnet/Telemetry/DotNetAppInsights/StreamForwarder.cs
index e3d96d07e8..b9a4fdb1c0 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/StreamForwarder.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/StreamForwarder.cs
@@ -11,8 +11,8 @@ namespace Microsoft.DotNet.Cli.Utils
{
public sealed class StreamForwarder
{
- private static readonly char[] s_ignoreCharacters = new char[] { '\r' };
- private static readonly char s_flushBuilderCharacter = '\n';
+ private static readonly char[] _sIgnoreCharacters = new char[] { '\r' };
+ private static readonly char _sFlushBuilderCharacter = '\n';
private StringBuilder _builder;
private StringWriter _capture;
@@ -67,11 +67,11 @@ public void Read(TextReader reader)
{
currentCharacter = buffer[0];
- if (currentCharacter == s_flushBuilderCharacter)
+ if (currentCharacter == _sFlushBuilderCharacter)
{
WriteBuilder();
}
- else if (!s_ignoreCharacters.Contains(currentCharacter))
+ else if (!_sIgnoreCharacters.Contains(currentCharacter))
{
_builder.Append(currentCharacter);
}
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/TelemetryCommonProperties.cs b/src/mlnet/Telemetry/DotNetAppInsights/TelemetryCommonProperties.cs
index 44fcf64ec9..6d6b4b482c 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/TelemetryCommonProperties.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/TelemetryCommonProperties.cs
@@ -3,8 +3,8 @@
using System;
using System.Collections.Generic;
-using Microsoft.DotNet.AutoML;
using System.IO;
+using Microsoft.DotNet.AutoML;
using Microsoft.DotNet.Configurer;
using RuntimeEnvironment = Microsoft.DotNet.PlatformAbstractions.RuntimeEnvironment;
using RuntimeInformation = System.Runtime.InteropServices.RuntimeInformation;
@@ -16,18 +16,18 @@ internal class TelemetryCommonProperties
public TelemetryCommonProperties(
Func getCurrentDirectory = null,
Func hasher = null,
- Func getMACAddress = null,
+ Func getMacAddress = null,
IUserLevelCacheWriter userLevelCacheWriter = null)
{
_getCurrentDirectory = getCurrentDirectory ?? Directory.GetCurrentDirectory;
_hasher = hasher ?? Sha256Hasher.Hash;
- _getMACAddress = getMACAddress ?? MacAddressGetter.GetMacAddress;
+ _getMacAddress = getMacAddress ?? MacAddressGetter.GetMacAddress;
_userLevelCacheWriter = userLevelCacheWriter ?? new UserLevelCacheWriter();
}
private Func _getCurrentDirectory;
private Func _hasher;
- private Func _getMACAddress;
+ private Func _getMacAddress;
private IUserLevelCacheWriter _userLevelCacheWriter;
private const string OSPlatform = "OS Platform";
private const string ProductVersion = "Product Version";
@@ -53,7 +53,7 @@ public Dictionary GetTelemetryCommonProperties()
private string GetMachineId()
{
- var macAddress = _getMACAddress();
+ var macAddress = _getMacAddress();
if (macAddress != null)
{
return _hasher(macAddress);
diff --git a/src/mlnet/Telemetry/DotNetAppInsights/TemporaryDirectory.cs b/src/mlnet/Telemetry/DotNetAppInsights/TemporaryDirectory.cs
index d43683e156..598dd87db8 100644
--- a/src/mlnet/Telemetry/DotNetAppInsights/TemporaryDirectory.cs
+++ b/src/mlnet/Telemetry/DotNetAppInsights/TemporaryDirectory.cs
@@ -1,8 +1,8 @@
// Copyright (c) .NET Foundation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
-using Microsoft.Extensions.EnvironmentAbstractions;
using System.IO;
+using Microsoft.Extensions.EnvironmentAbstractions;
namespace Microsoft.DotNet.InternalAbstractions
{
diff --git a/src/mlnet/Telemetry/MlTelemetry.cs b/src/mlnet/Telemetry/MlTelemetry.cs
index 3eeed79df2..a9ae092e04 100644
--- a/src/mlnet/Telemetry/MlTelemetry.cs
+++ b/src/mlnet/Telemetry/MlTelemetry.cs
@@ -9,11 +9,16 @@ namespace Microsoft.DotNet.Cli.Telemetry
{
public class MlTelemetry
{
- private bool _firstTimeUse = false;
- private bool _enabled = false;
- private List _parameters = new List();
+ private bool _firstTimeUse;
+ private bool _enabled;
+ private List _parameters;
private string _command;
+ public MlTelemetry()
+ {
+ _parameters = new List();
+ }
+
public void SetCommandAndParameters(string command, IEnumerable parameters)
{
if (parameters != null)
diff --git a/src/mlnet/Telemetry/Telemetry.cs b/src/mlnet/Telemetry/Telemetry.cs
index 71d5fda541..4d046303c0 100644
--- a/src/mlnet/Telemetry/Telemetry.cs
+++ b/src/mlnet/Telemetry/Telemetry.cs
@@ -13,18 +13,19 @@ namespace Microsoft.DotNet.Cli.Telemetry
{
public class Telemetry : ITelemetry
{
- private TelemetryClient _client = null;
- private Dictionary _commonProperties = new Dictionary();
- private Task _trackEventTask = null;
+ private TelemetryClient _client;
+ private Dictionary _commonProperties;
+ private Task _trackEventTask;
private const string InstrumentationKey = "c059917c-818d-489a-bfcb-351eaab73f2a";
private const string MlTelemetryOptout = "MLDOTNET_CLI_TELEMETRY_OPTOUT";
private const string MachineId = "MachineId";
public bool Enabled { get; }
-
+
public Telemetry()
{
+ _commonProperties = new Dictionary();
var optedOut = Env.GetEnvironmentVariableAsBool(MlTelemetryOptout, false);
Enabled = !optedOut;
@@ -33,13 +34,13 @@ public Telemetry()
{
return;
}
-
+
//initialize in task to offload to parallel thread
_trackEventTask = Task.Factory.StartNew(() => InitializeTelemetry());
}
public void TrackEvent(
- string eventName,
+ string eventName,
IDictionary properties,
IDictionary measurements)
{
@@ -72,9 +73,9 @@ private void InitializeTelemetry()
_client.InstrumentationKey = InstrumentationKey;
_client.Context.Device.OperatingSystem = RuntimeEnvironment.OperatingSystem;
- // we don't want hostname etc to be sent in plain text.
+ // we don't want hostname etc to be sent in plain text.
// these need to be set to some non-empty values to override default behavior.
- _client.Context.Cloud.RoleInstance = "-";
+ _client.Context.Cloud.RoleInstance = "-";
_client.Context.Cloud.RoleName = "-";
_commonProperties = new TelemetryCommonProperties().GetTelemetryCommonProperties();
@@ -101,7 +102,7 @@ private void TrackEventTask(
{
var eventProperties = GetEventProperties(properties);
var eventMeasurements = GetEventMeasures(measurements);
-
+
_client.TrackEvent(eventName, eventProperties, eventMeasurements);
_client.Flush();
}
diff --git a/src/mlnet/Templates/Console/ModelBuilder.cs b/src/mlnet/Templates/Console/ModelBuilder.cs
index feb2a993f4..6a7fee6d07 100644
--- a/src/mlnet/Templates/Console/ModelBuilder.cs
+++ b/src/mlnet/Templates/Console/ModelBuilder.cs
@@ -27,7 +27,7 @@ public partial class ModelBuilder : ModelBuilderBase
///
public virtual string TransformText()
{
- this.Write(@"//*****************************************************************************************
+ Write(@"//*****************************************************************************************
//* *
//* This is an auto-generated file by Microsoft ML.NET CLI (Command-Line Interface) tool. *
//* *
@@ -40,23 +40,23 @@ public virtual string TransformText()
using Microsoft.ML;
using Microsoft.ML.Data;
using ");
- this.Write(this.ToStringHelper.ToStringWithCulture(Namespace));
- this.Write(".Model.DataModels;\r\n");
- this.Write(this.ToStringHelper.ToStringWithCulture(GeneratedUsings));
- this.Write("\r\nnamespace ");
- this.Write(this.ToStringHelper.ToStringWithCulture(Namespace));
- this.Write(".ConsoleApp\r\n{\r\n public static class ModelBuilder\r\n {\r\n private stat" +
+ Write(ToStringHelper.ToStringWithCulture(Namespace));
+ Write(".Model.DataModels;\r\n");
+ Write(ToStringHelper.ToStringWithCulture(GeneratedUsings));
+ Write("\r\nnamespace ");
+ Write(ToStringHelper.ToStringWithCulture(Namespace));
+ Write(".ConsoleApp\r\n{\r\n public static class ModelBuilder\r\n {\r\n private stat" +
"ic string TRAIN_DATA_FILEPATH = @\"");
- this.Write(this.ToStringHelper.ToStringWithCulture(Path));
- this.Write("\";\r\n");
+ Write(ToStringHelper.ToStringWithCulture(Path));
+ Write("\";\r\n");
if(!string.IsNullOrEmpty(TestPath)){
- this.Write(" private static string TEST_DATA_FILEPATH = @\"");
- this.Write(this.ToStringHelper.ToStringWithCulture(TestPath));
- this.Write("\";\r\n");
+ Write(" private static string TEST_DATA_FILEPATH = @\"");
+ Write(ToStringHelper.ToStringWithCulture(TestPath));
+ Write("\";\r\n");
}
- this.Write(" private static string MODEL_FILEPATH = @\"../../../../");
- this.Write(this.ToStringHelper.ToStringWithCulture(Namespace));
- this.Write(@".Model/MLModel.zip"";
+ Write(" private static string MODEL_FILEPATH = @\"../../../../");
+ Write(ToStringHelper.ToStringWithCulture(Namespace));
+ Write(@".Model/MLModel.zip"";
// Create MLContext to be shared across the model creation workflow objects
// Set a random seed for repeatable/deterministic results across multiple trainings.
@@ -68,44 +68,44 @@ public static void CreateModel()
IDataView trainingDataView = mlContext.Data.LoadFromTextFile(
path: TRAIN_DATA_FILEPATH,
hasHeader : ");
- this.Write(this.ToStringHelper.ToStringWithCulture(HasHeader.ToString().ToLowerInvariant()));
- this.Write(",\r\n separatorChar : \'");
- this.Write(this.ToStringHelper.ToStringWithCulture(Regex.Escape(Separator.ToString())));
- this.Write("\',\r\n allowQuoting : ");
- this.Write(this.ToStringHelper.ToStringWithCulture(AllowQuoting.ToString().ToLowerInvariant()));
- this.Write(",\r\n allowSparse: ");
- this.Write(this.ToStringHelper.ToStringWithCulture(AllowSparse.ToString().ToLowerInvariant()));
- this.Write(");\r\n\r\n");
+ Write(ToStringHelper.ToStringWithCulture(HasHeader.ToString().ToLowerInvariant()));
+ Write(",\r\n separatorChar : \'");
+ Write(ToStringHelper.ToStringWithCulture(Regex.Escape(Separator.ToString())));
+ Write("\',\r\n allowQuoting : ");
+ Write(ToStringHelper.ToStringWithCulture(AllowQuoting.ToString().ToLowerInvariant()));
+ Write(",\r\n allowSparse: ");
+ Write(ToStringHelper.ToStringWithCulture(AllowSparse.ToString().ToLowerInvariant()));
+ Write(");\r\n\r\n");
if(!string.IsNullOrEmpty(TestPath)){
- this.Write(" IDataView testDataView = mlContext.Data.LoadFromTextFile(" +
+ Write(" IDataView testDataView = mlContext.Data.LoadFromTextFile(" +
"\r\n path: TEST_DATA_FILEPATH,\r\n " +
" hasHeader : ");
- this.Write(this.ToStringHelper.ToStringWithCulture(HasHeader.ToString().ToLowerInvariant()));
- this.Write(",\r\n separatorChar : \'");
- this.Write(this.ToStringHelper.ToStringWithCulture(Regex.Escape(Separator.ToString())));
- this.Write("\',\r\n allowQuoting : ");
- this.Write(this.ToStringHelper.ToStringWithCulture(AllowQuoting.ToString().ToLowerInvariant()));
- this.Write(",\r\n allowSparse: ");
- this.Write(this.ToStringHelper.ToStringWithCulture(AllowSparse.ToString().ToLowerInvariant()));
- this.Write(");\r\n");
+ Write(ToStringHelper.ToStringWithCulture(HasHeader.ToString().ToLowerInvariant()));
+ Write(",\r\n separatorChar : \'");
+ Write(ToStringHelper.ToStringWithCulture(Regex.Escape(Separator.ToString())));
+ Write("\',\r\n allowQuoting : ");
+ Write(ToStringHelper.ToStringWithCulture(AllowQuoting.ToString().ToLowerInvariant()));
+ Write(",\r\n allowSparse: ");
+ Write(ToStringHelper.ToStringWithCulture(AllowSparse.ToString().ToLowerInvariant()));
+ Write(");\r\n");
}
- this.Write(" // Build training pipeline\r\n IEstimator trai" +
+ Write(" // Build training pipeline\r\n IEstimator trai" +
"ningPipeline = BuildTrainingPipeline(mlContext);\r\n\r\n");
if(string.IsNullOrEmpty(TestPath)){
- this.Write(" // Evaluate quality of Model\r\n Evaluate(mlContext, trainin" +
+ Write(" // Evaluate quality of Model\r\n Evaluate(mlContext, trainin" +
"gDataView, trainingPipeline);\r\n\r\n");
}
- this.Write(" // Train Model\r\n ITransformer mlModel = TrainModel(mlConte" +
+ Write(" // Train Model\r\n ITransformer mlModel = TrainModel(mlConte" +
"xt, trainingDataView, trainingPipeline);\r\n");
if(!string.IsNullOrEmpty(TestPath)){
- this.Write("\r\n // Evaluate quality of Model\r\n EvaluateModel(mlContext, " +
+ Write("\r\n // Evaluate quality of Model\r\n EvaluateModel(mlContext, " +
"mlModel, testDataView);\r\n");
}
- this.Write("\r\n // Save model\r\n SaveModel(mlContext, mlModel, MODEL_FILE" +
+ Write("\r\n // Save model\r\n SaveModel(mlContext, mlModel, MODEL_FILE" +
"PATH, trainingDataView.Schema);\r\n }\r\n\r\n public static IEstimator BuildTrainingPipeline(MLContext mlContext)\r\n {\r\n");
if(PreTrainerTransforms.Count >0 ) {
- this.Write(" // Data process configuration with pipeline data transformations \r\n " +
+ Write(" // Data process configuration with pipeline data transformations \r\n " +
" var dataProcessPipeline = ");
for(int i=0;i0 ) {
- this.Write(" var trainingPipeline = dataProcessPipeline.Append(trainer);\r\n");
+ Write(" var trainingPipeline = dataProcessPipeline.Append(trainer);\r\n");
}
else{
- this.Write(" var trainingPipeline = trainer;\r\n");
+ Write(" var trainingPipeline = trainer;\r\n");
}
- this.Write(@"
+ Write(@"
return trainingPipeline;
}
@@ -156,71 +156,71 @@ public static ITransformer TrainModel(MLContext mlContext, IDataView trainingDat
");
if(!string.IsNullOrEmpty(TestPath)){
- this.Write(@" private static void EvaluateModel(MLContext mlContext, ITransformer mlModel, IDataView testDataView)
+ Write(@" private static void EvaluateModel(MLContext mlContext, ITransformer mlModel, IDataView testDataView)
{
// Evaluate the model and show accuracy stats
Console.WriteLine(""===== Evaluating Model's accuracy with Test data ====="");
IDataView predictions = mlModel.Transform(testDataView);
");
if("BinaryClassification".Equals(TaskType)){
- this.Write(" var metrics = mlContext.");
- this.Write(this.ToStringHelper.ToStringWithCulture(TaskType));
- this.Write(".EvaluateNonCalibrated(predictions, \"");
- this.Write(this.ToStringHelper.ToStringWithCulture(LabelName));
- this.Write("\", \"Score\");\r\n PrintBinaryClassificationMetrics(metrics);\r\n");
+ Write(" var metrics = mlContext.");
+ Write(ToStringHelper.ToStringWithCulture(TaskType));
+ Write(".EvaluateNonCalibrated(predictions, \"");
+ Write(ToStringHelper.ToStringWithCulture(LabelName));
+ Write("\", \"Score\");\r\n PrintBinaryClassificationMetrics(metrics);\r\n");
} if("MulticlassClassification".Equals(TaskType)){
- this.Write(" var metrics = mlContext.");
- this.Write(this.ToStringHelper.ToStringWithCulture(TaskType));
- this.Write(".Evaluate(predictions, \"");
- this.Write(this.ToStringHelper.ToStringWithCulture(LabelName));
- this.Write("\", \"Score\");\r\n PrintMulticlassClassificationMetrics(metrics);\r\n");
+ Write(" var metrics = mlContext.");
+ Write(ToStringHelper.ToStringWithCulture(TaskType));
+ Write(".Evaluate(predictions, \"");
+ Write(ToStringHelper.ToStringWithCulture(LabelName));
+ Write("\", \"Score\");\r\n PrintMulticlassClassificationMetrics(metrics);\r\n");
}if("Regression".Equals(TaskType)){
- this.Write(" var metrics = mlContext.");
- this.Write(this.ToStringHelper.ToStringWithCulture(TaskType));
- this.Write(".Evaluate(predictions, \"");
- this.Write(this.ToStringHelper.ToStringWithCulture(LabelName));
- this.Write("\", \"Score\");\r\n PrintRegressionMetrics(metrics);\r\n");
+ Write(" var metrics = mlContext.");
+ Write(ToStringHelper.ToStringWithCulture(TaskType));
+ Write(".Evaluate(predictions, \"");
+ Write(ToStringHelper.ToStringWithCulture(LabelName));
+ Write("\", \"Score\");\r\n PrintRegressionMetrics(metrics);\r\n");
}
- this.Write(" }\r\n");
+ Write(" }\r\n");
}else{
- this.Write(@" private static void Evaluate(MLContext mlContext, IDataView trainingDataView, IEstimator trainingPipeline)
+ Write(@" private static void Evaluate(MLContext mlContext, IDataView trainingDataView, IEstimator trainingPipeline)
{
// Cross-Validate with single dataset (since we don't have two datasets, one for training and for evaluate)
// in order to evaluate and get the model's accuracy metrics
Console.WriteLine(""=============== Cross-validating to get model's accuracy metrics ==============="");
");
if("BinaryClassification".Equals(TaskType)){
- this.Write(" var crossValidationResults = mlContext.");
- this.Write(this.ToStringHelper.ToStringWithCulture(TaskType));
- this.Write(".CrossValidateNonCalibrated(trainingDataView, trainingPipeline, numberOfFolds: ");
- this.Write(this.ToStringHelper.ToStringWithCulture(Kfolds));
- this.Write(", labelColumnName:\"");
- this.Write(this.ToStringHelper.ToStringWithCulture(LabelName));
- this.Write("\");\r\n PrintBinaryClassificationFoldsAverageMetrics(crossValidationResu" +
+ Write(" var crossValidationResults = mlContext.");
+ Write(ToStringHelper.ToStringWithCulture(TaskType));
+ Write(".CrossValidateNonCalibrated(trainingDataView, trainingPipeline, numberOfFolds: ");
+ Write(ToStringHelper.ToStringWithCulture(Kfolds));
+ Write(", labelColumnName:\"");
+ Write(ToStringHelper.ToStringWithCulture(LabelName));
+ Write("\");\r\n PrintBinaryClassificationFoldsAverageMetrics(crossValidationResu" +
"lts);\r\n");
}
if("MulticlassClassification".Equals(TaskType)){
- this.Write(" var crossValidationResults = mlContext.");
- this.Write(this.ToStringHelper.ToStringWithCulture(TaskType));
- this.Write(".CrossValidate(trainingDataView, trainingPipeline, numberOfFolds: ");
- this.Write(this.ToStringHelper.ToStringWithCulture(Kfolds));
- this.Write(", labelColumnName:\"");
- this.Write(this.ToStringHelper.ToStringWithCulture(LabelName));
- this.Write("\");\r\n PrintMulticlassClassificationFoldsAverageMetrics(crossValidation" +
+ Write(" var crossValidationResults = mlContext.");
+ Write(ToStringHelper.ToStringWithCulture(TaskType));
+ Write(".CrossValidate(trainingDataView, trainingPipeline, numberOfFolds: ");
+ Write(ToStringHelper.ToStringWithCulture(Kfolds));
+ Write(", labelColumnName:\"");
+ Write(ToStringHelper.ToStringWithCulture(LabelName));
+ Write("\");\r\n PrintMulticlassClassificationFoldsAverageMetrics(crossValidation" +
"Results);\r\n");
}
if("Regression".Equals(TaskType)){
- this.Write(" var crossValidationResults = mlContext.");
- this.Write(this.ToStringHelper.ToStringWithCulture(TaskType));
- this.Write(".CrossValidate(trainingDataView, trainingPipeline, numberOfFolds: ");
- this.Write(this.ToStringHelper.ToStringWithCulture(Kfolds));
- this.Write(", labelColumnName:\"");
- this.Write(this.ToStringHelper.ToStringWithCulture(LabelName));
- this.Write("\");\r\n PrintRegressionFoldsAverageMetrics(crossValidationResults);\r\n");
+ Write(" var crossValidationResults = mlContext.");
+ Write(ToStringHelper.ToStringWithCulture(TaskType));
+ Write(".CrossValidate(trainingDataView, trainingPipeline, numberOfFolds: ");
+ Write(ToStringHelper.ToStringWithCulture(Kfolds));
+ Write(", labelColumnName:\"");
+ Write(ToStringHelper.ToStringWithCulture(LabelName));
+ Write("\");\r\n PrintRegressionFoldsAverageMetrics(crossValidationResults);\r\n");
}
- this.Write(" }\r\n");
+ Write(" }\r\n");
}
- this.Write(@" private static void SaveModel(MLContext mlContext, ITransformer mlModel, string modelRelativePath, DataViewSchema modelInputSchema)
+ Write(@" private static void SaveModel(MLContext mlContext, ITransformer mlModel, string modelRelativePath, DataViewSchema modelInputSchema)
{
// Save/persist the trained model to a .ZIP file
Console.WriteLine($""=============== Saving the model ==============="");
@@ -240,7 +240,7 @@ public static string GetAbsolutePath(string relativePath)
");
if("Regression".Equals(TaskType)){
- this.Write(" public static void PrintRegressionMetrics(RegressionMetrics metrics)\r\n " +
+ Write(" public static void PrintRegressionMetrics(RegressionMetrics metrics)\r\n " +
" {\r\n Console.WriteLine($\"****************************************" +
"*********\");\r\n Console.WriteLine($\"* Metrics for regression mod" +
"el \");\r\n Console.WriteLine($\"*----------------------------------" +
@@ -272,7 +272,7 @@ public static string GetAbsolutePath(string relativePath)
"\r\n Console.WriteLine($\"**********************************************" +
"***************************************************************\");\r\n }\r\n");
} if("BinaryClassification".Equals(TaskType)){
- this.Write(" public static void PrintBinaryClassificationMetrics(BinaryClassificationM" +
+ Write(" public static void PrintBinaryClassificationMetrics(BinaryClassificationM" +
"etrics metrics)\r\n {\r\n Console.WriteLine($\"********************" +
"****************************************\");\r\n Console.WriteLine($\"* " +
" Metrics for binary classification model \");\r\n Console.Write" +
@@ -307,7 +307,7 @@ public static string GetAbsolutePath(string relativePath)
"95 = 1.96 * CalculateStandardDeviation(values) / Math.Sqrt((values.Count() - 1))" +
";\r\n return confidenceInterval95;\r\n }\r\n");
} if("MulticlassClassification".Equals(TaskType)){
- this.Write(" public static void PrintMulticlassClassificationMetrics(MulticlassClassif" +
+ Write(" public static void PrintMulticlassClassificationMetrics(MulticlassClassif" +
"icationMetrics metrics)\r\n {\r\n Console.WriteLine($\"************" +
"************************************************\");\r\n Console.WriteLi" +
"ne($\"* Metrics for multi-class classification model \");\r\n Consol" +
@@ -371,8 +371,8 @@ public static string GetAbsolutePath(string relativePath)
"ndardDeviation(values) / Math.Sqrt((values.Count() - 1));\r\n return co" +
"nfidenceInterval95;\r\n }\r\n");
}
- this.Write(" }\r\n}\r\n");
- return this.GenerationEnvironment.ToString();
+ Write(" }\r\n}\r\n");
+ return GenerationEnvironment.ToString();
}
public string Path {get;set;}
@@ -415,15 +415,15 @@ protected System.Text.StringBuilder GenerationEnvironment
{
get
{
- if ((this.generationEnvironmentField == null))
+ if ((generationEnvironmentField == null))
{
- this.generationEnvironmentField = new global::System.Text.StringBuilder();
+ generationEnvironmentField = new global::System.Text.StringBuilder();
}
- return this.generationEnvironmentField;
+ return generationEnvironmentField;
}
set
{
- this.generationEnvironmentField = value;
+ generationEnvironmentField = value;
}
}
///
@@ -433,11 +433,11 @@ public System.CodeDom.Compiler.CompilerErrorCollection Errors
{
get
{
- if ((this.errorsField == null))
+ if ((errorsField == null))
{
- this.errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
+ errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
}
- return this.errorsField;
+ return errorsField;
}
}
///
@@ -447,11 +447,11 @@ private System.Collections.Generic.List indentLengths
{
get
{
- if ((this.indentLengthsField == null))
+ if ((indentLengthsField == null))
{
- this.indentLengthsField = new global::System.Collections.Generic.List();
+ indentLengthsField = new global::System.Collections.Generic.List();
}
- return this.indentLengthsField;
+ return indentLengthsField;
}
}
///
@@ -461,7 +461,7 @@ public string CurrentIndent
{
get
{
- return this.currentIndentField;
+ return currentIndentField;
}
}
///
@@ -471,11 +471,11 @@ public string CurrentIndent
{
get
{
- return this.sessionField;
+ return sessionField;
}
set
{
- this.sessionField = value;
+ sessionField = value;
}
}
#endregion
@@ -491,35 +491,35 @@ public void Write(string textToAppend)
}
// If we're starting off, or if the previous text ended with a newline,
// we have to append the current indent first.
- if (((this.GenerationEnvironment.Length == 0)
- || this.endsWithNewline))
+ if (((GenerationEnvironment.Length == 0)
+ || endsWithNewline))
{
- this.GenerationEnvironment.Append(this.currentIndentField);
- this.endsWithNewline = false;
+ GenerationEnvironment.Append(currentIndentField);
+ endsWithNewline = false;
}
// Check if the current text ends with a newline
if (textToAppend.EndsWith(global::System.Environment.NewLine, global::System.StringComparison.CurrentCulture))
{
- this.endsWithNewline = true;
+ endsWithNewline = true;
}
// This is an optimization. If the current indent is "", then we don't have to do any
// of the more complex stuff further down.
- if ((this.currentIndentField.Length == 0))
+ if ((currentIndentField.Length == 0))
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
return;
}
// Everywhere there is a newline in the text, add an indent after it
- textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + this.currentIndentField));
+ textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + currentIndentField));
// If the text ends with a newline, then we should strip off the indent added at the very end
// because the appropriate indent will be added when the next time Write() is called
- if (this.endsWithNewline)
+ if (endsWithNewline)
{
- this.GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - this.currentIndentField.Length));
+ GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - currentIndentField.Length));
}
else
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
}
}
///
@@ -527,23 +527,23 @@ public void Write(string textToAppend)
///
public void WriteLine(string textToAppend)
{
- this.Write(textToAppend);
- this.GenerationEnvironment.AppendLine();
- this.endsWithNewline = true;
+ Write(textToAppend);
+ GenerationEnvironment.AppendLine();
+ endsWithNewline = true;
}
///
/// Write formatted text directly into the generated output
///
public void Write(string format, params object[] args)
{
- this.Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Write formatted text directly into the generated output
///
public void WriteLine(string format, params object[] args)
{
- this.WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Raise an error
@@ -552,7 +552,7 @@ public void Error(string message)
{
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Raise a warning
@@ -562,7 +562,7 @@ public void Warning(string message)
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
error.IsWarning = true;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Increase the indent
@@ -573,8 +573,8 @@ public void PushIndent(string indent)
{
throw new global::System.ArgumentNullException("indent");
}
- this.currentIndentField = (this.currentIndentField + indent);
- this.indentLengths.Add(indent.Length);
+ currentIndentField = (currentIndentField + indent);
+ indentLengths.Add(indent.Length);
}
///
/// Remove the last indent that was added with PushIndent
@@ -582,14 +582,14 @@ public void PushIndent(string indent)
public string PopIndent()
{
string returnValue = "";
- if ((this.indentLengths.Count > 0))
+ if ((indentLengths.Count > 0))
{
- int indentLength = this.indentLengths[(this.indentLengths.Count - 1)];
- this.indentLengths.RemoveAt((this.indentLengths.Count - 1));
+ int indentLength = indentLengths[(indentLengths.Count - 1)];
+ indentLengths.RemoveAt((indentLengths.Count - 1));
if ((indentLength > 0))
{
- returnValue = this.currentIndentField.Substring((this.currentIndentField.Length - indentLength));
- this.currentIndentField = this.currentIndentField.Remove((this.currentIndentField.Length - indentLength));
+ returnValue = currentIndentField.Substring((currentIndentField.Length - indentLength));
+ currentIndentField = currentIndentField.Remove((currentIndentField.Length - indentLength));
}
}
return returnValue;
@@ -599,8 +599,8 @@ public string PopIndent()
///
public void ClearIndent()
{
- this.indentLengths.Clear();
- this.currentIndentField = "";
+ indentLengths.Clear();
+ currentIndentField = "";
}
#endregion
#region ToString Helpers
@@ -617,13 +617,13 @@ public System.IFormatProvider FormatProvider
{
get
{
- return this.formatProviderField ;
+ return formatProviderField ;
}
set
{
if ((value != null))
{
- this.formatProviderField = value;
+ formatProviderField = value;
}
}
}
@@ -646,7 +646,7 @@ public string ToStringWithCulture(object objectToConvert)
else
{
return ((string)(method.Invoke(objectToConvert, new object[] {
- this.formatProviderField })));
+ formatProviderField })));
}
}
}
@@ -658,7 +658,7 @@ public ToStringInstanceHelper ToStringHelper
{
get
{
- return this.toStringHelperField;
+ return toStringHelperField;
}
}
#endregion
diff --git a/src/mlnet/Templates/Console/ModelInputClass.cs b/src/mlnet/Templates/Console/ModelInputClass.cs
index 6205ffc683..b136882a82 100644
--- a/src/mlnet/Templates/Console/ModelInputClass.cs
+++ b/src/mlnet/Templates/Console/ModelInputClass.cs
@@ -25,7 +25,7 @@ public partial class ModelInputClass : ModelInputClassBase
///
public virtual string TransformText()
{
- this.Write(@"//*****************************************************************************************
+ Write(@"//*****************************************************************************************
//* *
//* This is an auto-generated file by Microsoft ML.NET CLI (Command-Line Interface) tool. *
//* *
@@ -34,15 +34,15 @@ public virtual string TransformText()
using Microsoft.ML.Data;
namespace ");
- this.Write(this.ToStringHelper.ToStringWithCulture(Namespace));
- this.Write(".Model.DataModels\r\n{\r\n public class ModelInput\r\n {\r\n");
+ Write(ToStringHelper.ToStringWithCulture(Namespace));
+ Write(".Model.DataModels\r\n{\r\n public class ModelInput\r\n {\r\n");
foreach(var label in ClassLabels){
- this.Write(" ");
- this.Write(this.ToStringHelper.ToStringWithCulture(label));
- this.Write("\r\n");
+ Write(" ");
+ Write(ToStringHelper.ToStringWithCulture(label));
+ Write("\r\n");
}
- this.Write("}\r\n}\r\n");
- return this.GenerationEnvironment.ToString();
+ Write("}\r\n}\r\n");
+ return GenerationEnvironment.ToString();
}
public IList ClassLabels {get;set;}
@@ -72,15 +72,15 @@ protected System.Text.StringBuilder GenerationEnvironment
{
get
{
- if ((this.generationEnvironmentField == null))
+ if ((generationEnvironmentField == null))
{
- this.generationEnvironmentField = new global::System.Text.StringBuilder();
+ generationEnvironmentField = new global::System.Text.StringBuilder();
}
- return this.generationEnvironmentField;
+ return generationEnvironmentField;
}
set
{
- this.generationEnvironmentField = value;
+ generationEnvironmentField = value;
}
}
///
@@ -90,11 +90,11 @@ public System.CodeDom.Compiler.CompilerErrorCollection Errors
{
get
{
- if ((this.errorsField == null))
+ if ((errorsField == null))
{
- this.errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
+ errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
}
- return this.errorsField;
+ return errorsField;
}
}
///
@@ -104,11 +104,11 @@ private System.Collections.Generic.List indentLengths
{
get
{
- if ((this.indentLengthsField == null))
+ if ((indentLengthsField == null))
{
- this.indentLengthsField = new global::System.Collections.Generic.List();
+ indentLengthsField = new global::System.Collections.Generic.List();
}
- return this.indentLengthsField;
+ return indentLengthsField;
}
}
///
@@ -118,7 +118,7 @@ public string CurrentIndent
{
get
{
- return this.currentIndentField;
+ return currentIndentField;
}
}
///
@@ -128,11 +128,11 @@ public string CurrentIndent
{
get
{
- return this.sessionField;
+ return sessionField;
}
set
{
- this.sessionField = value;
+ sessionField = value;
}
}
#endregion
@@ -148,35 +148,35 @@ public void Write(string textToAppend)
}
// If we're starting off, or if the previous text ended with a newline,
// we have to append the current indent first.
- if (((this.GenerationEnvironment.Length == 0)
- || this.endsWithNewline))
+ if (((GenerationEnvironment.Length == 0)
+ || endsWithNewline))
{
- this.GenerationEnvironment.Append(this.currentIndentField);
- this.endsWithNewline = false;
+ GenerationEnvironment.Append(currentIndentField);
+ endsWithNewline = false;
}
// Check if the current text ends with a newline
if (textToAppend.EndsWith(global::System.Environment.NewLine, global::System.StringComparison.CurrentCulture))
{
- this.endsWithNewline = true;
+ endsWithNewline = true;
}
// This is an optimization. If the current indent is "", then we don't have to do any
// of the more complex stuff further down.
- if ((this.currentIndentField.Length == 0))
+ if ((currentIndentField.Length == 0))
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
return;
}
// Everywhere there is a newline in the text, add an indent after it
- textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + this.currentIndentField));
+ textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + currentIndentField));
// If the text ends with a newline, then we should strip off the indent added at the very end
// because the appropriate indent will be added when the next time Write() is called
- if (this.endsWithNewline)
+ if (endsWithNewline)
{
- this.GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - this.currentIndentField.Length));
+ GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - currentIndentField.Length));
}
else
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
}
}
///
@@ -184,23 +184,23 @@ public void Write(string textToAppend)
///
public void WriteLine(string textToAppend)
{
- this.Write(textToAppend);
- this.GenerationEnvironment.AppendLine();
- this.endsWithNewline = true;
+ Write(textToAppend);
+ GenerationEnvironment.AppendLine();
+ endsWithNewline = true;
}
///
/// Write formatted text directly into the generated output
///
public void Write(string format, params object[] args)
{
- this.Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Write formatted text directly into the generated output
///
public void WriteLine(string format, params object[] args)
{
- this.WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Raise an error
@@ -209,7 +209,7 @@ public void Error(string message)
{
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Raise a warning
@@ -219,7 +219,7 @@ public void Warning(string message)
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
error.IsWarning = true;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Increase the indent
@@ -230,8 +230,8 @@ public void PushIndent(string indent)
{
throw new global::System.ArgumentNullException("indent");
}
- this.currentIndentField = (this.currentIndentField + indent);
- this.indentLengths.Add(indent.Length);
+ currentIndentField = (currentIndentField + indent);
+ indentLengths.Add(indent.Length);
}
///
/// Remove the last indent that was added with PushIndent
@@ -239,14 +239,14 @@ public void PushIndent(string indent)
public string PopIndent()
{
string returnValue = "";
- if ((this.indentLengths.Count > 0))
+ if ((indentLengths.Count > 0))
{
- int indentLength = this.indentLengths[(this.indentLengths.Count - 1)];
- this.indentLengths.RemoveAt((this.indentLengths.Count - 1));
+ int indentLength = indentLengths[(indentLengths.Count - 1)];
+ indentLengths.RemoveAt((indentLengths.Count - 1));
if ((indentLength > 0))
{
- returnValue = this.currentIndentField.Substring((this.currentIndentField.Length - indentLength));
- this.currentIndentField = this.currentIndentField.Remove((this.currentIndentField.Length - indentLength));
+ returnValue = currentIndentField.Substring((currentIndentField.Length - indentLength));
+ currentIndentField = currentIndentField.Remove((currentIndentField.Length - indentLength));
}
}
return returnValue;
@@ -256,8 +256,8 @@ public string PopIndent()
///
public void ClearIndent()
{
- this.indentLengths.Clear();
- this.currentIndentField = "";
+ indentLengths.Clear();
+ currentIndentField = "";
}
#endregion
#region ToString Helpers
@@ -274,13 +274,13 @@ public System.IFormatProvider FormatProvider
{
get
{
- return this.formatProviderField ;
+ return formatProviderField ;
}
set
{
if ((value != null))
{
- this.formatProviderField = value;
+ formatProviderField = value;
}
}
}
@@ -303,7 +303,7 @@ public string ToStringWithCulture(object objectToConvert)
else
{
return ((string)(method.Invoke(objectToConvert, new object[] {
- this.formatProviderField })));
+ formatProviderField })));
}
}
}
@@ -315,7 +315,7 @@ public ToStringInstanceHelper ToStringHelper
{
get
{
- return this.toStringHelperField;
+ return toStringHelperField;
}
}
#endregion
diff --git a/src/mlnet/Templates/Console/ModelOutputClass.cs b/src/mlnet/Templates/Console/ModelOutputClass.cs
index 767ae5d0da..01c5b5b1c1 100644
--- a/src/mlnet/Templates/Console/ModelOutputClass.cs
+++ b/src/mlnet/Templates/Console/ModelOutputClass.cs
@@ -25,7 +25,7 @@ public partial class ModelOutputClass : ModelOutputClassBase
///
public virtual string TransformText()
{
- this.Write(@"//*****************************************************************************************
+ Write(@"//*****************************************************************************************
//* *
//* This is an auto-generated file by Microsoft ML.NET CLI (Command-Line Interface) tool. *
//* *
@@ -35,26 +35,26 @@ public virtual string TransformText()
using Microsoft.ML.Data;
namespace ");
- this.Write(this.ToStringHelper.ToStringWithCulture(Namespace));
- this.Write(".Model.DataModels\r\n{\r\n public class ModelOutput\r\n {\r\n");
+ Write(ToStringHelper.ToStringWithCulture(Namespace));
+ Write(".Model.DataModels\r\n{\r\n public class ModelOutput\r\n {\r\n");
if("BinaryClassification".Equals(TaskType)){
- this.Write(" // ColumnName attribute is used to change the column name from\r\n /" +
+ Write(" // ColumnName attribute is used to change the column name from\r\n /" +
"/ its default value, which is the name of the field.\r\n [ColumnName(\"Predi" +
"ctedLabel\")]\r\n public bool Prediction { get; set; }\r\n\r\n");
} if("MulticlassClassification".Equals(TaskType)){
- this.Write(" // ColumnName attribute is used to change the column name from\r\n /" +
+ Write(" // ColumnName attribute is used to change the column name from\r\n /" +
"/ its default value, which is the name of the field.\r\n [ColumnName(\"Predi" +
"ctedLabel\")]\r\n public ");
- this.Write(this.ToStringHelper.ToStringWithCulture(PredictionLabelType));
- this.Write(" Prediction { get; set; }\r\n");
+ Write(ToStringHelper.ToStringWithCulture(PredictionLabelType));
+ Write(" Prediction { get; set; }\r\n");
}
if("MulticlassClassification".Equals(TaskType)){
- this.Write(" public float[] Score { get; set; }\r\n");
+ Write(" public float[] Score { get; set; }\r\n");
}else{
- this.Write(" public float Score { get; set; }\r\n");
+ Write(" public float Score { get; set; }\r\n");
}
- this.Write(" }\r\n}\r\n");
- return this.GenerationEnvironment.ToString();
+ Write(" }\r\n}\r\n");
+ return GenerationEnvironment.ToString();
}
public string TaskType {get;set;}
@@ -85,15 +85,15 @@ protected System.Text.StringBuilder GenerationEnvironment
{
get
{
- if ((this.generationEnvironmentField == null))
+ if ((generationEnvironmentField == null))
{
- this.generationEnvironmentField = new global::System.Text.StringBuilder();
+ generationEnvironmentField = new global::System.Text.StringBuilder();
}
- return this.generationEnvironmentField;
+ return generationEnvironmentField;
}
set
{
- this.generationEnvironmentField = value;
+ generationEnvironmentField = value;
}
}
///
@@ -103,11 +103,11 @@ public System.CodeDom.Compiler.CompilerErrorCollection Errors
{
get
{
- if ((this.errorsField == null))
+ if ((errorsField == null))
{
- this.errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
+ errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
}
- return this.errorsField;
+ return errorsField;
}
}
///
@@ -117,11 +117,11 @@ private System.Collections.Generic.List indentLengths
{
get
{
- if ((this.indentLengthsField == null))
+ if ((indentLengthsField == null))
{
- this.indentLengthsField = new global::System.Collections.Generic.List();
+ indentLengthsField = new global::System.Collections.Generic.List();
}
- return this.indentLengthsField;
+ return indentLengthsField;
}
}
///
@@ -131,7 +131,7 @@ public string CurrentIndent
{
get
{
- return this.currentIndentField;
+ return currentIndentField;
}
}
///
@@ -141,11 +141,11 @@ public string CurrentIndent
{
get
{
- return this.sessionField;
+ return sessionField;
}
set
{
- this.sessionField = value;
+ sessionField = value;
}
}
#endregion
@@ -161,35 +161,35 @@ public void Write(string textToAppend)
}
// If we're starting off, or if the previous text ended with a newline,
// we have to append the current indent first.
- if (((this.GenerationEnvironment.Length == 0)
- || this.endsWithNewline))
+ if (((GenerationEnvironment.Length == 0)
+ || endsWithNewline))
{
- this.GenerationEnvironment.Append(this.currentIndentField);
- this.endsWithNewline = false;
+ GenerationEnvironment.Append(currentIndentField);
+ endsWithNewline = false;
}
// Check if the current text ends with a newline
if (textToAppend.EndsWith(global::System.Environment.NewLine, global::System.StringComparison.CurrentCulture))
{
- this.endsWithNewline = true;
+ endsWithNewline = true;
}
// This is an optimization. If the current indent is "", then we don't have to do any
// of the more complex stuff further down.
- if ((this.currentIndentField.Length == 0))
+ if ((currentIndentField.Length == 0))
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
return;
}
// Everywhere there is a newline in the text, add an indent after it
- textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + this.currentIndentField));
+ textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + currentIndentField));
// If the text ends with a newline, then we should strip off the indent added at the very end
// because the appropriate indent will be added when the next time Write() is called
- if (this.endsWithNewline)
+ if (endsWithNewline)
{
- this.GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - this.currentIndentField.Length));
+ GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - currentIndentField.Length));
}
else
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
}
}
///
@@ -197,23 +197,23 @@ public void Write(string textToAppend)
///
public void WriteLine(string textToAppend)
{
- this.Write(textToAppend);
- this.GenerationEnvironment.AppendLine();
- this.endsWithNewline = true;
+ Write(textToAppend);
+ GenerationEnvironment.AppendLine();
+ endsWithNewline = true;
}
///
/// Write formatted text directly into the generated output
///
public void Write(string format, params object[] args)
{
- this.Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Write formatted text directly into the generated output
///
public void WriteLine(string format, params object[] args)
{
- this.WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Raise an error
@@ -222,7 +222,7 @@ public void Error(string message)
{
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Raise a warning
@@ -232,7 +232,7 @@ public void Warning(string message)
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
error.IsWarning = true;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Increase the indent
@@ -243,8 +243,8 @@ public void PushIndent(string indent)
{
throw new global::System.ArgumentNullException("indent");
}
- this.currentIndentField = (this.currentIndentField + indent);
- this.indentLengths.Add(indent.Length);
+ currentIndentField = (currentIndentField + indent);
+ indentLengths.Add(indent.Length);
}
///
/// Remove the last indent that was added with PushIndent
@@ -252,14 +252,14 @@ public void PushIndent(string indent)
public string PopIndent()
{
string returnValue = "";
- if ((this.indentLengths.Count > 0))
+ if ((indentLengths.Count > 0))
{
- int indentLength = this.indentLengths[(this.indentLengths.Count - 1)];
- this.indentLengths.RemoveAt((this.indentLengths.Count - 1));
+ int indentLength = indentLengths[(indentLengths.Count - 1)];
+ indentLengths.RemoveAt((indentLengths.Count - 1));
if ((indentLength > 0))
{
- returnValue = this.currentIndentField.Substring((this.currentIndentField.Length - indentLength));
- this.currentIndentField = this.currentIndentField.Remove((this.currentIndentField.Length - indentLength));
+ returnValue = currentIndentField.Substring((currentIndentField.Length - indentLength));
+ currentIndentField = currentIndentField.Remove((currentIndentField.Length - indentLength));
}
}
return returnValue;
@@ -269,8 +269,8 @@ public string PopIndent()
///
public void ClearIndent()
{
- this.indentLengths.Clear();
- this.currentIndentField = "";
+ indentLengths.Clear();
+ currentIndentField = "";
}
#endregion
#region ToString Helpers
@@ -287,13 +287,13 @@ public System.IFormatProvider FormatProvider
{
get
{
- return this.formatProviderField ;
+ return formatProviderField ;
}
set
{
if ((value != null))
{
- this.formatProviderField = value;
+ formatProviderField = value;
}
}
}
@@ -316,7 +316,7 @@ public string ToStringWithCulture(object objectToConvert)
else
{
return ((string)(method.Invoke(objectToConvert, new object[] {
- this.formatProviderField })));
+ formatProviderField })));
}
}
}
@@ -328,7 +328,7 @@ public ToStringInstanceHelper ToStringHelper
{
get
{
- return this.toStringHelperField;
+ return toStringHelperField;
}
}
#endregion
diff --git a/src/mlnet/Templates/Console/ModelProject.cs b/src/mlnet/Templates/Console/ModelProject.cs
index e16f29c915..7c26ec3d76 100644
--- a/src/mlnet/Templates/Console/ModelProject.cs
+++ b/src/mlnet/Templates/Console/ModelProject.cs
@@ -25,22 +25,22 @@ public partial class ModelProject : ModelProjectBase
///
public virtual string TransformText()
{
- this.Write("\r\n\r\n \r\n netc" +
+ Write("\r\n\r\n \r\n netc" +
"oreapp2.1\r\n \r\n \r\n \r\n");
if (IncludeLightGBMPackage) {
- this.Write(" \r\n");
+ Write(" \r\n");
}
if (IncludeMklComponentsPackage){
- this.Write(" \r\n");
+ Write(" \r\n");
}
if (IncludeFastTreePackage){
- this.Write(" \r\n");
+ Write(" \r\n");
}
- this.Write(" \r\n\r\n \r\n \r\n \r\n\r\n \r\n \r\n PreserveNewest\r\n \r\n \r\n \r\n\r\n");
- return this.GenerationEnvironment.ToString();
+ return GenerationEnvironment.ToString();
}
public bool IncludeLightGBMPackage {get;set;}
@@ -71,15 +71,15 @@ protected System.Text.StringBuilder GenerationEnvironment
{
get
{
- if ((this.generationEnvironmentField == null))
+ if ((generationEnvironmentField == null))
{
- this.generationEnvironmentField = new global::System.Text.StringBuilder();
+ generationEnvironmentField = new global::System.Text.StringBuilder();
}
- return this.generationEnvironmentField;
+ return generationEnvironmentField;
}
set
{
- this.generationEnvironmentField = value;
+ generationEnvironmentField = value;
}
}
///
@@ -89,11 +89,11 @@ public System.CodeDom.Compiler.CompilerErrorCollection Errors
{
get
{
- if ((this.errorsField == null))
+ if ((errorsField == null))
{
- this.errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
+ errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
}
- return this.errorsField;
+ return errorsField;
}
}
///
@@ -103,11 +103,11 @@ private System.Collections.Generic.List indentLengths
{
get
{
- if ((this.indentLengthsField == null))
+ if ((indentLengthsField == null))
{
- this.indentLengthsField = new global::System.Collections.Generic.List();
+ indentLengthsField = new global::System.Collections.Generic.List();
}
- return this.indentLengthsField;
+ return indentLengthsField;
}
}
///
@@ -117,7 +117,7 @@ public string CurrentIndent
{
get
{
- return this.currentIndentField;
+ return currentIndentField;
}
}
///
@@ -127,11 +127,11 @@ public string CurrentIndent
{
get
{
- return this.sessionField;
+ return sessionField;
}
set
{
- this.sessionField = value;
+ sessionField = value;
}
}
#endregion
@@ -147,35 +147,35 @@ public void Write(string textToAppend)
}
// If we're starting off, or if the previous text ended with a newline,
// we have to append the current indent first.
- if (((this.GenerationEnvironment.Length == 0)
- || this.endsWithNewline))
+ if (((GenerationEnvironment.Length == 0)
+ || endsWithNewline))
{
- this.GenerationEnvironment.Append(this.currentIndentField);
- this.endsWithNewline = false;
+ GenerationEnvironment.Append(currentIndentField);
+ endsWithNewline = false;
}
// Check if the current text ends with a newline
if (textToAppend.EndsWith(global::System.Environment.NewLine, global::System.StringComparison.CurrentCulture))
{
- this.endsWithNewline = true;
+ endsWithNewline = true;
}
// This is an optimization. If the current indent is "", then we don't have to do any
// of the more complex stuff further down.
- if ((this.currentIndentField.Length == 0))
+ if ((currentIndentField.Length == 0))
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
return;
}
// Everywhere there is a newline in the text, add an indent after it
- textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + this.currentIndentField));
+ textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + currentIndentField));
// If the text ends with a newline, then we should strip off the indent added at the very end
// because the appropriate indent will be added when the next time Write() is called
- if (this.endsWithNewline)
+ if (endsWithNewline)
{
- this.GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - this.currentIndentField.Length));
+ GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - currentIndentField.Length));
}
else
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
}
}
///
@@ -183,23 +183,23 @@ public void Write(string textToAppend)
///
public void WriteLine(string textToAppend)
{
- this.Write(textToAppend);
- this.GenerationEnvironment.AppendLine();
- this.endsWithNewline = true;
+ Write(textToAppend);
+ GenerationEnvironment.AppendLine();
+ endsWithNewline = true;
}
///
/// Write formatted text directly into the generated output
///
public void Write(string format, params object[] args)
{
- this.Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Write formatted text directly into the generated output
///
public void WriteLine(string format, params object[] args)
{
- this.WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Raise an error
@@ -208,7 +208,7 @@ public void Error(string message)
{
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Raise a warning
@@ -218,7 +218,7 @@ public void Warning(string message)
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
error.IsWarning = true;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Increase the indent
@@ -229,8 +229,8 @@ public void PushIndent(string indent)
{
throw new global::System.ArgumentNullException("indent");
}
- this.currentIndentField = (this.currentIndentField + indent);
- this.indentLengths.Add(indent.Length);
+ currentIndentField = (currentIndentField + indent);
+ indentLengths.Add(indent.Length);
}
///
/// Remove the last indent that was added with PushIndent
@@ -238,14 +238,14 @@ public void PushIndent(string indent)
public string PopIndent()
{
string returnValue = "";
- if ((this.indentLengths.Count > 0))
+ if ((indentLengths.Count > 0))
{
- int indentLength = this.indentLengths[(this.indentLengths.Count - 1)];
- this.indentLengths.RemoveAt((this.indentLengths.Count - 1));
+ int indentLength = indentLengths[(indentLengths.Count - 1)];
+ indentLengths.RemoveAt((indentLengths.Count - 1));
if ((indentLength > 0))
{
- returnValue = this.currentIndentField.Substring((this.currentIndentField.Length - indentLength));
- this.currentIndentField = this.currentIndentField.Remove((this.currentIndentField.Length - indentLength));
+ returnValue = currentIndentField.Substring((currentIndentField.Length - indentLength));
+ currentIndentField = currentIndentField.Remove((currentIndentField.Length - indentLength));
}
}
return returnValue;
@@ -255,8 +255,8 @@ public string PopIndent()
///
public void ClearIndent()
{
- this.indentLengths.Clear();
- this.currentIndentField = "";
+ indentLengths.Clear();
+ currentIndentField = "";
}
#endregion
#region ToString Helpers
@@ -273,13 +273,13 @@ public System.IFormatProvider FormatProvider
{
get
{
- return this.formatProviderField ;
+ return formatProviderField ;
}
set
{
if ((value != null))
{
- this.formatProviderField = value;
+ formatProviderField = value;
}
}
}
@@ -302,7 +302,7 @@ public string ToStringWithCulture(object objectToConvert)
else
{
return ((string)(method.Invoke(objectToConvert, new object[] {
- this.formatProviderField })));
+ formatProviderField })));
}
}
}
@@ -314,7 +314,7 @@ public ToStringInstanceHelper ToStringHelper
{
get
{
- return this.toStringHelperField;
+ return toStringHelperField;
}
}
#endregion
diff --git a/src/mlnet/Templates/Console/PredictProgram.cs b/src/mlnet/Templates/Console/PredictProgram.cs
index 22d7d0ebee..8af0aaef22 100644
--- a/src/mlnet/Templates/Console/PredictProgram.cs
+++ b/src/mlnet/Templates/Console/PredictProgram.cs
@@ -27,7 +27,7 @@ public partial class PredictProgram : PredictProgramBase
///
public virtual string TransformText()
{
- this.Write(@"//*****************************************************************************************
+ Write(@"//*****************************************************************************************
//* *
//* This is an auto-generated file by Microsoft ML.NET CLI (Command-Line Interface) tool. *
//* *
@@ -38,22 +38,22 @@ public virtual string TransformText()
using System.Linq;
using Microsoft.ML;
using ");
- this.Write(this.ToStringHelper.ToStringWithCulture(Namespace));
- this.Write(".Model.DataModels;\r\n\r\n\r\nnamespace ");
- this.Write(this.ToStringHelper.ToStringWithCulture(Namespace));
- this.Write(".ConsoleApp\r\n{\r\n class Program\r\n {\r\n //Machine Learning model to loa" +
+ Write(ToStringHelper.ToStringWithCulture(Namespace));
+ Write(".Model.DataModels;\r\n\r\n\r\nnamespace ");
+ Write(ToStringHelper.ToStringWithCulture(Namespace));
+ Write(".ConsoleApp\r\n{\r\n class Program\r\n {\r\n //Machine Learning model to loa" +
"d and use for predictions\r\n private const string MODEL_FILEPATH = @\"MLMod" +
"el.zip\";\r\n\r\n //Dataset to use for predictions \r\n");
if(string.IsNullOrEmpty(TestDataPath)){
- this.Write(" private const string DATA_FILEPATH = @\"");
- this.Write(this.ToStringHelper.ToStringWithCulture(TrainDataPath));
- this.Write("\";\r\n");
+ Write(" private const string DATA_FILEPATH = @\"");
+ Write(ToStringHelper.ToStringWithCulture(TrainDataPath));
+ Write("\";\r\n");
} else{
- this.Write(" private const string DATA_FILEPATH = @\"");
- this.Write(this.ToStringHelper.ToStringWithCulture(TestDataPath));
- this.Write("\";\r\n");
+ Write(" private const string DATA_FILEPATH = @\"");
+ Write(ToStringHelper.ToStringWithCulture(TestDataPath));
+ Write("\";\r\n");
}
- this.Write(@"
+ Write(@"
static void Main(string[] args)
{
MLContext mlContext = new MLContext();
@@ -72,20 +72,20 @@ static void Main(string[] args)
");
if("BinaryClassification".Equals(TaskType)){
- this.Write(" Console.WriteLine($\"Single Prediction --> Actual value: {sampleData.");
- this.Write(this.ToStringHelper.ToStringWithCulture(Utils.Normalize(LabelName)));
- this.Write("} | Predicted value: {predictionResult.Prediction}\");\r\n");
+ Write(" Console.WriteLine($\"Single Prediction --> Actual value: {sampleData.");
+ Write(ToStringHelper.ToStringWithCulture(Utils.Normalize(LabelName)));
+ Write("} | Predicted value: {predictionResult.Prediction}\");\r\n");
}else if("Regression".Equals(TaskType)){
- this.Write(" Console.WriteLine($\"Single Prediction --> Actual value: {sampleData.");
- this.Write(this.ToStringHelper.ToStringWithCulture(Utils.Normalize(LabelName)));
- this.Write("} | Predicted value: {predictionResult.Score}\");\r\n");
+ Write(" Console.WriteLine($\"Single Prediction --> Actual value: {sampleData.");
+ Write(ToStringHelper.ToStringWithCulture(Utils.Normalize(LabelName)));
+ Write("} | Predicted value: {predictionResult.Score}\");\r\n");
} else if("MulticlassClassification".Equals(TaskType)){
- this.Write(" Console.WriteLine($\"Single Prediction --> Actual value: {sampleData.");
- this.Write(this.ToStringHelper.ToStringWithCulture(Utils.Normalize(LabelName)));
- this.Write("} | Predicted value: {predictionResult.Prediction} | Predicted scores: [{String.J" +
+ Write(" Console.WriteLine($\"Single Prediction --> Actual value: {sampleData.");
+ Write(ToStringHelper.ToStringWithCulture(Utils.Normalize(LabelName)));
+ Write("} | Predicted value: {predictionResult.Prediction} | Predicted scores: [{String.J" +
"oin(\",\", predictionResult.Score)}]\");\r\n");
}
- this.Write(@"
+ Write(@"
Console.WriteLine(""=============== End of process, hit any key to finish ==============="");
Console.ReadKey();
}
@@ -98,14 +98,14 @@ private static ModelInput CreateSingleDataSample(MLContext mlContext, string dat
IDataView dataView = mlContext.Data.LoadFromTextFile(
path: dataFilePath,
hasHeader : ");
- this.Write(this.ToStringHelper.ToStringWithCulture(HasHeader.ToString().ToLowerInvariant()));
- this.Write(",\r\n separatorChar : \'");
- this.Write(this.ToStringHelper.ToStringWithCulture(Regex.Escape(Separator.ToString())));
- this.Write("\',\r\n allowQuoting : ");
- this.Write(this.ToStringHelper.ToStringWithCulture(AllowQuoting.ToString().ToLowerInvariant()));
- this.Write(",\r\n allowSparse: ");
- this.Write(this.ToStringHelper.ToStringWithCulture(AllowSparse.ToString().ToLowerInvariant()));
- this.Write(@");
+ Write(ToStringHelper.ToStringWithCulture(HasHeader.ToString().ToLowerInvariant()));
+ Write(",\r\n separatorChar : \'");
+ Write(ToStringHelper.ToStringWithCulture(Regex.Escape(Separator.ToString())));
+ Write("\',\r\n allowQuoting : ");
+ Write(ToStringHelper.ToStringWithCulture(AllowQuoting.ToString().ToLowerInvariant()));
+ Write(",\r\n allowSparse: ");
+ Write(ToStringHelper.ToStringWithCulture(AllowSparse.ToString().ToLowerInvariant()));
+ Write(@");
// Here (ModelInput object) you could provide new test data, hardcoded or from the end-user application, instead of the row from the file.
ModelInput sampleForPrediction = mlContext.Data.CreateEnumerable(dataView, false)
@@ -125,7 +125,7 @@ public static string GetAbsolutePath(string relativePath)
}
}
");
- return this.GenerationEnvironment.ToString();
+ return GenerationEnvironment.ToString();
}
public string TaskType {get;set;}
@@ -162,15 +162,15 @@ protected System.Text.StringBuilder GenerationEnvironment
{
get
{
- if ((this.generationEnvironmentField == null))
+ if ((generationEnvironmentField == null))
{
- this.generationEnvironmentField = new global::System.Text.StringBuilder();
+ generationEnvironmentField = new global::System.Text.StringBuilder();
}
- return this.generationEnvironmentField;
+ return generationEnvironmentField;
}
set
{
- this.generationEnvironmentField = value;
+ generationEnvironmentField = value;
}
}
///
@@ -180,11 +180,11 @@ public System.CodeDom.Compiler.CompilerErrorCollection Errors
{
get
{
- if ((this.errorsField == null))
+ if ((errorsField == null))
{
- this.errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
+ errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
}
- return this.errorsField;
+ return errorsField;
}
}
///
@@ -194,11 +194,11 @@ private System.Collections.Generic.List indentLengths
{
get
{
- if ((this.indentLengthsField == null))
+ if ((indentLengthsField == null))
{
- this.indentLengthsField = new global::System.Collections.Generic.List();
+ indentLengthsField = new global::System.Collections.Generic.List();
}
- return this.indentLengthsField;
+ return indentLengthsField;
}
}
///
@@ -208,7 +208,7 @@ public string CurrentIndent
{
get
{
- return this.currentIndentField;
+ return currentIndentField;
}
}
///
@@ -218,11 +218,11 @@ public string CurrentIndent
{
get
{
- return this.sessionField;
+ return sessionField;
}
set
{
- this.sessionField = value;
+ sessionField = value;
}
}
#endregion
@@ -238,35 +238,35 @@ public void Write(string textToAppend)
}
// If we're starting off, or if the previous text ended with a newline,
// we have to append the current indent first.
- if (((this.GenerationEnvironment.Length == 0)
- || this.endsWithNewline))
+ if (((GenerationEnvironment.Length == 0)
+ || endsWithNewline))
{
- this.GenerationEnvironment.Append(this.currentIndentField);
- this.endsWithNewline = false;
+ GenerationEnvironment.Append(currentIndentField);
+ endsWithNewline = false;
}
// Check if the current text ends with a newline
if (textToAppend.EndsWith(global::System.Environment.NewLine, global::System.StringComparison.CurrentCulture))
{
- this.endsWithNewline = true;
+ endsWithNewline = true;
}
// This is an optimization. If the current indent is "", then we don't have to do any
// of the more complex stuff further down.
- if ((this.currentIndentField.Length == 0))
+ if ((currentIndentField.Length == 0))
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
return;
}
// Everywhere there is a newline in the text, add an indent after it
- textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + this.currentIndentField));
+ textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + currentIndentField));
// If the text ends with a newline, then we should strip off the indent added at the very end
// because the appropriate indent will be added when the next time Write() is called
- if (this.endsWithNewline)
+ if (endsWithNewline)
{
- this.GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - this.currentIndentField.Length));
+ GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - currentIndentField.Length));
}
else
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
}
}
///
@@ -274,23 +274,23 @@ public void Write(string textToAppend)
///
public void WriteLine(string textToAppend)
{
- this.Write(textToAppend);
- this.GenerationEnvironment.AppendLine();
- this.endsWithNewline = true;
+ Write(textToAppend);
+ GenerationEnvironment.AppendLine();
+ endsWithNewline = true;
}
///
/// Write formatted text directly into the generated output
///
public void Write(string format, params object[] args)
{
- this.Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Write formatted text directly into the generated output
///
public void WriteLine(string format, params object[] args)
{
- this.WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Raise an error
@@ -299,7 +299,7 @@ public void Error(string message)
{
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Raise a warning
@@ -309,7 +309,7 @@ public void Warning(string message)
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
error.IsWarning = true;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Increase the indent
@@ -320,8 +320,8 @@ public void PushIndent(string indent)
{
throw new global::System.ArgumentNullException("indent");
}
- this.currentIndentField = (this.currentIndentField + indent);
- this.indentLengths.Add(indent.Length);
+ currentIndentField = (currentIndentField + indent);
+ indentLengths.Add(indent.Length);
}
///
/// Remove the last indent that was added with PushIndent
@@ -329,14 +329,14 @@ public void PushIndent(string indent)
public string PopIndent()
{
string returnValue = "";
- if ((this.indentLengths.Count > 0))
+ if ((indentLengths.Count > 0))
{
- int indentLength = this.indentLengths[(this.indentLengths.Count - 1)];
- this.indentLengths.RemoveAt((this.indentLengths.Count - 1));
+ int indentLength = indentLengths[(indentLengths.Count - 1)];
+ indentLengths.RemoveAt((indentLengths.Count - 1));
if ((indentLength > 0))
{
- returnValue = this.currentIndentField.Substring((this.currentIndentField.Length - indentLength));
- this.currentIndentField = this.currentIndentField.Remove((this.currentIndentField.Length - indentLength));
+ returnValue = currentIndentField.Substring((currentIndentField.Length - indentLength));
+ currentIndentField = currentIndentField.Remove((currentIndentField.Length - indentLength));
}
}
return returnValue;
@@ -346,8 +346,8 @@ public string PopIndent()
///
public void ClearIndent()
{
- this.indentLengths.Clear();
- this.currentIndentField = "";
+ indentLengths.Clear();
+ currentIndentField = "";
}
#endregion
#region ToString Helpers
@@ -364,13 +364,13 @@ public System.IFormatProvider FormatProvider
{
get
{
- return this.formatProviderField ;
+ return formatProviderField ;
}
set
{
if ((value != null))
{
- this.formatProviderField = value;
+ formatProviderField = value;
}
}
}
@@ -393,7 +393,7 @@ public string ToStringWithCulture(object objectToConvert)
else
{
return ((string)(method.Invoke(objectToConvert, new object[] {
- this.formatProviderField })));
+ formatProviderField })));
}
}
}
@@ -405,7 +405,7 @@ public ToStringInstanceHelper ToStringHelper
{
get
{
- return this.toStringHelperField;
+ return toStringHelperField;
}
}
#endregion
diff --git a/src/mlnet/Templates/Console/PredictProject.cs b/src/mlnet/Templates/Console/PredictProject.cs
index e5a2c7d112..9bc5edb2a7 100644
--- a/src/mlnet/Templates/Console/PredictProject.cs
+++ b/src/mlnet/Templates/Console/PredictProject.cs
@@ -26,25 +26,25 @@ public partial class PredictProject : PredictProjectBase
///
public virtual string TransformText()
{
- this.Write("\r\n\r\n \r\n Exe\r\n\r\n \r\n Exe\r\n netcoreapp2.1\r\n \r\n \r\n \r\n");
if (IncludeLightGBMPackage){
- this.Write(" \r\n");
+ Write(" \r\n");
}
if (IncludeMklComponentsPackage){
- this.Write(" \r\n");
+ Write(" \r\n");
}
if (IncludeFastTreePackage){
- this.Write(" \r\n");
+ Write(" \r\n");
}
- this.Write(" \r\n \r\n \r\n \r\n\r\n");
- return this.GenerationEnvironment.ToString();
+ Write(" \r\n \r\n \r\n \r\n\r\n");
+ return GenerationEnvironment.ToString();
}
public string Namespace {get;set;}
@@ -76,15 +76,15 @@ protected System.Text.StringBuilder GenerationEnvironment
{
get
{
- if ((this.generationEnvironmentField == null))
+ if ((generationEnvironmentField == null))
{
- this.generationEnvironmentField = new global::System.Text.StringBuilder();
+ generationEnvironmentField = new global::System.Text.StringBuilder();
}
- return this.generationEnvironmentField;
+ return generationEnvironmentField;
}
set
{
- this.generationEnvironmentField = value;
+ generationEnvironmentField = value;
}
}
///
@@ -94,11 +94,11 @@ public System.CodeDom.Compiler.CompilerErrorCollection Errors
{
get
{
- if ((this.errorsField == null))
+ if ((errorsField == null))
{
- this.errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
+ errorsField = new global::System.CodeDom.Compiler.CompilerErrorCollection();
}
- return this.errorsField;
+ return errorsField;
}
}
///
@@ -108,11 +108,11 @@ private System.Collections.Generic.List indentLengths
{
get
{
- if ((this.indentLengthsField == null))
+ if ((indentLengthsField == null))
{
- this.indentLengthsField = new global::System.Collections.Generic.List();
+ indentLengthsField = new global::System.Collections.Generic.List();
}
- return this.indentLengthsField;
+ return indentLengthsField;
}
}
///
@@ -122,7 +122,7 @@ public string CurrentIndent
{
get
{
- return this.currentIndentField;
+ return currentIndentField;
}
}
///
@@ -132,11 +132,11 @@ public string CurrentIndent
{
get
{
- return this.sessionField;
+ return sessionField;
}
set
{
- this.sessionField = value;
+ sessionField = value;
}
}
#endregion
@@ -152,35 +152,35 @@ public void Write(string textToAppend)
}
// If we're starting off, or if the previous text ended with a newline,
// we have to append the current indent first.
- if (((this.GenerationEnvironment.Length == 0)
- || this.endsWithNewline))
+ if (((GenerationEnvironment.Length == 0)
+ || endsWithNewline))
{
- this.GenerationEnvironment.Append(this.currentIndentField);
- this.endsWithNewline = false;
+ GenerationEnvironment.Append(currentIndentField);
+ endsWithNewline = false;
}
// Check if the current text ends with a newline
if (textToAppend.EndsWith(global::System.Environment.NewLine, global::System.StringComparison.CurrentCulture))
{
- this.endsWithNewline = true;
+ endsWithNewline = true;
}
// This is an optimization. If the current indent is "", then we don't have to do any
// of the more complex stuff further down.
- if ((this.currentIndentField.Length == 0))
+ if ((currentIndentField.Length == 0))
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
return;
}
// Everywhere there is a newline in the text, add an indent after it
- textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + this.currentIndentField));
+ textToAppend = textToAppend.Replace(global::System.Environment.NewLine, (global::System.Environment.NewLine + currentIndentField));
// If the text ends with a newline, then we should strip off the indent added at the very end
// because the appropriate indent will be added when the next time Write() is called
- if (this.endsWithNewline)
+ if (endsWithNewline)
{
- this.GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - this.currentIndentField.Length));
+ GenerationEnvironment.Append(textToAppend, 0, (textToAppend.Length - currentIndentField.Length));
}
else
{
- this.GenerationEnvironment.Append(textToAppend);
+ GenerationEnvironment.Append(textToAppend);
}
}
///
@@ -188,23 +188,23 @@ public void Write(string textToAppend)
///
public void WriteLine(string textToAppend)
{
- this.Write(textToAppend);
- this.GenerationEnvironment.AppendLine();
- this.endsWithNewline = true;
+ Write(textToAppend);
+ GenerationEnvironment.AppendLine();
+ endsWithNewline = true;
}
///
/// Write formatted text directly into the generated output
///
public void Write(string format, params object[] args)
{
- this.Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ Write(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Write formatted text directly into the generated output
///
public void WriteLine(string format, params object[] args)
{
- this.WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
+ WriteLine(string.Format(global::System.Globalization.CultureInfo.CurrentCulture, format, args));
}
///
/// Raise an error
@@ -213,7 +213,7 @@ public void Error(string message)
{
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Raise a warning
@@ -223,7 +223,7 @@ public void Warning(string message)
System.CodeDom.Compiler.CompilerError error = new global::System.CodeDom.Compiler.CompilerError();
error.ErrorText = message;
error.IsWarning = true;
- this.Errors.Add(error);
+ Errors.Add(error);
}
///
/// Increase the indent
@@ -234,8 +234,8 @@ public void PushIndent(string indent)
{
throw new global::System.ArgumentNullException("indent");
}
- this.currentIndentField = (this.currentIndentField + indent);
- this.indentLengths.Add(indent.Length);
+ currentIndentField = (currentIndentField + indent);
+ indentLengths.Add(indent.Length);
}
///
/// Remove the last indent that was added with PushIndent
@@ -243,14 +243,14 @@ public void PushIndent(string indent)
public string PopIndent()
{
string returnValue = "";
- if ((this.indentLengths.Count > 0))
+ if ((indentLengths.Count > 0))
{
- int indentLength = this.indentLengths[(this.indentLengths.Count - 1)];
- this.indentLengths.RemoveAt((this.indentLengths.Count - 1));
+ int indentLength = indentLengths[(indentLengths.Count - 1)];
+ indentLengths.RemoveAt((indentLengths.Count - 1));
if ((indentLength > 0))
{
- returnValue = this.currentIndentField.Substring((this.currentIndentField.Length - indentLength));
- this.currentIndentField = this.currentIndentField.Remove((this.currentIndentField.Length - indentLength));
+ returnValue = currentIndentField.Substring((currentIndentField.Length - indentLength));
+ currentIndentField = currentIndentField.Remove((currentIndentField.Length - indentLength));
}
}
return returnValue;
@@ -260,8 +260,8 @@ public string PopIndent()
///
public void ClearIndent()
{
- this.indentLengths.Clear();
- this.currentIndentField = "";
+ indentLengths.Clear();
+ currentIndentField = "";
}
#endregion
#region ToString Helpers
@@ -278,13 +278,13 @@ public System.IFormatProvider FormatProvider
{
get
{
- return this.formatProviderField ;
+ return formatProviderField ;
}
set
{
if ((value != null))
{
- this.formatProviderField = value;
+ formatProviderField = value;
}
}
}
@@ -307,7 +307,7 @@ public string ToStringWithCulture(object objectToConvert)
else
{
return ((string)(method.Invoke(objectToConvert, new object[] {
- this.formatProviderField })));
+ formatProviderField })));
}
}
}
@@ -319,7 +319,7 @@ public ToStringInstanceHelper ToStringHelper
{
get
{
- return this.toStringHelperField;
+ return toStringHelperField;
}
}
#endregion
diff --git a/src/mlnet/Utilities/ConsolePrinter.cs b/src/mlnet/Utilities/ConsolePrinter.cs
index 177231607f..8020556192 100644
--- a/src/mlnet/Utilities/ConsolePrinter.cs
+++ b/src/mlnet/Utilities/ConsolePrinter.cs
@@ -14,53 +14,53 @@ namespace Microsoft.ML.CLI.Utilities
internal class ConsolePrinter
{
private const int Width = 114;
- private static NLog.Logger logger = NLog.LogManager.GetCurrentClassLogger();
- internal static readonly string TABLESEPERATOR = "------------------------------------------------------------------------------------------------------------------";
+ private static NLog.Logger _logger = NLog.LogManager.GetCurrentClassLogger();
+ internal static readonly string TableSeparator = "------------------------------------------------------------------------------------------------------------------";
internal static void PrintMetrics(int iteration, string trainerName, BinaryClassificationMetrics metrics, double bestMetric, double? runtimeInSeconds, LogLevel logLevel, int iterationNumber = -1)
{
- logger.Log(logLevel, CreateRow($"{iteration,-4} {trainerName,-35} {metrics?.Accuracy ?? double.NaN,9:F4} {metrics?.AreaUnderRocCurve ?? double.NaN,8:F4} {metrics?.AreaUnderPrecisionRecallCurve ?? double.NaN,8:F4} {metrics?.F1Score ?? double.NaN,9:F4} {runtimeInSeconds.Value,9:F1} {iterationNumber + 1,10}", Width));
+ _logger.Log(logLevel, CreateRow($"{iteration,-4} {trainerName,-35} {metrics?.Accuracy ?? double.NaN,9:F4} {metrics?.AreaUnderRocCurve ?? double.NaN,8:F4} {metrics?.AreaUnderPrecisionRecallCurve ?? double.NaN,8:F4} {metrics?.F1Score ?? double.NaN,9:F4} {runtimeInSeconds.Value,9:F1} {iterationNumber + 1,10}", Width));
}
internal static void PrintMetrics(int iteration, string trainerName, MulticlassClassificationMetrics metrics, double bestMetric, double? runtimeInSeconds, LogLevel logLevel, int iterationNumber = -1)
{
- logger.Log(logLevel, CreateRow($"{iteration,-4} {trainerName,-35} {metrics?.MicroAccuracy ?? double.NaN,14:F4} {metrics?.MacroAccuracy ?? double.NaN,14:F4} {runtimeInSeconds.Value,9:F1} {iterationNumber + 1,10}", Width));
+ _logger.Log(logLevel, CreateRow($"{iteration,-4} {trainerName,-35} {metrics?.MicroAccuracy ?? double.NaN,14:F4} {metrics?.MacroAccuracy ?? double.NaN,14:F4} {runtimeInSeconds.Value,9:F1} {iterationNumber + 1,10}", Width));
}
internal static void PrintMetrics(int iteration, string trainerName, RegressionMetrics metrics, double bestMetric, double? runtimeInSeconds, LogLevel logLevel, int iterationNumber = -1)
{
- logger.Log(logLevel, CreateRow($"{iteration,-4} {trainerName,-35} {metrics?.RSquared ?? double.NaN,8:F4} {metrics?.MeanAbsoluteError ?? double.NaN,13:F2} {metrics?.MeanSquaredError ?? double.NaN,12:F2} {metrics?.RootMeanSquaredError ?? double.NaN,8:F2} {runtimeInSeconds.Value,9:F1} {iterationNumber + 1,10}", Width));
+ _logger.Log(logLevel, CreateRow($"{iteration,-4} {trainerName,-35} {metrics?.RSquared ?? double.NaN,8:F4} {metrics?.MeanAbsoluteError ?? double.NaN,13:F2} {metrics?.MeanSquaredError ?? double.NaN,12:F2} {metrics?.RootMeanSquaredError ?? double.NaN,8:F2} {runtimeInSeconds.Value,9:F1} {iterationNumber + 1,10}", Width));
}
internal static void PrintBinaryClassificationMetricsHeader(LogLevel logLevel)
{
- logger.Log(logLevel, CreateRow($"{"",-4} {"Trainer",-35} {"Accuracy",9} {"AUC",8} {"AUPRC",8} {"F1-score",9} {"Duration",9} {"#Iteration",10}", Width));
+ _logger.Log(logLevel, CreateRow($"{"",-4} {"Trainer",-35} {"Accuracy",9} {"AUC",8} {"AUPRC",8} {"F1-score",9} {"Duration",9} {"#Iteration",10}", Width));
}
internal static void PrintMulticlassClassificationMetricsHeader(LogLevel logLevel)
{
- logger.Log(logLevel, CreateRow($"{"",-4} {"Trainer",-35} {"MicroAccuracy",14} {"MacroAccuracy",14} {"Duration",9} {"#Iteration",10}", Width));
+ _logger.Log(logLevel, CreateRow($"{"",-4} {"Trainer",-35} {"MicroAccuracy",14} {"MacroAccuracy",14} {"Duration",9} {"#Iteration",10}", Width));
}
internal static void PrintRegressionMetricsHeader(LogLevel logLevel)
{
- logger.Log(logLevel, CreateRow($"{"",-4} {"Trainer",-35} {"RSquared",8} {"Absolute-loss",13} {"Squared-loss",12} {"RMS-loss",8} {"Duration",9} {"#Iteration",10}", Width));
+ _logger.Log(logLevel, CreateRow($"{"",-4} {"Trainer",-35} {"RSquared",8} {"Absolute-loss",13} {"Squared-loss",12} {"RMS-loss",8} {"Duration",9} {"#Iteration",10}", Width));
}
internal static void ExperimentResultsHeader(LogLevel logLevel, string mltask, string datasetName, string labelName, string time, int numModelsExplored)
{
- logger.Log(logLevel, string.Empty);
- logger.Log(logLevel, $"===============================================Experiment Results=================================================");
- logger.Log(logLevel, TABLESEPERATOR);
+ _logger.Log(logLevel, string.Empty);
+ _logger.Log(logLevel, $"===============================================Experiment Results=================================================");
+ _logger.Log(logLevel, TableSeparator);
var header = "Summary";
- logger.Log(logLevel, CreateRow(header.PadLeft((Width / 2) + header.Length / 2), Width));
- logger.Log(logLevel, TABLESEPERATOR);
- logger.Log(logLevel, CreateRow($"{"ML Task",-7}: {mltask,-20}", Width));
- logger.Log(logLevel, CreateRow($"{"Dataset",-7}: {datasetName,-25}", Width));
- logger.Log(logLevel, CreateRow($"{"Label",-6}: {labelName,-25}", Width));
- logger.Log(logLevel, CreateRow($"{"Total experiment time",-22}: {time} Secs", Width));
- logger.Log(logLevel, CreateRow($"{"Total number of models explored",-30}: {numModelsExplored}", Width));
- logger.Log(logLevel, TABLESEPERATOR);
+ _logger.Log(logLevel, CreateRow(header.PadLeft((Width / 2) + header.Length / 2), Width));
+ _logger.Log(logLevel, TableSeparator);
+ _logger.Log(logLevel, CreateRow($"{"ML Task",-7}: {mltask,-20}", Width));
+ _logger.Log(logLevel, CreateRow($"{"Dataset",-7}: {datasetName,-25}", Width));
+ _logger.Log(logLevel, CreateRow($"{"Label",-6}: {labelName,-25}", Width));
+ _logger.Log(logLevel, CreateRow($"{"Total experiment time",-22}: {time} Secs", Width));
+ _logger.Log(logLevel, CreateRow($"{"Total number of models explored",-30}: {numModelsExplored}", Width));
+ _logger.Log(logLevel, TableSeparator);
}
internal static string CreateRow(string message, int width)
@@ -73,8 +73,8 @@ internal static void PrintIterationSummary(IEnumerable> results, RegressionMetric optimizationMetric, int count)
@@ -99,8 +99,8 @@ internal static void PrintIterationSummary(IEnumerable> results, MulticlassClassificationMetric optimizationMetric, int count)
@@ -125,8 +125,8 @@ internal static void PrintIterationSummary(IEnumerable>
{
- private readonly bool isMaximizing;
- private readonly Func, double> GetScore;
- private RunDetail bestResult;
- private int iterationIndex;
- private List> completedIterations;
- private ProgressBar progressBar;
- private string optimizationMetric = string.Empty;
- private bool isStopped;
+ private readonly bool _isMaximizing;
+ private readonly Func, double> _getScore;
+ private RunDetail _bestResult;
+ private int _iterationIndex;
+ private List> _completedIterations;
+ private ProgressBar _progressBar;
+ private string _optimizationMetric;
+ private bool _isStopped;
public RegressionHandler(RegressionMetric optimizationMetric, List> completedIterations, ShellProgressBar.ProgressBar progressBar)
{
- this.isMaximizing = new OptimizingMetricInfo(optimizationMetric).IsMaximizing;
- this.optimizationMetric = optimizationMetric.ToString();
- this.completedIterations = completedIterations;
- this.progressBar = progressBar;
- GetScore = (RunDetail result) => new RegressionMetricsAgent(null, optimizationMetric).GetScore(result?.ValidationMetrics);
+ _isMaximizing = new OptimizingMetricInfo(optimizationMetric).IsMaximizing;
+ _optimizationMetric = optimizationMetric.ToString();
+ _completedIterations = completedIterations;
+ _progressBar = progressBar;
+ _getScore = (RunDetail result) => new RegressionMetricsAgent(null, optimizationMetric).GetScore(result?.ValidationMetrics);
ConsolePrinter.PrintRegressionMetricsHeader(LogLevel.Trace);
}
@@ -43,15 +43,15 @@ public void Report(RunDetail iterationResult)
{
lock (this)
{
- if (this.isStopped)
+ if (_isStopped)
return;
- iterationIndex++;
- completedIterations.Add(iterationResult);
+ _iterationIndex++;
+ _completedIterations.Add(iterationResult);
UpdateBestResult(iterationResult);
- if (progressBar != null)
- progressBar.Message = $"Best quality({this.optimizationMetric}): {GetScore(bestResult):F4}, Best Algorithm: {bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
- ConsolePrinter.PrintMetrics(iterationIndex, iterationResult?.TrainerName, iterationResult?.ValidationMetrics, GetScore(bestResult), iterationResult?.RuntimeInSeconds, LogLevel.Trace);
+ if (_progressBar != null)
+ _progressBar.Message = $"Best quality({_optimizationMetric}): {_getScore(_bestResult):F4}, Best Algorithm: {_bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
+ ConsolePrinter.PrintMetrics(_iterationIndex, iterationResult?.TrainerName, iterationResult?.ValidationMetrics, _getScore(_bestResult), iterationResult?.RuntimeInSeconds, LogLevel.Trace);
if (iterationResult.Exception != null)
{
ConsolePrinter.PrintException(iterationResult.Exception, LogLevel.Trace);
@@ -62,37 +62,37 @@ public void Stop()
{
lock (this)
{
- this.isStopped = true;
+ _isStopped = true;
}
}
private void UpdateBestResult(RunDetail iterationResult)
{
- if (MetricComparator(GetScore(iterationResult), GetScore(bestResult), isMaximizing) > 0)
+ if (MetricComparator(_getScore(iterationResult), _getScore(_bestResult), _isMaximizing) > 0)
{
- bestResult = iterationResult;
+ _bestResult = iterationResult;
}
}
}
internal class BinaryClassificationHandler : IProgress>
{
- private readonly bool isMaximizing;
- private readonly Func, double> GetScore;
- private RunDetail bestResult;
- private int iterationIndex;
- private ProgressBar progressBar;
- private BinaryClassificationMetric optimizationMetric;
- private List> completedIterations;
- private bool isStopped;
+ private readonly bool _isMaximizing;
+ private readonly Func, double> _getScore;
+ private RunDetail _bestResult;
+ private int _iterationIndex;
+ private ProgressBar _progressBar;
+ private BinaryClassificationMetric _optimizationMetric;
+ private List> _completedIterations;
+ private bool _isStopped;
public BinaryClassificationHandler(BinaryClassificationMetric optimizationMetric, List> completedIterations, ProgressBar progressBar)
{
- this.isMaximizing = new OptimizingMetricInfo(optimizationMetric).IsMaximizing;
- this.optimizationMetric = optimizationMetric;
- this.completedIterations = completedIterations;
- this.progressBar = progressBar;
- GetScore = (RunDetail result) => new BinaryMetricsAgent(null, optimizationMetric).GetScore(result?.ValidationMetrics);
+ _isMaximizing = new OptimizingMetricInfo(optimizationMetric).IsMaximizing;
+ _optimizationMetric = optimizationMetric;
+ _completedIterations = completedIterations;
+ _progressBar = progressBar;
+ _getScore = (RunDetail result) => new BinaryMetricsAgent(null, optimizationMetric).GetScore(result?.ValidationMetrics);
ConsolePrinter.PrintBinaryClassificationMetricsHeader(LogLevel.Trace);
}
@@ -100,14 +100,14 @@ public void Report(RunDetail iterationResult)
{
lock (this)
{
- if (this.isStopped)
+ if (_isStopped)
return;
- iterationIndex++;
- completedIterations.Add(iterationResult);
+ _iterationIndex++;
+ _completedIterations.Add(iterationResult);
UpdateBestResult(iterationResult);
- if (progressBar != null)
- progressBar.Message = GetProgressBarMessage(iterationResult);
- ConsolePrinter.PrintMetrics(iterationIndex, iterationResult?.TrainerName, iterationResult?.ValidationMetrics, GetScore(bestResult), iterationResult?.RuntimeInSeconds, LogLevel.Trace);
+ if (_progressBar != null)
+ _progressBar.Message = GetProgressBarMessage(iterationResult);
+ ConsolePrinter.PrintMetrics(_iterationIndex, iterationResult?.TrainerName, iterationResult?.ValidationMetrics, _getScore(_bestResult), iterationResult?.RuntimeInSeconds, LogLevel.Trace);
if (iterationResult.Exception != null)
{
ConsolePrinter.PrintException(iterationResult.Exception, LogLevel.Trace);
@@ -117,49 +117,49 @@ public void Report(RunDetail iterationResult)
private string GetProgressBarMessage(RunDetail iterationResult)
{
- if (optimizationMetric == BinaryClassificationMetric.Accuracy)
+ if (_optimizationMetric == BinaryClassificationMetric.Accuracy)
{
- return $"Best Accuracy: {GetScore(bestResult) * 100:F2}%, Best Algorithm: {bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
+ return $"Best Accuracy: {_getScore(_bestResult) * 100:F2}%, Best Algorithm: {_bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
}
- return $"Best {this.optimizationMetric}: {GetScore(bestResult):F4}, Best Algorithm: {bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
+ return $"Best {_optimizationMetric}: {_getScore(_bestResult):F4}, Best Algorithm: {_bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
}
public void Stop()
{
lock (this)
{
- this.isStopped = true;
+ _isStopped = true;
}
}
private void UpdateBestResult(RunDetail iterationResult)
{
- if (MetricComparator(GetScore(iterationResult), GetScore(bestResult), isMaximizing) > 0)
+ if (MetricComparator(_getScore(iterationResult), _getScore(_bestResult), _isMaximizing) > 0)
{
- bestResult = iterationResult;
+ _bestResult = iterationResult;
}
}
}
internal class MulticlassClassificationHandler : IProgress>
{
- private readonly bool isMaximizing;
- private readonly Func, double> GetScore;
- private RunDetail bestResult;
- private int iterationIndex;
- private ProgressBar progressBar;
- private MulticlassClassificationMetric optimizationMetric;
- private List> completedIterations;
- private bool isStopped;
+ private readonly bool _isMaximizing;
+ private readonly Func, double> _getScore;
+ private RunDetail _bestResult;
+ private int _iterationIndex;
+ private ProgressBar _progressBar;
+ private MulticlassClassificationMetric _optimizationMetric;
+ private List> _completedIterations;
+ private bool _isStopped;
public MulticlassClassificationHandler(MulticlassClassificationMetric optimizationMetric, List> completedIterations, ProgressBar progressBar)
{
- this.isMaximizing = new OptimizingMetricInfo(optimizationMetric).IsMaximizing;
- this.optimizationMetric = optimizationMetric;
- this.completedIterations = completedIterations;
- this.progressBar = progressBar;
- GetScore = (RunDetail result) => new MultiMetricsAgent(null, optimizationMetric).GetScore(result?.ValidationMetrics);
+ _isMaximizing = new OptimizingMetricInfo(optimizationMetric).IsMaximizing;
+ _optimizationMetric = optimizationMetric;
+ _completedIterations = completedIterations;
+ _progressBar = progressBar;
+ _getScore = (RunDetail result) => new MultiMetricsAgent(null, optimizationMetric).GetScore(result?.ValidationMetrics);
ConsolePrinter.PrintMulticlassClassificationMetricsHeader(LogLevel.Trace);
}
@@ -167,17 +167,17 @@ public void Report(RunDetail iterationResult)
{
lock (this)
{
- if (this.isStopped)
+ if (_isStopped)
{
return;
}
- iterationIndex++;
- completedIterations.Add(iterationResult);
+ _iterationIndex++;
+ _completedIterations.Add(iterationResult);
UpdateBestResult(iterationResult);
- if (progressBar != null)
- progressBar.Message = GetProgressBarMessage(iterationResult);
- ConsolePrinter.PrintMetrics(iterationIndex, iterationResult?.TrainerName, iterationResult?.ValidationMetrics, GetScore(bestResult), iterationResult?.RuntimeInSeconds, LogLevel.Trace);
+ if (_progressBar != null)
+ _progressBar.Message = GetProgressBarMessage(iterationResult);
+ ConsolePrinter.PrintMetrics(_iterationIndex, iterationResult?.TrainerName, iterationResult?.ValidationMetrics, _getScore(_bestResult), iterationResult?.RuntimeInSeconds, LogLevel.Trace);
if (iterationResult.Exception != null)
{
ConsolePrinter.PrintException(iterationResult.Exception, LogLevel.Trace);
@@ -189,26 +189,26 @@ public void Stop()
{
lock (this)
{
- this.isStopped = true;
+ _isStopped = true;
}
}
private void UpdateBestResult(RunDetail iterationResult)
{
- if (MetricComparator(GetScore(iterationResult), GetScore(bestResult), isMaximizing) > 0)
+ if (MetricComparator(_getScore(iterationResult), _getScore(_bestResult), _isMaximizing) > 0)
{
- bestResult = iterationResult;
+ _bestResult = iterationResult;
}
}
private string GetProgressBarMessage(RunDetail iterationResult)
{
- if (optimizationMetric == MulticlassClassificationMetric.MicroAccuracy)
+ if (_optimizationMetric == MulticlassClassificationMetric.MicroAccuracy)
{
- return $"Best Accuracy: {GetScore(bestResult) * 100:F2}%, Best Algorithm: {bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
+ return $"Best Accuracy: {_getScore(_bestResult) * 100:F2}%, Best Algorithm: {_bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
}
- return $"Best {this.optimizationMetric}: {GetScore(bestResult):F4}, Best Algorithm: {bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
+ return $"Best {_optimizationMetric}: {_getScore(_bestResult):F4}, Best Algorithm: {_bestResult?.TrainerName}, Last Algorithm: {iterationResult?.TrainerName}";
}
}
diff --git a/src/mlnet/Utilities/Utils.cs b/src/mlnet/Utilities/Utils.cs
index cf48b897fe..94b6747030 100644
--- a/src/mlnet/Utilities/Utils.cs
+++ b/src/mlnet/Utilities/Utils.cs
@@ -171,7 +171,6 @@ internal static string FormatCode(string trainProgramCSFileContent)
return trainProgramCSFileContent;
}
-
internal static int AddProjectsToSolution(string modelprojectDir,
string modelProjectName,
string consoleAppProjectDir,
diff --git a/src/mlnet/mlnet.csproj b/src/mlnet/mlnet.csproj
index c2c8ff89ca..c1fb8cc954 100644
--- a/src/mlnet/mlnet.csproj
+++ b/src/mlnet/mlnet.csproj
@@ -4,9 +4,6 @@
Exe
netcoreapp2.1
Microsoft.ML.CLI
-
- false
- false
diff --git a/test/Microsoft.ML.AutoML.Tests/GetNextPipelineTests.cs b/test/Microsoft.ML.AutoML.Tests/GetNextPipelineTests.cs
index f736dad80d..ecc3b29e24 100644
--- a/test/Microsoft.ML.AutoML.Tests/GetNextPipelineTests.cs
+++ b/test/Microsoft.ML.AutoML.Tests/GetNextPipelineTests.cs
@@ -57,7 +57,7 @@ public void GetNextPipelineMock()
break;
}
- var result = new PipelineScore(pipeline, AutoMlUtils.random.Value.NextDouble(), true);
+ var result = new PipelineScore(pipeline, AutoMlUtils.Random.Value.NextDouble(), true);
history.Add(result);
}
diff --git a/test/Microsoft.ML.AutoML.Tests/Microsoft.ML.AutoML.Tests.csproj b/test/Microsoft.ML.AutoML.Tests/Microsoft.ML.AutoML.Tests.csproj
index 016dda9488..179beff81a 100644
--- a/test/Microsoft.ML.AutoML.Tests/Microsoft.ML.AutoML.Tests.csproj
+++ b/test/Microsoft.ML.AutoML.Tests/Microsoft.ML.AutoML.Tests.csproj
@@ -2,11 +2,6 @@
false
-
-
- false
- false
-
Microsoft.ML.AutoML.Test
diff --git a/test/Microsoft.ML.AutoML.Tests/TextFileSampleTests.cs b/test/Microsoft.ML.AutoML.Tests/TextFileSampleTests.cs
index bd4fadf959..1ae50f76e1 100644
--- a/test/Microsoft.ML.AutoML.Tests/TextFileSampleTests.cs
+++ b/test/Microsoft.ML.AutoML.Tests/TextFileSampleTests.cs
@@ -22,7 +22,7 @@ public void CanParseLargeRandomStream()
for (var i = 0; i < numRows; i++)
{
var row = new byte[rowSize];
- AutoMlUtils.random.Value.NextBytes(row);
+ AutoMlUtils.Random.Value.NextBytes(row);
// ensure byte array has no 0s, so text file sampler doesn't
// think file is encoded with UTF-16 or UTF-32 without a BOM
diff --git a/test/mlnet.Tests/mlnet.Tests.csproj b/test/mlnet.Tests/mlnet.Tests.csproj
index c906d577f8..6c23891667 100644
--- a/test/mlnet.Tests/mlnet.Tests.csproj
+++ b/test/mlnet.Tests/mlnet.Tests.csproj
@@ -2,9 +2,6 @@
false
-
- false
- false