diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs
index f6e8851f51..446dff327a 100644
--- a/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs
+++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs
@@ -271,10 +271,10 @@ public static NormalizeTransform Create(IHostEnvironment env, MinMaxArguments ar
/// Name of the output column.
/// Name of the column to be transformed. If this is null '' will be used.
/// /// Whether to use CDF as the output.
- public static NormalizeTransform CreateMeanVarNormalizer(IHostEnvironment env,
- IDataView input,
- string name,
- string source=null,
+ public static NormalizeTransform CreateMeanVarNormalizer(IHostEnvironment env,
+ IDataView input,
+ string name,
+ string source = null,
bool useCdf = Defaults.MeanVarCdf)
{
var args = new MeanVarArguments()
@@ -313,10 +313,10 @@ public static NormalizeTransform Create(IHostEnvironment env, MeanVarArguments a
/// Name of the output column.
/// Name of the column to be transformed. If this is null '' will be used.
/// /// Whether to use CDF as the output.
- public static NormalizeTransform CreateLogMeanVarNormalizer(IHostEnvironment env,
- IDataView input,
- string name,
- string source=null,
+ public static NormalizeTransform CreateLogMeanVarNormalizer(IHostEnvironment env,
+ IDataView input,
+ string name,
+ string source = null,
bool useCdf = Defaults.LogMeanVarCdf)
{
var args = new LogMeanVarArguments()
@@ -347,10 +347,10 @@ public static NormalizeTransform Create(IHostEnvironment env, LogMeanVarArgument
return func;
}
- public static NormalizeTransform CreateBinningNormalizer(IHostEnvironment env,
- IDataView input,
- string name,
- string source=null,
+ public static NormalizeTransform CreateBinningNormalizer(IHostEnvironment env,
+ IDataView input,
+ string name,
+ string source = null,
int numBins = Defaults.NumBins)
{
var args = new BinArguments()
@@ -381,12 +381,12 @@ public static NormalizeTransform Create(IHostEnvironment env, BinArguments args,
return func;
}
- public static NormalizeTransform CreateSupervisedBinningNormalizer(IHostEnvironment env,
- IDataView input,
- string labelColumn,
- string name,
- string source = null,
- int numBins = Defaults.NumBins,
+ public static NormalizeTransform CreateSupervisedBinningNormalizer(IHostEnvironment env,
+ IDataView input,
+ string labelColumn,
+ string name,
+ string source = null,
+ int numBins = Defaults.NumBins,
int minBinSize = Defaults.MinBinSize)
{
var args = new SupervisedBinArguments()
diff --git a/src/Microsoft.ML.FastTree/FastTreeClassification.cs b/src/Microsoft.ML.FastTree/FastTreeClassification.cs
index edbdd47a03..f694236166 100644
--- a/src/Microsoft.ML.FastTree/FastTreeClassification.cs
+++ b/src/Microsoft.ML.FastTree/FastTreeClassification.cs
@@ -338,10 +338,10 @@ public void AdjustTreeOutputs(IChannel ch, RegressionTree tree,
public static partial class FastTree
{
- [TlcModule.EntryPoint(Name = "Trainers.FastTreeBinaryClassifier",
- Desc = FastTreeBinaryClassificationTrainer.Summary,
- Remarks = FastTreeBinaryClassificationTrainer.Remarks,
- UserName = FastTreeBinaryClassificationTrainer.UserNameValue,
+ [TlcModule.EntryPoint(Name = "Trainers.FastTreeBinaryClassifier",
+ Desc = FastTreeBinaryClassificationTrainer.Summary,
+ Remarks = FastTreeBinaryClassificationTrainer.Remarks,
+ UserName = FastTreeBinaryClassificationTrainer.UserNameValue,
ShortName = FastTreeBinaryClassificationTrainer.ShortName)]
public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, FastTreeBinaryClassificationTrainer.Arguments input)
{
diff --git a/src/Microsoft.ML.FastTree/FastTreeRanking.cs b/src/Microsoft.ML.FastTree/FastTreeRanking.cs
index a689408748..70919fbdea 100644
--- a/src/Microsoft.ML.FastTree/FastTreeRanking.cs
+++ b/src/Microsoft.ML.FastTree/FastTreeRanking.cs
@@ -1096,10 +1096,10 @@ public static FastTreeRankingPredictor Create(IHostEnvironment env, ModelLoadCon
public static partial class FastTree
{
- [TlcModule.EntryPoint(Name = "Trainers.FastTreeRanker",
- Desc = FastTreeRankingTrainer.Summary,
+ [TlcModule.EntryPoint(Name = "Trainers.FastTreeRanker",
+ Desc = FastTreeRankingTrainer.Summary,
Remarks = FastTreeRankingTrainer.Remarks,
- UserName = FastTreeRankingTrainer.UserNameValue,
+ UserName = FastTreeRankingTrainer.UserNameValue,
ShortName = FastTreeRankingTrainer.ShortName)]
public static CommonOutputs.RankingOutput TrainRanking(IHostEnvironment env, FastTreeRankingTrainer.Arguments input)
{
diff --git a/src/Microsoft.ML.FastTree/FastTreeRegression.cs b/src/Microsoft.ML.FastTree/FastTreeRegression.cs
index 40ee906b5b..9c50fe75fe 100644
--- a/src/Microsoft.ML.FastTree/FastTreeRegression.cs
+++ b/src/Microsoft.ML.FastTree/FastTreeRegression.cs
@@ -449,9 +449,9 @@ public static FastTreeRegressionPredictor Create(IHostEnvironment env, ModelLoad
public static partial class FastTree
{
[TlcModule.EntryPoint(Name = "Trainers.FastTreeRegressor",
- Desc = FastTreeRegressionTrainer.Summary,
- Remarks = FastTreeRegressionTrainer.Remarks,
- UserName = FastTreeRegressionTrainer.UserNameValue,
+ Desc = FastTreeRegressionTrainer.Summary,
+ Remarks = FastTreeRegressionTrainer.Remarks,
+ UserName = FastTreeRegressionTrainer.UserNameValue,
ShortName = FastTreeRegressionTrainer.ShortName)]
public static CommonOutputs.RegressionOutput TrainRegression(IHostEnvironment env, FastTreeRegressionTrainer.Arguments input)
{
diff --git a/src/Microsoft.ML.FastTree/FastTreeTweedie.cs b/src/Microsoft.ML.FastTree/FastTreeTweedie.cs
index 00c29afd94..6d9bd58273 100644
--- a/src/Microsoft.ML.FastTree/FastTreeTweedie.cs
+++ b/src/Microsoft.ML.FastTree/FastTreeTweedie.cs
@@ -463,9 +463,9 @@ protected override void Map(ref VBuffer src, ref float dst)
public static partial class FastTree
{
- [TlcModule.EntryPoint(Name = "Trainers.FastTreeTweedieRegressor",
- Desc = FastTreeTweedieTrainer.Summary,
- UserName = FastTreeTweedieTrainer.UserNameValue,
+ [TlcModule.EntryPoint(Name = "Trainers.FastTreeTweedieRegressor",
+ Desc = FastTreeTweedieTrainer.Summary,
+ UserName = FastTreeTweedieTrainer.UserNameValue,
ShortName = FastTreeTweedieTrainer.ShortName)]
public static CommonOutputs.RegressionOutput TrainTweedieRegression(IHostEnvironment env, FastTreeTweedieTrainer.Arguments input)
{
diff --git a/src/Microsoft.ML.FastTree/RandomForestClassification.cs b/src/Microsoft.ML.FastTree/RandomForestClassification.cs
index e085996747..e3e265cf13 100644
--- a/src/Microsoft.ML.FastTree/RandomForestClassification.cs
+++ b/src/Microsoft.ML.FastTree/RandomForestClassification.cs
@@ -208,10 +208,10 @@ protected override void GetGradientInOneQuery(int query, int threadIndex)
public static partial class FastForest
{
- [TlcModule.EntryPoint(Name = "Trainers.FastForestBinaryClassifier",
- Desc = FastForestClassification.Summary,
- Remarks = FastForestClassification.Remarks,
- UserName = FastForestClassification.UserNameValue,
+ [TlcModule.EntryPoint(Name = "Trainers.FastForestBinaryClassifier",
+ Desc = FastForestClassification.Summary,
+ Remarks = FastForestClassification.Remarks,
+ UserName = FastForestClassification.UserNameValue,
ShortName = FastForestClassification.ShortName)]
public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, FastForestClassification.Arguments input)
{
diff --git a/src/Microsoft.ML.FastTree/RandomForestRegression.cs b/src/Microsoft.ML.FastTree/RandomForestRegression.cs
index 74bf8c2a1c..ef9b3e5e9b 100644
--- a/src/Microsoft.ML.FastTree/RandomForestRegression.cs
+++ b/src/Microsoft.ML.FastTree/RandomForestRegression.cs
@@ -280,10 +280,10 @@ public BasicImpl(Dataset trainData, Arguments args)
public static partial class FastForest
{
- [TlcModule.EntryPoint(Name = "Trainers.FastForestRegressor",
- Desc = FastForestRegression.Summary,
+ [TlcModule.EntryPoint(Name = "Trainers.FastForestRegressor",
+ Desc = FastForestRegression.Summary,
Remarks = FastForestRegression.Remarks,
- UserName = FastForestRegression.LoadNameValue,
+ UserName = FastForestRegression.LoadNameValue,
ShortName = FastForestRegression.ShortName)]
public static CommonOutputs.RegressionOutput TrainRegression(IHostEnvironment env, FastForestRegression.Arguments input)
{
diff --git a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs
index 3e47c595cd..df386d0dc1 100644
--- a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs
+++ b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs
@@ -233,10 +233,10 @@ private static int ComputeNumThreads(IHost host, int? argNumThreads)
return Math.Max(1, maxThreads);
}
- [TlcModule.EntryPoint(Name = "Trainers.KMeansPlusPlusClusterer",
+ [TlcModule.EntryPoint(Name = "Trainers.KMeansPlusPlusClusterer",
Desc = Summary,
Remarks = Remarks,
- UserName = UserNameValue,
+ UserName = UserNameValue,
ShortName = ShortName)]
public static CommonOutputs.ClusteringOutput TrainKMeans(IHostEnvironment env, Arguments input)
{
diff --git a/src/Microsoft.ML.PCA/PcaTrainer.cs b/src/Microsoft.ML.PCA/PcaTrainer.cs
index 6a6efd1ea6..6c114ef14d 100644
--- a/src/Microsoft.ML.PCA/PcaTrainer.cs
+++ b/src/Microsoft.ML.PCA/PcaTrainer.cs
@@ -284,10 +284,10 @@ private static void PostProcess(VBuffer[] y, Float[] sigma, Float[] z, in
}
}
- [TlcModule.EntryPoint(Name = "Trainers.PcaAnomalyDetector",
+ [TlcModule.EntryPoint(Name = "Trainers.PcaAnomalyDetector",
Desc = "Train an PCA Anomaly model.",
Remarks = PcaPredictor.Remarks,
- UserName = UserNameValue,
+ UserName = UserNameValue,
ShortName = ShortName)]
public static CommonOutputs.AnomalyDetectionOutput TrainPcaAnomaly(IHostEnvironment env, Arguments input)
{
diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs
index 08190acb74..7a0a099031 100644
--- a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs
+++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs
@@ -11,9 +11,7 @@
using Microsoft.ML.Runtime.EntryPoints;
using Microsoft.ML.Runtime.FactorizationMachine;
using Microsoft.ML.Runtime.Internal.CpuMath;
-using Microsoft.ML.Runtime.Internal.Internallearn;
using Microsoft.ML.Runtime.Internal.Utilities;
-using Microsoft.ML.Runtime.Model;
using Microsoft.ML.Runtime.Training;
[assembly: LoadableClass(FieldAwareFactorizationMachineTrainer.Summary, typeof(FieldAwareFactorizationMachineTrainer), typeof(FieldAwareFactorizationMachineTrainer.Arguments),
@@ -413,10 +411,10 @@ public override FieldAwareFactorizationMachinePredictor CreatePredictor()
return _pred;
}
- [TlcModule.EntryPoint(Name = "Trainers.FieldAwareFactorizationMachineBinaryClassifier",
- Desc = Summary,
+ [TlcModule.EntryPoint(Name = "Trainers.FieldAwareFactorizationMachineBinaryClassifier",
+ Desc = Summary,
Remarks = Remarks,
- UserName = UserName,
+ UserName = UserName,
ShortName = ShortName)]
public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, Arguments input)
{
diff --git a/src/Microsoft.ML.StandardLearners/Standard/LinearClassificationTrainer.cs b/src/Microsoft.ML.StandardLearners/Standard/LinearClassificationTrainer.cs
index 93a7cc2b32..738c0197cb 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/LinearClassificationTrainer.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/LinearClassificationTrainer.cs
@@ -189,7 +189,7 @@ public abstract class ArgumentsBase : LearnerInputBaseWithLabel
public int? MaxIterations;
[Argument(ArgumentType.AtMostOnce, HelpText = "Shuffle data every epoch?", ShortName = "shuf")]
- [TlcModule.SweepableDiscreteParamAttribute("Shuffle", null, isBool:true)]
+ [TlcModule.SweepableDiscreteParamAttribute("Shuffle", null, isBool: true)]
public bool Shuffle = true;
[Argument(ArgumentType.AtMostOnce, HelpText = "Convergence check frequency (in terms of number of iterations). Set as negative or zero for not checking at all. If left blank, it defaults to check after every 'numThreads' iterations.", NullName = "", ShortName = "checkFreq")]
@@ -1507,7 +1507,7 @@ public sealed class Arguments : LearnerInputBaseWithWeight
public Double InitLearningRate = 0.01;
[Argument(ArgumentType.AtMostOnce, HelpText = "Shuffle data every epoch?", ShortName = "shuf")]
- [TlcModule.SweepableDiscreteParamAttribute("Shuffle", null, isBool:true)]
+ [TlcModule.SweepableDiscreteParamAttribute("Shuffle", null, isBool: true)]
public bool Shuffle = true;
[Argument(ArgumentType.AtMostOnce, HelpText = "Apply weight to the positive class, for imbalanced data", ShortName = "piw")]
@@ -1795,10 +1795,10 @@ public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironm
///
public static partial class Sdca
{
- [TlcModule.EntryPoint(Name = "Trainers.StochasticDualCoordinateAscentBinaryClassifier",
+ [TlcModule.EntryPoint(Name = "Trainers.StochasticDualCoordinateAscentBinaryClassifier",
Desc = "Train an SDCA binary model.",
Remarks = LinearClassificationTrainer.Remarks,
- UserName = LinearClassificationTrainer.UserNameValue,
+ UserName = LinearClassificationTrainer.UserNameValue,
ShortName = LinearClassificationTrainer.LoadNameValue)]
public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, LinearClassificationTrainer.Arguments input)
{
diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs
index 0e85cd6712..f1d35950ba 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs
@@ -386,10 +386,10 @@ public override ParameterMixingCalibratedPredictor CreatePredictor()
new PlattCalibrator(Host, -1, 0));
}
- [TlcModule.EntryPoint(Name = "Trainers.LogisticRegressionBinaryClassifier",
- Desc = Summary,
+ [TlcModule.EntryPoint(Name = "Trainers.LogisticRegressionBinaryClassifier",
+ Desc = Summary,
Remarks = Remarks,
- UserName = UserNameValue,
+ UserName = UserNameValue,
ShortName = ShortName)]
public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, Arguments input)
{
diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs
index f2fad63794..367c99356d 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs
@@ -961,10 +961,10 @@ public IRow GetStatsIRowOrNull(RoleMappedSchema schema)
///
public partial class LogisticRegression
{
- [TlcModule.EntryPoint(Name = "Trainers.LogisticRegressionClassifier",
- Desc = Summary,
+ [TlcModule.EntryPoint(Name = "Trainers.LogisticRegressionClassifier",
+ Desc = Summary,
Remarks = MulticlassLogisticRegression.Remarks,
- UserName = MulticlassLogisticRegression.UserNameValue,
+ UserName = MulticlassLogisticRegression.UserNameValue,
ShortName = MulticlassLogisticRegression.ShortName)]
public static CommonOutputs.MulticlassClassificationOutput TrainMultiClass(IHostEnvironment env, MulticlassLogisticRegression.Arguments input)
{
diff --git a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs
index 239392b085..94efc8cf05 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/MultiClass/MultiClassNaiveBayesTrainer.cs
@@ -130,8 +130,8 @@ public override MultiClassNaiveBayesPredictor CreatePredictor()
return _predictor;
}
- [TlcModule.EntryPoint(Name = "Trainers.NaiveBayesClassifier",
- Desc = "Train a MultiClassNaiveBayesTrainer.",
+ [TlcModule.EntryPoint(Name = "Trainers.NaiveBayesClassifier",
+ Desc = "Train a MultiClassNaiveBayesTrainer.",
UserName = UserName, ShortName = ShortName)]
public static CommonOutputs.MulticlassClassificationOutput TrainMultiClassNaiveBayesTrainer(IHostEnvironment env, Arguments input)
{
diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs
index 138b3f0485..ac259f66db 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/Online/AveragedPerceptron.cs
@@ -110,10 +110,10 @@ public override LinearBinaryPredictor CreatePredictor()
return new LinearBinaryPredictor(Host, ref weights, bias);
}
- [TlcModule.EntryPoint(Name = "Trainers.AveragedPerceptronBinaryClassifier",
+ [TlcModule.EntryPoint(Name = "Trainers.AveragedPerceptronBinaryClassifier",
Desc = Summary,
Remarks = Remarks,
- UserName = UserNameValue,
+ UserName = UserNameValue,
ShortName = ShortName)]
public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, Arguments input)
{
diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs
index 1af080a76b..dee63ccf37 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineGradientDescent.cs
@@ -94,7 +94,7 @@ public override TPredictor CreatePredictor()
return new LinearRegressionPredictor(Host, ref weights, bias);
}
- [TlcModule.EntryPoint(Name = "Trainers.OnlineGradientDescentRegressor",
+ [TlcModule.EntryPoint(Name = "Trainers.OnlineGradientDescentRegressor",
Desc = "Train a Online gradient descent perceptron.",
Remarks = Remarks,
UserName = UserNameValue,
diff --git a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs
index c5ad4b4495..4e1739fb28 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs
@@ -32,7 +32,7 @@ public sealed class PoissonRegression : LbfgsTrainerBase
+ internal new const string Remarks = @"
Poisson regression is a parameterized regression method.
It assumes that the log of the conditional mean of the dependent variable follows a linear function of the dependent variables.
Assuming that the dependent variable follows a Poisson distribution, the parameters of the regressor can be estimated by maximizing the likelihood of the obtained observations.
diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs
index f0a250e4b8..9b00251139 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaMultiClass.cs
@@ -386,10 +386,10 @@ protected override Float GetInstanceWeight(FloatLabelCursor cursor)
///
public static partial class Sdca
{
- [TlcModule.EntryPoint(Name = "Trainers.StochasticDualCoordinateAscentClassifier",
- Desc = SdcaMultiClassTrainer.Summary,
+ [TlcModule.EntryPoint(Name = "Trainers.StochasticDualCoordinateAscentClassifier",
+ Desc = SdcaMultiClassTrainer.Summary,
Remarks = SdcaMultiClassTrainer.Remarks,
- UserName = SdcaMultiClassTrainer.UserNameValue,
+ UserName = SdcaMultiClassTrainer.UserNameValue,
ShortName = SdcaMultiClassTrainer.ShortName)]
public static CommonOutputs.MulticlassClassificationOutput TrainMultiClass(IHostEnvironment env, SdcaMultiClassTrainer.Arguments input)
{
diff --git a/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs
index 55a021ebb7..422b63f397 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/SdcaRegression.cs
@@ -131,10 +131,10 @@ protected override Float TuneDefaultL2(IChannel ch, int maxIterations, long rowC
///
public static partial class Sdca
{
- [TlcModule.EntryPoint(Name = "Trainers.StochasticDualCoordinateAscentRegressor",
+ [TlcModule.EntryPoint(Name = "Trainers.StochasticDualCoordinateAscentRegressor",
Desc = SdcaRegressionTrainer.Summary,
Remarks = SdcaRegressionTrainer.Remarks,
- UserName = SdcaRegressionTrainer.UserNameValue,
+ UserName = SdcaRegressionTrainer.UserNameValue,
ShortName = SdcaRegressionTrainer.ShortName)]
public static CommonOutputs.RegressionOutput TrainRegression(IHostEnvironment env, SdcaRegressionTrainer.Arguments input)
{
diff --git a/src/Microsoft.ML.Transforms/BootstrapSampleTransform.cs b/src/Microsoft.ML.Transforms/BootstrapSampleTransform.cs
index 72e7030604..91106bd445 100644
--- a/src/Microsoft.ML.Transforms/BootstrapSampleTransform.cs
+++ b/src/Microsoft.ML.Transforms/BootstrapSampleTransform.cs
@@ -92,11 +92,11 @@ public BootstrapSampleTransform(IHostEnvironment env, Arguments args, IDataView
/// The random seed. If unspecified random state will be instead derived from the environment.
/// Whether we should attempt to shuffle the source data. By default on, but can be turned off for efficiency.
/// When shuffling the output, the number of output rows to keep in that pool. Note that shuffling of output is completely distinct from shuffling of input.
- public BootstrapSampleTransform(IHostEnvironment env,
- IDataView input,
- bool complement = Defaults.Complement,
- uint? seed = null,
- bool shuffleInput = Defaults.ShuffleInput,
+ public BootstrapSampleTransform(IHostEnvironment env,
+ IDataView input,
+ bool complement = Defaults.Complement,
+ uint? seed = null,
+ bool shuffleInput = Defaults.ShuffleInput,
int poolSize = Defaults.PoolSize)
: this(env, new Arguments() { Complement = complement, Seed = seed, ShuffleInput = shuffleInput, PoolSize = poolSize }, input)
{
diff --git a/src/Microsoft.ML.Transforms/CategoricalHashTransform.cs b/src/Microsoft.ML.Transforms/CategoricalHashTransform.cs
index 5aec5658b2..42f506930a 100644
--- a/src/Microsoft.ML.Transforms/CategoricalHashTransform.cs
+++ b/src/Microsoft.ML.Transforms/CategoricalHashTransform.cs
@@ -139,12 +139,12 @@ public sealed class Arguments : TransformInputBase
/// Number of bits to hash into. Must be between 1 and 30, inclusive.
/// Limit the number of keys used to generate the slot name to this many. 0 means no invert hashing, -1 means no limit.
/// The type of output expected.
- public static IDataTransform Create(IHostEnvironment env,
- IDataView input,
- string name,
- string source =null,
- int hashBits = Defaults.HashBits,
- int invertHash = Defaults.InvertHash,
+ public static IDataTransform Create(IHostEnvironment env,
+ IDataView input,
+ string name,
+ string source = null,
+ int hashBits = Defaults.HashBits,
+ int invertHash = Defaults.InvertHash,
CategoricalTransform.OutputKind outputKind = Defaults.OutputKind)
{
var args = new Arguments()
@@ -201,7 +201,7 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV
}
return CategoricalTransform.CreateTransformCore(
- args.OutputKind,args.Column,
+ args.OutputKind, args.Column,
args.Column.Select(col => col.OutputKind).ToList(),
new HashTransform(h, hashArgs, input),
h,
diff --git a/src/Microsoft.ML.Transforms/GcnTransform.cs b/src/Microsoft.ML.Transforms/GcnTransform.cs
index fdb1d26c25..a7a69ff1f4 100644
--- a/src/Microsoft.ML.Transforms/GcnTransform.cs
+++ b/src/Microsoft.ML.Transforms/GcnTransform.cs
@@ -256,12 +256,12 @@ private static VersionInfo GetVersionInfo()
/// Subtract mean from each value before normalizing.
/// Normalize by standard deviation rather than L2 norm.
/// Scale features by this value.
- public static IDataTransform CreateGlobalContrastNormalizer(IHostEnvironment env,
- IDataView input,
- string name,
- string source = null,
- bool subMean = Defaults.GcnSubMean,
- bool useStdDev = Defaults.UseStdDev,
+ public static IDataTransform CreateGlobalContrastNormalizer(IHostEnvironment env,
+ IDataView input,
+ string name,
+ string source = null,
+ bool subMean = Defaults.GcnSubMean,
+ bool useStdDev = Defaults.UseStdDev,
Float scale = Defaults.Scale)
{
var args = new GcnArguments()
@@ -313,11 +313,11 @@ public LpNormNormalizerTransform(IHostEnvironment env, GcnArguments args, IDataV
/// Name of the column to be transformed. If this is null '' will be used.
/// /// The norm to use to normalize each sample.
/// Subtract mean from each value before normalizing.
- public static IDataTransform CreateLpNormNormalizer(IHostEnvironment env,
- IDataView input,
- string name,
- string source = null,
- NormalizerKind normKind = Defaults.NormKind,
+ public static IDataTransform CreateLpNormNormalizer(IHostEnvironment env,
+ IDataView input,
+ string name,
+ string source = null,
+ NormalizerKind normKind = Defaults.NormKind,
bool subMean = Defaults.LpSubMean)
{
var args = new Arguments()
@@ -439,14 +439,61 @@ protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, ou
{
switch (ex.NormKind)
{
+ case NormalizerKind.StdDev:
+ del =
+ (ref VBuffer dst) =>
+ {
+ getSrc(ref src);
+ Float mean = Mean(src.Values, src.Count, src.Length);
+ Float divisor = StdDev(src.Values, src.Count, src.Length, mean);
+ FillValues(Host, ref src, ref dst, divisor, scale, mean);
+ };
+ return del;
+ case NormalizerKind.L2Norm:
+ del =
+ (ref VBuffer dst) =>
+ {
+ getSrc(ref src);
+ Float mean = Mean(src.Values, src.Count, src.Length);
+ Float divisor = L2Norm(src.Values, src.Count, mean);
+ FillValues(Host, ref src, ref dst, divisor, scale, mean);
+ };
+ return del;
+ case NormalizerKind.L1Norm:
+ del =
+ (ref VBuffer dst) =>
+ {
+ getSrc(ref src);
+ Float mean = Mean(src.Values, src.Count, src.Length);
+ Float divisor = L1Norm(src.Values, src.Count, mean);
+ FillValues(Host, ref src, ref dst, divisor, scale, mean);
+ };
+ return del;
+ case NormalizerKind.LInf:
+ del =
+ (ref VBuffer dst) =>
+ {
+ getSrc(ref src);
+ Float mean = Mean(src.Values, src.Count, src.Length);
+ Float divisor = LInfNorm(src.Values, src.Count, mean);
+ FillValues(Host, ref src, ref dst, divisor, scale, mean);
+ };
+ return del;
+ default:
+ Host.Assert(false, "Unsupported normalizer type");
+ goto case NormalizerKind.L2Norm;
+ }
+ }
+
+ switch (ex.NormKind)
+ {
case NormalizerKind.StdDev:
del =
(ref VBuffer dst) =>
{
getSrc(ref src);
- Float mean = Mean(src.Values, src.Count, src.Length);
- Float divisor = StdDev(src.Values, src.Count, src.Length, mean);
- FillValues(Host, ref src, ref dst, divisor, scale, mean);
+ Float divisor = StdDev(src.Values, src.Count, src.Length);
+ FillValues(Host, ref src, ref dst, divisor, scale);
};
return del;
case NormalizerKind.L2Norm:
@@ -454,9 +501,8 @@ protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, ou
(ref VBuffer dst) =>
{
getSrc(ref src);
- Float mean = Mean(src.Values, src.Count, src.Length);
- Float divisor = L2Norm(src.Values, src.Count, mean);
- FillValues(Host, ref src, ref dst, divisor, scale, mean);
+ Float divisor = L2Norm(src.Values, src.Count);
+ FillValues(Host, ref src, ref dst, divisor, scale);
};
return del;
case NormalizerKind.L1Norm:
@@ -464,9 +510,8 @@ protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, ou
(ref VBuffer dst) =>
{
getSrc(ref src);
- Float mean = Mean(src.Values, src.Count, src.Length);
- Float divisor = L1Norm(src.Values, src.Count, mean);
- FillValues(Host, ref src, ref dst, divisor, scale, mean);
+ Float divisor = L1Norm(src.Values, src.Count);
+ FillValues(Host, ref src, ref dst, divisor, scale);
};
return del;
case NormalizerKind.LInf:
@@ -474,58 +519,13 @@ protected override Delegate GetGetterCore(IChannel ch, IRow input, int iinfo, ou
(ref VBuffer dst) =>
{
getSrc(ref src);
- Float mean = Mean(src.Values, src.Count, src.Length);
- Float divisor = LInfNorm(src.Values, src.Count, mean);
- FillValues(Host, ref src, ref dst, divisor, scale, mean);
+ Float divisor = LInfNorm(src.Values, src.Count);
+ FillValues(Host, ref src, ref dst, divisor, scale);
};
return del;
default:
Host.Assert(false, "Unsupported normalizer type");
goto case NormalizerKind.L2Norm;
- }
- }
-
- switch (ex.NormKind)
- {
- case NormalizerKind.StdDev:
- del =
- (ref VBuffer dst) =>
- {
- getSrc(ref src);
- Float divisor = StdDev(src.Values, src.Count, src.Length);
- FillValues(Host, ref src, ref dst, divisor, scale);
- };
- return del;
- case NormalizerKind.L2Norm:
- del =
- (ref VBuffer dst) =>
- {
- getSrc(ref src);
- Float divisor = L2Norm(src.Values, src.Count);
- FillValues(Host, ref src, ref dst, divisor, scale);
- };
- return del;
- case NormalizerKind.L1Norm:
- del =
- (ref VBuffer dst) =>
- {
- getSrc(ref src);
- Float divisor = L1Norm(src.Values, src.Count);
- FillValues(Host, ref src, ref dst, divisor, scale);
- };
- return del;
- case NormalizerKind.LInf:
- del =
- (ref VBuffer dst) =>
- {
- getSrc(ref src);
- Float divisor = LInfNorm(src.Values, src.Count);
- FillValues(Host, ref src, ref dst, divisor, scale);
- };
- return del;
- default:
- Host.Assert(false, "Unsupported normalizer type");
- goto case NormalizerKind.L2Norm;
}
}