You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
public Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier.Output Add(Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier input)
482
+
{
483
+
var output = new Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier.Output();
484
+
Add(input, output);
485
+
return output;
486
+
}
487
+
488
+
public void Add(Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier input, Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier.Output output)
public Microsoft.ML.Trainers.GeneralizedAdditiveModelBinaryClassifier.Output Add(Microsoft.ML.Trainers.GeneralizedAdditiveModelBinaryClassifier input)
482
494
{
483
495
var output = new Microsoft.ML.Trainers.GeneralizedAdditiveModelBinaryClassifier.Output();
@@ -5987,6 +5999,130 @@ public FastTreeTweedieRegressorPipelineStep(Output output)
5987
5999
}
5988
6000
}
5989
6001
6002
+
namespace Trainers
6003
+
{
6004
+
6005
+
/// <summary>
6006
+
/// Train a field-aware factorization machine for binary classification
6007
+
/// </summary>
6008
+
public sealed partial class FieldAwareFactorizationMachineBinaryClassifier : Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInputWithLabel, Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInput, Microsoft.ML.ILearningPipelineItem
public string LabelColumn { get; set; } = "Label";
6067
+
6068
+
/// <summary>
6069
+
/// The data to be used for training
6070
+
/// </summary>
6071
+
public Var<Microsoft.ML.Runtime.Data.IDataView> TrainingData { get; set; } = new Var<Microsoft.ML.Runtime.Data.IDataView>();
6072
+
6073
+
/// <summary>
6074
+
/// Column to use for features
6075
+
/// </summary>
6076
+
public string FeatureColumn { get; set; } = "Features";
6077
+
6078
+
/// <summary>
6079
+
/// Normalize option for the feature column
6080
+
/// </summary>
6081
+
public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto;
6082
+
6083
+
/// <summary>
6084
+
/// Whether learner should cache input training data
6085
+
/// </summary>
6086
+
public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto;
6087
+
6088
+
6089
+
public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBinaryClassificationOutput, Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITrainerOutput
6090
+
{
6091
+
/// <summary>
6092
+
/// The trained model
6093
+
/// </summary>
6094
+
public Var<Microsoft.ML.Runtime.EntryPoints.IPredictorModel> PredictorModel { get; set; } = new Var<Microsoft.ML.Runtime.EntryPoints.IPredictorModel>();
6095
+
6096
+
}
6097
+
public Var<IDataView> GetInputData() => TrainingData;
6098
+
6099
+
public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment)
6100
+
{
6101
+
if (previousStep != null)
6102
+
{
6103
+
if (!(previousStep is ILearningPipelineDataStep dataStep))
6104
+
{
6105
+
throw new InvalidOperationException($"{ nameof(FieldAwareFactorizationMachineBinaryClassifier)} only supports an { nameof(ILearningPipelineDataStep)} as an input.");
6106
+
}
6107
+
6108
+
TrainingData = dataStep.Data;
6109
+
}
6110
+
Output output = experiment.Add(this);
6111
+
return new FieldAwareFactorizationMachineBinaryClassifierPipelineStep(output);
6112
+
}
6113
+
6114
+
private class FieldAwareFactorizationMachineBinaryClassifierPipelineStep : ILearningPipelinePredictorStep
6115
+
{
6116
+
public FieldAwareFactorizationMachineBinaryClassifierPipelineStep(Output output)
Copy file name to clipboardExpand all lines: src/Native/FactorizationMachineNative/FactorizationMachineCore.cpp
+1-10Lines changed: 1 addition & 10 deletions
Original file line number
Diff line number
Diff line change
@@ -2,20 +2,11 @@
2
2
#include<cstring>
3
3
#include<limits>
4
4
#include<pmmintrin.h>
5
+
#include"../Stdafx.h"
5
6
6
7
#defineUNUSED(x) (void)(x)
7
8
#defineDEBUG_ONLY(x) (void)(x)
8
9
9
-
#ifdef COMPILER_GCC
10
-
11
-
#include"UnixSal.h"
12
-
#defineEXPORT_API(ret) extern"C" __attribute__((visibility("default"))) ret
13
-
14
-
#else
15
-
#include<intrin.h>
16
-
#defineEXPORT_API(ret) extern"C" __declspec(dllexport) ret __stdcall
17
-
#endif
18
-
19
10
EXPORT_API(void) CalculateIntermediateVariablesNative(int fieldCount, int latentDim, int count, _In_ int * fieldIndices, _In_ int * featureIndices, _In_ float * featureValues,
Copy file name to clipboardExpand all lines: test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv
+1Lines changed: 1 addition & 0 deletions
Original file line number
Diff line number
Diff line change
@@ -36,6 +36,7 @@ Trainers.FastTreeBinaryClassifier Uses a logit-boost boosted tree learner to per
36
36
Trainers.FastTreeRankerTrains gradient boosted decision trees to the LambdaRank quasi-gradient.Microsoft.ML.Runtime.FastTree.FastTreeTrainRankingMicrosoft.ML.Runtime.FastTree.FastTreeRankingTrainer+ArgumentsMicrosoft.ML.Runtime.EntryPoints.CommonOutputs+RankingOutput
37
37
Trainers.FastTreeRegressorTrains gradient boosted decision trees to fit target values using least-squares.Microsoft.ML.Runtime.FastTree.FastTreeTrainRegressionMicrosoft.ML.Runtime.FastTree.FastTreeRegressionTrainer+ArgumentsMicrosoft.ML.Runtime.EntryPoints.CommonOutputs+RegressionOutput
38
38
Trainers.FastTreeTweedieRegressorTrains gradient boosted decision trees to fit target values using a Tweedie loss function. This learner is a generalization of Poisson, compound Poisson, and gamma regression.Microsoft.ML.Runtime.FastTree.FastTreeTrainTweedieRegressionMicrosoft.ML.Runtime.FastTree.FastTreeTweedieTrainer+ArgumentsMicrosoft.ML.Runtime.EntryPoints.CommonOutputs+RegressionOutput
39
+
Trainers.FieldAwareFactorizationMachineBinaryClassifierTrain a field-aware factorization machine for binary classificationMicrosoft.ML.Runtime.FactorizationMachine.FieldAwareFactorizationMachineTrainerTrainBinaryMicrosoft.ML.Runtime.FactorizationMachine.FieldAwareFactorizationMachineTrainer+ArgumentsMicrosoft.ML.Runtime.EntryPoints.CommonOutputs+BinaryClassificationOutput
39
40
Trainers.GeneralizedAdditiveModelBinaryClassifierTrains a gradient boosted stump per feature, on all features simultaneously, to fit target values using least-squares. It mantains no interactions between features.Microsoft.ML.Runtime.FastTree.GamTrainBinaryMicrosoft.ML.Runtime.FastTree.BinaryClassificationGamTrainer+ArgumentsMicrosoft.ML.Runtime.EntryPoints.CommonOutputs+BinaryClassificationOutput
40
41
Trainers.GeneralizedAdditiveModelRegressorTrains a gradient boosted stump per feature, on all features simultaneously, to fit target values using least-squares. It mantains no interactions between features.Microsoft.ML.Runtime.FastTree.GamTrainRegressionMicrosoft.ML.Runtime.FastTree.RegressionGamTrainer+ArgumentsMicrosoft.ML.Runtime.EntryPoints.CommonOutputs+RegressionOutput
41
42
Trainers.KMeansPlusPlusClustererK-means is a popular clustering algorithm. With K-means, the data is clustered into a specified number of clusters in order to minimize the within-cluster sum of squares. K-means++ improves upon K-means by using a better method for choosing the initial cluster centers.Microsoft.ML.Runtime.KMeans.KMeansPlusPlusTrainerTrainKMeansMicrosoft.ML.Runtime.KMeans.KMeansPlusPlusTrainer+ArgumentsMicrosoft.ML.Runtime.EntryPoints.CommonOutputs+ClusteringOutput
0 commit comments