diff --git a/src/Microsoft.ML.CpuMath/AssemblyInfo.cs b/src/Microsoft.ML.CpuMath/AssemblyInfo.cs
new file mode 100644
index 0000000000..cb45bf5608
--- /dev/null
+++ b/src/Microsoft.ML.CpuMath/AssemblyInfo.cs
@@ -0,0 +1,9 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+[assembly: InternalsVisibleTo("Microsoft.ML.StandardLearners, PublicKey=00240000048000009400000006020000002400005253413100040000010001004b86c4cb78549b34bab61a3b1800e23bfeb5b3ec390074041536a7e3cbd97f5f04cf0f857155a8928eaa29ebfd11cfbbad3ba70efea7bda3226c6a8d370a4cd303f714486b6ebc225985a638471e6ef571cc92a4613c00b8fa65d61ccee0cbe5f36330c9a01f4183559f1bef24cc2917c6d913e3a541333a1d05d9bed22b38cb")]
\ No newline at end of file
diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineInterface.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineInterface.cs
new file mode 100644
index 0000000000..b5fdbd0262
--- /dev/null
+++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineInterface.cs
@@ -0,0 +1,94 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using Microsoft.ML.Runtime.Internal.CpuMath;
+using Microsoft.ML.Runtime.Internal.Utilities;
+using System.Runtime.InteropServices;
+
+using System.Security;
+
+namespace Microsoft.ML.Runtime.FactorizationMachine
+{
+ internal unsafe static class FieldAwareFactorizationMachineInterface
+ {
+ internal const string NativePath = "FactorizationMachineNative";
+ public const int CbAlign = 16;
+
+ private static bool Compat(AlignedArray a)
+ {
+ Contracts.AssertValue(a);
+ Contracts.Assert(a.Size > 0);
+ return a.CbAlign == CbAlign;
+ }
+
+ private unsafe static float* Ptr(AlignedArray a, float* p)
+ {
+ Contracts.AssertValue(a);
+ float* q = p + a.GetBase((long)p);
+ Contracts.Assert(((long)q & (CbAlign - 1)) == 0);
+ return q;
+ }
+
+ [DllImport(NativePath), SuppressUnmanagedCodeSecurity]
+ public static extern void CalculateIntermediateVariablesNative(int fieldCount, int latentDim, int count, int* /*const*/ fieldIndices, int* /*const*/ featureIndices,
+ float* /*const*/ featureValues, float* /*const*/ linearWeights, float* /*const*/ latentWeights, float* latentSum, float* response);
+
+ [DllImport(NativePath), SuppressUnmanagedCodeSecurity]
+ public static extern void CalculateGradientAndUpdateNative(float lambdaLinear, float lambdaLatent, float learningRate, int fieldCount, int latentDim, float weight,
+ int count, int* /*const*/ fieldIndices, int* /*const*/ featureIndices, float* /*const*/ featureValues, float* /*const*/ latentSum, float slope,
+ float* linearWeights, float* latentWeights, float* linearAccumulatedSquaredGrads, float* latentAccumulatedSquaredGrads);
+
+ public static void CalculateIntermediateVariables(int fieldCount, int latentDim, int count, int[] fieldIndices, int[] featureIndices, float[] featureValues,
+ float[] linearWeights, AlignedArray latentWeights, AlignedArray latentSum, ref float response)
+ {
+ Contracts.AssertNonEmpty(fieldIndices);
+ Contracts.AssertNonEmpty(featureValues);
+ Contracts.AssertNonEmpty(featureIndices);
+ Contracts.AssertNonEmpty(linearWeights);
+ Contracts.Assert(Compat(latentWeights));
+ Contracts.Assert(Compat(latentSum));
+
+ unsafe
+ {
+ fixed (int* pf = &fieldIndices[0])
+ fixed (int* pi = &featureIndices[0])
+ fixed (float* px = &featureValues[0])
+ fixed (float* pw = &linearWeights[0])
+ fixed (float* pv = &latentWeights.Items[0])
+ fixed (float* pq = &latentSum.Items[0])
+ fixed (float* pr = &response)
+ CalculateIntermediateVariablesNative(fieldCount, latentDim, count, pf, pi, px, pw, Ptr(latentWeights, pv), Ptr(latentSum, pq), pr);
+ }
+ }
+
+ public static void CalculateGradientAndUpdate(float lambdaLinear, float lambdaLatent, float learningRate, int fieldCount, int latentDim,
+ float weight, int count, int[] fieldIndices, int[] featureIndices, float[] featureValues, AlignedArray latentSum, float slope,
+ float[] linearWeights, AlignedArray latentWeights, float[] linearAccumulatedSquaredGrads, AlignedArray latentAccumulatedSquaredGrads)
+ {
+ Contracts.AssertNonEmpty(fieldIndices);
+ Contracts.AssertNonEmpty(featureIndices);
+ Contracts.AssertNonEmpty(featureValues);
+ Contracts.Assert(Compat(latentSum));
+ Contracts.AssertNonEmpty(linearWeights);
+ Contracts.Assert(Compat(latentWeights));
+ Contracts.AssertNonEmpty(linearAccumulatedSquaredGrads);
+ Contracts.Assert(Compat(latentAccumulatedSquaredGrads));
+
+ unsafe
+ {
+ fixed (int* pf = &fieldIndices[0])
+ fixed (int* pi = &featureIndices[0])
+ fixed (float* px = &featureValues[0])
+ fixed (float* pq = &latentSum.Items[0])
+ fixed (float* pw = &linearWeights[0])
+ fixed (float* pv = &latentWeights.Items[0])
+ fixed (float* phw = &linearAccumulatedSquaredGrads[0])
+ fixed (float* phv = &latentAccumulatedSquaredGrads.Items[0])
+ CalculateGradientAndUpdateNative(lambdaLinear, lambdaLatent, learningRate, fieldCount, latentDim, weight, count, pf, pi, px,
+ Ptr(latentSum, pq), slope, pw, Ptr(latentWeights, pv), phw, Ptr(latentAccumulatedSquaredGrads, phv));
+ }
+
+ }
+ }
+}
diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs
new file mode 100644
index 0000000000..87d72471d9
--- /dev/null
+++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FactorizationMachineTrainer.cs
@@ -0,0 +1,418 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.ML.Runtime;
+using Microsoft.ML.Runtime.CommandLine;
+using Microsoft.ML.Runtime.Data;
+using Microsoft.ML.Runtime.EntryPoints;
+using Microsoft.ML.Runtime.FactorizationMachine;
+using Microsoft.ML.Runtime.Internal.CpuMath;
+using Microsoft.ML.Runtime.Internal.Internallearn;
+using Microsoft.ML.Runtime.Internal.Utilities;
+using Microsoft.ML.Runtime.Model;
+using Microsoft.ML.Runtime.Training;
+
+[assembly: LoadableClass(FieldAwareFactorizationMachineTrainer.Summary, typeof(FieldAwareFactorizationMachineTrainer), typeof(FieldAwareFactorizationMachineTrainer.Arguments),
+ new[] { typeof(SignatureBinaryClassifierTrainer), typeof(SignatureTrainer) }, FieldAwareFactorizationMachineTrainer.UserName, FieldAwareFactorizationMachineTrainer.LoadName,
+ FieldAwareFactorizationMachineTrainer.ShortName, DocName = "trainer/FactorizationMachine.md")]
+
+[assembly: LoadableClass(typeof(void), typeof(FieldAwareFactorizationMachineTrainer), null, typeof(SignatureEntryPointModule), FieldAwareFactorizationMachineTrainer.LoadName)]
+
+namespace Microsoft.ML.Runtime.FactorizationMachine
+{
+ ///
+ /// Train a field-aware factorization machine using ADAGRAD (an advanced stochastic gradient method). See references below
+ /// for details. This trainer is essentially faster the one introduced in [2] because of some implemtation tricks[3].
+ /// [1] http://jmlr.org/papers/volume12/duchi11a/duchi11a.pdf
+ /// [2] http://www.csie.ntu.edu.tw/~cjlin/papers/ffm.pdf
+ /// [3] https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf
+ ///
+ public sealed class FieldAwareFactorizationMachineTrainer : TrainerBase,
+ IIncrementalTrainer, IValidatingTrainer,
+ IIncrementalValidatingTrainer
+ {
+ public const string Summary = "Train a field-aware factorization machine for binary classification";
+ public const string UserName = "Field-aware Factorization Machine";
+ public const string LoadName = "FieldAwareFactorizationMachine";
+ public const string ShortName = "ffm";
+
+ public sealed class Arguments : LearnerInputBaseWithLabel
+ {
+ [Argument(ArgumentType.AtMostOnce, HelpText = "Initial learning rate", ShortName = "lr", SortOrder = 1)]
+ [TlcModule.SweepableFloatParam(0.001f, 1.0f, isLogScale: true)]
+ public float LearningRate = (float)0.1;
+
+ [Argument(ArgumentType.AtMostOnce, HelpText = "Number of training iterations", ShortName = "iter", SortOrder = 2)]
+ [TlcModule.SweepableLongParam(1, 100)]
+ public int Iters = 5;
+
+ [Argument(ArgumentType.AtMostOnce, HelpText = "Latent space dimension", ShortName = "d", SortOrder = 3)]
+ [TlcModule.SweepableLongParam(4, 100)]
+ public int LatentDim = 20;
+
+ [Argument(ArgumentType.AtMostOnce, HelpText = "Regularization coefficient of linear weights", ShortName = "lambdaLinear", SortOrder = 4)]
+ [TlcModule.SweepableFloatParam(1e-8f, 1f, isLogScale: true)]
+ public float LambdaLinear = 0.0001f;
+
+ [Argument(ArgumentType.AtMostOnce, HelpText = "Regularization coefficient of latent weights", ShortName = "lambdaLatent", SortOrder = 5)]
+ [TlcModule.SweepableFloatParam(1e-8f, 1f, isLogScale: true)]
+ public float LambdaLatent = 0.0001f;
+
+ [Argument(ArgumentType.AtMostOnce, HelpText = "Whether to normalize the input vectors so that the concatenation of all fields' feature vectors is unit-length", ShortName = "norm", SortOrder = 6)]
+ public bool Norm = true;
+
+ [Argument(ArgumentType.AtMostOnce, HelpText = "Whether to shuffle for each training iteration", ShortName = "shuf", SortOrder = 90)]
+ public bool Shuffle = true;
+
+ [Argument(ArgumentType.AtMostOnce, HelpText = "Report traning progress or not", ShortName = "verbose", SortOrder = 91)]
+ public bool Verbose = true;
+
+ [Argument(ArgumentType.AtMostOnce, HelpText = "Radius of initial latent factors", ShortName = "rad", SortOrder = 110)]
+ [TlcModule.SweepableFloatParam(0.1f, 1f)]
+ public float Radius = 0.5f;
+ }
+
+ public override PredictionKind PredictionKind => PredictionKind.BinaryClassification;
+ public override bool NeedNormalization => true;
+ public override bool NeedCalibration => false;
+ public override bool WantCaching => true;
+ private readonly int _latentDim;
+ private readonly int _latentDimAligned;
+ private readonly float _lambdaLinear;
+ private readonly float _lambdaLatent;
+ private readonly float _learningRate;
+ private readonly int _numIterations;
+ private readonly bool _norm;
+ private readonly bool _shuffle;
+ private readonly bool _verbose;
+ private readonly float _radius;
+ private FieldAwareFactorizationMachinePredictor _pred;
+
+ public FieldAwareFactorizationMachineTrainer(IHostEnvironment env, Arguments args) : base(env, LoadName)
+ {
+ Host.CheckUserArg(args.LatentDim > 0, nameof(args.LatentDim), "Must be positive");
+ Host.CheckUserArg(args.LambdaLinear >= 0, nameof(args.LambdaLinear), "Must be non-negative");
+ Host.CheckUserArg(args.LambdaLatent >= 0, nameof(args.LambdaLatent), "Must be non-negative");
+ Host.CheckUserArg(args.LearningRate > 0, nameof(args.LearningRate), "Must be positive");
+ Host.CheckUserArg(args.Iters >= 0, nameof(args.Iters), "Must be non-negative");
+ _latentDim = args.LatentDim;
+ _latentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(_latentDim);
+ _lambdaLinear = args.LambdaLinear;
+ _lambdaLatent = args.LambdaLatent;
+ _learningRate = args.LearningRate;
+ _numIterations = args.Iters;
+ _norm = args.Norm;
+ _shuffle = args.Shuffle;
+ _verbose = args.Verbose;
+ _radius = args.Radius;
+ }
+
+ private void InitializeTrainingState(int fieldCount, int featureCount, FieldAwareFactorizationMachinePredictor predictor, out float[] linearWeights,
+ out AlignedArray latentWeightsAligned, out float[] linearAccumulatedSquaredGrads, out AlignedArray latentAccumulatedSquaredGradsAligned)
+ {
+ linearWeights = new float[featureCount];
+ latentWeightsAligned = new AlignedArray(featureCount * fieldCount * _latentDimAligned, 16);
+ linearAccumulatedSquaredGrads = new float[featureCount];
+ latentAccumulatedSquaredGradsAligned = new AlignedArray(featureCount * fieldCount * _latentDimAligned, 16);
+
+ if (predictor == null)
+ {
+ var rng = Host.Rand;
+ for (int j = 0; j < featureCount; j++)
+ {
+ linearWeights[j] = 0;
+ linearAccumulatedSquaredGrads[j] = 1;
+ for (int f = 0; f < fieldCount; f++)
+ {
+ int vBias = j * fieldCount * _latentDimAligned + f * _latentDimAligned;
+ for (int k = 0; k < _latentDimAligned; k++)
+ {
+ if (k < _latentDim)
+ latentWeightsAligned[vBias + k] = _radius * (float)rng.NextDouble();
+ else
+ latentWeightsAligned[vBias + k] = 0;
+ latentAccumulatedSquaredGradsAligned[vBias + k] = 1;
+ }
+ }
+ }
+ }
+ else
+ {
+ predictor.CopyLinearWeightsTo(linearWeights);
+ predictor.CopyLatentWeightsTo(latentWeightsAligned);
+ for (int j = 0; j < featureCount; j++)
+ {
+ linearAccumulatedSquaredGrads[j] = 1;
+ for (int f = 0; f < fieldCount; f++)
+ {
+ int vBias = j * fieldCount * _latentDimAligned + f * _latentDimAligned;
+ for (int k = 0; k < _latentDimAligned; k++)
+ latentAccumulatedSquaredGradsAligned[vBias + k] = 1;
+ }
+ }
+ }
+ }
+
+ private static float CalculateLoss(float label, float modelResponse)
+ {
+ float margin = label > 0 ? modelResponse : -modelResponse;
+ if (margin > 0)
+ return MathUtils.Log(1 + MathUtils.ExpSlow(-margin));
+ else
+ return -margin + MathUtils.Log(1 + MathUtils.ExpSlow(margin));
+ }
+
+ private static float CalculateLossSlope(float label, float modelResponse)
+ {
+ float sign = label > 0 ? 1 : -1;
+ float margin = sign * modelResponse;
+ return -sign * MathUtils.Sigmoid(-margin);
+ }
+
+ private static double CalculateAvgLoss(IChannel ch, RoleMappedData data, bool norm, float[] linearWeights, AlignedArray latentWeightsAligned,
+ int latentDimAligned, AlignedArray latentSum, int[] featureFieldBuffer, int[] featureIndexBuffer, float[] featureValueBuffer, VBuffer buffer, ref long badExampleCount)
+ {
+ var featureColumns = data.Schema.GetColumns(RoleMappedSchema.ColumnRole.Feature);
+ Func pred = c => featureColumns.Select(ci => ci.Index).Contains(c) || c == data.Schema.Label.Index || (data.Schema.Weight != null && c == data.Schema.Weight.Index);
+ var getters = new ValueGetter>[featureColumns.Count];
+ float label = 0;
+ float weight = 1;
+ double loss = 0;
+ float modelResponse = 0;
+ long exampleCount = 0;
+ badExampleCount = 0;
+ int count = 0;
+ using (var cursor = data.Data.GetRowCursor(pred))
+ {
+ var labelGetter = cursor.GetGetter(data.Schema.Label.Index);
+ var weightGetter = data.Schema.Weight == null ? null : cursor.GetGetter(data.Schema.Weight.Index);
+ for (int f = 0; f < featureColumns.Count; f++)
+ getters[f] = cursor.GetGetter>(featureColumns[f].Index);
+ while (cursor.MoveNext())
+ {
+ labelGetter(ref label);
+ weightGetter?.Invoke(ref weight);
+ float annihilation = label - label + weight - weight;
+ if (!FloatUtils.IsFinite(annihilation))
+ {
+ badExampleCount++;
+ continue;
+ }
+ if (!FieldAwareFactorizationMachineUtils.LoadOneExampleIntoBuffer(getters, buffer, norm, ref count,
+ featureFieldBuffer, featureIndexBuffer, featureValueBuffer))
+ {
+ badExampleCount++;
+ continue;
+ }
+ FieldAwareFactorizationMachineInterface.CalculateIntermediateVariables(featureColumns.Count, latentDimAligned, count,
+ featureFieldBuffer, featureIndexBuffer, featureValueBuffer, linearWeights, latentWeightsAligned, latentSum, ref modelResponse);
+ loss += weight * CalculateLoss(label, modelResponse);
+ exampleCount++;
+ }
+ }
+ return loss / exampleCount;
+ }
+
+ private void TrainCore(IChannel ch, IProgressChannel pch, RoleMappedData data, RoleMappedData validData, FieldAwareFactorizationMachinePredictor predictor)
+ {
+ Host.AssertValue(ch);
+ Host.AssertValue(pch);
+
+ data.CheckBinaryLabel();
+ var featureColumns = data.Schema.GetColumns(RoleMappedSchema.ColumnRole.Feature);
+ int fieldCount = featureColumns.Count;
+ int totalFeatureCount = 0;
+ int[] fieldColumnIndexes = new int[fieldCount];
+ for (int f = 0; f < fieldCount; f++)
+ {
+ var col = featureColumns[f];
+ Host.Assert(col.Type.AsVector.VectorSize > 0);
+ if (col == null)
+ throw ch.ExceptParam(nameof(data), "Empty feature column not allowed");
+ Host.Assert(!data.Schema.Schema.IsHidden(col.Index));
+ if (!col.Type.IsKnownSizeVector || col.Type.ItemType != NumberType.Float)
+ throw ch.ExceptParam(nameof(data), "Training feature column '{0}' must be a known-size vector of R4, but has type: {1}.", col.Name, col.Type);
+ fieldColumnIndexes[f] = col.Index;
+ totalFeatureCount += col.Type.AsVector.VectorSize;
+ }
+ ch.Check(checked(totalFeatureCount * fieldCount * _latentDimAligned) <= Utils.ArrayMaxSize, "Latent dimension or the number of fields too large");
+ if (predictor != null)
+ {
+ ch.Check(predictor.FeatureCount == totalFeatureCount, "Input model's feature count mismatches training feature count");
+ ch.Check(predictor.LatentDim == _latentDim, "Input model's latent dimension mismatches trainer's");
+ }
+ if (validData != null)
+ {
+ validData.CheckBinaryLabel();
+ var validFeatureColumns = data.Schema.GetColumns(RoleMappedSchema.ColumnRole.Feature);
+ Host.Assert(fieldCount == validFeatureColumns.Count);
+ for (int f = 0; f < fieldCount; f++)
+ Host.Assert(featureColumns[f] == validFeatureColumns[f]);
+ }
+ bool shuffle = _shuffle;
+ if (shuffle && !data.Data.CanShuffle)
+ {
+ ch.Warning("Training data does not support shuffling, so ignoring request to shuffle");
+ shuffle = false;
+ }
+ var rng = shuffle ? Host.Rand : null;
+ var featureGetters = new ValueGetter>[fieldCount];
+ var featureBuffer = new VBuffer();
+ var featureValueBuffer = new float[totalFeatureCount];
+ var featureIndexBuffer = new int[totalFeatureCount];
+ var featureFieldBuffer = new int[totalFeatureCount];
+ var latentSum = new AlignedArray(fieldCount * fieldCount * _latentDimAligned, 16);
+ var metricNames = new List() { "Training-loss" };
+ if (validData != null)
+ metricNames.Add("Validation-loss");
+ int iter = 0;
+ long exampleCount = 0;
+ long badExampleCount = 0;
+ long validBadExampleCount = 0;
+ double loss = 0;
+ double validLoss = 0;
+ pch.SetHeader(new ProgressHeader(metricNames.ToArray(), new string[] { "iterations", "examples" }), entry =>
+ {
+ entry.SetProgress(0, iter, _numIterations);
+ entry.SetProgress(1, exampleCount);
+ });
+ Func pred = c => fieldColumnIndexes.Contains(c) || c == data.Schema.Label.Index || (data.Schema.Weight != null && c == data.Schema.Weight.Index);
+ InitializeTrainingState(fieldCount, totalFeatureCount, predictor, out float[] linearWeights,
+ out AlignedArray latentWeightsAligned, out float[] linearAccSqGrads, out AlignedArray latentAccSqGradsAligned);
+
+ // refer to Algorithm 3 in https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf
+ while (iter++ < _numIterations)
+ {
+ using (var cursor = data.Data.GetRowCursor(pred, rng))
+ {
+ var labelGetter = RowCursorUtils.GetLabelGetter(cursor, data.Schema.Label.Index);
+ var weightGetter = data.Schema.Weight == null ? null : RowCursorUtils.GetGetterAs(NumberType.R4, cursor, data.Schema.Weight.Index);
+ for (int i = 0; i < fieldCount; i++)
+ featureGetters[i] = cursor.GetGetter>(fieldColumnIndexes[i]);
+ loss = 0;
+ exampleCount = 0;
+ badExampleCount = 0;
+ while (cursor.MoveNext())
+ {
+ float label = 0;
+ float weight = 1;
+ int count = 0;
+ float modelResponse = 0;
+ labelGetter(ref label);
+ weightGetter?.Invoke(ref weight);
+ float annihilation = label - label + weight - weight;
+ if (!FloatUtils.IsFinite(annihilation))
+ {
+ badExampleCount++;
+ continue;
+ }
+ if (!FieldAwareFactorizationMachineUtils.LoadOneExampleIntoBuffer(featureGetters, featureBuffer, _norm, ref count,
+ featureFieldBuffer, featureIndexBuffer, featureValueBuffer))
+ {
+ badExampleCount++;
+ continue;
+ }
+
+ // refer to Algorithm 1 in [3] https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf
+ FieldAwareFactorizationMachineInterface.CalculateIntermediateVariables(fieldCount, _latentDimAligned, count,
+ featureFieldBuffer, featureIndexBuffer, featureValueBuffer, linearWeights, latentWeightsAligned, latentSum, ref modelResponse);
+ var slope = CalculateLossSlope(label, modelResponse);
+
+ // refer to Algorithm 2 in [3] https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf
+ FieldAwareFactorizationMachineInterface.CalculateGradientAndUpdate(_lambdaLinear, _lambdaLatent, _learningRate, fieldCount, _latentDimAligned, weight, count,
+ featureFieldBuffer, featureIndexBuffer, featureValueBuffer, latentSum, slope, linearWeights, latentWeightsAligned, linearAccSqGrads, latentAccSqGradsAligned);
+ loss += weight * CalculateLoss(label, modelResponse);
+ exampleCount++;
+ }
+ loss /= exampleCount;
+ }
+
+ if (_verbose)
+ {
+ if (validData == null)
+ pch.Checkpoint(loss, iter, exampleCount);
+ else
+ {
+ validLoss = CalculateAvgLoss(ch, validData, _norm, linearWeights, latentWeightsAligned, _latentDimAligned, latentSum,
+ featureFieldBuffer, featureIndexBuffer, featureValueBuffer, featureBuffer, ref validBadExampleCount);
+ pch.Checkpoint(loss, validLoss, iter, exampleCount);
+ }
+ }
+ }
+ if (badExampleCount != 0)
+ ch.Warning($"Skipped {badExampleCount} examples with bad label/weight/features in training set");
+ if (validBadExampleCount != 0)
+ ch.Warning($"Skipped {validBadExampleCount} examples with bad label/weight/features in validation set");
+ _pred = new FieldAwareFactorizationMachinePredictor(Host, _norm, fieldCount, totalFeatureCount, _latentDim, linearWeights, latentWeightsAligned);
+ }
+
+ public override void Train(RoleMappedData data)
+ {
+ Host.CheckValue(data, nameof(data));
+ using (var ch = Host.Start("Training"))
+ using (var pch = Host.StartProgressChannel("Training"))
+ {
+ TrainCore(ch, pch, data, null, null);
+ ch.Done();
+ }
+ }
+
+ public void Train(RoleMappedData data, RoleMappedData validData)
+ {
+ Host.CheckValue(data, nameof(data));
+ Host.CheckValue(validData, nameof(validData));
+ using (var ch = Host.Start("Training"))
+ using (var pch = Host.StartProgressChannel("Training"))
+ {
+ TrainCore(ch, pch, data, validData, null);
+ ch.Done();
+ }
+ }
+
+ public void Train(RoleMappedData data, FieldAwareFactorizationMachinePredictor predictor)
+ {
+ Host.CheckValue(data, nameof(data));
+ Host.CheckValue(predictor, nameof(predictor));
+ using (var ch = Host.Start("Training"))
+ using (var pch = Host.StartProgressChannel("Training"))
+ {
+ TrainCore(ch, pch, data, null, predictor);
+ ch.Done();
+ }
+ }
+
+ public void Train(RoleMappedData data, RoleMappedData validData, FieldAwareFactorizationMachinePredictor predictor)
+ {
+ Host.CheckValue(data, nameof(data));
+ Host.CheckValue(data, nameof(validData));
+ Host.CheckValue(predictor, nameof(predictor));
+ using (var ch = Host.Start("Training"))
+ using (var pch = Host.StartProgressChannel("Training"))
+ {
+ TrainCore(ch, pch, data, validData, predictor);
+ ch.Done();
+ }
+ }
+
+ public override FieldAwareFactorizationMachinePredictor CreatePredictor()
+ {
+ Host.Check(_pred != null, nameof(Train) + " has not yet been called");
+ return _pred;
+ }
+
+ [TlcModule.EntryPoint(Name = "Trainers.FieldAwareFactorizationMachineBinaryClassifier", Desc = FieldAwareFactorizationMachineTrainer.Summary, UserName = FieldAwareFactorizationMachineTrainer.UserName, ShortName = FieldAwareFactorizationMachineTrainer.ShortName)]
+ public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, Arguments input)
+ {
+ Contracts.CheckValue(env, nameof(env));
+ var host = env.Register("Train a field-aware factorization machine");
+ host.CheckValue(input, nameof(input));
+ EntryPointUtils.CheckInputArgs(host, input);
+ return LearnerEntryPointsUtils.Train(host, input, () => new FieldAwareFactorizationMachineTrainer(host, input),
+ () => LearnerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumn));
+ }
+ }
+}
diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs
new file mode 100644
index 0000000000..37261cb55b
--- /dev/null
+++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachinePredictor.cs
@@ -0,0 +1,184 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using Microsoft.ML.Runtime;
+using Microsoft.ML.Runtime.Data;
+using Microsoft.ML.Runtime.FactorizationMachine;
+using Microsoft.ML.Runtime.Internal.CpuMath;
+using Microsoft.ML.Runtime.Internal.Internallearn;
+using Microsoft.ML.Runtime.Internal.Utilities;
+using Microsoft.ML.Runtime.Model;
+
+[assembly: LoadableClass(typeof(FieldAwareFactorizationMachinePredictor), null, typeof(SignatureLoadModel), "Field Aware Factorization Machine", FieldAwareFactorizationMachinePredictor.LoaderSignature)]
+
+namespace Microsoft.ML.Runtime.FactorizationMachine
+{
+ public sealed class FieldAwareFactorizationMachinePredictor : PredictorBase, ISchemaBindableMapper, ICanSaveModel
+ {
+ public const string LoaderSignature = "FieldAwareFactMacPredict";
+ public override PredictionKind PredictionKind => PredictionKind.BinaryClassification;
+ private bool _norm;
+ internal int FieldCount { get; }
+ internal int FeatureCount { get; }
+ internal int LatentDim { get; }
+ internal int LatentDimAligned { get; }
+ private readonly float[] _linearWeights;
+ private readonly AlignedArray _latentWeightsAligned;
+
+ private static VersionInfo GetVersionInfo()
+ {
+ return new VersionInfo(
+ modelSignature: "FAFAMAPD",
+ verWrittenCur: 0x00010001,
+ verReadableCur: 0x00010001,
+ verWeCanReadBack: 0x00010001,
+ loaderSignature: LoaderSignature);
+ }
+
+ internal FieldAwareFactorizationMachinePredictor(IHostEnvironment env, bool norm, int fieldCount, int featureCount, int latentDim,
+ float[] linearWeights, AlignedArray latentWeightsAligned) : base(env, LoaderSignature)
+ {
+ Host.Assert(fieldCount > 0);
+ Host.Assert(featureCount > 0);
+ Host.Assert(latentDim > 0);
+ Host.Assert(Utils.Size(linearWeights) == featureCount);
+ LatentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(latentDim);
+ Host.Assert(latentWeightsAligned.Size == checked(featureCount * fieldCount * LatentDimAligned));
+
+ _norm = norm;
+ FieldCount = fieldCount;
+ FeatureCount = featureCount;
+ LatentDim = latentDim;
+ _linearWeights = linearWeights;
+ _latentWeightsAligned = latentWeightsAligned;
+ }
+
+ private FieldAwareFactorizationMachinePredictor(IHostEnvironment env, ModelLoadContext ctx) : base(env, LoaderSignature)
+ {
+ Host.AssertValue(ctx);
+
+ // *** Binary format ***
+ // bool: whether to normalize feature vectors
+ // int: number of fields
+ // int: number of features
+ // int: latent dimension
+ // float[]: linear coefficients
+ // float[]: latent representation of features
+
+ var norm = ctx.Reader.ReadBoolean();
+ var fieldCount = ctx.Reader.ReadInt32();
+ Host.CheckDecode(fieldCount > 0);
+ var featureCount = ctx.Reader.ReadInt32();
+ Host.CheckDecode(featureCount > 0);
+ var latentDim = ctx.Reader.ReadInt32();
+ Host.CheckDecode(latentDim > 0);
+ LatentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(latentDim);
+ Host.Check(checked(featureCount * fieldCount * LatentDimAligned) <= Utils.ArrayMaxSize, "Latent dimension too large");
+ var linearWeights = ctx.Reader.ReadFloatArray();
+ Host.CheckDecode(Utils.Size(linearWeights) == featureCount);
+ var latentWeights = ctx.Reader.ReadFloatArray();
+ Host.CheckDecode(Utils.Size(latentWeights) == featureCount * fieldCount * latentDim);
+
+ _norm = norm;
+ FieldCount = fieldCount;
+ FeatureCount = featureCount;
+ LatentDim = latentDim;
+ _linearWeights = linearWeights;
+ _latentWeightsAligned = new AlignedArray(FeatureCount * FieldCount * LatentDimAligned, 16);
+ for (int j = 0; j < FeatureCount; j++)
+ {
+ for (int f = 0; f < FieldCount; f++)
+ {
+ int vBias = j * FieldCount * LatentDim + f * LatentDim;
+ int vBiasAligned = j * FieldCount * LatentDimAligned + f * LatentDimAligned;
+ for (int k = 0; k < LatentDimAligned; k++)
+ {
+ if (k < LatentDim)
+ _latentWeightsAligned[vBiasAligned + k] = latentWeights[vBias + k];
+ else
+ _latentWeightsAligned[vBiasAligned + k] = 0;
+ }
+ }
+ }
+ }
+
+ public static FieldAwareFactorizationMachinePredictor Create(IHostEnvironment env, ModelLoadContext ctx)
+ {
+ Contracts.CheckValue(env, nameof(env));
+ env.CheckValue(ctx, nameof(ctx));
+ ctx.CheckAtModel(GetVersionInfo());
+ return new FieldAwareFactorizationMachinePredictor(env, ctx);
+ }
+
+ protected override void SaveCore(ModelSaveContext ctx)
+ {
+ Host.AssertValue(ctx);
+ ctx.SetVersionInfo(GetVersionInfo());
+
+ // *** Binary format ***
+ // bool: whether to normalize feature vectors
+ // int: number of fields
+ // int: number of features
+ // int: latent dimension
+ // float[]: linear coefficients
+ // float[]: latent representation of features
+
+ Host.Assert(FieldCount > 0);
+ Host.Assert(FeatureCount > 0);
+ Host.Assert(LatentDim > 0);
+ Host.Assert(Utils.Size(_linearWeights) == FeatureCount);
+ Host.Assert(_latentWeightsAligned.Size == FeatureCount * FieldCount * LatentDimAligned);
+
+ ctx.Writer.Write(_norm);
+ ctx.Writer.Write(FieldCount);
+ ctx.Writer.Write(FeatureCount);
+ ctx.Writer.Write(LatentDim);
+ ctx.Writer.WriteFloatArray(_linearWeights);
+ float[] latentWeights = new float[FeatureCount * FieldCount * LatentDim];
+ for (int j = 0; j < FeatureCount; j++)
+ {
+ for (int f = 0; f < FieldCount; f++)
+ {
+ int vBias = j * FieldCount * LatentDim + f * LatentDim;
+ int vBiasAligned = j * FieldCount * LatentDimAligned + f * LatentDimAligned;
+ for (int k = 0; k < LatentDim; k++)
+ latentWeights[vBias + k] = _latentWeightsAligned[vBiasAligned + k];
+ }
+ }
+ ctx.Writer.WriteFloatArray(latentWeights);
+ }
+
+ internal float CalculateResponse(ValueGetter>[] getters, VBuffer featureBuffer,
+ int[] featureFieldBuffer, int[] featureIndexBuffer, float[] featureValueBuffer, AlignedArray latentSum)
+ {
+ int count = 0;
+ float modelResponse = 0;
+ FieldAwareFactorizationMachineUtils.LoadOneExampleIntoBuffer(getters, featureBuffer, _norm, ref count,
+ featureFieldBuffer, featureIndexBuffer, featureValueBuffer);
+ FieldAwareFactorizationMachineInterface.CalculateIntermediateVariables(FieldCount, LatentDimAligned, count,
+ featureFieldBuffer, featureIndexBuffer, featureValueBuffer, _linearWeights, _latentWeightsAligned, latentSum, ref modelResponse);
+ return modelResponse;
+ }
+
+ public ISchemaBoundMapper Bind(IHostEnvironment env, RoleMappedSchema schema)
+ {
+ return new FieldAwareFactorizationMachineScalarRowMapper(env, schema, new BinaryClassifierSchema(), this);
+ }
+
+ internal void CopyLinearWeightsTo(float[] linearWeights)
+ {
+ Host.AssertValue(_linearWeights);
+ Host.AssertValue(linearWeights);
+ Array.Copy(_linearWeights, linearWeights, _linearWeights.Length);
+ }
+
+ internal void CopyLatentWeightsTo(AlignedArray latentWeights)
+ {
+ Host.AssertValue(_latentWeightsAligned);
+ Host.AssertValue(latentWeights);
+ latentWeights.CopyFrom(_latentWeightsAligned);
+ }
+ }
+}
diff --git a/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs
new file mode 100644
index 0000000000..0438fcc49b
--- /dev/null
+++ b/src/Microsoft.ML.StandardLearners/FactorizationMachine/FieldAwareFactorizationMachineUtils.cs
@@ -0,0 +1,144 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.ML.Runtime.Data;
+using Microsoft.ML.Runtime.Internal.CpuMath;
+using Microsoft.ML.Runtime.Internal.Utilities;
+
+namespace Microsoft.ML.Runtime.FactorizationMachine
+{
+ internal sealed class FieldAwareFactorizationMachineUtils
+ {
+ internal static int GetAlignedVectorLength(int length)
+ {
+ int res = length % 4;
+ if (res == 0)
+ return length;
+ else
+ return length + (4 - res);
+ }
+
+ internal static bool LoadOneExampleIntoBuffer(ValueGetter>[] getters, VBuffer featureBuffer, bool norm, ref int count,
+ int[] fieldIndexBuffer, int[] featureIndexBuffer, float[] featureValueBuffer)
+ {
+ count = 0;
+ float featureNorm = 0;
+ int bias = 0;
+ float annihilation = 0;
+ for (int f = 0; f < getters.Length; f++)
+ {
+ getters[f](ref featureBuffer);
+ foreach (var pair in featureBuffer.Items())
+ {
+ fieldIndexBuffer[count] = f;
+ featureIndexBuffer[count] = bias + pair.Key;
+ featureValueBuffer[count] = pair.Value;
+ featureNorm += pair.Value * pair.Value;
+ annihilation += pair.Value - pair.Value;
+ count++;
+ }
+ bias += featureBuffer.Length;
+ }
+ featureNorm = MathUtils.Sqrt(featureNorm);
+ if (norm)
+ {
+ for (int i = 0; i < count; i++)
+ featureValueBuffer[i] /= featureNorm;
+ }
+ return FloatUtils.IsFinite(annihilation);
+ }
+ }
+
+
+ internal sealed class FieldAwareFactorizationMachineScalarRowMapper : ISchemaBoundRowMapper
+ {
+ private readonly FieldAwareFactorizationMachinePredictor _pred;
+
+ public RoleMappedSchema InputSchema { get; }
+
+ public ISchema OutputSchema { get; }
+
+ public ISchemaBindableMapper Bindable => _pred;
+
+ private readonly ColumnInfo[] _columns;
+ private readonly List _inputColumnIndexes;
+ private readonly IHostEnvironment _env;
+
+ public FieldAwareFactorizationMachineScalarRowMapper(IHostEnvironment env, RoleMappedSchema schema,
+ ISchema outputSchema, FieldAwareFactorizationMachinePredictor pred)
+ {
+ Contracts.AssertValue(env);
+ Contracts.AssertValue(schema);
+ Contracts.CheckParam(outputSchema.ColumnCount == 2, nameof(outputSchema));
+ Contracts.CheckParam(outputSchema.GetColumnType(0).IsNumber, nameof(outputSchema));
+ Contracts.CheckParam(outputSchema.GetColumnType(1).IsNumber, nameof(outputSchema));
+ Contracts.AssertValue(pred);
+
+ _env = env;
+ _columns = schema.GetColumns(RoleMappedSchema.ColumnRole.Feature).ToArray();
+ _pred = pred;
+
+ var inputFeatureColumns = _columns.Select(c => new KeyValuePair(RoleMappedSchema.ColumnRole.Feature, c.Name)).ToList();
+ InputSchema = RoleMappedSchema.Create(schema.Schema, inputFeatureColumns);
+ OutputSchema = outputSchema;
+
+ _inputColumnIndexes = new List();
+ foreach (var kvp in inputFeatureColumns)
+ {
+ if (schema.Schema.TryGetColumnIndex(kvp.Value, out int index))
+ _inputColumnIndexes.Add(index);
+ }
+ }
+
+ public IRow GetOutputRow(IRow input, Func predicate, out Action action)
+ {
+ var latentSum = new AlignedArray(_pred.FieldCount * _pred.FieldCount * _pred.LatentDimAligned, 16);
+ var featureBuffer = new VBuffer();
+ var featureFieldBuffer = new int[_pred.FeatureCount];
+ var featureIndexBuffer = new int[_pred.FeatureCount];
+ var featureValueBuffer = new float[_pred.FeatureCount];
+ var inputGetters = new ValueGetter>[_pred.FieldCount];
+ for (int f = 0; f < _pred.FieldCount; f++)
+ inputGetters[f] = input.GetGetter>(_inputColumnIndexes[f]);
+
+ action = null;
+ var getters = new Delegate[2];
+ if (predicate(0))
+ {
+ ValueGetter responseGetter = (ref float value) =>
+ {
+ value = _pred.CalculateResponse(inputGetters, featureBuffer, featureFieldBuffer, featureIndexBuffer, featureValueBuffer, latentSum);
+ };
+ getters[0] = responseGetter;
+ }
+ if (predicate(1))
+ {
+ ValueGetter probGetter = (ref float value) =>
+ {
+ value = _pred.CalculateResponse(inputGetters, featureBuffer, featureFieldBuffer, featureIndexBuffer, featureValueBuffer, latentSum);
+ value = MathUtils.SigmoidSlow(value);
+ };
+ getters[1] = probGetter;
+ }
+
+ return new SimpleRow(OutputSchema, input, getters);
+ }
+
+ public Func GetDependencies(Func predicate)
+ {
+ if (Enumerable.Range(0, OutputSchema.ColumnCount).Any(predicate))
+ return index => _inputColumnIndexes.Any(c => c == index);
+ else
+ return index => false;
+ }
+
+ public IEnumerable> GetInputColumnRoles()
+ {
+ return InputSchema.GetColumnRoles().Select(kvp => new KeyValuePair(kvp.Key, kvp.Value.Name));
+ }
+ }
+}
diff --git a/src/Microsoft.ML.StandardLearners/Microsoft.ML.StandardLearners.csproj b/src/Microsoft.ML.StandardLearners/Microsoft.ML.StandardLearners.csproj
index 303995dd5b..6bada43299 100644
--- a/src/Microsoft.ML.StandardLearners/Microsoft.ML.StandardLearners.csproj
+++ b/src/Microsoft.ML.StandardLearners/Microsoft.ML.StandardLearners.csproj
@@ -1,8 +1,9 @@
-
+
netstandard2.0
Microsoft.ML
+ true
diff --git a/src/Microsoft.ML/CSharpApi.cs b/src/Microsoft.ML/CSharpApi.cs
index 0f8fefb267..d87c51c574 100644
--- a/src/Microsoft.ML/CSharpApi.cs
+++ b/src/Microsoft.ML/CSharpApi.cs
@@ -478,6 +478,18 @@ public void Add(Microsoft.ML.Trainers.FastTreeTweedieRegressor input, Microsoft.
_jsonNodes.Add(Serialize("Trainers.FastTreeTweedieRegressor", input, output));
}
+ public Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier.Output Add(Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier input)
+ {
+ var output = new Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier.Output();
+ Add(input, output);
+ return output;
+ }
+
+ public void Add(Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier input, Microsoft.ML.Trainers.FieldAwareFactorizationMachineBinaryClassifier.Output output)
+ {
+ _jsonNodes.Add(Serialize("Trainers.FieldAwareFactorizationMachineBinaryClassifier", input, output));
+ }
+
public Microsoft.ML.Trainers.GeneralizedAdditiveModelBinaryClassifier.Output Add(Microsoft.ML.Trainers.GeneralizedAdditiveModelBinaryClassifier input)
{
var output = new Microsoft.ML.Trainers.GeneralizedAdditiveModelBinaryClassifier.Output();
@@ -5999,6 +6011,130 @@ public FastTreeTweedieRegressorPipelineStep(Output output)
}
}
+ namespace Trainers
+ {
+
+ ///
+ /// Train a field-aware factorization machine for binary classification
+ ///
+ public sealed partial class FieldAwareFactorizationMachineBinaryClassifier : Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInputWithLabel, Microsoft.ML.Runtime.EntryPoints.CommonInputs.ITrainerInput, Microsoft.ML.ILearningPipelineItem
+ {
+
+
+ ///
+ /// Initial learning rate
+ ///
+ [TlcModule.SweepableFloatParamAttribute("LearningRate", 0.001f, 1f, isLogScale:true)]
+ public float LearningRate { get; set; } = 0.1f;
+
+ ///
+ /// Number of training iterations
+ ///
+ [TlcModule.SweepableLongParamAttribute("Iters", 1, 100)]
+ public int Iters { get; set; } = 5;
+
+ ///
+ /// Latent space dimension
+ ///
+ [TlcModule.SweepableLongParamAttribute("LatentDim", 4, 100)]
+ public int LatentDim { get; set; } = 20;
+
+ ///
+ /// Regularization coefficient of linear weights
+ ///
+ [TlcModule.SweepableFloatParamAttribute("LambdaLinear", 1E-08f, 1f, isLogScale:true)]
+ public float LambdaLinear { get; set; } = 0.0001f;
+
+ ///
+ /// Regularization coefficient of latent weights
+ ///
+ [TlcModule.SweepableFloatParamAttribute("LambdaLatent", 1E-08f, 1f, isLogScale:true)]
+ public float LambdaLatent { get; set; } = 0.0001f;
+
+ ///
+ /// Whether to normalize the input vectors so that the concatenation of all fields' feature vectors is unit-length
+ ///
+ public bool Norm { get; set; } = true;
+
+ ///
+ /// Whether to shuffle for each training iteration
+ ///
+ public bool Shuffle { get; set; } = true;
+
+ ///
+ /// Report traning progress or not
+ ///
+ public bool Verbose { get; set; } = true;
+
+ ///
+ /// Radius of initial latent factors
+ ///
+ [TlcModule.SweepableFloatParamAttribute("Radius", 0.1f, 1f)]
+ public float Radius { get; set; } = 0.5f;
+
+ ///
+ /// Column to use for labels
+ ///
+ public string LabelColumn { get; set; } = "Label";
+
+ ///
+ /// The data to be used for training
+ ///
+ public Var TrainingData { get; set; } = new Var();
+
+ ///
+ /// Column to use for features
+ ///
+ public string FeatureColumn { get; set; } = "Features";
+
+ ///
+ /// Normalize option for the feature column
+ ///
+ public Microsoft.ML.Models.NormalizeOption NormalizeFeatures { get; set; } = Microsoft.ML.Models.NormalizeOption.Auto;
+
+ ///
+ /// Whether learner should cache input training data
+ ///
+ public Microsoft.ML.Models.CachingOptions Caching { get; set; } = Microsoft.ML.Models.CachingOptions.Auto;
+
+
+ public sealed class Output : Microsoft.ML.Runtime.EntryPoints.CommonOutputs.IBinaryClassificationOutput, Microsoft.ML.Runtime.EntryPoints.CommonOutputs.ITrainerOutput
+ {
+ ///
+ /// The trained model
+ ///
+ public Var PredictorModel { get; set; } = new Var();
+
+ }
+ public Var GetInputData() => TrainingData;
+
+ public ILearningPipelineStep ApplyStep(ILearningPipelineStep previousStep, Experiment experiment)
+ {
+ if (previousStep != null)
+ {
+ if (!(previousStep is ILearningPipelineDataStep dataStep))
+ {
+ throw new InvalidOperationException($"{ nameof(FieldAwareFactorizationMachineBinaryClassifier)} only supports an { nameof(ILearningPipelineDataStep)} as an input.");
+ }
+
+ TrainingData = dataStep.Data;
+ }
+ Output output = experiment.Add(this);
+ return new FieldAwareFactorizationMachineBinaryClassifierPipelineStep(output);
+ }
+
+ private class FieldAwareFactorizationMachineBinaryClassifierPipelineStep : ILearningPipelinePredictorStep
+ {
+ public FieldAwareFactorizationMachineBinaryClassifierPipelineStep(Output output)
+ {
+ Model = output.PredictorModel;
+ }
+
+ public Var Model { get; }
+ }
+ }
+ }
+
namespace Trainers
{
diff --git a/src/Native/CMakeLists.txt b/src/Native/CMakeLists.txt
index d8f963e44e..767f6151fa 100644
--- a/src/Native/CMakeLists.txt
+++ b/src/Native/CMakeLists.txt
@@ -180,4 +180,5 @@ endfunction()
add_subdirectory(CpuMathNative)
add_subdirectory(FastTreeNative)
-add_subdirectory(LdaNative)
\ No newline at end of file
+add_subdirectory(LdaNative)
+add_subdirectory(FactorizationMachineNative)
diff --git a/src/Native/FactorizationMachineNative/CMakeLists.txt b/src/Native/FactorizationMachineNative/CMakeLists.txt
new file mode 100644
index 0000000000..95cb6ab9a6
--- /dev/null
+++ b/src/Native/FactorizationMachineNative/CMakeLists.txt
@@ -0,0 +1,15 @@
+cmake_minimum_required (VERSION 3.2)
+project (FactorizationMachineNative)
+
+set(SOURCES
+ FactorizationMachineCore.cpp
+)
+
+if(WIN32)
+else()
+ list(APPEND SOURCES ${VERSION_FILE_PATH})
+endif()
+
+add_library(FactorizationMachineNative SHARED ${SOURCES} ${RESOURCES})
+
+install_library_and_symbols (FactorizationMachineNative)
\ No newline at end of file
diff --git a/src/Native/FactorizationMachineNative/FactorizationMachineCore.cpp b/src/Native/FactorizationMachineNative/FactorizationMachineCore.cpp
new file mode 100644
index 0000000000..982b266683
--- /dev/null
+++ b/src/Native/FactorizationMachineNative/FactorizationMachineCore.cpp
@@ -0,0 +1,179 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+#include "../Stdafx.h"
+#include
+#include
+#include
+#include
+
+// Compute the output value of the field-aware factorization, as the sum of the linear part and the latent part.
+// The linear part is the inner product of linearWeights and featureValues.
+// The latent part is the sum of all intra-field interactions in one field f, for all fields possible.
+EXPORT_API(void) CalculateIntermediateVariablesNative(int fieldCount, int latentDim, int count, _In_ int * fieldIndices, _In_ int * featureIndices, _In_ float * featureValues,
+ _In_ float * linearWeights, _In_ float * latentWeights, _Inout_ float * latentSum, _Out_ float * response)
+{
+ // The number of all possible fields.
+ const int m = fieldCount;
+ const int d = latentDim;
+ const int c = count;
+ const int * pf = fieldIndices;
+ const int * pi = featureIndices;
+ const float * px = featureValues;
+ const float * pw = linearWeights;
+ const float * pv = latentWeights;
+ float * pq = latentSum;
+ float linearResponse = 0;
+ float latentResponse = 0;
+
+ memset(pq, 0, sizeof(float) * m * m * d);
+ __m128 _y = _mm_setzero_ps();
+ __m128 _tmp = _mm_setzero_ps();
+
+ for (int i = 0; i < c; i++)
+ {
+ const int f = pf[i];
+ const int j = pi[i];
+ linearResponse += pw[j] * px[i];
+
+ const __m128 _x = _mm_load1_ps(px + i);
+ const __m128 _xx = _mm_mul_ps(_x, _x);
+
+ // tmp -= * x * x
+ const int vBias = j * m * d + f * d;
+
+ // j-th feature's latent vector in the f-th field hidden space.
+ const float * vjf = pv + vBias;
+
+ for (int k = 0; k + 4 <= d; k += 4)
+ {
+ const __m128 _v = _mm_load_ps(vjf + k);
+ _tmp = _mm_sub_ps(_tmp, _mm_mul_ps(_mm_mul_ps(_v, _v), _xx));
+ }
+
+ for (int fprime = 0; fprime < m; fprime++)
+ {
+ const int vBias = j * m * d + fprime * d;
+ const int qBias = f * m * d + fprime * d;
+ const float * vjfprime = pv + vBias;
+ float * qffprime = pq + qBias;
+
+ // q_f,f' += v_j,f' * x
+ for (int k = 0; k + 4 <= d; k += 4)
+ {
+ const __m128 _v = _mm_load_ps(vjfprime + k);
+ __m128 _q = _mm_load_ps(qffprime + k);
+ _q = _mm_add_ps(_q, _mm_mul_ps(_v, _x));
+ _mm_store_ps(qffprime + k, _q);
+ }
+ }
+ }
+
+ for (int f = 0; f < m; f++)
+ {
+ // tmp +=
+ const float * qff = pq + f * m * d + f * d;
+ for (int k = 0; k + 4 <= d; k += 4)
+ {
+ __m128 _qff = _mm_load_ps(qff + k);
+
+ // Intra-field interactions.
+ _tmp = _mm_add_ps(_tmp, _mm_mul_ps(_qff, _qff));
+ }
+
+ // y += , f != f'
+ // Whis loop handles inter - field interactions because f != f'.
+ for (int fprime = f + 1; fprime < m; fprime++)
+ {
+ const float * qffprime = pq + f * m * d + fprime * d;
+ const float * qfprimef = pq + fprime * m * d + f * d;
+ for (int k = 0; k + 4 <= d; k += 4)
+ {
+ // Inter-field interaction.
+ __m128 _qffprime = _mm_load_ps(qffprime + k);
+ __m128 _qfprimef = _mm_load_ps(qfprimef + k);
+ _y = _mm_add_ps(_y, _mm_mul_ps(_qffprime, _qfprimef));
+ }
+ }
+ }
+
+ _y = _mm_add_ps(_y, _mm_mul_ps(_mm_set_ps1(0.5f), _tmp));
+ _tmp = _mm_add_ps(_y, _mm_movehl_ps(_y, _y));
+ _y = _mm_add_ps(_tmp, _mm_shuffle_ps(_tmp, _tmp, 1)); // The lowest slot is the response value.
+ _mm_store_ss(&latentResponse, _y);
+ *response = linearResponse + latentResponse;
+}
+
+// Calculate the stochastic gradient and update the model.
+// The /*const*/ comment on the parameters of the function means that their values should not get altered by this function.
+EXPORT_API(void) CalculateGradientAndUpdateNative(float lambdaLinear, float lambdaLatent, float learningRate, int fieldCount, int latentDim, float weight, int count,
+ _In_ int* /*const*/ fieldIndices, _In_ int* /*const*/ featureIndices, _In_ float* /*const*/ featureValues, _In_ float* /*const*/ latentSum, float slope,
+ _Inout_ float* linearWeights, _Inout_ float* latentWeights, _Inout_ float* linearAccumulatedSquaredGrads, _Inout_ float* latentAccumulatedSquaredGrads)
+{
+ const int m = fieldCount;
+ const int d = latentDim;
+ const int c = count;
+ const int * pf = fieldIndices;
+ const int * pi = featureIndices;
+ const float * px = featureValues;
+ const float * pq = latentSum;
+ float * pw = linearWeights;
+ float * pv = latentWeights;
+ float * phw = linearAccumulatedSquaredGrads;
+ float * phv = latentAccumulatedSquaredGrads;
+
+ const __m128 _wei = _mm_set_ps1(weight);
+ const __m128 _s = _mm_set_ps1(slope);
+ const __m128 _lr = _mm_set_ps1(learningRate);
+ const __m128 _lambdav = _mm_set_ps1(lambdaLatent);
+
+ for (int i = 0; i < count; i++)
+ {
+ const int f = pf[i];
+ const int j = pi[i];
+
+ // Calculate gradient of linear term w_j.
+ float g = weight * (lambdaLinear * pw[j] + slope * px[i]);
+
+ // Accumulate the gradient of the linear term.
+ phw[j] += g * g;
+
+ // Perform ADAGRAD update rule to adjust linear term.
+ pw[j] -= learningRate / sqrt(phw[j]) * g;
+
+ // Update latent term, v_j,f', f'=1,...,m.
+ const __m128 _x = _mm_load1_ps(px + i);
+ for (int fprime = 0; fprime < m; fprime++)
+ {
+ float * vjfprime = pv + j * m * d + fprime * d;
+ float * hvjfprime = phv + j * m * d + fprime * d;
+ const float * qfprimef = pq + fprime * m * d + f * d;
+ const __m128 _sx = _mm_mul_ps(_s, _x);
+
+ for (int k = 0; k + 4 <= d; k += 4)
+ {
+ __m128 _v = _mm_load_ps(vjfprime + k);
+ __m128 _q = _mm_load_ps(qfprimef + k);
+
+ // Calculate L2-norm regularization's gradient.
+ __m128 _g = _mm_mul_ps(_lambdav, _v);
+
+ // Calculate loss function's gradient.
+ if (fprime != f)
+ _g = _mm_add_ps(_g, _mm_mul_ps(_sx, _q));
+ else
+ _g = _mm_add_ps(_g, _mm_mul_ps(_sx, _mm_sub_ps(_q, _mm_mul_ps(_v, _x))));
+ _g = _mm_mul_ps(_wei, _g);
+
+ // Accumulate the gradient of latent vectors.
+ const __m128 _h = _mm_add_ps(_mm_load_ps(hvjfprime + k), _mm_mul_ps(_g, _g));
+
+ // Perform ADAGRAD update rule to adjust latent vector.
+ _v = _mm_sub_ps(_v, _mm_mul_ps(_lr, _mm_mul_ps(_mm_rsqrt_ps(_h), _g)));
+ _mm_store_ps(vjfprime + k, _v);
+ _mm_store_ps(hvjfprime + k, _h);
+ }
+ }
+ }
+}
diff --git a/src/Native/build.proj b/src/Native/build.proj
index c091a78c43..c7f4f2386e 100644
--- a/src/Native/build.proj
+++ b/src/Native/build.proj
@@ -71,12 +71,12 @@
-
-
+
diff --git a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv
index a5b66052f7..cd83330960 100644
--- a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv
+++ b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv
@@ -36,6 +36,7 @@ Trainers.FastTreeBinaryClassifier Uses a logit-boost boosted tree learner to per
Trainers.FastTreeRanker Trains gradient boosted decision trees to the LambdaRank quasi-gradient. Microsoft.ML.Runtime.FastTree.FastTree TrainRanking Microsoft.ML.Runtime.FastTree.FastTreeRankingTrainer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+RankingOutput
Trainers.FastTreeRegressor Trains gradient boosted decision trees to fit target values using least-squares. Microsoft.ML.Runtime.FastTree.FastTree TrainRegression Microsoft.ML.Runtime.FastTree.FastTreeRegressionTrainer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+RegressionOutput
Trainers.FastTreeTweedieRegressor Trains gradient boosted decision trees to fit target values using a Tweedie loss function. This learner is a generalization of Poisson, compound Poisson, and gamma regression. Microsoft.ML.Runtime.FastTree.FastTree TrainTweedieRegression Microsoft.ML.Runtime.FastTree.FastTreeTweedieTrainer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+RegressionOutput
+Trainers.FieldAwareFactorizationMachineBinaryClassifier Train a field-aware factorization machine for binary classification Microsoft.ML.Runtime.FactorizationMachine.FieldAwareFactorizationMachineTrainer TrainBinary Microsoft.ML.Runtime.FactorizationMachine.FieldAwareFactorizationMachineTrainer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+BinaryClassificationOutput
Trainers.GeneralizedAdditiveModelBinaryClassifier Trains a gradient boosted stump per feature, on all features simultaneously, to fit target values using least-squares. It mantains no interactions between features. Microsoft.ML.Runtime.FastTree.Gam TrainBinary Microsoft.ML.Runtime.FastTree.BinaryClassificationGamTrainer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+BinaryClassificationOutput
Trainers.GeneralizedAdditiveModelRegressor Trains a gradient boosted stump per feature, on all features simultaneously, to fit target values using least-squares. It mantains no interactions between features. Microsoft.ML.Runtime.FastTree.Gam TrainRegression Microsoft.ML.Runtime.FastTree.RegressionGamTrainer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+RegressionOutput
Trainers.KMeansPlusPlusClusterer K-means is a popular clustering algorithm. With K-means, the data is clustered into a specified number of clusters in order to minimize the within-cluster sum of squares. K-means++ improves upon K-means by using a better method for choosing the initial cluster centers. Microsoft.ML.Runtime.KMeans.KMeansPlusPlusTrainer TrainKMeans Microsoft.ML.Runtime.KMeans.KMeansPlusPlusTrainer+Arguments Microsoft.ML.Runtime.EntryPoints.CommonOutputs+ClusteringOutput
diff --git a/test/BaselineOutput/Common/EntryPoints/core_manifest.json b/test/BaselineOutput/Common/EntryPoints/core_manifest.json
index b15d04c860..34c4545324 100644
--- a/test/BaselineOutput/Common/EntryPoints/core_manifest.json
+++ b/test/BaselineOutput/Common/EntryPoints/core_manifest.json
@@ -8684,6 +8684,245 @@
"ITrainerOutput"
]
},
+ {
+ "Name": "Trainers.FieldAwareFactorizationMachineBinaryClassifier",
+ "Desc": "Train a field-aware factorization machine for binary classification",
+ "FriendlyName": "Field-aware Factorization Machine",
+ "ShortName": "ffm",
+ "Inputs": [
+ {
+ "Name": "LearningRate",
+ "Type": "Float",
+ "Desc": "Initial learning rate",
+ "Aliases": [
+ "lr"
+ ],
+ "Required": false,
+ "SortOrder": 1.0,
+ "IsNullable": false,
+ "Default": 0.1,
+ "SweepRange": {
+ "RangeType": "Float",
+ "Min": 0.001,
+ "Max": 1.0,
+ "IsLogScale": true
+ }
+ },
+ {
+ "Name": "TrainingData",
+ "Type": "DataView",
+ "Desc": "The data to be used for training",
+ "Aliases": [
+ "data"
+ ],
+ "Required": true,
+ "SortOrder": 1.0,
+ "IsNullable": false
+ },
+ {
+ "Name": "Iters",
+ "Type": "Int",
+ "Desc": "Number of training iterations",
+ "Aliases": [
+ "iter"
+ ],
+ "Required": false,
+ "SortOrder": 2.0,
+ "IsNullable": false,
+ "Default": 5,
+ "SweepRange": {
+ "RangeType": "Long",
+ "Min": 1,
+ "Max": 100
+ }
+ },
+ {
+ "Name": "FeatureColumn",
+ "Type": "String",
+ "Desc": "Column to use for features",
+ "Aliases": [
+ "feat"
+ ],
+ "Required": false,
+ "SortOrder": 2.0,
+ "IsNullable": false,
+ "Default": "Features"
+ },
+ {
+ "Name": "LatentDim",
+ "Type": "Int",
+ "Desc": "Latent space dimension",
+ "Aliases": [
+ "d"
+ ],
+ "Required": false,
+ "SortOrder": 3.0,
+ "IsNullable": false,
+ "Default": 20,
+ "SweepRange": {
+ "RangeType": "Long",
+ "Min": 4,
+ "Max": 100
+ }
+ },
+ {
+ "Name": "LabelColumn",
+ "Type": "String",
+ "Desc": "Column to use for labels",
+ "Aliases": [
+ "lab"
+ ],
+ "Required": false,
+ "SortOrder": 3.0,
+ "IsNullable": false,
+ "Default": "Label"
+ },
+ {
+ "Name": "LambdaLinear",
+ "Type": "Float",
+ "Desc": "Regularization coefficient of linear weights",
+ "Aliases": [
+ "lambdaLinear"
+ ],
+ "Required": false,
+ "SortOrder": 4.0,
+ "IsNullable": false,
+ "Default": 0.0001,
+ "SweepRange": {
+ "RangeType": "Float",
+ "Min": 1E-08,
+ "Max": 1.0,
+ "IsLogScale": true
+ }
+ },
+ {
+ "Name": "LambdaLatent",
+ "Type": "Float",
+ "Desc": "Regularization coefficient of latent weights",
+ "Aliases": [
+ "lambdaLatent"
+ ],
+ "Required": false,
+ "SortOrder": 5.0,
+ "IsNullable": false,
+ "Default": 0.0001,
+ "SweepRange": {
+ "RangeType": "Float",
+ "Min": 1E-08,
+ "Max": 1.0,
+ "IsLogScale": true
+ }
+ },
+ {
+ "Name": "NormalizeFeatures",
+ "Type": {
+ "Kind": "Enum",
+ "Values": [
+ "No",
+ "Warn",
+ "Auto",
+ "Yes"
+ ]
+ },
+ "Desc": "Normalize option for the feature column",
+ "Aliases": [
+ "norm"
+ ],
+ "Required": false,
+ "SortOrder": 5.0,
+ "IsNullable": false,
+ "Default": "Auto"
+ },
+ {
+ "Name": "Norm",
+ "Type": "Bool",
+ "Desc": "Whether to normalize the input vectors so that the concatenation of all fields' feature vectors is unit-length",
+ "Aliases": [
+ "norm"
+ ],
+ "Required": false,
+ "SortOrder": 6.0,
+ "IsNullable": false,
+ "Default": true
+ },
+ {
+ "Name": "Caching",
+ "Type": {
+ "Kind": "Enum",
+ "Values": [
+ "Auto",
+ "Memory",
+ "Disk",
+ "None"
+ ]
+ },
+ "Desc": "Whether learner should cache input training data",
+ "Aliases": [
+ "cache"
+ ],
+ "Required": false,
+ "SortOrder": 6.0,
+ "IsNullable": false,
+ "Default": "Auto"
+ },
+ {
+ "Name": "Shuffle",
+ "Type": "Bool",
+ "Desc": "Whether to shuffle for each training iteration",
+ "Aliases": [
+ "shuf"
+ ],
+ "Required": false,
+ "SortOrder": 90.0,
+ "IsNullable": false,
+ "Default": true
+ },
+ {
+ "Name": "Verbose",
+ "Type": "Bool",
+ "Desc": "Report traning progress or not",
+ "Aliases": [
+ "verbose"
+ ],
+ "Required": false,
+ "SortOrder": 91.0,
+ "IsNullable": false,
+ "Default": true
+ },
+ {
+ "Name": "Radius",
+ "Type": "Float",
+ "Desc": "Radius of initial latent factors",
+ "Aliases": [
+ "rad"
+ ],
+ "Required": false,
+ "SortOrder": 110.0,
+ "IsNullable": false,
+ "Default": 0.5,
+ "SweepRange": {
+ "RangeType": "Float",
+ "Min": 0.1,
+ "Max": 1.0
+ }
+ }
+ ],
+ "Outputs": [
+ {
+ "Name": "PredictorModel",
+ "Type": "PredictorModel",
+ "Desc": "The trained model"
+ }
+ ],
+ "InputKind": [
+ "ITrainerInputWithLabel",
+ "ITrainerInput"
+ ],
+ "OutputKind": [
+ "IBinaryClassificationOutput",
+ "ITrainerOutput"
+ ]
+ },
{
"Name": "Trainers.GeneralizedAdditiveModelBinaryClassifier",
"Desc": "Trains a gradient boosted stump per feature, on all features simultaneously, to fit target values using least-squares. It mantains no interactions between features.",
diff --git a/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-out.txt b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-out.txt
new file mode 100644
index 0000000000..d05c1b0b1c
--- /dev/null
+++ b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-out.txt
@@ -0,0 +1,77 @@
+maml.exe CV tr=FieldAwareFactorizationMachine{d=5 shuf- norm-} col[Feature]=DupFeatures threads=- norm=No dout=%Output% data=%Data% seed=1 xf=Copy{col=DupFeatures:Features} xf=MinMax{col=Features col=DupFeatures}
+Not adding a normalizer.
+Warning: Skipped 8 examples with bad label/weight/features in training set
+Not training a calibrator because it is not needed.
+Not adding a normalizer.
+Warning: Skipped 8 examples with bad label/weight/features in training set
+Not training a calibrator because it is not needed.
+Warning: The predictor produced non-finite prediction values on 8 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable.
+TEST POSITIVE RATIO: 0.3785 (134.0/(134.0+220.0))
+Confusion table
+ ||======================
+PREDICTED || positive | negative | Recall
+TRUTH ||======================
+ positive || 122 | 12 | 0.9104
+ negative || 4 | 216 | 0.9818
+ ||======================
+Precision || 0.9683 | 0.9474 |
+OVERALL 0/1 ACCURACY: 0.954802
+LOG LOSS/instance: 0.259660
+Test-set entropy (prior Log-Loss/instance): 0.956998
+LOG-LOSS REDUCTION (RIG): 72.867233
+AUC: 0.984973
+Warning: The predictor produced non-finite prediction values on 8 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable.
+TEST POSITIVE RATIO: 0.3191 (105.0/(105.0+224.0))
+Confusion table
+ ||======================
+PREDICTED || positive | negative | Recall
+TRUTH ||======================
+ positive || 92 | 13 | 0.8762
+ negative || 2 | 222 | 0.9911
+ ||======================
+Precision || 0.9787 | 0.9447 |
+OVERALL 0/1 ACCURACY: 0.954407
+LOG LOSS/instance: 0.260480
+Test-set entropy (prior Log-Loss/instance): 0.903454
+LOG-LOSS REDUCTION (RIG): 71.168362
+AUC: 0.967049
+
+OVERALL RESULTS
+---------------------------------------
+AUC: 0.976011 (0.0090)
+Accuracy: 0.954605 (0.0002)
+Positive precision: 0.973489 (0.0052)
+Positive recall: 0.893319 (0.0171)
+Negative precision: 0.946025 (0.0013)
+Negative recall: 0.986445 (0.0046)
+Log-loss: 0.260070 (0.0004)
+Log-loss reduction: 72.017798 (0.8494)
+F1 Score: 0.931542 (0.0069)
+AUPRC: 0.974115 (0.0054)
+
+---------------------------------------
+Physical memory usage(MB): %Number%
+Virtual memory usage(MB): %Number%
+%DateTime% Time elapsed(s): %Number%
+
+--- Progress log ---
+[1] 'Normalize' started.
+[1] (%Time%) 337 examples
+[1] 'Normalize' finished in %Time%.
+[2] 'Training' started.
+[2] (%Time%) 1 iterations, 329 examples Training-loss: 0.371414389819699
+[2] (%Time%) 2 iterations, 329 examples Training-loss: 0.225137821503565
+[2] (%Time%) 3 iterations, 329 examples Training-loss: 0.197323119398265
+[2] (%Time%) 4 iterations, 329 examples Training-loss: 0.183649426646222
+[2] (%Time%) 5 iterations, 329 examples Training-loss: 0.174400635825405
+[2] 'Training' finished in %Time%.
+[3] 'Normalize #2' started.
+[3] (%Time%) 362 examples
+[3] 'Normalize #2' finished in %Time%.
+[4] 'Training #2' started.
+[4] (%Time%) 1 iterations, 354 examples Training-loss: 0.35872800705401
+[4] (%Time%) 2 iterations, 354 examples Training-loss: 0.239609312114266
+[4] (%Time%) 3 iterations, 354 examples Training-loss: 0.210775498912242
+[4] (%Time%) 4 iterations, 354 examples Training-loss: 0.19625903089058
+[4] (%Time%) 5 iterations, 354 examples Training-loss: 0.187121580244397
+[4] 'Training #2' finished in %Time%.
diff --git a/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-rp.txt b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-rp.txt
new file mode 100644
index 0000000000..b826c5ae0d
--- /dev/null
+++ b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-rp.txt
@@ -0,0 +1,4 @@
+FieldAwareFactorizationMachine
+AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /d /norm /shuf Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
+0.976011 0.954605 0.973489 0.893319 0.946025 0.986445 0.26007 72.0178 0.931542 0.974115 5 - - FieldAwareFactorizationMachine %Data% %Output% 99 0 0 maml.exe CV tr=FieldAwareFactorizationMachine{d=5 shuf- norm-} col[Feature]=DupFeatures threads=- norm=No dout=%Output% data=%Data% seed=1 xf=Copy{col=DupFeatures:Features} xf=MinMax{col=Features col=DupFeatures} /d:5;/norm:-;/shuf:-
+
diff --git a/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer.txt b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer.txt
new file mode 100644
index 0000000000..c1977a346f
--- /dev/null
+++ b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer.txt
@@ -0,0 +1,700 @@
+Instance Label Score Probability Log-loss Assigned
+5 1 14.8638926 0.999999642 5.1594804484713121E-07 1
+6 0 -0.8998656 0.289078116 0.49223705031518167 0
+8 0 -2.48059678 0.0772296637 0.11595646754997578 0
+9 0 -2.373167 0.08524186 0.12853774899383463 0
+10 0 -1.8267622 0.138624445 0.2152857123445632 0
+11 0 -2.03787947 0.115282834 0.17671177846463382 0
+18 1 5.347374 0.9952619 0.0068518676812505449 1
+20 1 2.93836784 0.949710846 0.074439765251190851 1
+21 1 4.537781 0.989416063 0.015350773539417332 1
+25 1 0.214011192 0.5532995 0.85386750966876357 1
+28 0 -2.03787947 0.115282834 0.17671177846463382 0
+31 0 -2.22085452 0.09789331 0.14863003134088387 0
+32 1 2.94516468 0.9500345 0.073948191048563552 1
+35 0 -2.03787947 0.115282834 0.17671177846463382 0
+37 0 -3.201078 0.039125178 0.057579598806254788 0
+40 0
+41 1 -1.51869583 0.179653645 2.476709891872058 0
+44 1 6.41933727 0.9983729 0.0023493029812132662 1
+45 0 -2.01876 0.117247269 0.17991871486127162 0
+46 1 5.507395 0.9959597 0.005840729107190099 1
+48 0 -2.64206457 0.0664797947 0.099246846085645518 0
+50 1 -0.7729988 0.315830767 1.6627763744194746 0
+51 1 -1.07958174 0.2535852 1.9794576062795608 0
+52 1 1.92737579 0.87295866 0.196014759754012 1
+54 1 2.360999 0.9138046 0.13004240462108896 1
+56 1 6.64605141 0.9987025 0.0018730745676176913 1
+60 1 0.586817265 0.6426345 0.6379296358599772 1
+63 1 -1.3297224 0.209205285 2.2570087990606815 0
+64 0 -1.85676885 0.135080114 0.20936158691636991 0
+66 0 -2.56239128 0.0715984255 0.10717912551116054 0
+68 1 6.963379 0.999054968 0.0013640370953882713 1
+69 0 -2.07254 0.111794561 0.1710346893319066 0
+70 0 -2.4900763 0.0765568 0.11490487237984263 0
+71 1 2.84816265 0.94522357 0.081272490426858024 1
+72 0 -2.398419 0.08329334 0.12546794171530753 0
+73 1 7.05520535 0.999137759 0.0012444871146033098 1
+74 1 -1.02989769 0.263103932 1.926295284848996 0
+76 0 -1.87518346 0.1329431 0.20580141213002087 0
+77 0 -2.51152325 0.0750542954 0.11255941460668487 0
+79 0 -2.256354 0.09480278 0.14369594410546618 0
+82 0 -2.68407941 0.06391936 0.095295273554898599 0
+88 0 -2.56239128 0.0715984255 0.10717912551116054 0
+90 0 -2.04771137 0.114283845 0.17508366149751475 0
+91 0 -2.108456 0.108277671 0.16533355177618697 0
+92 0 -2.56239128 0.0715984255 0.10717912551116054 0
+93 0 -1.85676885 0.135080114 0.20936158691636991 0
+95 0 -2.04771137 0.114283845 0.17508366149751475 0
+96 0 -1.93042636 0.1267034 0.19545636564796123 0
+97 0 -2.730785 0.0611810647 0.091081154608133083 0
+98 1 5.869131 0.9971826 0.0040703745878533431 1
+99 1 9.67402649 0.999937057 9.0809697580461105E-05 1
+100 1 0.9169636 0.714423 0.48514956618634053 1
+102 0 -2.528028 0.0739165246 0.11078585383424393 0
+104 1 9.67022 0.9999368 9.1153684574596215E-05 1
+105 1 -1.82901669 0.138355449 2.8535486322723087 0
+106 1 9.469551 0.9999229 0.00011127706647502522 1
+108 0 -1.91807806 0.128076032 0.1977257574643819 0
+109 1 4.250736 0.9859466 0.020418590525940875 1
+111 1 1.28113461 0.782642841 0.35357401033762303 1
+112 1 3.845829 0.9790785 0.030503600596017547 1
+113 1 7.44737339 0.9994174 0.00084072413131743907 1
+115 0 -1.45452547 0.18930608 0.30277077223301041 0
+117 1 6.50040436 0.9984994 0.00216654358065164 1
+120 0 -2.10977483 0.108150378 0.16512762206914725 0
+121 0 -2.40890312 0.0824963 0.12421411952463637 0
+122 1 9.287441 0.9999074 0.0001336367089073252 1
+123 1 0.991513252 0.7293867 0.45524422695477623 1
+125 0 -1.85676885 0.135080114 0.20936158691636991 0
+128 1 2.86968613 0.946327448 0.079588624104640571 1
+129 0 -3.50972271 0.0290368516 0.042511553752884852 0
+131 0 -2.22085452 0.09789331 0.14863003134088387 0
+132 1 10.8694086 0.9999809 2.7517486566085523E-05 1
+133 0 -2.231102 0.09699208 0.1471894583221901 0
+137 0 -2.03968573 0.115098737 0.17641160645683859 0
+138 0 -2.37357259 0.08521025 0.12848789225109375 0
+141 0 -1.85219073 0.135615885 0.21025553563894767 0
+144 0 -2.03787947 0.115282834 0.17671177846463382 0
+145 0
+147 0 -2.03505254 0.115571469 0.17718252883609134 0
+150 0 -1.8267622 0.138624445 0.2152857123445632 0
+151 1 0.872104645 0.7051834 0.50392960783522212 1
+152 1 9.204662 0.9998994 0.00014516065994930907 1
+154 0 -1.66311264 0.1593446 0.25041355795755704 0
+156 0 -1.869329 0.133619383 0.20692712807027863 0
+161 0 -2.555451 0.0720611438 0.10789834857071755 0
+164 0
+167 1 3.16309261 0.9594215 0.059763301891325865 1
+169 0 -1.52618456 0.178552613 0.28375991953883767 0
+171 0 -2.04771137 0.114283845 0.17508366149751475 0
+173 1 19.4496174 1 0 1
+174 1 3.02353477 0.9536261 0.068504378788065867 1
+176 0 -2.22085452 0.09789331 0.14863003134088387 0
+177 1 4.302659 0.986648142 0.01939241122666641 1
+179 1 9.584427E-05 0.500023961 0.9999308646308972 1
+180 0 -1.8267622 0.138624445 0.2152857123445632 0
+181 0 -1.66311264 0.1593446 0.25041355795755704 0
+183 1 10.4648762 0.9999715 4.1104439112694392E-05 1
+187 1 10.78104 0.999979258 2.9925291612631146E-05 1
+188 1 8.408628 0.999777138 0.00032155739854406422 1
+189 0 -2.01109338 0.11804311 0.18121995585967673 0
+191 1 12.5877247 0.999996543 4.987505496102727E-06 1
+192 0 -2.57171249 0.0709812939 0.10622044886941406 0
+196 0 3.01742458 0.953355134 4.4221378767752633 1
+198 0 -1.66311264 0.1593446 0.25041355795755704 0
+199 0 -2.21740723 0.0981981754 0.14911766597757717 0
+201 1 6.942314 0.9990349 0.001393043875711089 1
+202 0 -2.04771137 0.114283845 0.17508366149751475 0
+204 0 -2.04771137 0.114283845 0.17508366149751475 0
+205 1 16.2102318 0.9999999 1.7198266111377426E-07 1
+206 1 4.939949 0.9928959 0.010285626570319192 1
+207 0 -1.8267622 0.138624445 0.2152857123445632 0
+209 0 -2.537307 0.07328385 0.109800582737096 0
+210 1 18.7023277 1 0 1
+211 1 10.281146 0.999965668 4.9531853723975585E-05 1
+212 0 -2.04771137 0.114283845 0.17508366149751475 0
+216 0 -1.85676885 0.135080114 0.20936158691636991 0
+218 1 7.263891 0.9993001 0.0010100636449898702 1
+219 0 -2.76439142 0.059279006 0.088161193635133334 0
+223 1 2.7308712 0.9388239 0.091073557949706299 1
+226 1 8.248712 0.9997385 0.00037729327519243937 1
+228 0 -1.8267622 0.138624445 0.2152857123445632 0
+233 1 2.361946 0.913879156 0.12992468715671707 1
+237 1 4.39291763 0.9877864 0.01772897141308883 1
+239 1 1.52190113 0.820818245 0.28486529489841783 1
+240 0 -2.17585921 0.10193938 0.15511526353124511 0
+241 0 -2.29227257 0.09176497 0.13886241586083783 0
+242 0 -2.22085452 0.09789331 0.14863003134088387 0
+244 0 -2.04771137 0.114283845 0.17508366149751475 0
+246 1 14.4728222 0.9999995 6.8793076746672365E-07 1
+247 1 0.83739996 0.697917342 0.51887191386761078 1
+248 0 -2.44329524 0.0799302459 0.12018485315955793 0
+249 0
+250 0 -1.70150161 0.154269248 0.24172965723606502 0
+252 0 1.9876461 0.879493833 3.0528211105339298 1
+254 1 6.02062225 0.9975777 0.0034988407610739244 1
+257 0 -2.21740723 0.0981981754 0.14911766597757717 0
+258 0 -2.39241457 0.08375295 0.12619145057643072 0
+259 0 2.59828758 0.930751264 3.8520684539295051 1
+260 1 6.64796829 0.998705 0.001869458244391086 1
+262 1 10.2717314 0.9999654 4.9875830876224796E-05 1
+267 1 0.8395891 0.6983786 0.51791869634276033 1
+268 1 4.901437 0.992619 0.010688056896805 1
+269 0 -2.04771137 0.114283845 0.17508366149751475 0
+271 0 -2.730785 0.0611810647 0.091081154608133083 0
+272 1 0.8395891 0.6983786 0.51791869634276033 1
+275 0
+276 0 -2.21740723 0.0981981754 0.14911766597757717 0
+277 0 -1.85676885 0.135080114 0.20936158691636991 0
+278 0 -2.04771137 0.114283845 0.17508366149751475 0
+279 1 2.53177547 0.926339567 0.11038695820005974 1
+280 0 -2.39241457 0.08375295 0.12619145057643072 0
+283 1 2.58736229 0.930043757 0.10462950074543359 1
+284 1 6.315773 0.9981957 0.0026053945661372192 1
+285 1 17.0703163 1 0 1
+288 1 -0.602478 0.353776962 1.4990879940554545 0
+290 0 -1.66311264 0.1593446 0.25041355795755704 0
+291 0 -2.04771137 0.114283845 0.17508366149751475 0
+293 1 2.63869476 0.933310747 0.099570586451478413 1
+296 0 -0.04219532 0.48945272 0.96988352376951226 0
+297 0
+299 1 2.437027 0.9196076 0.1209097376152837 1
+300 1 4.886091 0.9925057 0.010852664710009398 1
+301 0 -2.04771137 0.114283845 0.17508366149751475 0
+303 0 -2.04771137 0.114283845 0.17508366149751475 0
+304 1 2.90765285 0.9482235 0.076700989946081224 1
+308 1 4.42898655 0.988214 0.017104577035971798 1
+309 0 -2.17276287 0.102223195 0.15557127190119621 0
+311 0 -1.66311264 0.1593446 0.25041355795755704 0
+312 1 -0.368750572 0.408842951 1.2903813280998107 0
+314 0 -1.63186324 0.163575277 0.25769238876050721 0
+316 1 2.07395124 0.8883455 0.1708072411711142 1
+317 1 9.236988 0.9999026 0.00014051666860547148 1
+319 0 -0.6327839 0.346879572 0.61457906126809736 0
+321 0
+323 1 3.56552029 0.9724956 0.040236349195893271 1
+327 0 -1.85676885 0.135080114 0.20936158691636991 0
+328 1 2.31613636 0.910204649 0.13573713983693664 1
+329 1 3.052658 0.9548971 0.066582810014138891 1
+331 0 -2.85865283 0.0542357638 0.080447507227237577 0
+332 0 -2.45453167 0.0791077837 0.11889578564930094 0
+333 1 3.224123 0.9617321 0.056293037640751442 1
+336 1 4.161271 0.984651566 0.022314800350329005 1
+338 0 -1.63186324 0.163575277 0.25769238876050721 0
+343 0 -1.66311264 0.1593446 0.25041355795755704 0
+344 1 6.48078156 0.99846977 0.0022093461331651669 1
+346 0 -2.4612627 0.07861882 0.11812996027828425 0
+347 0 -1.5268147 0.178460211 0.28359764431931689 0
+348 1 -1.55794716 0.173941419 2.5233265901272413 0
+349 1 0.96990633 0.7251008 0.46374649890978753 1
+350 0 -2.50478745 0.07552324 0.11329104572942034 0
+352 0 -0.8905029 0.291006058 0.49615479540511881 0
+353 1 11.4202032 0.999989033 1.5822490644178947E-05 1
+354 0 -1.85676885 0.135080114 0.20936158691636991 0
+355 0 -2.59761286 0.06929221 0.10359981233578254 0
+358 1 4.168295 0.9847573 0.02215988217787071 1
+360 1 19.7173061 1 0 1
+361 1 4.88178444 0.9924736 0.01089936476746209 1
+366 1 18.082119 1 0 1
+368 0 -1.71223855 0.152873591 0.23935082825368728 0
+370 0 -2.466129 0.07826703 0.11757923930621997 0
+371 0 -1.71223855 0.152873591 0.23935082825368728 0
+373 0 -2.61302185 0.0683050454 0.10207041412114773 0
+376 0 -1.85676885 0.135080114 0.20936158691636991 0
+377 0 -1.63186324 0.163575277 0.25769238876050721 0
+378 0 -2.55687785 0.0719657838 0.1077500970966577 0
+379 0 -2.64790964 0.0661179647 0.098687769592959287 0
+381 1 6.70689869 0.998779 0.0017626085941359387 1
+383 0 -1.85219073 0.135615885 0.21025553563894767 0
+384 0 -1.85219073 0.135615885 0.21025553563894767 0
+387 0 -2.587854 0.06992423 0.10457983932319151 0
+388 0 -2.062164 0.11282903 0.17271593575335431 0
+389 0 -2.63233638 0.06708608 0.10018412244865801 0
+391 1 9.797784 0.999944448 8.0146141443027661E-05 1
+392 0 -2.21740723 0.0981981754 0.14911766597757717 0
+395 0 -2.21740723 0.0981981754 0.14911766597757717 0
+396 0 -2.39241457 0.08375295 0.12619145057643072 0
+398 0 -2.20704079 0.09912 0.15059314510991556 0
+399 0 -1.70332181 0.1540319 0.24132483641316704 0
+404 0 -1.84161174 0.136860788 0.21233483043736184 0
+406 0 -2.33448553 0.08830687 0.13337978519080357 0
+409 0 -2.2088275 0.09896057 0.15033785589508675 0
+413 0 -2.68322229 0.0639706552 0.095374335452958092 0
+414 1 4.470516 0.988688052 0.016412697518568688 1
+415 0 -1.37697363 0.201495469 0.32462750016760433 0
+416 1 6.49479866 0.9984909 0.0021787727517901165 1
+418 0 -2.47054315 0.07794919 0.11708183960876384 0
+419 0 -2.491969 0.07642309 0.11469599534527423 0
+422 0 -2.202886 0.09949162 0.15118839089802541 0
+423 0 -2.4900763 0.0765568 0.11490487237984263 0
+428 0 -1.85676885 0.135080114 0.20936158691636991 0
+429 0 -2.03787947 0.115282834 0.17671177846463382 0
+430 0 -1.88024449 0.1323608 0.20483286117168037 0
+434 0 4.23710155 0.985756457 6.1335481216812706 1
+436 1 2.84663773 0.9451446 0.081393036792859863 1
+439 0 -2.40992618 0.0824188963 0.12409241344887624 0
+440 1 3.80452919 0.9782154 0.031775923054351646 1
+441 0 -1.8284502 0.138423011 0.21494837550204221 0
+442 0 -1.70252347 0.154135972 0.24150232548215611 0
+449 1 12.0930557 0.9999944 8.083207235017858E-06 1
+450 0 -2.296249 0.0914341062 0.13833694513157324 0
+451 0 -2.40992618 0.0824188963 0.12409241344887624 0
+452 0 -2.17240882 0.102255695 0.1556234982659298 0
+453 1 6.529911 0.998543 0.0021035047090949962 1
+454 0 -2.07757616 0.111295484 0.17022427557546105 0
+455 1 -1.9901104 0.120245181 3.0559490175405211 0
+456 1 7.704238 0.9995492 0.00065049902446028007 1
+457 1 7.00719738 0.99909544 0.0013055949383471309 1
+464 0 -2.22559762 0.09747525 0.14796160408536918 0
+465 1 8.38367748 0.999771535 0.00032964240762353795 1
+466 1 6.553727 0.998577237 0.0020540745152688254 1
+467 1 6.02119255 0.997579157 0.0034967719595408674 1
+474 0 -2.40992618 0.0824188963 0.12409241344887624 0
+480 0 -2.20853472 0.09898668 0.15037965733571257 0
+482 1 16.26332 0.9999999 1.7198266111377426E-07 1
+483 1 11.1233063 0.9999852 2.1326006327376515E-05 1
+484 0 -2.38753271 0.08412834 0.1267826495786491 0
+487 1 13.0827723 0.999998 2.9237080272005804E-06 1
+489 1 -2.25294924 0.09509537 3.3944810328808703 0
+492 0 -2.191049 0.100557171 0.15289651175735539 0
+493 1 12.4391136 0.999996066 5.6754386418026423E-06 1
+495 0 -2.006237 0.11854963 0.18204875397128145 0
+497 0 -2.21665764 0.09826457 0.14922388345357948 0
+501 0 -2.39535141 0.0835278556 0.12583706323377797 0
+502 0 -2.446126 0.07972231 0.11985883701567156 0
+504 0 -1.66311264 0.1593446 0.25041355795755704 0
+507 0 -1.5343039 0.177364841 0.28167536187524722 0
+510 0 -1.66311264 0.1593446 0.25041355795755704 0
+513 0 -2.006237 0.11854963 0.18204875397128145 0
+514 1 11.0994911 0.99998486 2.1841961999056935E-05 1
+517 0 -1.63186324 0.163575277 0.25769238876050721 0
+519 1 4.549981 0.9895431 0.01516557768494084 1
+520 0 -1.89241147 0.130969763 0.20252171957783308 0
+521 0 -2.43429351 0.08059475 0.12122718788029851 0
+522 1 0.357010841 0.5883166 0.76533530327012156 1
+523 1 5.51915169 0.9960068 0.0057725219886650735 1
+527 0 -2.56239128 0.0715984255 0.10717912551116054 0
+528 0 -2.52255535 0.07429201 0.11137092307285647 0
+529 0 -2.191049 0.100557171 0.15289651175735539 0
+531 0 -2.33448553 0.08830687 0.13337978519080357 0
+532 0 -1.8267622 0.138624445 0.2152857123445632 0
+533 0 -2.21740723 0.0981981754 0.14911766597757717 0
+534 0 -2.03787947 0.115282834 0.17671177846463382 0
+535 0 -2.08951616 0.110119984 0.16831726594009194 0
+538 0 -2.39535141 0.0835278556 0.12583706323377797 0
+539 0 -2.74649048 0.0602851622 0.089705066266029906 0
+540 0 -2.47341466 0.07774305 0.11675934435269709 0
+541 0 -2.03968573 0.115098737 0.17641160645683859 0
+544 0 -2.07751942 0.111301087 0.17023337107337447 0
+546 1 15.9298019 0.9999999 1.7198266111377426E-07 1
+547 0 -1.63019609 0.1638035 0.25808609509773911 0
+548 0 -1.82455885 0.138887748 0.21572677983014757 0
+549 1 3.53455734 0.9716552 0.04148365754326467 1
+557 0 -2.50478745 0.07552324 0.11329104572942034 0
+558 0 -2.03787947 0.115282834 0.17671177846463382 0
+559 0 -2.57171249 0.0709812939 0.10622044886941406 0
+560 0 -2.730785 0.0611810647 0.091081154608133083 0
+561 0 -2.730785 0.0611810647 0.091081154608133083 0
+563 0 -2.21740723 0.0981981754 0.14911766597757717 0
+565 1 14.4914665 0.9999995 6.8793076746672365E-07 1
+566 0 -2.37125015 0.08539145 0.12877368417465981 0
+569 1 7.86107731 0.9996147 0.00055595503487491883 1
+577 0 -1.85676885 0.135080114 0.20936158691636991 0
+578 0 -1.85676885 0.135080114 0.20936158691636991 0
+581 1 8.679046 0.9998299 0.00024544011948576707 1
+582 1 9.267637 0.9999056 0.00013621668994928923 1
+584 0 -2.791611 0.0577791929 0.085862903474618182 0
+586 1 18.3650551 1 0 1
+590 1 0.8902283 0.7089373 0.49627008273810297 1
+593 0 -2.38753271 0.08412834 0.1267826495786491 0
+594 1 3.65131 0.9746996 0.036970418233996383 1
+600 0 -2.21740723 0.0981981754 0.14911766597757717 0
+602 0 -2.39535141 0.0835278556 0.12583706323377797 0
+604 1 1.157403 0.7608605 0.3942961227380975 1
+606 0 -2.284902 0.0923811048 0.13984145096224598 0
+607 0 -1.66311264 0.1593446 0.25041355795755704 0
+609 0 -2.40992618 0.0824188963 0.12409241344887624 0
+612 1 20.9893341 1 0 1
+613 0 -2.030234 0.1160649 0.17798764430460248 0
+614 0 -1.8198415 0.13945289 0.21667392047737177 0
+617 0
+618 0 -2.39535141 0.0835278556 0.12583706323377797 0
+619 0 -2.57171249 0.0709812939 0.10622044886941406 0
+621 0 -2.57282734 0.07090781 0.10610633667562216 0
+622 0 -2.87062454 0.0536249466 0.079516050420504789 0
+624 0 -2.41982079 0.0816737 0.12292122599990905 0
+627 0 -1.58449054 0.170160457 0.26909569048429549 0
+629 0 -2.22559762 0.09747525 0.14796160408536918 0
+633 1 1.851265 0.8642755 0.21043680463191686 1
+634 0 -2.03968573 0.115098737 0.17641160645683859 0
+638 0 -2.22559762 0.09747525 0.14796160408536918 0
+639 0 -2.50478745 0.07552324 0.11329104572942034 0
+641 0 -2.21740723 0.0981981754 0.14911766597757717 0
+642 0 -2.21740723 0.0981981754 0.14911766597757717 0
+644 0 -1.85219073 0.135615885 0.21025553563894767 0
+645 0 -2.21740723 0.0981981754 0.14911766597757717 0
+649 0 -2.21740723 0.0981981754 0.14911766597757717 0
+652 0 -2.564228 0.07147643 0.10698956187034971 0
+653 0 -2.39535141 0.0835278556 0.12583706323377797 0
+654 0 -2.39241457 0.08375295 0.12619145057643072 0
+656 0 -2.57171249 0.0709812939 0.10622044886941406 0
+657 0 -2.0722928 0.111819126 0.17107458963369743 0
+660 0 -1.85676885 0.135080114 0.20936158691636991 0
+661 0 -2.56239128 0.0715984255 0.10717912551116054 0
+665 0 -1.66311264 0.1593446 0.25041355795755704 0
+668 1 -0.192660332 0.451983333 1.1456585221797912 0
+670 1 6.66539 0.9987274 0.0018371700459368042 1
+678 0 -1.66311264 0.1593446 0.25041355795755704 0
+679 0 -1.85219073 0.135615885 0.21025553563894767 0
+680 1 20.8869 1 0 1
+681 1 12.674202 0.9999969 4.4715558520995569E-06 1
+682 0 -2.713255 0.0621957332 0.092641251857392207 0
+683 0 -1.66311264 0.1593446 0.25041355795755704 0
+685 0 -1.66311264 0.1593446 0.25041355795755704 0
+688 0 -2.22559762 0.09747525 0.14796160408536918 0
+689 0 -2.78634024 0.0580668 0.086303344802401927 0
+691 1 4.852685 0.9922531 0.011219894450214495 1
+692 0 -2.03968573 0.115098737 0.17641160645683859 0
+693 0 -2.36527443 0.08585931 0.12951188152740967 0
+694 0 -2.19330382 0.10035342 0.15256973467024601 0
+696 1 6.44385242 0.998412251 0.0022924573316975153 1
+697 1 2.56533623 0.9285971 0.1068753309513874 1
+698 1 3.769271 0.977451265 0.032903322595354806 1
+0 0 -2.301516 0.09099748 0.13764380069439297 0
+1 0 1.08247185 0.7469615 1.9825710402688395 1
+2 0 -2.0523982 0.113810278 0.17431250107108243 0
+3 0 3.463801 0.96964 5.0416851857458864 1
+4 0 -2.25727749 0.09472357 0.14356969816808129 0
+7 0 -2.013621 0.117780194 0.18078994480689475 0
+12 1 -0.8284931 0.30396378 1.7180286690667717 0
+13 0 -1.81933558 0.139513627 0.21677574901409538 0
+14 1 7.468815 0.999429643 0.00082308574166617431 1
+15 1 -0.935884 0.2817325 1.8276020977115841 0
+16 0 -2.098476 0.109245047 0.16689949442685795 0
+17 0 -2.22533417 0.09749843 0.14799865604012269 0
+19 0 -2.36852455 0.08560456 0.12910989118919178 0
+22 0 -2.00196 0.118997283 0.18278162694192454 0
+23 1
+24 0 -1.941751 0.125455618 0.19339649381178925 0
+26 0 -1.70507562 0.153803527 0.24093542302960627 0
+27 0 -2.18581915 0.101031184 0.15365702371483531 0
+29 0 -1.6321876 0.1635309 0.25761585008130428 0
+30 0 -1.73291934 0.150214538 0.23482943239457851 0
+33 0 -2.09316373 0.109763056 0.16773872242540308 0
+34 0 -1.966197 0.122797951 0.18901891314128982 0
+36 1 10.6047812 0.9999752 3.5772834884537144E-05 1
+38 1 5.09487247 0.99390924 0.0088139788276411432 1
+39 1 0.138988018 0.534691155 0.90322228350613509 1
+42 1 8.104832 0.999698043 0.00043569783898110006 1
+43 1 -2.051857 0.113864884 3.1346052100634472 0
+47 0 -1.78140879 0.144129261 0.22453517072067464 0
+49 1 6.031123 0.997603 0.0034622923707363488 1
+53 1 2.5970068 0.930668652 0.10366048147588253 1
+55 1 3.37955284 0.967059433 0.048323537425725815 1
+57 1 -2.29733229 0.09134414 3.4525440037349249 0
+58 1 -0.8062577 0.308688521 1.6957762577022222 0
+59 1 -0.20775795 0.448246568 1.1576355563258656 0
+61 0 -1.73861909 0.149488419 0.23359721489187701 0
+62 1 5.937831 0.99736917 0.0038004865334286451 1
+65 1 -1.92219067 0.127617478 2.9701021620868731 0
+67 1 1.6232996 0.835249662 0.25972060045188861 1
+75 0 -1.96936083 0.122457556 0.18845918980961393 0
+78 0 -1.95251036 0.124279886 0.19145824716485596 0
+80 0 -2.1607275 0.103333026 0.15735583372897294 0
+81 0 -2.12950277 0.106262207 0.16207646288515981 0
+83 0 -2.52023387 0.07445183 0.11162001257935196 0
+84 1 7.28208447 0.999312758 0.00099182083151890563 1
+85 1 3.20175552 0.9609003 0.057541335571880493 1
+86 1 1.07764864 0.746048748 0.42265819240738722 1
+87 1 6.03411 0.9976101 0.0034520348521077638 1
+89 0 -2.220188 0.0979522 0.14872421333055375 0
+94 0 -2.045452 0.114512727 0.17545652289353517 0
+101 1 -0.09225035 0.4769538 1.0680785545608487 0
+103 1 -2.52788973 0.07392599 3.7577745918272321 0
+107 1 6.7885294 0.9988746 0.0016245164986375925 1
+110 0 -2.08183551 0.110874891 0.16954166005159219 0
+114 0 -1.657285 0.16012679 0.25175654520308971 0
+116 0 -1.4446578 0.1908251 0.30547653402158781 0
+118 0 -2.08073568 0.110983357 0.16971766707069735 0
+119 0 -1.99037921 0.120216757 0.18477997210748806 0
+124 1 4.98317528 0.993194342 0.0098520525459535446 1
+126 1 4.85608768 0.9922792 0.011182023351539032 1
+127 0 -2.13997936 0.105271339 0.16047786433454483 0
+130 0 -2.02583027 0.116517484 0.17872651087564395 0
+134 0 -2.27046967 0.0935983658 0.14177763258855564 0
+135 0 -2.082193 0.110839657 0.16948449080217048 0
+136 0 -2.098476 0.109245047 0.16689949442685795 0
+139 0
+140 0 -1.860422 0.134653866 0.20865077658844469 0
+142 1 1.5520792 0.82521385 0.2771600607430223 1
+143 0 -1.42160451 0.19441016 0.31188260684972846 0
+146 1 -0.379899025 0.406151235 1.2999110632727831 0
+148 0 -3.333778 0.03443041 0.050547855906756659 0
+149 1 9.453949 0.9999217 0.0001129970266666251 1
+153 0 -1.79716134 0.142196968 0.22128167987188255 0
+155 1 2.7138834 0.9378409 0.092584929720156431 1
+157 0 -2.045452 0.114512727 0.17545652289353517 0
+158 0
+159 1 14.5143137 0.9999995 6.8793076746672365E-07 1
+160 1 8.880978 0.999861 0.0002005457089583406 1
+162 0 -2.13997936 0.105271339 0.16047786433454483 0
+163 0 -2.36457872 0.0859139338 0.12959808560549185 0
+165 0 -2.09007239 0.110065483 0.16822891030896434 0
+166 1 8.115719 0.9997012 0.00043113892933886136 1
+168 0 -2.13997936 0.105271339 0.16047786433454483 0
+170 0 -1.860422 0.134653866 0.20865077658844469 0
+172 0 -1.78140879 0.144129261 0.22453517072067464 0
+175 1 7.61374569 0.9995066 0.00071201186227698389 1
+178 0 -2.22533417 0.09749843 0.14799865604012269 0
+182 0 -2.36852455 0.08560456 0.12910989118919178 0
+184 1 4.644288 0.990475237 0.013807187789500337 1
+185 0 -1.77662516 0.144720361 0.22553189902602172 0
+186 1 5.66861629 0.9965592 0.0049725809792141133 1
+190 1 16.6872864 1 0 1
+193 0 -1.941751 0.125455618 0.19339649381178925 0
+194 0 -2.13997936 0.105271339 0.16047786433454483 0
+195 0 -2.22533417 0.09749843 0.14799865604012269 0
+197 0 -2.29312468 0.091693975 0.13874964503480286 0
+200 1 10.1933537 0.999962568 5.4003563100270837E-05 1
+203 0 -2.301516 0.09099748 0.13764380069439297 0
+208 0 -1.67101777 0.158288538 0.24860233230888198 0
+213 1 20.2759647 1 0 1
+214 1 18.7733364 1 0 1
+215 1 8.191004 0.9997229 0.00039982907120573395 1
+217 0 -1.941751 0.125455618 0.19339649381178925 0
+220 0 -2.00243449 0.118947558 0.18270020126928377 0
+221 1 7.44619 0.9994167 0.00084175662910008565 1
+222 1 -1.34953451 0.205946475 2.2796586599059694 0
+224 1 9.052607 0.999882936 0.00016889684917020562 1
+225 0 -1.78140879 0.144129261 0.22453517072067464 0
+227 1 8.601936 0.999816358 0.00026496360361006585 1
+229 1 15.556097 0.9999999 1.7198266111377426E-07 1
+230 1 4.348646 0.9872406 0.018526350463759503 1
+231 1 8.508919 0.999798357 0.00029093798765384679 1
+232 0 -0.296713352 0.426361144 0.80178534411673164 0
+234 0 -1.87558413 0.132896915 0.20572457748992548 0
+235 0
+236 1 9.828637 0.9999461 7.7738252578338893E-05 1
+238 1 15.6847239 0.9999999 1.7198266111377426E-07 1
+243 0 -1.85970509 0.134737432 0.20879010310197682 0
+245 0 -2.17634916 0.101894535 0.15504322373043186 0
+251 1 6.219199 0.998013139 0.00286928622354919 1
+253 1 8.104832 0.999698043 0.00043569783898110006 1
+255 1 3.94488955 0.9810141 0.027654262469072717 1
+256 0 -1.860422 0.134653866 0.20865077658844469 0
+261 1 12.4782257 0.9999962 5.5034553246245386E-06 1
+263 1 6.391608 0.9983272 0.0024153673304377296 1
+264 1 2.26732063 0.906134069 0.14220357185362809 1
+265 0 -1.91125607 0.1288398 0.19899006005896697 0
+266 1 7.600045 0.9994998 0.00072181974597906125 1
+270 1 5.563095 0.9961778 0.0055248450255732757 1
+273 1 -1.02924967 0.263229579 1.925606483115087 0
+274 0 -2.136303 0.105618112 0.16103712188187333 0
+281 0 -2.09316373 0.109763056 0.16773872242540308 0
+282 1 3.098658 0.9568373 0.06365446955853514 1
+286 1 19.419363 1 0 1
+287 0 -2.27046967 0.0935983658 0.14177763258855564 0
+289 1 8.124609 0.999703944 0.00042718215151575466 1
+292 1
+294 0
+295 1 5.98761 0.997496545 0.0036162501096248033 1
+298 0 -2.98175526 0.04825695 0.071355963047900345 0
+302 1 18.99006 1 0 1
+305 1 8.560013 0.9998085 0.00027631658811241448 1
+306 0 -1.941751 0.125455618 0.19339649381178925 0
+307 0 -1.941751 0.125455618 0.19339649381178925 0
+310 0 -2.16668749 0.1027821 0.1564696923073676 0
+313 0 -1.6175487 0.165543213 0.26109075530209375 0
+315 0
+318 0 -2.750764 0.0600435175 0.089334129517283936 0
+320 1 2.96006584 0.950737059 0.072881698029347464 1
+322 0 -2.13997936 0.105271339 0.16047786433454483 0
+324 0 -1.941751 0.125455618 0.19339649381178925 0
+325 0 -1.86101615 0.13458465 0.20853538526771875 0
+326 1 0.5836396 0.6419045 0.6395694800066366 1
+330 1 3.19974613 0.9608247 0.05765481382577594 1
+334 1 5.354478 0.995295346 0.0068033977028160051 1
+335 0 -1.6175487 0.165543213 0.26109075530209375 0
+337 0 -1.941751 0.125455618 0.19339649381178925 0
+339 1 4.42046261 0.9881143 0.01725016366406085 1
+340 1 3.32619667 0.9653167 0.050925737892124082 1
+341 0 -1.941751 0.125455618 0.19339649381178925 0
+342 0 -1.74357152 0.148859844 0.23253137628148873 0
+345 0 -1.6175487 0.165543213 0.26109075530209375 0
+351 0 -2.045452 0.114512727 0.17545652289353517 0
+356 1 -1.66799617 0.158691525 2.6557030083293012 0
+357 1 12.1684132 0.999994755 7.5672564839008544E-06 1
+359 1 2.620923 0.9321961 0.10129464764951893 1
+362 0 -2.19840622 0.0998937041 0.15183271169511453 0
+363 0 -1.04904556 0.2594084 0.43324993672264289 0
+364 0 -2.045452 0.114512727 0.17545652289353517 0
+365 0 -1.89627123 0.130531073 0.20179362469978843 0
+367 1 13.7444353 0.9999989 1.5478446880940214E-06 1
+369 0 -1.49699593 0.182874 0.29136952685596818 0
+372 0 -2.04446387 0.114612974 0.17561986216689546 0
+374 0 -1.966197 0.122797951 0.18901891314128982 0
+375 0 -1.6175487 0.165543213 0.26109075530209375 0
+380 0 -1.6175487 0.165543213 0.26109075530209375 0
+382 0 -1.98493266 0.120794 0.18572686024059626 0
+385 0 -2.27838588 0.09292892 0.14071249291381016 0
+386 1 4.234085 0.985713959 0.020759038727296567 1
+390 0 -1.85135007 0.135714456 0.21042006464991661 0
+393 0 -1.342573 0.207087249 0.33476596791809693 0
+394 0 -1.731577 0.150385961 0.23512048938573521 0
+397 0 -1.96809924 0.122593194 0.1886821979796787 0
+400 1 8.218968 0.9997305 0.00038881917268506155 1
+401 0 -1.860422 0.134653866 0.20865077658844469 0
+402 0 -1.81246018 0.140341058 0.21816369183653808 0
+403 0 -1.63434482 0.163236037 0.25710737448281606 0
+405 0 -1.78140879 0.144129261 0.22453517072067464 0
+407 0 -1.78140879 0.144129261 0.22453517072067464 0
+408 0 -1.77392912 0.145054385 0.22609544513509036 0
+410 0 -1.78140879 0.144129261 0.22453517072067464 0
+411 0
+412 1 9.160485 0.999894857 0.00015169667309156297 1
+417 0 -1.78140879 0.144129261 0.22453517072067464 0
+420 0 -1.71119952 0.1530082 0.23958008015495064 0
+421 1 12.2299891 0.9999951 7.0513059173031522E-06 1
+424 0 -1.860422 0.134653866 0.20865077658844469 0
+425 1 20.1651058 1 0 1
+426 0 -1.3227582 0.210359767 0.34073259653634197 0
+427 1 4.300104 0.9866145 0.019441567487860585 1
+431 0 -2.75610447 0.0597428158 0.088872670403648199 0
+432 0 -2.164125 0.103018656 0.15685011613830954 0
+433 0 -1.83914936 0.137151927 0.21282153695787007 0
+435 1 7.36268234 0.999365866 0.00091515190632679476 1
+437 0 -1.96809924 0.122593194 0.1886821979796787 0
+438 0 -1.880899 0.132285655 0.2047079142247506 0
+443 0 -1.58702278 0.169803187 0.26847470165960907 0
+444 0 -2.40749931 0.08260262 0.12438130785824263 0
+445 0 -1.74357152 0.148859844 0.23253137628148873 0
+446 0 -1.6175487 0.165543213 0.26109075530209375 0
+447 0 -2.06660366 0.1123854 0.17199469628631101 0
+448 0 -1.342573 0.207087249 0.33476596791809693 0
+458 0 -2.00865936 0.118296735 0.18163489276241526 0
+459 0 -1.93524909 0.126170725 0.19457665456288356 0
+460 0 -2.10963583 0.108163789 0.16514931652661516 0
+461 0 -1.5343715 0.177354991 0.2816580881405864 0
+462 0 -2.17302465 0.102199174 0.15553267206171928 0
+463 0 -1.97221267 0.12215142 0.18795598317957285 0
+468 0 -1.96809924 0.122593194 0.1886821979796787 0
+469 0 -1.78214443 0.144038543 0.22438225994148536 0
+470 0 -1.73291934 0.150214538 0.23482943239457851 0
+471 0 -2.17302465 0.102199174 0.15553267206171928 0
+472 0 -1.89049411 0.131188139 0.20288429650077927 0
+473 0 -1.96809924 0.122593194 0.1886821979796787 0
+475 0 -1.860422 0.134653866 0.20865077658844469 0
+476 0 -1.92840433 0.1269273 0.19582630620444338 0
+477 0 -1.96809924 0.122593194 0.1886821979796787 0
+478 0 -2.01690173 0.117439739 0.18023330693767575 0
+479 1 9.129778 0.999891639 0.00015634070042369144 1
+481 0 -1.77698469 0.144675866 0.2254568465498844 0
+485 0 -1.37343407 0.202065587 0.32565792737640359 0
+486 0 -1.73291934 0.150214538 0.23482943239457851 0
+488 1 1.15169907 0.7598212 0.39626817381950297 1
+490 0 -1.6175487 0.165543213 0.26109075530209375 0
+491 1 6.43022442 0.998390555 0.0023238082916929833 1
+494 0 -1.0844636 0.252662241 0.42016767992874143 0
+496 0 -1.342573 0.207087249 0.33476596791809693 0
+498 0 -2.098476 0.109245047 0.16689949442685795 0
+499 0 -2.098476 0.109245047 0.16689949442685795 0
+500 0 -2.36852455 0.08560456 0.12910989118919178 0
+503 0 -2.22533417 0.09749843 0.14799865604012269 0
+505 0 -1.81462574 0.14007999 0.21772562877705068 0
+506 1 12.0963869 0.9999944 8.083207235017858E-06 1
+508 0 -2.06660366 0.1123854 0.17199469628631101 0
+509 0 -1.74357152 0.148859844 0.23253137628148873 0
+511 0 -2.18581915 0.101031184 0.15365702371483531 0
+512 0 -2.06660366 0.1123854 0.17199469628631101 0
+515 1 10.156889 0.999961138 5.6067433641037978E-05 1
+516 0 -1.342573 0.207087249 0.33476596791809693 0
+518 0 -1.87690234 0.132745087 0.20547198672775793 0
+524 0 -2.00196 0.118997283 0.18278162694192454 0
+525 0 -1.91367817 0.128568187 0.19854031339238309 0
+526 0 -1.96809924 0.122593194 0.1886821979796787 0
+530 1 4.653802 0.9905646 0.013677053104199231 1
+536 0 -2.301516 0.09099748 0.13764380069439297 0
+537 0 -2.21493769 0.09841708 0.14946791169661081 0
+542 0 -1.95570588 0.123932526 0.19088610479236329 0
+543 0 -2.098476 0.109245047 0.16689949442685795 0
+545 0 -2.18581915 0.101031184 0.15365702371483531 0
+550 0 -2.00196 0.118997283 0.18278162694192454 0
+551 0 -1.941751 0.125455618 0.19339649381178925 0
+552 0 -2.07113647 0.111934021 0.17126122926502999 0
+553 0 -1.193924 0.232557878 0.38187014349940485 0
+554 0 -1.860422 0.134653866 0.20865077658844469 0
+555 0 -1.62672949 0.16427888 0.25890649895897555 0
+556 0 -1.76245427 0.146483228 0.22850859107163557 0
+562 0 -1.941751 0.125455618 0.19339649381178925 0
+564 0 -2.20704937 0.09911924 0.15059192808996283 0
+567 0 -1.90300918 0.129768282 0.20052849433295125 0
+568 1 1.72357225 0.8485884 0.23686312545110236 1
+570 1 5.530344 0.996051 0.0057084620367186976 1
+571 1 12.7975969 0.999997258 3.955606392614897E-06 1
+572 0 -2.00196 0.118997283 0.18278162694192454 0
+573 0 -1.78140879 0.144129261 0.22453517072067464 0
+574 1 5.131028 0.994124234 0.0085019411118018116 1
+575 0 -2.21493769 0.09841708 0.14946791169661081 0
+576 0 -2.18581915 0.101031184 0.15365702371483531 0
+579 0 -1.941751 0.125455618 0.19339649381178925 0
+580 0 -2.11355782 0.107786037 0.1645383687069783 0
+583 0 -1.860422 0.134653866 0.20865077658844469 0
+585 0 -1.6175487 0.165543213 0.26109075530209375 0
+587 0 -2.164125 0.103018656 0.15685011613830954 0
+588 1 3.97299385 0.9815305 0.026895014627516617 1
+589 0 -2.06660366 0.1123854 0.17199469628631101 0
+591 1 5.15082 0.994238734 0.008335784993429584 1
+592 1 2.59083414 0.930269361 0.10427958420150241 1
+595 0 -2.18581915 0.101031184 0.15365702371483531 0
+596 0 -2.04446387 0.114612974 0.17561986216689546 0
+597 0 -2.112893 0.107849985 0.16464177579708919 0
+598 0 -2.00196 0.118997283 0.18278162694192454 0
+599 0 -1.47923768 0.1855426 0.29608885109001915 0
+601 0 -1.5085547 0.181153089 0.28833433912434342 0
+603 1 2.79953 0.9426504 0.085205310002869877 1
+605 1 9.113677 0.999889851 0.00015892072206770877 1
+608 1 11.1271286 0.999985337 2.1154021144488122E-05 1
+610 1 4.917906 0.992738664 0.010514112839565644 1
+611 1 7.66873455 0.999533057 0.00067381337182235643 1
+615 0 -1.9749856 0.121854387 0.18746790978262812 0
+616 0 -2.00196 0.118997283 0.18278162694192454 0
+620 0 -2.00196 0.118997283 0.18278162694192454 0
+623 0 -1.6175487 0.165543213 0.26109075530209375 0
+625 0 -1.72655845 0.15102832 0.23621166606060956 0
+626 1 3.22024632 0.9615891 0.056507555310947317 1
+628 0 -1.74357152 0.148859844 0.23253137628148873 0
+630 0 -1.88439143 0.131885275 0.20404238182768375 0
+631 0 -2.18581915 0.101031184 0.15365702371483531 0
+632 0 -1.6175487 0.165543213 0.26109075530209375 0
+635 0 -1.74968672 0.148086727 0.23122152652399849 0
+636 1 9.544065 0.999928355 0.00010336527600414941 1
+637 0 -1.525428 0.178663611 0.28395487805787722 0
+640 0 -2.03707385 0.115365021 0.17684580637902217 0
+643 0 -1.6175487 0.165543213 0.26109075530209375 0
+646 0 -1.62914348 0.163947731 0.25833495461491246 0
+647 0 -1.76435113 0.146246225 0.22810804177334876 0
+648 1 11.25837 0.9999871 1.8574245861463165E-05 1
+650 0 -1.90433264 0.1296189 0.20028086228354752 0
+651 0 -1.88349569 0.13198787 0.20421289076099527 0
+655 0 -2.00196 0.118997283 0.18278162694192454 0
+658 1 8.6298 0.999821365 0.00025773902362411926 1
+659 0 -1.6175487 0.165543213 0.26109075530209375 0
+662 0 -1.76741409 0.1458642 0.22746263715138998 0
+663 0 -1.76741409 0.1458642 0.22746263715138998 0
+664 0 -2.07836151 0.111217827 0.17009821449285314 0
+666 0 -2.084568 0.110605806 0.16910510831886344 0
+667 0 -2.13997936 0.105271339 0.16047786433454483 0
+669 1 11.2461758 0.9999869 1.8918215632667518E-05 1
+671 0 -2.11648464 0.1075049 0.16408384130497666 0
+672 0 -2.045452 0.114512727 0.17545652289353517 0
+673 0 -2.11998677 0.107169338 0.1635415204810986 0
+674 0 -1.78140879 0.144129261 0.22453517072067464 0
+675 0 -1.80261636 0.141532868 0.22016519609417487 0
+676 0 -1.78214443 0.144038543 0.22438225994148536 0
+677 0 -2.06660366 0.1123854 0.17199469628631101 0
+684 0 -1.6175487 0.165543213 0.26109075530209375 0
+686 0 -1.6175487 0.165543213 0.26109075530209375 0
+687 0 -2.00984144 0.11817351 0.18143327856658856 0
+690 0 -1.76435113 0.146246225 0.22810804177334876 0
+695 0 -1.74357152 0.148859844 0.23253137628148873 0
diff --git a/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-out.txt b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-out.txt
new file mode 100644
index 0000000000..3805af6e7c
--- /dev/null
+++ b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-out.txt
@@ -0,0 +1,51 @@
+maml.exe TrainTest test=%Data% tr=FieldAwareFactorizationMachine{d=5 shuf- norm-} col[Feature]=DupFeatures norm=No dout=%Output% data=%Data% out=%Output% seed=1 xf=Copy{col=DupFeatures:Features} xf=MinMax{col=Features col=DupFeatures}
+Not adding a normalizer.
+Warning: Skipped 16 examples with bad label/weight/features in training set
+Not training a calibrator because it is not needed.
+Warning: The predictor produced non-finite prediction values on 16 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable.
+TEST POSITIVE RATIO: 0.3499 (239.0/(239.0+444.0))
+Confusion table
+ ||======================
+PREDICTED || positive | negative | Recall
+TRUTH ||======================
+ positive || 215 | 24 | 0.8996
+ negative || 7 | 437 | 0.9842
+ ||======================
+Precision || 0.9685 | 0.9479 |
+OVERALL 0/1 ACCURACY: 0.954612
+LOG LOSS/instance: 0.228754
+Test-set entropy (prior Log-Loss/instance): 0.934003
+LOG-LOSS REDUCTION (RIG): 75.508177
+AUC: 0.982029
+
+OVERALL RESULTS
+---------------------------------------
+AUC: 0.982029 (0.0000)
+Accuracy: 0.954612 (0.0000)
+Positive precision: 0.968468 (0.0000)
+Positive recall: 0.899582 (0.0000)
+Negative precision: 0.947939 (0.0000)
+Negative recall: 0.984234 (0.0000)
+Log-loss: 0.228754 (0.0000)
+Log-loss reduction: 75.508177 (0.0000)
+F1 Score: 0.932755 (0.0000)
+AUPRC: 0.980228 (0.0000)
+
+---------------------------------------
+Physical memory usage(MB): %Number%
+Virtual memory usage(MB): %Number%
+%DateTime% Time elapsed(s): %Number%
+
+--- Progress log ---
+[1] 'Normalize' started.
+[1] (%Time%) 699 examples
+[1] 'Normalize' finished in %Time%.
+[2] 'Training' started.
+[2] (%Time%) 1 iterations, 683 examples Training-loss: 0.306117119945184
+[2] (%Time%) 2 iterations, 683 examples Training-loss: 0.193084570883075
+[2] (%Time%) 3 iterations, 683 examples Training-loss: 0.173782368769797
+[2] (%Time%) 4 iterations, 683 examples Training-loss: 0.163879262610855
+[2] (%Time%) 5 iterations, 683 examples Training-loss: 0.157117446501075
+[2] 'Training' finished in %Time%.
+[3] 'Saving model' started.
+[3] 'Saving model' finished in %Time%.
diff --git a/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-rp.txt b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-rp.txt
new file mode 100644
index 0000000000..4a01e926c6
--- /dev/null
+++ b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-rp.txt
@@ -0,0 +1,4 @@
+FieldAwareFactorizationMachine
+AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /d /norm /shuf Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
+0.982029 0.954612 0.968468 0.899582 0.947939 0.984234 0.228754 75.50818 0.932755 0.980228 5 - - FieldAwareFactorizationMachine %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=FieldAwareFactorizationMachine{d=5 shuf- norm-} col[Feature]=DupFeatures norm=No dout=%Output% data=%Data% out=%Output% seed=1 xf=Copy{col=DupFeatures:Features} xf=MinMax{col=Features col=DupFeatures} /d:5;/norm:-;/shuf:-
+
diff --git a/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer.txt b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer.txt
new file mode 100644
index 0000000000..e142c5c952
--- /dev/null
+++ b/test/BaselineOutput/SingleDebug/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer.txt
@@ -0,0 +1,700 @@
+Instance Label Score Probability Log-loss Assigned
+0 0 -2.79334736 0.0576847345 0.085718279282943594 0
+1 0 1.188056 0.7663932 2.0978457302965743 1
+2 0 -2.51387787 0.074891 0.11230473674761222 0
+3 0 2.8827734 0.9469883 4.2375449640754628 1
+4 0 -2.73850226 0.0607392974 0.090402444870538778 0
+5 1 17.67403 1 0 1
+6 0 -1.42012978 0.194641247 0.31229651002770953 0
+7 0 -2.48911548 0.07662476 0.11501104509053926 0
+8 0 -3.05736852 0.0449004173 0.066276932485278203 0
+9 0 -2.49013543 0.07655262 0.11489834233223327 0
+10 0 -2.08552837 0.110511363 0.16895191862826214 0
+11 0 -2.303132 0.0908639 0.1374318066173198 0
+12 1 -1.27195454 0.218922853 2.1915055288260206 0
+13 0 -2.25197268 0.09517944 0.14429638095999855 0
+14 1 8.572502 0.9998109 0.00027287628034393304 1
+15 1 -1.27670574 0.218111485 2.196862353947556 0
+16 0 -2.546958 0.07263111 0.10878476978943961 0
+17 0 -2.70365119 0.06275825 0.09350687664921721 0
+18 1 6.88258553 0.998975635 0.0014786043638188881 1
+19 0 -2.87238669 0.0535355881 0.079379834832595766 0
+20 1 3.6811657 0.975425541 0.035896345222225338 1
+21 1 6.199198 0.997973 0.0029272747638287261 1
+22 0 -2.43037367 0.08088568 0.12168377661511501 0
+23 1
+24 0 -2.37061882 0.08544078 0.12885149942120921 0
+25 1 0.301486969 0.574806 0.79885303657013707 1
+26 0 -2.121753 0.107000455 0.16326865504132451 0
+27 0 -2.652885 0.06581142 0.098214282791589516 0
+28 0 -2.303132 0.0908639 0.1374318066173198 0
+29 0 -2.03972268 0.115094975 0.17640547222344355 0
+30 0 -2.15111279 0.104227282 0.15879536747436127 0
+31 0 -2.49228716 0.07640065 0.11466094104662786 0
+32 1 4.41975975 0.988106 0.017262260285248437 1
+33 0 -2.54244328 0.0729358 0.10925884045202583 0
+34 0 -2.4251883 0.08127202 0.12229032950777963 0
+35 0 -2.303132 0.0908639 0.1374318066173198 0
+36 1 11.464077 0.9999895 1.5134552659953847E-05 1
+37 0 -2.90807438 0.05175586 0.076669544650713642 0
+38 1 5.226836 0.9946582 0.0077272859162861246 1
+39 1 0.192784309 0.548047364 0.86762751464658039 1
+40 0
+41 1 -0.340168953 0.415768445 1.266147828359969 0
+42 1 8.853592 0.9998572 0.00020604992936022588 1
+43 1 -2.117641 0.107394 3.2190146693586854 0
+44 1 9.450965 0.999921441 0.0001133410189510066 1
+45 0 -2.40569448 0.0827394947 0.12459657233005934 0
+46 1 5.376622 0.995397866 0.0066548011445776789 1
+47 0 -2.16523314 0.1029163 0.15668549713042432 0
+48 0 -2.73850226 0.0607392974 0.090402444870538778 0
+49 1 5.89553928 0.997255862 0.0039643965487975831 1
+50 1 -0.227107048 0.443465978 1.1731046676718506 0
+51 1 -0.8415785 0.301202446 1.7311946079503711 0
+52 1 3.04452515 0.9545456 0.067113950169056663 1
+53 1 3.61555958 0.9738029 0.038298262118380257 1
+54 1 3.77800655 0.977643 0.032620334610649768 1
+55 1 3.194067 0.9606104 0.057976682417311451 1
+56 1 7.91791248 0.999635935 0.00052533064883456481 1
+57 1 -2.86445379 0.0539389737 4.2125281183154604 0
+58 1 -0.969594 0.274961442 1.8626987732996008 0
+59 1 -0.6607013 0.3405821 1.553925470966943 0
+60 1 1.0063467 0.732304633 0.44948417232333177 1
+61 0 -2.1292522 0.106286012 0.16211488941610727 0
+62 1 6.41300774 0.9983626 0.0023642038022797952 1
+63 1 -1.33857679 0.207744211 2.2671198197586828 0
+64 0 -2.16523314 0.1029163 0.15668549713042432 0
+65 1 -2.526939 0.073991105 3.7565043460709084 0
+66 0 -2.70365119 0.06275825 0.09350687664921721 0
+67 1 1.78247547 0.8560023 0.22431347044655056 1
+68 1 7.800373 0.9995906 0.00059079536563381421 1
+69 0 -2.402561 0.08297762 0.12497115587899781 0
+70 0 -2.56230116 0.0716044158 0.10718843415121884 0
+71 1 3.8944397 0.9800513 0.029070857780294054 1
+72 0 -2.60696554 0.06869147 0.10266890201899616 0
+73 1 8.50068 0.9997967 0.00029334623238004185 1
+74 1 -0.107892036 0.473053157 1.0799257855156403 0
+75 0 -2.46654248 0.07823721 0.11753257018815799 0
+76 0 -2.22932768 0.0971476 0.14743794057217838 0
+77 0 -2.47708964 0.077479966 0.11634785291703036 0
+78 0 -2.41123319 0.08232011 0.1239371005276802 0
+79 0 -2.58705831 0.06997599 0.10466012826020632 0
+80 0 -2.76758575 0.0591011234 0.087888417643977923 0
+81 0 -2.58375478 0.07019129 0.10499414907116332 0
+82 0 -2.69794416 0.06309478 0.094024986527322621 0
+83 0 -3.1041286 0.042937275 0.06331461422116523 0
+84 1 8.480363 0.9997925 0.00029936686178311424 1
+85 1 3.62424469 0.9740235 0.0379714836237331 1
+86 1 0.873991 0.705575466 0.50312769764048526 1
+87 1 6.5370903 0.998553336 0.0020886065803920047 1
+88 0 -2.70365119 0.06275825 0.09350687664921721 0
+89 0 -2.64684486 0.06618374 0.098789382822258742 0
+90 0 -2.37061882 0.08544078 0.12885149942120921 0
+91 0 -2.31983972 0.08949312 0.13525817349750544 0
+92 0 -2.70365119 0.06275825 0.09350687664921721 0
+93 0 -2.16523314 0.1029163 0.15668549713042432 0
+94 0 -2.49228716 0.07640065 0.11466094104662786 0
+95 0 -2.37061882 0.08544078 0.12885149942120921 0
+96 0 -2.19050217 0.10060665 0.15297587812753632 0
+97 0 -2.79334736 0.0576847345 0.085718279282943594 0
+98 1 5.520876 0.996013641 0.0057625933731902997 1
+99 1 11.3165894 0.999987841 1.7542337039908934E-05 1
+100 1 1.017416 0.734468937 0.44522661908047623 1
+101 1 -0.5716233 0.360862345 1.4704794870706024 0
+102 0 -2.66489172 0.0650770739 0.097080658952617693 0
+103 1 -2.88350773 0.05297488 4.2385477822371218 0
+104 1 13.1776581 0.9999981 2.751725038055267E-06 1
+105 1 -1.78037786 0.144256487 2.7932918938163764 0
+106 1 12.0229883 0.99999404 8.5991581706542968E-06 1
+107 1 7.51091671 0.999453247 0.00078901414592929902 1
+108 0 -2.25749826 0.0947046354 0.14353952759458233 0
+109 1 5.029805 0.993502438 0.0094045870149739464 1
+110 0 -2.56023145 0.07174213 0.10740245681394917 0
+111 1 1.76195621 0.85345453 0.22861380249667038 1
+112 1 5.949276 0.9973991 0.0037572056709905387 1
+113 1 9.109059 0.9998894 0.00015960872861869104 1
+114 0 -2.12039614 0.107130177 0.16347824413516199 0
+115 0 -1.8000406 0.14184612 0.22069172747506174 0
+116 0 -1.96252012 0.123194568 0.18967135852054179 0
+117 1 7.06990051 0.9991504 0.0012262413366360865 1
+118 0 -2.46007347 0.07870501 0.11826493158533567 0
+119 0 -2.44013214 0.0801631659 0.1205501245335563 0
+120 0 -2.431875 0.08077414 0.12150871514409342 0
+121 0 -2.50224543 0.07570092 0.11356834207574736 0
+122 1 10.23144 0.999964 5.1939695512002597E-05 1
+123 1 0.452518463 0.6112378 0.71019427344485664 1
+124 1 5.87042236 0.997186244 0.0040651143062878699 1
+125 0 -2.16523314 0.1029163 0.15668549713042432 0
+126 1 5.55969334 0.9961648 0.0055436631836815291 1
+127 0 -2.60329771 0.06892648 0.10303300888915028 0
+128 1 2.87807369 0.9467519 0.07894169481269693 1
+129 0 -4.29203844 0.0134924809 0.019598047692599548 0
+130 0 -2.56230116 0.0716044158 0.10718843415121884 0
+131 0 -2.49228716 0.07640065 0.11466094104662786 0
+132 1 11.527173 0.9999901 1.4274630640975531E-05 1
+133 0 -2.447955 0.0795882344 0.11964866931231606 0
+134 0 -2.709362 0.0624231733 0.092991182628501673 0
+135 0 -2.57368875 0.07085109 0.10601826253397174 0
+136 0 -2.546958 0.07263111 0.10878476978943961 0
+137 0 -2.253997 0.095005244 0.14401866240282399 0
+138 0 -2.52397871 0.0741941854 0.11121847131930589 0
+139 0
+140 0 -2.253997 0.095005244 0.14401866240282399 0
+141 0 -2.11052465 0.108078077 0.16501067045825779 0
+142 1 1.40640354 0.8031981 0.31617223990371063 1
+143 0 -1.8000406 0.14184612 0.22069172747506174 0
+144 0 -2.303132 0.0908639 0.1374318066173198 0
+145 0
+146 1 -0.608948231 0.352299154 1.5051270852918262 0
+147 0 -2.410581 0.08236938 0.12401456162100966 0
+148 0 -3.790462 0.0220863409 0.032221001014681734 0
+149 1 11.1856136 0.9999862 1.9950125438339949E-05 1
+150 0 -2.08552837 0.110511363 0.16895191862826214 0
+151 1 0.7081995 0.6700032 0.57776016109760775 1
+152 1 12.8261318 0.999997258 3.955606392614897E-06 1
+153 0 -2.22495723 0.0975316 0.14805168093637969 0
+154 0 -1.95639491 0.123857722 0.19076292420475424 0
+155 1 2.53439236 0.9265179 0.11010924008025842 1
+156 0 -2.179071 0.10164573 0.15464360520785025 0
+157 0 -2.49228716 0.07640065 0.11466094104662786 0
+158 0
+159 1 15.6741524 0.9999999 1.7198266111377426E-07 1
+160 1 9.474756 0.9999232 0.00011076107881739477 1
+161 0 -2.68982267 0.06357657 0.094767065231191869 0
+162 0 -2.60329771 0.06892648 0.10303300888915028 0
+163 0 -2.98970175 0.0478932858 0.070804811806733092 0
+164 0
+165 0 -2.566331 0.07133699 0.10677292670868219 0
+166 1 8.84706 0.999856234 0.00020742598774193681 1
+167 1 4.890644 0.992539465 0.010803626944692387 1
+168 0 -2.60329771 0.06892648 0.10303300888915028 0
+169 0 -1.76260591 0.146464273 0.22847655310970594 0
+170 0 -2.253997 0.095005244 0.14401866240282399 0
+171 0 -2.37061882 0.08544078 0.12885149942120921 0
+172 0 -2.16523314 0.1029163 0.15668549713042432 0
+173 1 22.0507126 1 0 1
+174 1 4.101033 0.9837141 0.023689008090051845 1
+175 1 8.803769 0.9998498 0.00021671441614871164 1
+176 0 -2.49228716 0.07640065 0.11466094104662786 0
+177 1 5.009384 0.9933693 0.0095979609700506362 1
+178 0 -2.70365119 0.06275825 0.09350687664921721 0
+179 1 -0.333506584 0.417387664 1.2605401318838831 0
+180 0 -2.08552837 0.110511363 0.16895191862826214 0
+181 0 -1.95639491 0.123857722 0.19076292420475424 0
+182 0 -2.87238669 0.0535355881 0.079379834832595766 0
+183 1 11.7801619 0.9999924 1.1006931643385188E-05 1
+184 1 4.80740261 0.991897166 0.011737536892369548 1
+185 0 -2.212134 0.09866614 0.14986650442098268 0
+186 1 5.877905 0.9972071 0.0040349327336088392 1
+187 1 12.8788414 0.9999975 3.6116401888020084E-06 1
+188 1 10.4632988 0.9999714 4.1276426673909545E-05 1
+189 0 -2.33616543 0.08817172 0.13316594116300465 0
+190 1 18.1290474 1 0 1
+191 1 13.2267113 0.9999982 2.5797420694119618E-06 1
+192 0 -2.652885 0.06581142 0.098214282791589516 0
+193 0 -2.37061882 0.08544078 0.12885149942120921 0
+194 0 -2.60329771 0.06892648 0.10303300888915028 0
+195 0 -2.70365119 0.06275825 0.09350687664921721 0
+196 0 4.31835938 0.986853361 6.249162189120602 1
+197 0 -2.78983569 0.0578759126 0.086011004864571042 0
+198 0 -1.95639491 0.123857722 0.19076292420475424 0
+199 0 -2.43037367 0.08088568 0.12168377661511501 0
+200 1 11.15968 0.9999858 2.0466080617959804E-05 1
+201 1 8.401458 0.999775469 0.00032396569438284837 1
+202 0 -2.37061882 0.08544078 0.12885149942120921 0
+203 0 -2.79334736 0.0576847345 0.085718279282943594 0
+204 0 -2.37061882 0.08544078 0.12885149942120921 0
+205 1 17.8429222 1 0 1
+206 1 6.46274567 0.998442 0.0022494800623383254 1
+207 0 -2.08552837 0.110511363 0.16895191862826214 0
+208 0 -2.08552837 0.110511363 0.16895191862826214 0
+209 0 -2.52800727 0.07391794 0.11078805913818714 0
+210 1 21.9436874 1 0 1
+211 1 13.0268564 0.999997854 3.0956910368479058E-06 1
+212 0 -2.37061882 0.08544078 0.12885149942120921 0
+213 1 22.5284481 1 0 1
+214 1 20.4712334 1 0 1
+215 1 8.525514 0.9998017 0.00028612151026144098 1
+216 0 -2.16523314 0.1029163 0.15668549713042432 0
+217 0 -2.37061882 0.08544078 0.12885149942120921 0
+218 1 9.455612 0.9999218 0.00011282503055519205 1
+219 0 -2.714654 0.0621141754 0.092515790827243469 0
+220 0 -2.43490982 0.08054909 0.12115554621880931 0
+221 1 9.159876 0.999894857 0.00015169667309156297 1
+222 1 -1.80774 0.14091149 2.8271388431147284 0
+223 1 3.70435333 0.9759753 0.035083495285141632 1
+224 1 9.788629 0.999944 8.0834110428232468E-05 1
+225 0 -2.16523314 0.1029163 0.15668549713042432 0
+226 1 10.3669252 0.9999685 4.5404134294015166E-05 1
+227 1 8.810473 0.999850869 0.00021516634059471509 1
+228 0 -2.08552837 0.110511363 0.16895191862826214 0
+229 1 17.6545982 1 0 1
+230 1 4.3147707 0.9868068 0.019160423251940339 1
+231 1 9.426738 0.9999194 0.00011626495667983538 1
+232 0 -0.812353134 0.3073893 0.52988339666190787 0
+233 1 3.34824562 0.9660473 0.049834285709967709 1
+234 0 -2.38382244 0.08441466 0.12723373315285244 0
+235 0
+236 1 10.7871552 0.9999794 2.9753305404609972E-05 1
+237 1 6.06969929 0.9976935 0.0033314498282712543 1
+238 1 16.8129368 1 0 1
+239 1 2.5463686 0.9273292 0.10884653830325543 1
+240 0 -2.16806126 0.102655485 0.15626611390641998 0
+241 0 -2.40176368 0.08303831 0.12506663099210433 0
+242 0 -2.49228716 0.07640065 0.11466094104662786 0
+243 0 -2.34700274 0.0873043 0.13179416296824689 0
+244 0 -2.37061882 0.08544078 0.12885149942120921 0
+245 0 -2.65912771 0.06542865 0.097623286813018989 0
+246 1 17.1139336 1 0 1
+247 1 1.33216 0.7911977 0.33788983133296008 1
+248 0 -2.59526634 0.0694436952 0.10383464946038788 0
+249 0
+250 0 -2.00470734 0.118709572 0.18231055874000973 0
+251 1 6.54015732 0.998557866 0.0020820617863629202 1
+252 0 2.0749712 0.8884466 3.1641939848631071 1
+253 1 8.853592 0.9998572 0.00020604992936022588 1
+254 1 6.41300774 0.9983626 0.0023642038022797952 1
+255 1 3.7826376 0.977744043 0.032471253848047714 1
+256 0 -2.253997 0.095005244 0.14401866240282399 0
+257 0 -2.43037367 0.08088568 0.12168377661511501 0
+258 0 -2.60329771 0.06892648 0.10303300888915028 0
+259 0 2.46353149 0.9215453 3.6719968033606416 1
+260 1 8.69649 0.999832869 0.0002411398280924362 1
+261 1 14.626255 0.9999995 6.8793076746672365E-07 1
+262 1 11.909255 0.9999933 9.6310605954860058E-06 1
+263 1 7.177536 0.999237061 0.0011011090587247259 1
+264 1 3.13080788 0.958145857 0.061682803281407773 1
+265 0 -2.517743 0.0746236444 0.111887857908842 0
+266 1 7.81477261 0.9995964 0.00058236478868097896 1
+267 1 0.570395947 0.638854563 0.64644055930061062 1
+268 1 7.142889 0.9992101 0.0011400073388397399 1
+269 0 -2.37061882 0.08544078 0.12885149942120921 0
+270 1 5.226963 0.9946589 0.0077262484789402371 1
+271 0 -2.79334736 0.0576847345 0.085718279282943594 0
+272 1 0.570395947 0.638854563 0.64644055930061062 1
+273 1 -1.541533 0.176312521 2.5037931614232587 0
+274 0 -2.60472631 0.06883486 0.10289105134144588 0
+275 0
+276 0 -2.43037367 0.08088568 0.12168377661511501 0
+277 0 -2.16523314 0.1029163 0.15668549713042432 0
+278 0 -2.37061882 0.08544078 0.12885149942120921 0
+279 1 3.33338261 0.9655565 0.050567409752020474 1
+280 0 -2.60329771 0.06892648 0.10303300888915028 0
+281 0 -2.54244328 0.0729358 0.10925884045202583 0
+282 1 3.29053974 0.964102864 0.052741012904588061 1
+283 1 3.700367 0.9758816 0.035222007864660711 1
+284 1 7.36733246 0.999368846 0.00091084961823976909 1
+285 1 20.352438 1 0 1
+286 1 21.9562721 1 0 1
+287 0 -2.709362 0.0624231733 0.092991182628501673 0
+288 1 -0.586700439 0.357392281 1.4844196184802794 0
+289 1 8.823527 0.9998528 0.0002124142103781059 1
+290 0 -1.95639491 0.123857722 0.19076292420475424 0
+291 0 -2.37061882 0.08544078 0.12885149942120921 0
+292 1
+293 1 3.77584267 0.9775957 0.032690174791706848 1
+294 0
+295 1 6.63497543 0.998688161 0.0018938255499461618 1
+296 0 0.0507936478 0.51269567 1.0371050534420929 1
+297 0
+298 0 -3.44531822 0.0309087858 0.045295631375104138 0
+299 1 3.52736 0.971456349 0.041778923210622917 1
+300 1 6.63810539 0.9986922 0.00188797047076344 1
+301 0 -2.37061882 0.08544078 0.12885149942120921 0
+302 1 21.63637 1 0 1
+303 0 -2.37061882 0.08544078 0.12885149942120921 0
+304 1 3.82702637 0.97868973 0.031076534032479262 1
+305 1 9.007427 0.999877453 0.00017680899902768678 1
+306 0 -2.37061882 0.08544078 0.12885149942120921 0
+307 0 -2.37061882 0.08544078 0.12885149942120921 0
+308 1 6.49249649 0.9984875 0.0021837677951713447 1
+309 0 -2.367922 0.08565173 0.12918431538000874 0
+310 0 -2.58705831 0.06997599 0.10466012826020632 0
+311 0 -1.95639491 0.123857722 0.19076292420475424 0
+312 1 0.721348763 0.672903955 0.571527494298949 1
+313 0 -1.95639491 0.123857722 0.19076292420475424 0
+314 0 -1.876097 0.132837832 0.20562627776187412 0
+315 0
+316 1 1.90932846 0.870943666 0.19934868964045313 1
+317 1 11.0919495 0.999984741 2.2013947263955502E-05 1
+318 0 -3.22425485 0.0382631 0.056285823149524759 0
+319 0 -0.5100851 0.375173569 0.67847261153874627 0
+320 1 3.40061569 0.9677238 0.047332770769414431 1
+321 0
+322 0 -2.60329771 0.06892648 0.10303300888915028 0
+323 1 4.2277 0.985623837 0.020890948016973505 1
+324 0 -2.37061882 0.08544078 0.12885149942120921 0
+325 0 -2.33610034 0.08817695 0.13317421658132386 0
+326 1 0.397289276 0.59803617 0.74169535175171442 1
+327 0 -2.16523314 0.1029163 0.15668549713042432 0
+328 1 3.29097939 0.964118063 0.052718268842774099 1
+329 1 4.33795357 0.98710525 0.018724174263564545 1
+330 1 2.89053345 0.947376549 0.077990134080806572 1
+331 0 -2.99748755 0.0475395061 0.070268841071409238 0
+332 0 -2.44493961 0.0798094 0.11999537191367729 0
+333 1 3.38947582 0.967374 0.047854291800112088 1
+334 1 5.177353 0.994388759 0.0081181070550104366 1
+335 0 -1.95639491 0.123857722 0.19076292420475424 0
+336 1 4.698082 0.9909696 0.013087296874456903 1
+337 0 -2.37061882 0.08544078 0.12885149942120921 0
+338 0 -1.876097 0.132837832 0.20562627776187412 0
+339 1 4.343502 0.987175643 0.018621295533697572 1
+340 1 3.39017 0.9673959 0.047821668989543743 1
+341 0 -2.37061882 0.08544078 0.12885149942120921 0
+342 0 -2.11052465 0.108078077 0.16501067045825779 0
+343 0 -1.95639491 0.123857722 0.19076292420475424 0
+344 1 7.7177906 0.9995553 0.00064172398023939646 1
+345 0 -1.95639491 0.123857722 0.19076292420475424 0
+346 0 -2.565462 0.07139457 0.10686237809729705 0
+347 0 -1.79727888 0.142182633 0.22125757090956827 0
+348 1 -1.28854942 0.216098443 2.2102394195203448 0
+349 1 1.21367073 0.770947754 0.37529500004713245 1
+350 0 -2.57519341 0.0707521 0.10586457082203461 0
+351 0 -2.49228716 0.07640065 0.11466094104662786 0
+352 0 -1.23204327 0.225824 0.36926650823776686 0
+353 1 11.2146692 0.9999865 1.9434170443242565E-05 1
+354 0 -2.16523314 0.1029163 0.15668549713042432 0
+355 0 -2.8185854 0.0563280769 0.083642715320436059 0
+356 1 -2.201137 0.09964843 3.3270091010964316 0
+357 1 14.1900883 0.9999993 1.031896274211761E-06 1
+358 1 5.223918 0.9946426 0.0077498503631306228 1
+359 1 2.35608578 0.9134167 0.13065495391893572 1
+360 1 21.4813557 1 0 1
+361 1 5.294572 0.9950063 0.0072224014614089942 1
+362 0 -2.63699365 0.06679519 0.099734355600720456 0
+363 0 -1.5586791 0.173836261 0.27550035442459786 0
+364 0 -2.49228716 0.07640065 0.11466094104662786 0
+365 0 -2.303132 0.0908639 0.1374318066173198 0
+366 1 19.947937 1 0 1
+367 1 15.1915445 0.999999762 3.4396534272948301E-07 1
+368 0 -2.03972268 0.115094975 0.17640547222344355 0
+369 0 -1.93949687 0.125703141 0.19380487905648439 0
+370 0 -2.61506629 0.0681750551 0.10186914304032915 0
+371 0 -2.03972268 0.115094975 0.17640547222344355 0
+372 0 -2.52397871 0.0741941854 0.11121847131930589 0
+373 0 -2.67554665 0.06443181 0.096085283332132315 0
+374 0 -2.4251883 0.08127202 0.12229032950777963 0
+375 0 -1.95639491 0.123857722 0.19076292420475424 0
+376 0 -2.16523314 0.1029163 0.15668549713042432 0
+377 0 -1.876097 0.132837832 0.20562627776187412 0
+378 0 -2.79220486 0.0577468649 0.085813404732194079 0
+379 0 -2.74447179 0.06039962 0.089880800720694584 0
+380 0 -1.95639491 0.123857722 0.19076292420475424 0
+381 1 8.746121 0.999841 0.00022944310035113074 1
+382 0 -2.4639585 0.07842376 0.11782457436958683 0
+383 0 -2.11052465 0.108078077 0.16501067045825779 0
+384 0 -2.11052465 0.108078077 0.16501067045825779 0
+385 0 -2.79314542 0.05769571 0.085735081775246871 0
+386 1 4.176774 0.984884 0.021974246797932556 1
+387 0 -2.563189 0.0715454146 0.10709675129529037 0
+388 0 -2.33597 0.08818744 0.13319081471442681 0
+389 0 -2.77166653 0.0588746034 0.087541132766088936 0
+390 0 -2.24443483 0.09583059 0.14533498514227941 0
+391 1 12.28669 0.999995351 6.7073389754153415E-06 1
+392 0 -2.43037367 0.08088568 0.12168377661511501 0
+393 0 -1.663213 0.159331158 0.25039049156336257 0
+394 0 -2.18103456 0.101466566 0.15435590868043605 0
+395 0 -2.43037367 0.08088568 0.12168377661511501 0
+396 0 -2.60329771 0.06892648 0.10303300888915028 0
+397 0 -2.386812 0.0841838941 0.12687015785045277 0
+398 0 -2.39560652 0.0835083351 0.12580633468481714 0
+399 0 -2.0224514 0.116865739 0.17929531088912914 0
+400 1 9.442846 0.9999207 0.00011437299629627494 1
+401 0 -2.253997 0.095005244 0.14401866240282399 0
+402 0 -2.32173133 0.0893391 0.13501415250433074 0
+403 0 -2.04644132 0.114412457 0.17529316569822423 0
+404 0 -2.143785 0.104913421 0.15990085822156069 0
+405 0 -2.16523314 0.1029163 0.15668549713042432 0
+406 0 -2.44775438 0.07960292 0.11967168757719346 0
+407 0 -2.16523314 0.1029163 0.15668549713042432 0
+408 0 -2.22424483 0.09759433 0.14815195953491689 0
+409 0 -2.4251883 0.08127202 0.12229032950777963 0
+410 0 -2.16523314 0.1029163 0.15668549713042432 0
+411 0
+412 1 10.2703123 0.9999653 5.0047819483104426E-05 1
+413 0 -2.73679447 0.0608368 0.090552214775961787 0
+414 1 5.42697239 0.9956228 0.0063288062748059282 1
+415 0 -1.37955284 0.201080829 0.32387854587592818 0
+416 1 7.32360268 0.9993406 0.00095163582501576051 1
+417 0 -2.16523314 0.1029163 0.15668549713042432 0
+418 0 -2.48652363 0.07680834 0.11529790529919315 0
+419 0 -2.92190623 0.05108122 0.075643488086837282 0
+420 0 -2.17014742 0.102463476 0.15595744686821872 0
+421 1 14.1832743 0.9999993 1.031896274211761E-06 1
+422 0 -2.21935058 0.09802621 0.14884258107085416 0
+423 0 -2.56230116 0.0716044158 0.10718843415121884 0
+424 0 -2.253997 0.095005244 0.14401866240282399 0
+425 1 23.2536659 1 0 1
+426 0 -1.97403908 0.121955715 0.18763438978981944 0
+427 1 3.93143272 0.9807618 0.028025268445269182 1
+428 0 -2.16523314 0.1029163 0.15668549713042432 0
+429 0 -2.303132 0.0908639 0.1374318066173198 0
+430 0 -2.1786294 0.10168606 0.15470837385047881 0
+431 0 -3.24330187 0.0375683233 0.055243967912172835 0
+432 0 -2.63995266 0.06661098 0.099449594195521407 0
+433 0 -2.278904 0.09288526 0.14064305278762584 0
+434 0 4.94386959 0.9929235 7.1427479209872091 1
+435 1 7.12867451 0.999198854 0.0011562726386102314 1
+436 1 4.3324194 0.9870346 0.018827408813728724 1
+437 0 -2.386812 0.0841838941 0.12687015785045277 0
+438 0 -2.38723946 0.08415094 0.12681824612014014 0
+439 0 -2.50897 0.07523173 0.11283619840279428 0
+440 1 6.34583759 0.998249054 0.0025282952766864627 1
+441 0 -1.81499052 0.140036061 0.21765193123843524 0
+442 0 -2.020601 0.117056854 0.17960755137882647 0
+443 0 -1.93714356 0.125962 0.19423209736428568 0
+444 0 -2.91968775 0.0511888638 0.075807152007862255 0
+445 0 -2.11052465 0.108078077 0.16501067045825779 0
+446 0 -1.95639491 0.123857722 0.19076292420475424 0
+447 0 -2.50897 0.07523173 0.11283619840279428 0
+448 0 -1.663213 0.159331158 0.25039049156336257 0
+449 1 13.44416 0.999998569 2.0637932864940443E-06 1
+450 0 -2.347867 0.08723546 0.13168534659555281 0
+451 0 -2.50897 0.07523173 0.11283619840279428 0
+452 0 -2.37676024 0.08496209 0.12809658311879946 0
+453 1 7.50886631 0.999452055 0.00079073491429955357 1
+454 0 -2.37708616 0.08493676 0.12805664400028163 0
+455 1 -1.54050016 0.176462576 2.5025658459084399 0
+456 1 10.2714491 0.9999654 4.9875830876224796E-05 1
+457 1 9.928566 0.999951243 7.0342619050273796E-05 1
+458 0 -2.483977 0.07698911 0.11558042055485453 0
+459 0 -2.44994617 0.0794424862 0.11942023498557051 0
+460 0 -2.57519341 0.0707521 0.10586457082203461 0
+461 0 -1.95080662 0.124465436 0.19176396054905676 0
+462 0 -2.65842414 0.06547169 0.097689720656269172 0
+463 0 -2.40528941 0.0827702358 0.12464492366430695 0
+464 0 -2.386812 0.0841838941 0.12687015785045277 0
+465 1 11.50494 0.999989867 1.4618599387059818E-05 1
+466 1 8.586574 0.9998135 0.00026909195127407725 1
+467 1 6.92544365 0.999018431 0.0014168006049970199 1
+468 0 -2.386812 0.0841838941 0.12687015785045277 0
+469 0 -2.1956358 0.100143082 0.15223247204165077 0
+470 0 -2.15111279 0.104227282 0.15879536747436127 0
+471 0 -2.65842414 0.06547169 0.097689720656269172 0
+472 0 -2.35197687 0.0869087651 0.13116907530431268 0
+473 0 -2.386812 0.0841838941 0.12687015785045277 0
+474 0 -2.50897 0.07523173 0.11283619840279428 0
+475 0 -2.253997 0.095005244 0.14401866240282399 0
+476 0 -2.379613 0.08474059 0.12774738819728162 0
+477 0 -2.386812 0.0841838941 0.12687015785045277 0
+478 0 -2.45095563 0.0793686956 0.11930459528595055 0
+479 1 9.896286 0.9999497 7.2578504258149067E-05 1
+480 0 -2.23553848 0.09660421 0.14656990931584829 0
+481 0 -2.2461 0.0956864059 0.14510494370298066 0
+482 1 19.90526 1 0 1
+483 1 13.2226048 0.9999982 2.5797420694119618E-06 1
+484 0 -2.483977 0.07698911 0.11558042055485453 0
+485 0 -1.75304985 0.147662938 0.23050402825087019 0
+486 0 -2.15111279 0.104227282 0.15879536747436127 0
+487 1 16.2562943 0.9999999 1.7198266111377426E-07 1
+488 1 0.896533 0.710236549 0.49362849030318379 1
+489 1 -2.11052513 0.108078033 3.2098547752295934 0
+490 0 -1.95639491 0.123857722 0.19076292420475424 0
+491 1 6.42460346 0.9983815 0.0023369001030092021 1
+492 0 -2.27263451 0.09341486 0.14148557773460221 0
+493 1 13.9223871 0.999999046 1.3758618629646341E-06 1
+494 0 -1.544085 0.175942212 0.27918258388346984 0
+495 0 -2.15111279 0.104227282 0.15879536747436127 0
+496 0 -1.663213 0.159331158 0.25039049156336257 0
+497 0 -2.34868431 0.08717041 0.13158253208596138 0
+498 0 -2.546958 0.07263111 0.10878476978943961 0
+499 0 -2.546958 0.07263111 0.10878476978943961 0
+500 0 -2.87238669 0.0535355881 0.079379834832595766 0
+501 0 -2.546958 0.07263111 0.10878476978943961 0
+502 0 -2.58375478 0.07019129 0.10499414907116332 0
+503 0 -2.70365119 0.06275825 0.09350687664921721 0
+504 0 -1.95639491 0.123857722 0.19076292420475424 0
+505 0 -2.207967 0.09903733 0.15046075844804158 0
+506 1 13.1352835 0.999998 2.9237080272005804E-06 1
+507 0 -1.85896659 0.134823546 0.20893369205937654 0
+508 0 -2.50897 0.07523173 0.11283619840279428 0
+509 0 -2.11052465 0.108078077 0.16501067045825779 0
+510 0 -1.95639491 0.123857722 0.19076292420475424 0
+511 0 -2.652885 0.06581142 0.098214282791589516 0
+512 0 -2.50897 0.07523173 0.11283619840279428 0
+513 0 -2.15111279 0.104227282 0.15879536747436127 0
+514 1 13.1494493 0.9999981 2.751725038055267E-06 1
+515 1 11.2234116 0.999986649 1.926218548588174E-05 1
+516 0 -1.663213 0.159331158 0.25039049156336257 0
+517 0 -1.876097 0.132837832 0.20562627776187412 0
+518 0 -2.3127768 0.09007031 0.1361730175390829 0
+519 1 4.88246536 0.9924787 0.010891913451001586 1
+520 0 -2.249304 0.09540951 0.14466326997414256 0
+521 0 -2.66490865 0.0650760457 0.097079072351726234 0
+522 1 2.02141762 0.883027554 0.17946963916334691 1
+523 1 6.809782 0.998898268 0.0015903398846003496 1
+524 0 -2.43037367 0.08088568 0.12168377661511501 0
+525 0 -2.31983972 0.08949312 0.13525817349750544 0
+526 0 -2.386812 0.0841838941 0.12687015785045277 0
+527 0 -2.70365119 0.06275825 0.09350687664921721 0
+528 0 -2.51398182 0.0748837963 0.11229350114078318 0
+529 0 -2.27263451 0.09341486 0.14148557773460221 0
+530 1 4.92369843 0.992780268 0.010453653133861311 1
+531 0 -2.44775438 0.07960292 0.11967168757719346 0
+532 0 -2.08552837 0.110511363 0.16895191862826214 0
+533 0 -2.43037367 0.08088568 0.12168377661511501 0
+534 0 -2.303132 0.0908639 0.1374318066173198 0
+535 0 -2.41980267 0.08167506 0.12292336799782667 0
+536 0 -2.79334736 0.0576847345 0.085718279282943594 0
+537 0 -2.73679447 0.0608368 0.090552214775961787 0
+538 0 -2.546958 0.07263111 0.10878476978943961 0
+539 0 -2.74815464 0.0601909533 0.089560439570368855 0
+540 0 -2.543486 0.0728653148 0.10914915993929039 0
+541 0 -2.253997 0.095005244 0.14401866240282399 0
+542 0 -2.38339472 0.08444773 0.12728583594126508 0
+543 0 -2.546958 0.07263111 0.10878476978943961 0
+544 0 -2.34906387 0.08714021 0.13153480725866068 0
+545 0 -2.652885 0.06581142 0.098214282791589516 0
+546 1 18.4379044 1 0 1
+547 0 -1.82228041 0.139160469 0.21618376511481502 0
+548 0 -1.97069025 0.122314766 0.18822445904268387 0
+549 1 5.37553024 0.9953929 0.0066619714409914422 1
+550 0 -2.43037367 0.08088568 0.12168377661511501 0
+551 0 -2.37061882 0.08544078 0.12885149942120921 0
+552 0 -2.567891 0.07123372 0.10661249990663338 0
+553 0 -1.67024612 0.158391356 0.24877857334123982 0
+554 0 -2.253997 0.095005244 0.14401866240282399 0
+555 0 -2.15087152 0.104249813 0.15883165472574934 0
+556 0 -2.25254679 0.09513001 0.14421757377759611 0
+557 0 -2.57519341 0.0707521 0.10586457082203461 0
+558 0 -2.303132 0.0908639 0.1374318066173198 0
+559 0 -2.652885 0.06581142 0.098214282791589516 0
+560 0 -2.79334736 0.0576847345 0.085718279282943594 0
+561 0 -2.79334736 0.0576847345 0.085718279282943594 0
+562 0 -2.37061882 0.08544078 0.12885149942120921 0
+563 0 -2.43037367 0.08088568 0.12168377661511501 0
+564 0 -2.68982267 0.06357657 0.094767065231191869 0
+565 1 16.7462845 1 0 1
+566 0 -2.58233213 0.07028419 0.10513830229728056 0
+567 0 -2.35164356 0.08693522 0.13121087830396089 0
+568 1 1.66488457 0.840892553 0.25000662577655486 1
+569 1 11.8811359 0.9999931 9.9750282344474815E-06 1
+570 1 6.743778 0.9988232 0.001698726443122326 1
+571 1 13.4375887 0.999998569 2.0637932864940443E-06 1
+572 0 -2.43037367 0.08088568 0.12168377661511501 0
+573 0 -2.16523314 0.1029163 0.15668549713042432 0
+574 1 5.289529 0.9949812 0.0072587859583563598 1
+575 0 -2.73679447 0.0608368 0.090552214775961787 0
+576 0 -2.652885 0.06581142 0.098214282791589516 0
+577 0 -2.16523314 0.1029163 0.15668549713042432 0
+578 0 -2.16523314 0.1029163 0.15668549713042432 0
+579 0 -2.37061882 0.08544078 0.12885149942120921 0
+580 0 -2.61211181 0.06836298 0.10216012821346472 0
+581 1 9.491055 0.9999244 0.00010904112129145599 1
+582 1 10.5543289 0.9999739 3.7664692194035506E-05 1
+583 0 -2.253997 0.095005244 0.14401866240282399 0
+584 0 -2.854372 0.0544557646 0.080783141512571238 0
+585 0 -1.95639491 0.123857722 0.19076292420475424 0
+586 1 22.14612 1 0 1
+587 0 -2.63995266 0.06661098 0.099449594195521407 0
+588 1 4.183667 0.984986365 0.021824341396277779 1
+589 0 -2.50897 0.07523173 0.11283619840279428 0
+590 1 2.24761486 0.904444635 0.14489590312889089 1
+591 1 5.37478161 0.995389462 0.0066669820307222651 1
+592 1 2.52463913 0.925851166 0.11114780094090568 1
+593 0 -2.483977 0.07698911 0.11558042055485453 0
+594 1 3.883813 0.9798425 0.029378249558157231 1
+595 0 -2.652885 0.06581142 0.098214282791589516 0
+596 0 -2.52397871 0.0741941854 0.11121847131930589 0
+597 0 -2.67120314 0.06469413 0.096489851264687398 0
+598 0 -2.43037367 0.08088568 0.12168377661511501 0
+599 0 -2.042253 0.114837505 0.17598577040139771 0
+600 0 -2.43037367 0.08088568 0.12168377661511501 0
+601 0 -1.876097 0.132837832 0.20562627776187412 0
+602 0 -2.546958 0.07263111 0.10878476978943961 0
+603 1 2.76534462 0.9407741 0.088079778169719514 1
+604 1 1.64908409 0.8387672 0.25365769899634338 1
+605 1 9.463315 0.9999224 0.00011196505030560377 1
+606 0 -2.43851948 0.08028216 0.12073676854570529 0
+607 0 -1.95639491 0.123857722 0.19076292420475424 0
+608 1 13.1060286 0.999998 2.9237080272005804E-06 1
+609 0 -2.50897 0.07523173 0.11283619840279428 0
+610 1 5.4565506 0.995749831 0.0061447648647644489 1
+611 1 8.201106 0.999725759 0.00039570034941416836 1
+612 1 24.51187 1 0 1
+613 0 -2.25948548 0.0945344046 0.14326827011600504 0
+614 0 -2.0189333 0.117229328 0.1798893939370643 0
+615 0 -2.491962 0.0764236 0.11469678675361461 0
+616 0 -2.43037367 0.08088568 0.12168377661511501 0
+617 0
+618 0 -2.546958 0.07263111 0.10878476978943961 0
+619 0 -2.652885 0.06581142 0.098214282791589516 0
+620 0 -2.43037367 0.08088568 0.12168377661511501 0
+621 0 -2.19200563 0.100470684 0.15275779542018131 0
+622 0 -2.700953 0.06291714 0.093751478225403542 0
+623 0 -1.95639491 0.123857722 0.19076292420475424 0
+624 0 -2.41144156 0.0823043659 0.1239123508690735 0
+625 0 -2.19398546 0.1002919 0.15247108397309578 0
+626 1 3.657977 0.9748635 0.03672782424977717 1
+627 0 -1.88075233 0.1323025 0.20473591051784815 0
+628 0 -2.11052465 0.108078077 0.16501067045825779 0
+629 0 -2.386812 0.0841838941 0.12687015785045277 0
+630 0 -2.40915632 0.08247714 0.12418398785058164 0
+631 0 -2.652885 0.06581142 0.098214282791589516 0
+632 0 -1.95639491 0.123857722 0.19076292420475424 0
+633 1 3.20818138 0.961141 0.057180018700639101 1
+634 0 -2.253997 0.095005244 0.14401866240282399 0
+635 0 -2.25866842 0.09460436 0.14337973262597348 0
+636 1 11.0438576 0.999984 2.3045859283900738E-05 1
+637 0 -2.06156754 0.112888753 0.17281306044669567 0
+638 0 -2.386812 0.0841838941 0.12687015785045277 0
+639 0 -2.57519341 0.0707521 0.10586457082203461 0
+640 0 -2.48130536 0.07717918 0.11587753928641568 0
+641 0 -2.43037367 0.08088568 0.12168377661511501 0
+642 0 -2.43037367 0.08088568 0.12168377661511501 0
+643 0 -1.95639491 0.123857722 0.19076292420475424 0
+644 0 -2.11052465 0.108078077 0.16501067045825779 0
+645 0 -2.43037367 0.08088568 0.12168377661511501 0
+646 0 -2.00470734 0.118709572 0.18231055874000973 0
+647 0 -2.135698 0.105675265 0.16112931691390375 0
+648 1 13.45014 0.999998569 2.0637932864940443E-06 1
+649 0 -2.43037367 0.08088568 0.12168377661511501 0
+650 0 -2.31990623 0.0894877 0.13524959098307188 0
+651 0 -2.28205872 0.09261979 0.14022090268098497 0
+652 0 -2.63995266 0.06661098 0.099449594195521407 0
+653 0 -2.546958 0.07263111 0.10878476978943961 0
+654 0 -2.60329771 0.06892648 0.10303300888915028 0
+655 0 -2.43037367 0.08088568 0.12168377661511501 0
+656 0 -2.652885 0.06581142 0.098214282791589516 0
+657 0 -2.3856678 0.0842721462 0.12700918908215508 0
+658 1 9.13554 0.999892235 0.00015548069423433847 1
+659 0 -1.95639491 0.123857722 0.19076292420475424 0
+660 0 -2.16523314 0.1029163 0.15668549713042432 0
+661 0 -2.70365119 0.06275825 0.09350687664921721 0
+662 0 -2.21700072 0.09823417 0.14917524963571913 0
+663 0 -2.21700072 0.09823417 0.14917524963571913 0
+664 0 -2.52331543 0.07423976 0.11128949348681842 0
+665 0 -1.95639491 0.123857722 0.19076292420475424 0
+666 0 -2.6012094 0.06906062 0.10324087369927021 0
+667 0 -2.60329771 0.06892648 0.10303300888915028 0
+668 1 0.230495453 0.557370067 0.84329256977903522 1
+669 1 11.8682785 0.999992967 1.0147012084681539E-05 1
+670 1 7.613348 0.9995065 0.00071218392983748437 1
+671 0 -2.56594157 0.071362786 0.10681299857820942 0
+672 0 -2.49228716 0.07640065 0.11466094104662786 0
+673 0 -2.57178926 0.07097623 0.10621258116698419 0
+674 0 -2.16523314 0.1029163 0.15668549713042432 0
+675 0 -2.33810186 0.08801616 0.13291983400253021 0
+676 0 -2.1956358 0.100143082 0.15223247204165077 0
+677 0 -2.50897 0.07523173 0.11283619840279428 0
+678 0 -1.95639491 0.123857722 0.19076292420475424 0
+679 0 -2.11052465 0.108078077 0.16501067045825779 0
+680 1 25.43999 1 0 1
+681 1 14.1827745 0.9999993 1.031896274211761E-06 1
+682 0 -2.7642622 0.05928621 0.088172242863683387 0
+683 0 -1.95639491 0.123857722 0.19076292420475424 0
+684 0 -1.95639491 0.123857722 0.19076292420475424 0
+685 0 -1.95639491 0.123857722 0.19076292420475424 0
+686 0 -1.95639491 0.123857722 0.19076292420475424 0
+687 0 -2.46161175 0.07859354 0.11809037777838542 0
+688 0 -2.386812 0.0841838941 0.12687015785045277 0
+689 0 -3.64467454 0.0254645254 0.037213392183878896 0
+690 0 -2.135698 0.105675265 0.16112931691390375 0
+691 1 5.792588 0.9969592 0.0043936168831631463 1
+692 0 -2.253997 0.095005244 0.14401866240282399 0
+693 0 -2.65598679 0.06562097 0.097920204018736551 0
+694 0 -2.47353578 0.0777343661 0.11674575467283718 0
+695 0 -2.11052465 0.108078077 0.16501067045825779 0
+696 1 7.72577858 0.999558866 0.00063656221445328599 1
+697 1 3.85642529 0.97929436 0.030185519608588427 1
+698 1 5.01899147 0.993432164 0.009506637452477578 1
diff --git a/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-out.txt b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-out.txt
new file mode 100644
index 0000000000..d05c1b0b1c
--- /dev/null
+++ b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-out.txt
@@ -0,0 +1,77 @@
+maml.exe CV tr=FieldAwareFactorizationMachine{d=5 shuf- norm-} col[Feature]=DupFeatures threads=- norm=No dout=%Output% data=%Data% seed=1 xf=Copy{col=DupFeatures:Features} xf=MinMax{col=Features col=DupFeatures}
+Not adding a normalizer.
+Warning: Skipped 8 examples with bad label/weight/features in training set
+Not training a calibrator because it is not needed.
+Not adding a normalizer.
+Warning: Skipped 8 examples with bad label/weight/features in training set
+Not training a calibrator because it is not needed.
+Warning: The predictor produced non-finite prediction values on 8 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable.
+TEST POSITIVE RATIO: 0.3785 (134.0/(134.0+220.0))
+Confusion table
+ ||======================
+PREDICTED || positive | negative | Recall
+TRUTH ||======================
+ positive || 122 | 12 | 0.9104
+ negative || 4 | 216 | 0.9818
+ ||======================
+Precision || 0.9683 | 0.9474 |
+OVERALL 0/1 ACCURACY: 0.954802
+LOG LOSS/instance: 0.259660
+Test-set entropy (prior Log-Loss/instance): 0.956998
+LOG-LOSS REDUCTION (RIG): 72.867233
+AUC: 0.984973
+Warning: The predictor produced non-finite prediction values on 8 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable.
+TEST POSITIVE RATIO: 0.3191 (105.0/(105.0+224.0))
+Confusion table
+ ||======================
+PREDICTED || positive | negative | Recall
+TRUTH ||======================
+ positive || 92 | 13 | 0.8762
+ negative || 2 | 222 | 0.9911
+ ||======================
+Precision || 0.9787 | 0.9447 |
+OVERALL 0/1 ACCURACY: 0.954407
+LOG LOSS/instance: 0.260480
+Test-set entropy (prior Log-Loss/instance): 0.903454
+LOG-LOSS REDUCTION (RIG): 71.168362
+AUC: 0.967049
+
+OVERALL RESULTS
+---------------------------------------
+AUC: 0.976011 (0.0090)
+Accuracy: 0.954605 (0.0002)
+Positive precision: 0.973489 (0.0052)
+Positive recall: 0.893319 (0.0171)
+Negative precision: 0.946025 (0.0013)
+Negative recall: 0.986445 (0.0046)
+Log-loss: 0.260070 (0.0004)
+Log-loss reduction: 72.017798 (0.8494)
+F1 Score: 0.931542 (0.0069)
+AUPRC: 0.974115 (0.0054)
+
+---------------------------------------
+Physical memory usage(MB): %Number%
+Virtual memory usage(MB): %Number%
+%DateTime% Time elapsed(s): %Number%
+
+--- Progress log ---
+[1] 'Normalize' started.
+[1] (%Time%) 337 examples
+[1] 'Normalize' finished in %Time%.
+[2] 'Training' started.
+[2] (%Time%) 1 iterations, 329 examples Training-loss: 0.371414389819699
+[2] (%Time%) 2 iterations, 329 examples Training-loss: 0.225137821503565
+[2] (%Time%) 3 iterations, 329 examples Training-loss: 0.197323119398265
+[2] (%Time%) 4 iterations, 329 examples Training-loss: 0.183649426646222
+[2] (%Time%) 5 iterations, 329 examples Training-loss: 0.174400635825405
+[2] 'Training' finished in %Time%.
+[3] 'Normalize #2' started.
+[3] (%Time%) 362 examples
+[3] 'Normalize #2' finished in %Time%.
+[4] 'Training #2' started.
+[4] (%Time%) 1 iterations, 354 examples Training-loss: 0.35872800705401
+[4] (%Time%) 2 iterations, 354 examples Training-loss: 0.239609312114266
+[4] (%Time%) 3 iterations, 354 examples Training-loss: 0.210775498912242
+[4] (%Time%) 4 iterations, 354 examples Training-loss: 0.19625903089058
+[4] (%Time%) 5 iterations, 354 examples Training-loss: 0.187121580244397
+[4] 'Training #2' finished in %Time%.
diff --git a/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-rp.txt b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-rp.txt
new file mode 100644
index 0000000000..b826c5ae0d
--- /dev/null
+++ b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer-rp.txt
@@ -0,0 +1,4 @@
+FieldAwareFactorizationMachine
+AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /d /norm /shuf Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
+0.976011 0.954605 0.973489 0.893319 0.946025 0.986445 0.26007 72.0178 0.931542 0.974115 5 - - FieldAwareFactorizationMachine %Data% %Output% 99 0 0 maml.exe CV tr=FieldAwareFactorizationMachine{d=5 shuf- norm-} col[Feature]=DupFeatures threads=- norm=No dout=%Output% data=%Data% seed=1 xf=Copy{col=DupFeatures:Features} xf=MinMax{col=Features col=DupFeatures} /d:5;/norm:-;/shuf:-
+
diff --git a/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer.txt b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer.txt
new file mode 100644
index 0000000000..c1977a346f
--- /dev/null
+++ b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-CV-breast-cancer.txt
@@ -0,0 +1,700 @@
+Instance Label Score Probability Log-loss Assigned
+5 1 14.8638926 0.999999642 5.1594804484713121E-07 1
+6 0 -0.8998656 0.289078116 0.49223705031518167 0
+8 0 -2.48059678 0.0772296637 0.11595646754997578 0
+9 0 -2.373167 0.08524186 0.12853774899383463 0
+10 0 -1.8267622 0.138624445 0.2152857123445632 0
+11 0 -2.03787947 0.115282834 0.17671177846463382 0
+18 1 5.347374 0.9952619 0.0068518676812505449 1
+20 1 2.93836784 0.949710846 0.074439765251190851 1
+21 1 4.537781 0.989416063 0.015350773539417332 1
+25 1 0.214011192 0.5532995 0.85386750966876357 1
+28 0 -2.03787947 0.115282834 0.17671177846463382 0
+31 0 -2.22085452 0.09789331 0.14863003134088387 0
+32 1 2.94516468 0.9500345 0.073948191048563552 1
+35 0 -2.03787947 0.115282834 0.17671177846463382 0
+37 0 -3.201078 0.039125178 0.057579598806254788 0
+40 0
+41 1 -1.51869583 0.179653645 2.476709891872058 0
+44 1 6.41933727 0.9983729 0.0023493029812132662 1
+45 0 -2.01876 0.117247269 0.17991871486127162 0
+46 1 5.507395 0.9959597 0.005840729107190099 1
+48 0 -2.64206457 0.0664797947 0.099246846085645518 0
+50 1 -0.7729988 0.315830767 1.6627763744194746 0
+51 1 -1.07958174 0.2535852 1.9794576062795608 0
+52 1 1.92737579 0.87295866 0.196014759754012 1
+54 1 2.360999 0.9138046 0.13004240462108896 1
+56 1 6.64605141 0.9987025 0.0018730745676176913 1
+60 1 0.586817265 0.6426345 0.6379296358599772 1
+63 1 -1.3297224 0.209205285 2.2570087990606815 0
+64 0 -1.85676885 0.135080114 0.20936158691636991 0
+66 0 -2.56239128 0.0715984255 0.10717912551116054 0
+68 1 6.963379 0.999054968 0.0013640370953882713 1
+69 0 -2.07254 0.111794561 0.1710346893319066 0
+70 0 -2.4900763 0.0765568 0.11490487237984263 0
+71 1 2.84816265 0.94522357 0.081272490426858024 1
+72 0 -2.398419 0.08329334 0.12546794171530753 0
+73 1 7.05520535 0.999137759 0.0012444871146033098 1
+74 1 -1.02989769 0.263103932 1.926295284848996 0
+76 0 -1.87518346 0.1329431 0.20580141213002087 0
+77 0 -2.51152325 0.0750542954 0.11255941460668487 0
+79 0 -2.256354 0.09480278 0.14369594410546618 0
+82 0 -2.68407941 0.06391936 0.095295273554898599 0
+88 0 -2.56239128 0.0715984255 0.10717912551116054 0
+90 0 -2.04771137 0.114283845 0.17508366149751475 0
+91 0 -2.108456 0.108277671 0.16533355177618697 0
+92 0 -2.56239128 0.0715984255 0.10717912551116054 0
+93 0 -1.85676885 0.135080114 0.20936158691636991 0
+95 0 -2.04771137 0.114283845 0.17508366149751475 0
+96 0 -1.93042636 0.1267034 0.19545636564796123 0
+97 0 -2.730785 0.0611810647 0.091081154608133083 0
+98 1 5.869131 0.9971826 0.0040703745878533431 1
+99 1 9.67402649 0.999937057 9.0809697580461105E-05 1
+100 1 0.9169636 0.714423 0.48514956618634053 1
+102 0 -2.528028 0.0739165246 0.11078585383424393 0
+104 1 9.67022 0.9999368 9.1153684574596215E-05 1
+105 1 -1.82901669 0.138355449 2.8535486322723087 0
+106 1 9.469551 0.9999229 0.00011127706647502522 1
+108 0 -1.91807806 0.128076032 0.1977257574643819 0
+109 1 4.250736 0.9859466 0.020418590525940875 1
+111 1 1.28113461 0.782642841 0.35357401033762303 1
+112 1 3.845829 0.9790785 0.030503600596017547 1
+113 1 7.44737339 0.9994174 0.00084072413131743907 1
+115 0 -1.45452547 0.18930608 0.30277077223301041 0
+117 1 6.50040436 0.9984994 0.00216654358065164 1
+120 0 -2.10977483 0.108150378 0.16512762206914725 0
+121 0 -2.40890312 0.0824963 0.12421411952463637 0
+122 1 9.287441 0.9999074 0.0001336367089073252 1
+123 1 0.991513252 0.7293867 0.45524422695477623 1
+125 0 -1.85676885 0.135080114 0.20936158691636991 0
+128 1 2.86968613 0.946327448 0.079588624104640571 1
+129 0 -3.50972271 0.0290368516 0.042511553752884852 0
+131 0 -2.22085452 0.09789331 0.14863003134088387 0
+132 1 10.8694086 0.9999809 2.7517486566085523E-05 1
+133 0 -2.231102 0.09699208 0.1471894583221901 0
+137 0 -2.03968573 0.115098737 0.17641160645683859 0
+138 0 -2.37357259 0.08521025 0.12848789225109375 0
+141 0 -1.85219073 0.135615885 0.21025553563894767 0
+144 0 -2.03787947 0.115282834 0.17671177846463382 0
+145 0
+147 0 -2.03505254 0.115571469 0.17718252883609134 0
+150 0 -1.8267622 0.138624445 0.2152857123445632 0
+151 1 0.872104645 0.7051834 0.50392960783522212 1
+152 1 9.204662 0.9998994 0.00014516065994930907 1
+154 0 -1.66311264 0.1593446 0.25041355795755704 0
+156 0 -1.869329 0.133619383 0.20692712807027863 0
+161 0 -2.555451 0.0720611438 0.10789834857071755 0
+164 0
+167 1 3.16309261 0.9594215 0.059763301891325865 1
+169 0 -1.52618456 0.178552613 0.28375991953883767 0
+171 0 -2.04771137 0.114283845 0.17508366149751475 0
+173 1 19.4496174 1 0 1
+174 1 3.02353477 0.9536261 0.068504378788065867 1
+176 0 -2.22085452 0.09789331 0.14863003134088387 0
+177 1 4.302659 0.986648142 0.01939241122666641 1
+179 1 9.584427E-05 0.500023961 0.9999308646308972 1
+180 0 -1.8267622 0.138624445 0.2152857123445632 0
+181 0 -1.66311264 0.1593446 0.25041355795755704 0
+183 1 10.4648762 0.9999715 4.1104439112694392E-05 1
+187 1 10.78104 0.999979258 2.9925291612631146E-05 1
+188 1 8.408628 0.999777138 0.00032155739854406422 1
+189 0 -2.01109338 0.11804311 0.18121995585967673 0
+191 1 12.5877247 0.999996543 4.987505496102727E-06 1
+192 0 -2.57171249 0.0709812939 0.10622044886941406 0
+196 0 3.01742458 0.953355134 4.4221378767752633 1
+198 0 -1.66311264 0.1593446 0.25041355795755704 0
+199 0 -2.21740723 0.0981981754 0.14911766597757717 0
+201 1 6.942314 0.9990349 0.001393043875711089 1
+202 0 -2.04771137 0.114283845 0.17508366149751475 0
+204 0 -2.04771137 0.114283845 0.17508366149751475 0
+205 1 16.2102318 0.9999999 1.7198266111377426E-07 1
+206 1 4.939949 0.9928959 0.010285626570319192 1
+207 0 -1.8267622 0.138624445 0.2152857123445632 0
+209 0 -2.537307 0.07328385 0.109800582737096 0
+210 1 18.7023277 1 0 1
+211 1 10.281146 0.999965668 4.9531853723975585E-05 1
+212 0 -2.04771137 0.114283845 0.17508366149751475 0
+216 0 -1.85676885 0.135080114 0.20936158691636991 0
+218 1 7.263891 0.9993001 0.0010100636449898702 1
+219 0 -2.76439142 0.059279006 0.088161193635133334 0
+223 1 2.7308712 0.9388239 0.091073557949706299 1
+226 1 8.248712 0.9997385 0.00037729327519243937 1
+228 0 -1.8267622 0.138624445 0.2152857123445632 0
+233 1 2.361946 0.913879156 0.12992468715671707 1
+237 1 4.39291763 0.9877864 0.01772897141308883 1
+239 1 1.52190113 0.820818245 0.28486529489841783 1
+240 0 -2.17585921 0.10193938 0.15511526353124511 0
+241 0 -2.29227257 0.09176497 0.13886241586083783 0
+242 0 -2.22085452 0.09789331 0.14863003134088387 0
+244 0 -2.04771137 0.114283845 0.17508366149751475 0
+246 1 14.4728222 0.9999995 6.8793076746672365E-07 1
+247 1 0.83739996 0.697917342 0.51887191386761078 1
+248 0 -2.44329524 0.0799302459 0.12018485315955793 0
+249 0
+250 0 -1.70150161 0.154269248 0.24172965723606502 0
+252 0 1.9876461 0.879493833 3.0528211105339298 1
+254 1 6.02062225 0.9975777 0.0034988407610739244 1
+257 0 -2.21740723 0.0981981754 0.14911766597757717 0
+258 0 -2.39241457 0.08375295 0.12619145057643072 0
+259 0 2.59828758 0.930751264 3.8520684539295051 1
+260 1 6.64796829 0.998705 0.001869458244391086 1
+262 1 10.2717314 0.9999654 4.9875830876224796E-05 1
+267 1 0.8395891 0.6983786 0.51791869634276033 1
+268 1 4.901437 0.992619 0.010688056896805 1
+269 0 -2.04771137 0.114283845 0.17508366149751475 0
+271 0 -2.730785 0.0611810647 0.091081154608133083 0
+272 1 0.8395891 0.6983786 0.51791869634276033 1
+275 0
+276 0 -2.21740723 0.0981981754 0.14911766597757717 0
+277 0 -1.85676885 0.135080114 0.20936158691636991 0
+278 0 -2.04771137 0.114283845 0.17508366149751475 0
+279 1 2.53177547 0.926339567 0.11038695820005974 1
+280 0 -2.39241457 0.08375295 0.12619145057643072 0
+283 1 2.58736229 0.930043757 0.10462950074543359 1
+284 1 6.315773 0.9981957 0.0026053945661372192 1
+285 1 17.0703163 1 0 1
+288 1 -0.602478 0.353776962 1.4990879940554545 0
+290 0 -1.66311264 0.1593446 0.25041355795755704 0
+291 0 -2.04771137 0.114283845 0.17508366149751475 0
+293 1 2.63869476 0.933310747 0.099570586451478413 1
+296 0 -0.04219532 0.48945272 0.96988352376951226 0
+297 0
+299 1 2.437027 0.9196076 0.1209097376152837 1
+300 1 4.886091 0.9925057 0.010852664710009398 1
+301 0 -2.04771137 0.114283845 0.17508366149751475 0
+303 0 -2.04771137 0.114283845 0.17508366149751475 0
+304 1 2.90765285 0.9482235 0.076700989946081224 1
+308 1 4.42898655 0.988214 0.017104577035971798 1
+309 0 -2.17276287 0.102223195 0.15557127190119621 0
+311 0 -1.66311264 0.1593446 0.25041355795755704 0
+312 1 -0.368750572 0.408842951 1.2903813280998107 0
+314 0 -1.63186324 0.163575277 0.25769238876050721 0
+316 1 2.07395124 0.8883455 0.1708072411711142 1
+317 1 9.236988 0.9999026 0.00014051666860547148 1
+319 0 -0.6327839 0.346879572 0.61457906126809736 0
+321 0
+323 1 3.56552029 0.9724956 0.040236349195893271 1
+327 0 -1.85676885 0.135080114 0.20936158691636991 0
+328 1 2.31613636 0.910204649 0.13573713983693664 1
+329 1 3.052658 0.9548971 0.066582810014138891 1
+331 0 -2.85865283 0.0542357638 0.080447507227237577 0
+332 0 -2.45453167 0.0791077837 0.11889578564930094 0
+333 1 3.224123 0.9617321 0.056293037640751442 1
+336 1 4.161271 0.984651566 0.022314800350329005 1
+338 0 -1.63186324 0.163575277 0.25769238876050721 0
+343 0 -1.66311264 0.1593446 0.25041355795755704 0
+344 1 6.48078156 0.99846977 0.0022093461331651669 1
+346 0 -2.4612627 0.07861882 0.11812996027828425 0
+347 0 -1.5268147 0.178460211 0.28359764431931689 0
+348 1 -1.55794716 0.173941419 2.5233265901272413 0
+349 1 0.96990633 0.7251008 0.46374649890978753 1
+350 0 -2.50478745 0.07552324 0.11329104572942034 0
+352 0 -0.8905029 0.291006058 0.49615479540511881 0
+353 1 11.4202032 0.999989033 1.5822490644178947E-05 1
+354 0 -1.85676885 0.135080114 0.20936158691636991 0
+355 0 -2.59761286 0.06929221 0.10359981233578254 0
+358 1 4.168295 0.9847573 0.02215988217787071 1
+360 1 19.7173061 1 0 1
+361 1 4.88178444 0.9924736 0.01089936476746209 1
+366 1 18.082119 1 0 1
+368 0 -1.71223855 0.152873591 0.23935082825368728 0
+370 0 -2.466129 0.07826703 0.11757923930621997 0
+371 0 -1.71223855 0.152873591 0.23935082825368728 0
+373 0 -2.61302185 0.0683050454 0.10207041412114773 0
+376 0 -1.85676885 0.135080114 0.20936158691636991 0
+377 0 -1.63186324 0.163575277 0.25769238876050721 0
+378 0 -2.55687785 0.0719657838 0.1077500970966577 0
+379 0 -2.64790964 0.0661179647 0.098687769592959287 0
+381 1 6.70689869 0.998779 0.0017626085941359387 1
+383 0 -1.85219073 0.135615885 0.21025553563894767 0
+384 0 -1.85219073 0.135615885 0.21025553563894767 0
+387 0 -2.587854 0.06992423 0.10457983932319151 0
+388 0 -2.062164 0.11282903 0.17271593575335431 0
+389 0 -2.63233638 0.06708608 0.10018412244865801 0
+391 1 9.797784 0.999944448 8.0146141443027661E-05 1
+392 0 -2.21740723 0.0981981754 0.14911766597757717 0
+395 0 -2.21740723 0.0981981754 0.14911766597757717 0
+396 0 -2.39241457 0.08375295 0.12619145057643072 0
+398 0 -2.20704079 0.09912 0.15059314510991556 0
+399 0 -1.70332181 0.1540319 0.24132483641316704 0
+404 0 -1.84161174 0.136860788 0.21233483043736184 0
+406 0 -2.33448553 0.08830687 0.13337978519080357 0
+409 0 -2.2088275 0.09896057 0.15033785589508675 0
+413 0 -2.68322229 0.0639706552 0.095374335452958092 0
+414 1 4.470516 0.988688052 0.016412697518568688 1
+415 0 -1.37697363 0.201495469 0.32462750016760433 0
+416 1 6.49479866 0.9984909 0.0021787727517901165 1
+418 0 -2.47054315 0.07794919 0.11708183960876384 0
+419 0 -2.491969 0.07642309 0.11469599534527423 0
+422 0 -2.202886 0.09949162 0.15118839089802541 0
+423 0 -2.4900763 0.0765568 0.11490487237984263 0
+428 0 -1.85676885 0.135080114 0.20936158691636991 0
+429 0 -2.03787947 0.115282834 0.17671177846463382 0
+430 0 -1.88024449 0.1323608 0.20483286117168037 0
+434 0 4.23710155 0.985756457 6.1335481216812706 1
+436 1 2.84663773 0.9451446 0.081393036792859863 1
+439 0 -2.40992618 0.0824188963 0.12409241344887624 0
+440 1 3.80452919 0.9782154 0.031775923054351646 1
+441 0 -1.8284502 0.138423011 0.21494837550204221 0
+442 0 -1.70252347 0.154135972 0.24150232548215611 0
+449 1 12.0930557 0.9999944 8.083207235017858E-06 1
+450 0 -2.296249 0.0914341062 0.13833694513157324 0
+451 0 -2.40992618 0.0824188963 0.12409241344887624 0
+452 0 -2.17240882 0.102255695 0.1556234982659298 0
+453 1 6.529911 0.998543 0.0021035047090949962 1
+454 0 -2.07757616 0.111295484 0.17022427557546105 0
+455 1 -1.9901104 0.120245181 3.0559490175405211 0
+456 1 7.704238 0.9995492 0.00065049902446028007 1
+457 1 7.00719738 0.99909544 0.0013055949383471309 1
+464 0 -2.22559762 0.09747525 0.14796160408536918 0
+465 1 8.38367748 0.999771535 0.00032964240762353795 1
+466 1 6.553727 0.998577237 0.0020540745152688254 1
+467 1 6.02119255 0.997579157 0.0034967719595408674 1
+474 0 -2.40992618 0.0824188963 0.12409241344887624 0
+480 0 -2.20853472 0.09898668 0.15037965733571257 0
+482 1 16.26332 0.9999999 1.7198266111377426E-07 1
+483 1 11.1233063 0.9999852 2.1326006327376515E-05 1
+484 0 -2.38753271 0.08412834 0.1267826495786491 0
+487 1 13.0827723 0.999998 2.9237080272005804E-06 1
+489 1 -2.25294924 0.09509537 3.3944810328808703 0
+492 0 -2.191049 0.100557171 0.15289651175735539 0
+493 1 12.4391136 0.999996066 5.6754386418026423E-06 1
+495 0 -2.006237 0.11854963 0.18204875397128145 0
+497 0 -2.21665764 0.09826457 0.14922388345357948 0
+501 0 -2.39535141 0.0835278556 0.12583706323377797 0
+502 0 -2.446126 0.07972231 0.11985883701567156 0
+504 0 -1.66311264 0.1593446 0.25041355795755704 0
+507 0 -1.5343039 0.177364841 0.28167536187524722 0
+510 0 -1.66311264 0.1593446 0.25041355795755704 0
+513 0 -2.006237 0.11854963 0.18204875397128145 0
+514 1 11.0994911 0.99998486 2.1841961999056935E-05 1
+517 0 -1.63186324 0.163575277 0.25769238876050721 0
+519 1 4.549981 0.9895431 0.01516557768494084 1
+520 0 -1.89241147 0.130969763 0.20252171957783308 0
+521 0 -2.43429351 0.08059475 0.12122718788029851 0
+522 1 0.357010841 0.5883166 0.76533530327012156 1
+523 1 5.51915169 0.9960068 0.0057725219886650735 1
+527 0 -2.56239128 0.0715984255 0.10717912551116054 0
+528 0 -2.52255535 0.07429201 0.11137092307285647 0
+529 0 -2.191049 0.100557171 0.15289651175735539 0
+531 0 -2.33448553 0.08830687 0.13337978519080357 0
+532 0 -1.8267622 0.138624445 0.2152857123445632 0
+533 0 -2.21740723 0.0981981754 0.14911766597757717 0
+534 0 -2.03787947 0.115282834 0.17671177846463382 0
+535 0 -2.08951616 0.110119984 0.16831726594009194 0
+538 0 -2.39535141 0.0835278556 0.12583706323377797 0
+539 0 -2.74649048 0.0602851622 0.089705066266029906 0
+540 0 -2.47341466 0.07774305 0.11675934435269709 0
+541 0 -2.03968573 0.115098737 0.17641160645683859 0
+544 0 -2.07751942 0.111301087 0.17023337107337447 0
+546 1 15.9298019 0.9999999 1.7198266111377426E-07 1
+547 0 -1.63019609 0.1638035 0.25808609509773911 0
+548 0 -1.82455885 0.138887748 0.21572677983014757 0
+549 1 3.53455734 0.9716552 0.04148365754326467 1
+557 0 -2.50478745 0.07552324 0.11329104572942034 0
+558 0 -2.03787947 0.115282834 0.17671177846463382 0
+559 0 -2.57171249 0.0709812939 0.10622044886941406 0
+560 0 -2.730785 0.0611810647 0.091081154608133083 0
+561 0 -2.730785 0.0611810647 0.091081154608133083 0
+563 0 -2.21740723 0.0981981754 0.14911766597757717 0
+565 1 14.4914665 0.9999995 6.8793076746672365E-07 1
+566 0 -2.37125015 0.08539145 0.12877368417465981 0
+569 1 7.86107731 0.9996147 0.00055595503487491883 1
+577 0 -1.85676885 0.135080114 0.20936158691636991 0
+578 0 -1.85676885 0.135080114 0.20936158691636991 0
+581 1 8.679046 0.9998299 0.00024544011948576707 1
+582 1 9.267637 0.9999056 0.00013621668994928923 1
+584 0 -2.791611 0.0577791929 0.085862903474618182 0
+586 1 18.3650551 1 0 1
+590 1 0.8902283 0.7089373 0.49627008273810297 1
+593 0 -2.38753271 0.08412834 0.1267826495786491 0
+594 1 3.65131 0.9746996 0.036970418233996383 1
+600 0 -2.21740723 0.0981981754 0.14911766597757717 0
+602 0 -2.39535141 0.0835278556 0.12583706323377797 0
+604 1 1.157403 0.7608605 0.3942961227380975 1
+606 0 -2.284902 0.0923811048 0.13984145096224598 0
+607 0 -1.66311264 0.1593446 0.25041355795755704 0
+609 0 -2.40992618 0.0824188963 0.12409241344887624 0
+612 1 20.9893341 1 0 1
+613 0 -2.030234 0.1160649 0.17798764430460248 0
+614 0 -1.8198415 0.13945289 0.21667392047737177 0
+617 0
+618 0 -2.39535141 0.0835278556 0.12583706323377797 0
+619 0 -2.57171249 0.0709812939 0.10622044886941406 0
+621 0 -2.57282734 0.07090781 0.10610633667562216 0
+622 0 -2.87062454 0.0536249466 0.079516050420504789 0
+624 0 -2.41982079 0.0816737 0.12292122599990905 0
+627 0 -1.58449054 0.170160457 0.26909569048429549 0
+629 0 -2.22559762 0.09747525 0.14796160408536918 0
+633 1 1.851265 0.8642755 0.21043680463191686 1
+634 0 -2.03968573 0.115098737 0.17641160645683859 0
+638 0 -2.22559762 0.09747525 0.14796160408536918 0
+639 0 -2.50478745 0.07552324 0.11329104572942034 0
+641 0 -2.21740723 0.0981981754 0.14911766597757717 0
+642 0 -2.21740723 0.0981981754 0.14911766597757717 0
+644 0 -1.85219073 0.135615885 0.21025553563894767 0
+645 0 -2.21740723 0.0981981754 0.14911766597757717 0
+649 0 -2.21740723 0.0981981754 0.14911766597757717 0
+652 0 -2.564228 0.07147643 0.10698956187034971 0
+653 0 -2.39535141 0.0835278556 0.12583706323377797 0
+654 0 -2.39241457 0.08375295 0.12619145057643072 0
+656 0 -2.57171249 0.0709812939 0.10622044886941406 0
+657 0 -2.0722928 0.111819126 0.17107458963369743 0
+660 0 -1.85676885 0.135080114 0.20936158691636991 0
+661 0 -2.56239128 0.0715984255 0.10717912551116054 0
+665 0 -1.66311264 0.1593446 0.25041355795755704 0
+668 1 -0.192660332 0.451983333 1.1456585221797912 0
+670 1 6.66539 0.9987274 0.0018371700459368042 1
+678 0 -1.66311264 0.1593446 0.25041355795755704 0
+679 0 -1.85219073 0.135615885 0.21025553563894767 0
+680 1 20.8869 1 0 1
+681 1 12.674202 0.9999969 4.4715558520995569E-06 1
+682 0 -2.713255 0.0621957332 0.092641251857392207 0
+683 0 -1.66311264 0.1593446 0.25041355795755704 0
+685 0 -1.66311264 0.1593446 0.25041355795755704 0
+688 0 -2.22559762 0.09747525 0.14796160408536918 0
+689 0 -2.78634024 0.0580668 0.086303344802401927 0
+691 1 4.852685 0.9922531 0.011219894450214495 1
+692 0 -2.03968573 0.115098737 0.17641160645683859 0
+693 0 -2.36527443 0.08585931 0.12951188152740967 0
+694 0 -2.19330382 0.10035342 0.15256973467024601 0
+696 1 6.44385242 0.998412251 0.0022924573316975153 1
+697 1 2.56533623 0.9285971 0.1068753309513874 1
+698 1 3.769271 0.977451265 0.032903322595354806 1
+0 0 -2.301516 0.09099748 0.13764380069439297 0
+1 0 1.08247185 0.7469615 1.9825710402688395 1
+2 0 -2.0523982 0.113810278 0.17431250107108243 0
+3 0 3.463801 0.96964 5.0416851857458864 1
+4 0 -2.25727749 0.09472357 0.14356969816808129 0
+7 0 -2.013621 0.117780194 0.18078994480689475 0
+12 1 -0.8284931 0.30396378 1.7180286690667717 0
+13 0 -1.81933558 0.139513627 0.21677574901409538 0
+14 1 7.468815 0.999429643 0.00082308574166617431 1
+15 1 -0.935884 0.2817325 1.8276020977115841 0
+16 0 -2.098476 0.109245047 0.16689949442685795 0
+17 0 -2.22533417 0.09749843 0.14799865604012269 0
+19 0 -2.36852455 0.08560456 0.12910989118919178 0
+22 0 -2.00196 0.118997283 0.18278162694192454 0
+23 1
+24 0 -1.941751 0.125455618 0.19339649381178925 0
+26 0 -1.70507562 0.153803527 0.24093542302960627 0
+27 0 -2.18581915 0.101031184 0.15365702371483531 0
+29 0 -1.6321876 0.1635309 0.25761585008130428 0
+30 0 -1.73291934 0.150214538 0.23482943239457851 0
+33 0 -2.09316373 0.109763056 0.16773872242540308 0
+34 0 -1.966197 0.122797951 0.18901891314128982 0
+36 1 10.6047812 0.9999752 3.5772834884537144E-05 1
+38 1 5.09487247 0.99390924 0.0088139788276411432 1
+39 1 0.138988018 0.534691155 0.90322228350613509 1
+42 1 8.104832 0.999698043 0.00043569783898110006 1
+43 1 -2.051857 0.113864884 3.1346052100634472 0
+47 0 -1.78140879 0.144129261 0.22453517072067464 0
+49 1 6.031123 0.997603 0.0034622923707363488 1
+53 1 2.5970068 0.930668652 0.10366048147588253 1
+55 1 3.37955284 0.967059433 0.048323537425725815 1
+57 1 -2.29733229 0.09134414 3.4525440037349249 0
+58 1 -0.8062577 0.308688521 1.6957762577022222 0
+59 1 -0.20775795 0.448246568 1.1576355563258656 0
+61 0 -1.73861909 0.149488419 0.23359721489187701 0
+62 1 5.937831 0.99736917 0.0038004865334286451 1
+65 1 -1.92219067 0.127617478 2.9701021620868731 0
+67 1 1.6232996 0.835249662 0.25972060045188861 1
+75 0 -1.96936083 0.122457556 0.18845918980961393 0
+78 0 -1.95251036 0.124279886 0.19145824716485596 0
+80 0 -2.1607275 0.103333026 0.15735583372897294 0
+81 0 -2.12950277 0.106262207 0.16207646288515981 0
+83 0 -2.52023387 0.07445183 0.11162001257935196 0
+84 1 7.28208447 0.999312758 0.00099182083151890563 1
+85 1 3.20175552 0.9609003 0.057541335571880493 1
+86 1 1.07764864 0.746048748 0.42265819240738722 1
+87 1 6.03411 0.9976101 0.0034520348521077638 1
+89 0 -2.220188 0.0979522 0.14872421333055375 0
+94 0 -2.045452 0.114512727 0.17545652289353517 0
+101 1 -0.09225035 0.4769538 1.0680785545608487 0
+103 1 -2.52788973 0.07392599 3.7577745918272321 0
+107 1 6.7885294 0.9988746 0.0016245164986375925 1
+110 0 -2.08183551 0.110874891 0.16954166005159219 0
+114 0 -1.657285 0.16012679 0.25175654520308971 0
+116 0 -1.4446578 0.1908251 0.30547653402158781 0
+118 0 -2.08073568 0.110983357 0.16971766707069735 0
+119 0 -1.99037921 0.120216757 0.18477997210748806 0
+124 1 4.98317528 0.993194342 0.0098520525459535446 1
+126 1 4.85608768 0.9922792 0.011182023351539032 1
+127 0 -2.13997936 0.105271339 0.16047786433454483 0
+130 0 -2.02583027 0.116517484 0.17872651087564395 0
+134 0 -2.27046967 0.0935983658 0.14177763258855564 0
+135 0 -2.082193 0.110839657 0.16948449080217048 0
+136 0 -2.098476 0.109245047 0.16689949442685795 0
+139 0
+140 0 -1.860422 0.134653866 0.20865077658844469 0
+142 1 1.5520792 0.82521385 0.2771600607430223 1
+143 0 -1.42160451 0.19441016 0.31188260684972846 0
+146 1 -0.379899025 0.406151235 1.2999110632727831 0
+148 0 -3.333778 0.03443041 0.050547855906756659 0
+149 1 9.453949 0.9999217 0.0001129970266666251 1
+153 0 -1.79716134 0.142196968 0.22128167987188255 0
+155 1 2.7138834 0.9378409 0.092584929720156431 1
+157 0 -2.045452 0.114512727 0.17545652289353517 0
+158 0
+159 1 14.5143137 0.9999995 6.8793076746672365E-07 1
+160 1 8.880978 0.999861 0.0002005457089583406 1
+162 0 -2.13997936 0.105271339 0.16047786433454483 0
+163 0 -2.36457872 0.0859139338 0.12959808560549185 0
+165 0 -2.09007239 0.110065483 0.16822891030896434 0
+166 1 8.115719 0.9997012 0.00043113892933886136 1
+168 0 -2.13997936 0.105271339 0.16047786433454483 0
+170 0 -1.860422 0.134653866 0.20865077658844469 0
+172 0 -1.78140879 0.144129261 0.22453517072067464 0
+175 1 7.61374569 0.9995066 0.00071201186227698389 1
+178 0 -2.22533417 0.09749843 0.14799865604012269 0
+182 0 -2.36852455 0.08560456 0.12910989118919178 0
+184 1 4.644288 0.990475237 0.013807187789500337 1
+185 0 -1.77662516 0.144720361 0.22553189902602172 0
+186 1 5.66861629 0.9965592 0.0049725809792141133 1
+190 1 16.6872864 1 0 1
+193 0 -1.941751 0.125455618 0.19339649381178925 0
+194 0 -2.13997936 0.105271339 0.16047786433454483 0
+195 0 -2.22533417 0.09749843 0.14799865604012269 0
+197 0 -2.29312468 0.091693975 0.13874964503480286 0
+200 1 10.1933537 0.999962568 5.4003563100270837E-05 1
+203 0 -2.301516 0.09099748 0.13764380069439297 0
+208 0 -1.67101777 0.158288538 0.24860233230888198 0
+213 1 20.2759647 1 0 1
+214 1 18.7733364 1 0 1
+215 1 8.191004 0.9997229 0.00039982907120573395 1
+217 0 -1.941751 0.125455618 0.19339649381178925 0
+220 0 -2.00243449 0.118947558 0.18270020126928377 0
+221 1 7.44619 0.9994167 0.00084175662910008565 1
+222 1 -1.34953451 0.205946475 2.2796586599059694 0
+224 1 9.052607 0.999882936 0.00016889684917020562 1
+225 0 -1.78140879 0.144129261 0.22453517072067464 0
+227 1 8.601936 0.999816358 0.00026496360361006585 1
+229 1 15.556097 0.9999999 1.7198266111377426E-07 1
+230 1 4.348646 0.9872406 0.018526350463759503 1
+231 1 8.508919 0.999798357 0.00029093798765384679 1
+232 0 -0.296713352 0.426361144 0.80178534411673164 0
+234 0 -1.87558413 0.132896915 0.20572457748992548 0
+235 0
+236 1 9.828637 0.9999461 7.7738252578338893E-05 1
+238 1 15.6847239 0.9999999 1.7198266111377426E-07 1
+243 0 -1.85970509 0.134737432 0.20879010310197682 0
+245 0 -2.17634916 0.101894535 0.15504322373043186 0
+251 1 6.219199 0.998013139 0.00286928622354919 1
+253 1 8.104832 0.999698043 0.00043569783898110006 1
+255 1 3.94488955 0.9810141 0.027654262469072717 1
+256 0 -1.860422 0.134653866 0.20865077658844469 0
+261 1 12.4782257 0.9999962 5.5034553246245386E-06 1
+263 1 6.391608 0.9983272 0.0024153673304377296 1
+264 1 2.26732063 0.906134069 0.14220357185362809 1
+265 0 -1.91125607 0.1288398 0.19899006005896697 0
+266 1 7.600045 0.9994998 0.00072181974597906125 1
+270 1 5.563095 0.9961778 0.0055248450255732757 1
+273 1 -1.02924967 0.263229579 1.925606483115087 0
+274 0 -2.136303 0.105618112 0.16103712188187333 0
+281 0 -2.09316373 0.109763056 0.16773872242540308 0
+282 1 3.098658 0.9568373 0.06365446955853514 1
+286 1 19.419363 1 0 1
+287 0 -2.27046967 0.0935983658 0.14177763258855564 0
+289 1 8.124609 0.999703944 0.00042718215151575466 1
+292 1
+294 0
+295 1 5.98761 0.997496545 0.0036162501096248033 1
+298 0 -2.98175526 0.04825695 0.071355963047900345 0
+302 1 18.99006 1 0 1
+305 1 8.560013 0.9998085 0.00027631658811241448 1
+306 0 -1.941751 0.125455618 0.19339649381178925 0
+307 0 -1.941751 0.125455618 0.19339649381178925 0
+310 0 -2.16668749 0.1027821 0.1564696923073676 0
+313 0 -1.6175487 0.165543213 0.26109075530209375 0
+315 0
+318 0 -2.750764 0.0600435175 0.089334129517283936 0
+320 1 2.96006584 0.950737059 0.072881698029347464 1
+322 0 -2.13997936 0.105271339 0.16047786433454483 0
+324 0 -1.941751 0.125455618 0.19339649381178925 0
+325 0 -1.86101615 0.13458465 0.20853538526771875 0
+326 1 0.5836396 0.6419045 0.6395694800066366 1
+330 1 3.19974613 0.9608247 0.05765481382577594 1
+334 1 5.354478 0.995295346 0.0068033977028160051 1
+335 0 -1.6175487 0.165543213 0.26109075530209375 0
+337 0 -1.941751 0.125455618 0.19339649381178925 0
+339 1 4.42046261 0.9881143 0.01725016366406085 1
+340 1 3.32619667 0.9653167 0.050925737892124082 1
+341 0 -1.941751 0.125455618 0.19339649381178925 0
+342 0 -1.74357152 0.148859844 0.23253137628148873 0
+345 0 -1.6175487 0.165543213 0.26109075530209375 0
+351 0 -2.045452 0.114512727 0.17545652289353517 0
+356 1 -1.66799617 0.158691525 2.6557030083293012 0
+357 1 12.1684132 0.999994755 7.5672564839008544E-06 1
+359 1 2.620923 0.9321961 0.10129464764951893 1
+362 0 -2.19840622 0.0998937041 0.15183271169511453 0
+363 0 -1.04904556 0.2594084 0.43324993672264289 0
+364 0 -2.045452 0.114512727 0.17545652289353517 0
+365 0 -1.89627123 0.130531073 0.20179362469978843 0
+367 1 13.7444353 0.9999989 1.5478446880940214E-06 1
+369 0 -1.49699593 0.182874 0.29136952685596818 0
+372 0 -2.04446387 0.114612974 0.17561986216689546 0
+374 0 -1.966197 0.122797951 0.18901891314128982 0
+375 0 -1.6175487 0.165543213 0.26109075530209375 0
+380 0 -1.6175487 0.165543213 0.26109075530209375 0
+382 0 -1.98493266 0.120794 0.18572686024059626 0
+385 0 -2.27838588 0.09292892 0.14071249291381016 0
+386 1 4.234085 0.985713959 0.020759038727296567 1
+390 0 -1.85135007 0.135714456 0.21042006464991661 0
+393 0 -1.342573 0.207087249 0.33476596791809693 0
+394 0 -1.731577 0.150385961 0.23512048938573521 0
+397 0 -1.96809924 0.122593194 0.1886821979796787 0
+400 1 8.218968 0.9997305 0.00038881917268506155 1
+401 0 -1.860422 0.134653866 0.20865077658844469 0
+402 0 -1.81246018 0.140341058 0.21816369183653808 0
+403 0 -1.63434482 0.163236037 0.25710737448281606 0
+405 0 -1.78140879 0.144129261 0.22453517072067464 0
+407 0 -1.78140879 0.144129261 0.22453517072067464 0
+408 0 -1.77392912 0.145054385 0.22609544513509036 0
+410 0 -1.78140879 0.144129261 0.22453517072067464 0
+411 0
+412 1 9.160485 0.999894857 0.00015169667309156297 1
+417 0 -1.78140879 0.144129261 0.22453517072067464 0
+420 0 -1.71119952 0.1530082 0.23958008015495064 0
+421 1 12.2299891 0.9999951 7.0513059173031522E-06 1
+424 0 -1.860422 0.134653866 0.20865077658844469 0
+425 1 20.1651058 1 0 1
+426 0 -1.3227582 0.210359767 0.34073259653634197 0
+427 1 4.300104 0.9866145 0.019441567487860585 1
+431 0 -2.75610447 0.0597428158 0.088872670403648199 0
+432 0 -2.164125 0.103018656 0.15685011613830954 0
+433 0 -1.83914936 0.137151927 0.21282153695787007 0
+435 1 7.36268234 0.999365866 0.00091515190632679476 1
+437 0 -1.96809924 0.122593194 0.1886821979796787 0
+438 0 -1.880899 0.132285655 0.2047079142247506 0
+443 0 -1.58702278 0.169803187 0.26847470165960907 0
+444 0 -2.40749931 0.08260262 0.12438130785824263 0
+445 0 -1.74357152 0.148859844 0.23253137628148873 0
+446 0 -1.6175487 0.165543213 0.26109075530209375 0
+447 0 -2.06660366 0.1123854 0.17199469628631101 0
+448 0 -1.342573 0.207087249 0.33476596791809693 0
+458 0 -2.00865936 0.118296735 0.18163489276241526 0
+459 0 -1.93524909 0.126170725 0.19457665456288356 0
+460 0 -2.10963583 0.108163789 0.16514931652661516 0
+461 0 -1.5343715 0.177354991 0.2816580881405864 0
+462 0 -2.17302465 0.102199174 0.15553267206171928 0
+463 0 -1.97221267 0.12215142 0.18795598317957285 0
+468 0 -1.96809924 0.122593194 0.1886821979796787 0
+469 0 -1.78214443 0.144038543 0.22438225994148536 0
+470 0 -1.73291934 0.150214538 0.23482943239457851 0
+471 0 -2.17302465 0.102199174 0.15553267206171928 0
+472 0 -1.89049411 0.131188139 0.20288429650077927 0
+473 0 -1.96809924 0.122593194 0.1886821979796787 0
+475 0 -1.860422 0.134653866 0.20865077658844469 0
+476 0 -1.92840433 0.1269273 0.19582630620444338 0
+477 0 -1.96809924 0.122593194 0.1886821979796787 0
+478 0 -2.01690173 0.117439739 0.18023330693767575 0
+479 1 9.129778 0.999891639 0.00015634070042369144 1
+481 0 -1.77698469 0.144675866 0.2254568465498844 0
+485 0 -1.37343407 0.202065587 0.32565792737640359 0
+486 0 -1.73291934 0.150214538 0.23482943239457851 0
+488 1 1.15169907 0.7598212 0.39626817381950297 1
+490 0 -1.6175487 0.165543213 0.26109075530209375 0
+491 1 6.43022442 0.998390555 0.0023238082916929833 1
+494 0 -1.0844636 0.252662241 0.42016767992874143 0
+496 0 -1.342573 0.207087249 0.33476596791809693 0
+498 0 -2.098476 0.109245047 0.16689949442685795 0
+499 0 -2.098476 0.109245047 0.16689949442685795 0
+500 0 -2.36852455 0.08560456 0.12910989118919178 0
+503 0 -2.22533417 0.09749843 0.14799865604012269 0
+505 0 -1.81462574 0.14007999 0.21772562877705068 0
+506 1 12.0963869 0.9999944 8.083207235017858E-06 1
+508 0 -2.06660366 0.1123854 0.17199469628631101 0
+509 0 -1.74357152 0.148859844 0.23253137628148873 0
+511 0 -2.18581915 0.101031184 0.15365702371483531 0
+512 0 -2.06660366 0.1123854 0.17199469628631101 0
+515 1 10.156889 0.999961138 5.6067433641037978E-05 1
+516 0 -1.342573 0.207087249 0.33476596791809693 0
+518 0 -1.87690234 0.132745087 0.20547198672775793 0
+524 0 -2.00196 0.118997283 0.18278162694192454 0
+525 0 -1.91367817 0.128568187 0.19854031339238309 0
+526 0 -1.96809924 0.122593194 0.1886821979796787 0
+530 1 4.653802 0.9905646 0.013677053104199231 1
+536 0 -2.301516 0.09099748 0.13764380069439297 0
+537 0 -2.21493769 0.09841708 0.14946791169661081 0
+542 0 -1.95570588 0.123932526 0.19088610479236329 0
+543 0 -2.098476 0.109245047 0.16689949442685795 0
+545 0 -2.18581915 0.101031184 0.15365702371483531 0
+550 0 -2.00196 0.118997283 0.18278162694192454 0
+551 0 -1.941751 0.125455618 0.19339649381178925 0
+552 0 -2.07113647 0.111934021 0.17126122926502999 0
+553 0 -1.193924 0.232557878 0.38187014349940485 0
+554 0 -1.860422 0.134653866 0.20865077658844469 0
+555 0 -1.62672949 0.16427888 0.25890649895897555 0
+556 0 -1.76245427 0.146483228 0.22850859107163557 0
+562 0 -1.941751 0.125455618 0.19339649381178925 0
+564 0 -2.20704937 0.09911924 0.15059192808996283 0
+567 0 -1.90300918 0.129768282 0.20052849433295125 0
+568 1 1.72357225 0.8485884 0.23686312545110236 1
+570 1 5.530344 0.996051 0.0057084620367186976 1
+571 1 12.7975969 0.999997258 3.955606392614897E-06 1
+572 0 -2.00196 0.118997283 0.18278162694192454 0
+573 0 -1.78140879 0.144129261 0.22453517072067464 0
+574 1 5.131028 0.994124234 0.0085019411118018116 1
+575 0 -2.21493769 0.09841708 0.14946791169661081 0
+576 0 -2.18581915 0.101031184 0.15365702371483531 0
+579 0 -1.941751 0.125455618 0.19339649381178925 0
+580 0 -2.11355782 0.107786037 0.1645383687069783 0
+583 0 -1.860422 0.134653866 0.20865077658844469 0
+585 0 -1.6175487 0.165543213 0.26109075530209375 0
+587 0 -2.164125 0.103018656 0.15685011613830954 0
+588 1 3.97299385 0.9815305 0.026895014627516617 1
+589 0 -2.06660366 0.1123854 0.17199469628631101 0
+591 1 5.15082 0.994238734 0.008335784993429584 1
+592 1 2.59083414 0.930269361 0.10427958420150241 1
+595 0 -2.18581915 0.101031184 0.15365702371483531 0
+596 0 -2.04446387 0.114612974 0.17561986216689546 0
+597 0 -2.112893 0.107849985 0.16464177579708919 0
+598 0 -2.00196 0.118997283 0.18278162694192454 0
+599 0 -1.47923768 0.1855426 0.29608885109001915 0
+601 0 -1.5085547 0.181153089 0.28833433912434342 0
+603 1 2.79953 0.9426504 0.085205310002869877 1
+605 1 9.113677 0.999889851 0.00015892072206770877 1
+608 1 11.1271286 0.999985337 2.1154021144488122E-05 1
+610 1 4.917906 0.992738664 0.010514112839565644 1
+611 1 7.66873455 0.999533057 0.00067381337182235643 1
+615 0 -1.9749856 0.121854387 0.18746790978262812 0
+616 0 -2.00196 0.118997283 0.18278162694192454 0
+620 0 -2.00196 0.118997283 0.18278162694192454 0
+623 0 -1.6175487 0.165543213 0.26109075530209375 0
+625 0 -1.72655845 0.15102832 0.23621166606060956 0
+626 1 3.22024632 0.9615891 0.056507555310947317 1
+628 0 -1.74357152 0.148859844 0.23253137628148873 0
+630 0 -1.88439143 0.131885275 0.20404238182768375 0
+631 0 -2.18581915 0.101031184 0.15365702371483531 0
+632 0 -1.6175487 0.165543213 0.26109075530209375 0
+635 0 -1.74968672 0.148086727 0.23122152652399849 0
+636 1 9.544065 0.999928355 0.00010336527600414941 1
+637 0 -1.525428 0.178663611 0.28395487805787722 0
+640 0 -2.03707385 0.115365021 0.17684580637902217 0
+643 0 -1.6175487 0.165543213 0.26109075530209375 0
+646 0 -1.62914348 0.163947731 0.25833495461491246 0
+647 0 -1.76435113 0.146246225 0.22810804177334876 0
+648 1 11.25837 0.9999871 1.8574245861463165E-05 1
+650 0 -1.90433264 0.1296189 0.20028086228354752 0
+651 0 -1.88349569 0.13198787 0.20421289076099527 0
+655 0 -2.00196 0.118997283 0.18278162694192454 0
+658 1 8.6298 0.999821365 0.00025773902362411926 1
+659 0 -1.6175487 0.165543213 0.26109075530209375 0
+662 0 -1.76741409 0.1458642 0.22746263715138998 0
+663 0 -1.76741409 0.1458642 0.22746263715138998 0
+664 0 -2.07836151 0.111217827 0.17009821449285314 0
+666 0 -2.084568 0.110605806 0.16910510831886344 0
+667 0 -2.13997936 0.105271339 0.16047786433454483 0
+669 1 11.2461758 0.9999869 1.8918215632667518E-05 1
+671 0 -2.11648464 0.1075049 0.16408384130497666 0
+672 0 -2.045452 0.114512727 0.17545652289353517 0
+673 0 -2.11998677 0.107169338 0.1635415204810986 0
+674 0 -1.78140879 0.144129261 0.22453517072067464 0
+675 0 -1.80261636 0.141532868 0.22016519609417487 0
+676 0 -1.78214443 0.144038543 0.22438225994148536 0
+677 0 -2.06660366 0.1123854 0.17199469628631101 0
+684 0 -1.6175487 0.165543213 0.26109075530209375 0
+686 0 -1.6175487 0.165543213 0.26109075530209375 0
+687 0 -2.00984144 0.11817351 0.18143327856658856 0
+690 0 -1.76435113 0.146246225 0.22810804177334876 0
+695 0 -1.74357152 0.148859844 0.23253137628148873 0
diff --git a/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-out.txt b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-out.txt
new file mode 100644
index 0000000000..3805af6e7c
--- /dev/null
+++ b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-out.txt
@@ -0,0 +1,51 @@
+maml.exe TrainTest test=%Data% tr=FieldAwareFactorizationMachine{d=5 shuf- norm-} col[Feature]=DupFeatures norm=No dout=%Output% data=%Data% out=%Output% seed=1 xf=Copy{col=DupFeatures:Features} xf=MinMax{col=Features col=DupFeatures}
+Not adding a normalizer.
+Warning: Skipped 16 examples with bad label/weight/features in training set
+Not training a calibrator because it is not needed.
+Warning: The predictor produced non-finite prediction values on 16 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable.
+TEST POSITIVE RATIO: 0.3499 (239.0/(239.0+444.0))
+Confusion table
+ ||======================
+PREDICTED || positive | negative | Recall
+TRUTH ||======================
+ positive || 215 | 24 | 0.8996
+ negative || 7 | 437 | 0.9842
+ ||======================
+Precision || 0.9685 | 0.9479 |
+OVERALL 0/1 ACCURACY: 0.954612
+LOG LOSS/instance: 0.228754
+Test-set entropy (prior Log-Loss/instance): 0.934003
+LOG-LOSS REDUCTION (RIG): 75.508177
+AUC: 0.982029
+
+OVERALL RESULTS
+---------------------------------------
+AUC: 0.982029 (0.0000)
+Accuracy: 0.954612 (0.0000)
+Positive precision: 0.968468 (0.0000)
+Positive recall: 0.899582 (0.0000)
+Negative precision: 0.947939 (0.0000)
+Negative recall: 0.984234 (0.0000)
+Log-loss: 0.228754 (0.0000)
+Log-loss reduction: 75.508177 (0.0000)
+F1 Score: 0.932755 (0.0000)
+AUPRC: 0.980228 (0.0000)
+
+---------------------------------------
+Physical memory usage(MB): %Number%
+Virtual memory usage(MB): %Number%
+%DateTime% Time elapsed(s): %Number%
+
+--- Progress log ---
+[1] 'Normalize' started.
+[1] (%Time%) 699 examples
+[1] 'Normalize' finished in %Time%.
+[2] 'Training' started.
+[2] (%Time%) 1 iterations, 683 examples Training-loss: 0.306117119945184
+[2] (%Time%) 2 iterations, 683 examples Training-loss: 0.193084570883075
+[2] (%Time%) 3 iterations, 683 examples Training-loss: 0.173782368769797
+[2] (%Time%) 4 iterations, 683 examples Training-loss: 0.163879262610855
+[2] (%Time%) 5 iterations, 683 examples Training-loss: 0.157117446501075
+[2] 'Training' finished in %Time%.
+[3] 'Saving model' started.
+[3] 'Saving model' finished in %Time%.
diff --git a/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-rp.txt b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-rp.txt
new file mode 100644
index 0000000000..4a01e926c6
--- /dev/null
+++ b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer-rp.txt
@@ -0,0 +1,4 @@
+FieldAwareFactorizationMachine
+AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /d /norm /shuf Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings
+0.982029 0.954612 0.968468 0.899582 0.947939 0.984234 0.228754 75.50818 0.932755 0.980228 5 - - FieldAwareFactorizationMachine %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=FieldAwareFactorizationMachine{d=5 shuf- norm-} col[Feature]=DupFeatures norm=No dout=%Output% data=%Data% out=%Output% seed=1 xf=Copy{col=DupFeatures:Features} xf=MinMax{col=Features col=DupFeatures} /d:5;/norm:-;/shuf:-
+
diff --git a/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer.txt b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer.txt
new file mode 100644
index 0000000000..e142c5c952
--- /dev/null
+++ b/test/BaselineOutput/SingleRelease/FieldAwareFactorizationMachine/FieldAwareFactorizationMachine-TrainTest-breast-cancer.txt
@@ -0,0 +1,700 @@
+Instance Label Score Probability Log-loss Assigned
+0 0 -2.79334736 0.0576847345 0.085718279282943594 0
+1 0 1.188056 0.7663932 2.0978457302965743 1
+2 0 -2.51387787 0.074891 0.11230473674761222 0
+3 0 2.8827734 0.9469883 4.2375449640754628 1
+4 0 -2.73850226 0.0607392974 0.090402444870538778 0
+5 1 17.67403 1 0 1
+6 0 -1.42012978 0.194641247 0.31229651002770953 0
+7 0 -2.48911548 0.07662476 0.11501104509053926 0
+8 0 -3.05736852 0.0449004173 0.066276932485278203 0
+9 0 -2.49013543 0.07655262 0.11489834233223327 0
+10 0 -2.08552837 0.110511363 0.16895191862826214 0
+11 0 -2.303132 0.0908639 0.1374318066173198 0
+12 1 -1.27195454 0.218922853 2.1915055288260206 0
+13 0 -2.25197268 0.09517944 0.14429638095999855 0
+14 1 8.572502 0.9998109 0.00027287628034393304 1
+15 1 -1.27670574 0.218111485 2.196862353947556 0
+16 0 -2.546958 0.07263111 0.10878476978943961 0
+17 0 -2.70365119 0.06275825 0.09350687664921721 0
+18 1 6.88258553 0.998975635 0.0014786043638188881 1
+19 0 -2.87238669 0.0535355881 0.079379834832595766 0
+20 1 3.6811657 0.975425541 0.035896345222225338 1
+21 1 6.199198 0.997973 0.0029272747638287261 1
+22 0 -2.43037367 0.08088568 0.12168377661511501 0
+23 1
+24 0 -2.37061882 0.08544078 0.12885149942120921 0
+25 1 0.301486969 0.574806 0.79885303657013707 1
+26 0 -2.121753 0.107000455 0.16326865504132451 0
+27 0 -2.652885 0.06581142 0.098214282791589516 0
+28 0 -2.303132 0.0908639 0.1374318066173198 0
+29 0 -2.03972268 0.115094975 0.17640547222344355 0
+30 0 -2.15111279 0.104227282 0.15879536747436127 0
+31 0 -2.49228716 0.07640065 0.11466094104662786 0
+32 1 4.41975975 0.988106 0.017262260285248437 1
+33 0 -2.54244328 0.0729358 0.10925884045202583 0
+34 0 -2.4251883 0.08127202 0.12229032950777963 0
+35 0 -2.303132 0.0908639 0.1374318066173198 0
+36 1 11.464077 0.9999895 1.5134552659953847E-05 1
+37 0 -2.90807438 0.05175586 0.076669544650713642 0
+38 1 5.226836 0.9946582 0.0077272859162861246 1
+39 1 0.192784309 0.548047364 0.86762751464658039 1
+40 0
+41 1 -0.340168953 0.415768445 1.266147828359969 0
+42 1 8.853592 0.9998572 0.00020604992936022588 1
+43 1 -2.117641 0.107394 3.2190146693586854 0
+44 1 9.450965 0.999921441 0.0001133410189510066 1
+45 0 -2.40569448 0.0827394947 0.12459657233005934 0
+46 1 5.376622 0.995397866 0.0066548011445776789 1
+47 0 -2.16523314 0.1029163 0.15668549713042432 0
+48 0 -2.73850226 0.0607392974 0.090402444870538778 0
+49 1 5.89553928 0.997255862 0.0039643965487975831 1
+50 1 -0.227107048 0.443465978 1.1731046676718506 0
+51 1 -0.8415785 0.301202446 1.7311946079503711 0
+52 1 3.04452515 0.9545456 0.067113950169056663 1
+53 1 3.61555958 0.9738029 0.038298262118380257 1
+54 1 3.77800655 0.977643 0.032620334610649768 1
+55 1 3.194067 0.9606104 0.057976682417311451 1
+56 1 7.91791248 0.999635935 0.00052533064883456481 1
+57 1 -2.86445379 0.0539389737 4.2125281183154604 0
+58 1 -0.969594 0.274961442 1.8626987732996008 0
+59 1 -0.6607013 0.3405821 1.553925470966943 0
+60 1 1.0063467 0.732304633 0.44948417232333177 1
+61 0 -2.1292522 0.106286012 0.16211488941610727 0
+62 1 6.41300774 0.9983626 0.0023642038022797952 1
+63 1 -1.33857679 0.207744211 2.2671198197586828 0
+64 0 -2.16523314 0.1029163 0.15668549713042432 0
+65 1 -2.526939 0.073991105 3.7565043460709084 0
+66 0 -2.70365119 0.06275825 0.09350687664921721 0
+67 1 1.78247547 0.8560023 0.22431347044655056 1
+68 1 7.800373 0.9995906 0.00059079536563381421 1
+69 0 -2.402561 0.08297762 0.12497115587899781 0
+70 0 -2.56230116 0.0716044158 0.10718843415121884 0
+71 1 3.8944397 0.9800513 0.029070857780294054 1
+72 0 -2.60696554 0.06869147 0.10266890201899616 0
+73 1 8.50068 0.9997967 0.00029334623238004185 1
+74 1 -0.107892036 0.473053157 1.0799257855156403 0
+75 0 -2.46654248 0.07823721 0.11753257018815799 0
+76 0 -2.22932768 0.0971476 0.14743794057217838 0
+77 0 -2.47708964 0.077479966 0.11634785291703036 0
+78 0 -2.41123319 0.08232011 0.1239371005276802 0
+79 0 -2.58705831 0.06997599 0.10466012826020632 0
+80 0 -2.76758575 0.0591011234 0.087888417643977923 0
+81 0 -2.58375478 0.07019129 0.10499414907116332 0
+82 0 -2.69794416 0.06309478 0.094024986527322621 0
+83 0 -3.1041286 0.042937275 0.06331461422116523 0
+84 1 8.480363 0.9997925 0.00029936686178311424 1
+85 1 3.62424469 0.9740235 0.0379714836237331 1
+86 1 0.873991 0.705575466 0.50312769764048526 1
+87 1 6.5370903 0.998553336 0.0020886065803920047 1
+88 0 -2.70365119 0.06275825 0.09350687664921721 0
+89 0 -2.64684486 0.06618374 0.098789382822258742 0
+90 0 -2.37061882 0.08544078 0.12885149942120921 0
+91 0 -2.31983972 0.08949312 0.13525817349750544 0
+92 0 -2.70365119 0.06275825 0.09350687664921721 0
+93 0 -2.16523314 0.1029163 0.15668549713042432 0
+94 0 -2.49228716 0.07640065 0.11466094104662786 0
+95 0 -2.37061882 0.08544078 0.12885149942120921 0
+96 0 -2.19050217 0.10060665 0.15297587812753632 0
+97 0 -2.79334736 0.0576847345 0.085718279282943594 0
+98 1 5.520876 0.996013641 0.0057625933731902997 1
+99 1 11.3165894 0.999987841 1.7542337039908934E-05 1
+100 1 1.017416 0.734468937 0.44522661908047623 1
+101 1 -0.5716233 0.360862345 1.4704794870706024 0
+102 0 -2.66489172 0.0650770739 0.097080658952617693 0
+103 1 -2.88350773 0.05297488 4.2385477822371218 0
+104 1 13.1776581 0.9999981 2.751725038055267E-06 1
+105 1 -1.78037786 0.144256487 2.7932918938163764 0
+106 1 12.0229883 0.99999404 8.5991581706542968E-06 1
+107 1 7.51091671 0.999453247 0.00078901414592929902 1
+108 0 -2.25749826 0.0947046354 0.14353952759458233 0
+109 1 5.029805 0.993502438 0.0094045870149739464 1
+110 0 -2.56023145 0.07174213 0.10740245681394917 0
+111 1 1.76195621 0.85345453 0.22861380249667038 1
+112 1 5.949276 0.9973991 0.0037572056709905387 1
+113 1 9.109059 0.9998894 0.00015960872861869104 1
+114 0 -2.12039614 0.107130177 0.16347824413516199 0
+115 0 -1.8000406 0.14184612 0.22069172747506174 0
+116 0 -1.96252012 0.123194568 0.18967135852054179 0
+117 1 7.06990051 0.9991504 0.0012262413366360865 1
+118 0 -2.46007347 0.07870501 0.11826493158533567 0
+119 0 -2.44013214 0.0801631659 0.1205501245335563 0
+120 0 -2.431875 0.08077414 0.12150871514409342 0
+121 0 -2.50224543 0.07570092 0.11356834207574736 0
+122 1 10.23144 0.999964 5.1939695512002597E-05 1
+123 1 0.452518463 0.6112378 0.71019427344485664 1
+124 1 5.87042236 0.997186244 0.0040651143062878699 1
+125 0 -2.16523314 0.1029163 0.15668549713042432 0
+126 1 5.55969334 0.9961648 0.0055436631836815291 1
+127 0 -2.60329771 0.06892648 0.10303300888915028 0
+128 1 2.87807369 0.9467519 0.07894169481269693 1
+129 0 -4.29203844 0.0134924809 0.019598047692599548 0
+130 0 -2.56230116 0.0716044158 0.10718843415121884 0
+131 0 -2.49228716 0.07640065 0.11466094104662786 0
+132 1 11.527173 0.9999901 1.4274630640975531E-05 1
+133 0 -2.447955 0.0795882344 0.11964866931231606 0
+134 0 -2.709362 0.0624231733 0.092991182628501673 0
+135 0 -2.57368875 0.07085109 0.10601826253397174 0
+136 0 -2.546958 0.07263111 0.10878476978943961 0
+137 0 -2.253997 0.095005244 0.14401866240282399 0
+138 0 -2.52397871 0.0741941854 0.11121847131930589 0
+139 0
+140 0 -2.253997 0.095005244 0.14401866240282399 0
+141 0 -2.11052465 0.108078077 0.16501067045825779 0
+142 1 1.40640354 0.8031981 0.31617223990371063 1
+143 0 -1.8000406 0.14184612 0.22069172747506174 0
+144 0 -2.303132 0.0908639 0.1374318066173198 0
+145 0
+146 1 -0.608948231 0.352299154 1.5051270852918262 0
+147 0 -2.410581 0.08236938 0.12401456162100966 0
+148 0 -3.790462 0.0220863409 0.032221001014681734 0
+149 1 11.1856136 0.9999862 1.9950125438339949E-05 1
+150 0 -2.08552837 0.110511363 0.16895191862826214 0
+151 1 0.7081995 0.6700032 0.57776016109760775 1
+152 1 12.8261318 0.999997258 3.955606392614897E-06 1
+153 0 -2.22495723 0.0975316 0.14805168093637969 0
+154 0 -1.95639491 0.123857722 0.19076292420475424 0
+155 1 2.53439236 0.9265179 0.11010924008025842 1
+156 0 -2.179071 0.10164573 0.15464360520785025 0
+157 0 -2.49228716 0.07640065 0.11466094104662786 0
+158 0
+159 1 15.6741524 0.9999999 1.7198266111377426E-07 1
+160 1 9.474756 0.9999232 0.00011076107881739477 1
+161 0 -2.68982267 0.06357657 0.094767065231191869 0
+162 0 -2.60329771 0.06892648 0.10303300888915028 0
+163 0 -2.98970175 0.0478932858 0.070804811806733092 0
+164 0
+165 0 -2.566331 0.07133699 0.10677292670868219 0
+166 1 8.84706 0.999856234 0.00020742598774193681 1
+167 1 4.890644 0.992539465 0.010803626944692387 1
+168 0 -2.60329771 0.06892648 0.10303300888915028 0
+169 0 -1.76260591 0.146464273 0.22847655310970594 0
+170 0 -2.253997 0.095005244 0.14401866240282399 0
+171 0 -2.37061882 0.08544078 0.12885149942120921 0
+172 0 -2.16523314 0.1029163 0.15668549713042432 0
+173 1 22.0507126 1 0 1
+174 1 4.101033 0.9837141 0.023689008090051845 1
+175 1 8.803769 0.9998498 0.00021671441614871164 1
+176 0 -2.49228716 0.07640065 0.11466094104662786 0
+177 1 5.009384 0.9933693 0.0095979609700506362 1
+178 0 -2.70365119 0.06275825 0.09350687664921721 0
+179 1 -0.333506584 0.417387664 1.2605401318838831 0
+180 0 -2.08552837 0.110511363 0.16895191862826214 0
+181 0 -1.95639491 0.123857722 0.19076292420475424 0
+182 0 -2.87238669 0.0535355881 0.079379834832595766 0
+183 1 11.7801619 0.9999924 1.1006931643385188E-05 1
+184 1 4.80740261 0.991897166 0.011737536892369548 1
+185 0 -2.212134 0.09866614 0.14986650442098268 0
+186 1 5.877905 0.9972071 0.0040349327336088392 1
+187 1 12.8788414 0.9999975 3.6116401888020084E-06 1
+188 1 10.4632988 0.9999714 4.1276426673909545E-05 1
+189 0 -2.33616543 0.08817172 0.13316594116300465 0
+190 1 18.1290474 1 0 1
+191 1 13.2267113 0.9999982 2.5797420694119618E-06 1
+192 0 -2.652885 0.06581142 0.098214282791589516 0
+193 0 -2.37061882 0.08544078 0.12885149942120921 0
+194 0 -2.60329771 0.06892648 0.10303300888915028 0
+195 0 -2.70365119 0.06275825 0.09350687664921721 0
+196 0 4.31835938 0.986853361 6.249162189120602 1
+197 0 -2.78983569 0.0578759126 0.086011004864571042 0
+198 0 -1.95639491 0.123857722 0.19076292420475424 0
+199 0 -2.43037367 0.08088568 0.12168377661511501 0
+200 1 11.15968 0.9999858 2.0466080617959804E-05 1
+201 1 8.401458 0.999775469 0.00032396569438284837 1
+202 0 -2.37061882 0.08544078 0.12885149942120921 0
+203 0 -2.79334736 0.0576847345 0.085718279282943594 0
+204 0 -2.37061882 0.08544078 0.12885149942120921 0
+205 1 17.8429222 1 0 1
+206 1 6.46274567 0.998442 0.0022494800623383254 1
+207 0 -2.08552837 0.110511363 0.16895191862826214 0
+208 0 -2.08552837 0.110511363 0.16895191862826214 0
+209 0 -2.52800727 0.07391794 0.11078805913818714 0
+210 1 21.9436874 1 0 1
+211 1 13.0268564 0.999997854 3.0956910368479058E-06 1
+212 0 -2.37061882 0.08544078 0.12885149942120921 0
+213 1 22.5284481 1 0 1
+214 1 20.4712334 1 0 1
+215 1 8.525514 0.9998017 0.00028612151026144098 1
+216 0 -2.16523314 0.1029163 0.15668549713042432 0
+217 0 -2.37061882 0.08544078 0.12885149942120921 0
+218 1 9.455612 0.9999218 0.00011282503055519205 1
+219 0 -2.714654 0.0621141754 0.092515790827243469 0
+220 0 -2.43490982 0.08054909 0.12115554621880931 0
+221 1 9.159876 0.999894857 0.00015169667309156297 1
+222 1 -1.80774 0.14091149 2.8271388431147284 0
+223 1 3.70435333 0.9759753 0.035083495285141632 1
+224 1 9.788629 0.999944 8.0834110428232468E-05 1
+225 0 -2.16523314 0.1029163 0.15668549713042432 0
+226 1 10.3669252 0.9999685 4.5404134294015166E-05 1
+227 1 8.810473 0.999850869 0.00021516634059471509 1
+228 0 -2.08552837 0.110511363 0.16895191862826214 0
+229 1 17.6545982 1 0 1
+230 1 4.3147707 0.9868068 0.019160423251940339 1
+231 1 9.426738 0.9999194 0.00011626495667983538 1
+232 0 -0.812353134 0.3073893 0.52988339666190787 0
+233 1 3.34824562 0.9660473 0.049834285709967709 1
+234 0 -2.38382244 0.08441466 0.12723373315285244 0
+235 0
+236 1 10.7871552 0.9999794 2.9753305404609972E-05 1
+237 1 6.06969929 0.9976935 0.0033314498282712543 1
+238 1 16.8129368 1 0 1
+239 1 2.5463686 0.9273292 0.10884653830325543 1
+240 0 -2.16806126 0.102655485 0.15626611390641998 0
+241 0 -2.40176368 0.08303831 0.12506663099210433 0
+242 0 -2.49228716 0.07640065 0.11466094104662786 0
+243 0 -2.34700274 0.0873043 0.13179416296824689 0
+244 0 -2.37061882 0.08544078 0.12885149942120921 0
+245 0 -2.65912771 0.06542865 0.097623286813018989 0
+246 1 17.1139336 1 0 1
+247 1 1.33216 0.7911977 0.33788983133296008 1
+248 0 -2.59526634 0.0694436952 0.10383464946038788 0
+249 0
+250 0 -2.00470734 0.118709572 0.18231055874000973 0
+251 1 6.54015732 0.998557866 0.0020820617863629202 1
+252 0 2.0749712 0.8884466 3.1641939848631071 1
+253 1 8.853592 0.9998572 0.00020604992936022588 1
+254 1 6.41300774 0.9983626 0.0023642038022797952 1
+255 1 3.7826376 0.977744043 0.032471253848047714 1
+256 0 -2.253997 0.095005244 0.14401866240282399 0
+257 0 -2.43037367 0.08088568 0.12168377661511501 0
+258 0 -2.60329771 0.06892648 0.10303300888915028 0
+259 0 2.46353149 0.9215453 3.6719968033606416 1
+260 1 8.69649 0.999832869 0.0002411398280924362 1
+261 1 14.626255 0.9999995 6.8793076746672365E-07 1
+262 1 11.909255 0.9999933 9.6310605954860058E-06 1
+263 1 7.177536 0.999237061 0.0011011090587247259 1
+264 1 3.13080788 0.958145857 0.061682803281407773 1
+265 0 -2.517743 0.0746236444 0.111887857908842 0
+266 1 7.81477261 0.9995964 0.00058236478868097896 1
+267 1 0.570395947 0.638854563 0.64644055930061062 1
+268 1 7.142889 0.9992101 0.0011400073388397399 1
+269 0 -2.37061882 0.08544078 0.12885149942120921 0
+270 1 5.226963 0.9946589 0.0077262484789402371 1
+271 0 -2.79334736 0.0576847345 0.085718279282943594 0
+272 1 0.570395947 0.638854563 0.64644055930061062 1
+273 1 -1.541533 0.176312521 2.5037931614232587 0
+274 0 -2.60472631 0.06883486 0.10289105134144588 0
+275 0
+276 0 -2.43037367 0.08088568 0.12168377661511501 0
+277 0 -2.16523314 0.1029163 0.15668549713042432 0
+278 0 -2.37061882 0.08544078 0.12885149942120921 0
+279 1 3.33338261 0.9655565 0.050567409752020474 1
+280 0 -2.60329771 0.06892648 0.10303300888915028 0
+281 0 -2.54244328 0.0729358 0.10925884045202583 0
+282 1 3.29053974 0.964102864 0.052741012904588061 1
+283 1 3.700367 0.9758816 0.035222007864660711 1
+284 1 7.36733246 0.999368846 0.00091084961823976909 1
+285 1 20.352438 1 0 1
+286 1 21.9562721 1 0 1
+287 0 -2.709362 0.0624231733 0.092991182628501673 0
+288 1 -0.586700439 0.357392281 1.4844196184802794 0
+289 1 8.823527 0.9998528 0.0002124142103781059 1
+290 0 -1.95639491 0.123857722 0.19076292420475424 0
+291 0 -2.37061882 0.08544078 0.12885149942120921 0
+292 1
+293 1 3.77584267 0.9775957 0.032690174791706848 1
+294 0
+295 1 6.63497543 0.998688161 0.0018938255499461618 1
+296 0 0.0507936478 0.51269567 1.0371050534420929 1
+297 0
+298 0 -3.44531822 0.0309087858 0.045295631375104138 0
+299 1 3.52736 0.971456349 0.041778923210622917 1
+300 1 6.63810539 0.9986922 0.00188797047076344 1
+301 0 -2.37061882 0.08544078 0.12885149942120921 0
+302 1 21.63637 1 0 1
+303 0 -2.37061882 0.08544078 0.12885149942120921 0
+304 1 3.82702637 0.97868973 0.031076534032479262 1
+305 1 9.007427 0.999877453 0.00017680899902768678 1
+306 0 -2.37061882 0.08544078 0.12885149942120921 0
+307 0 -2.37061882 0.08544078 0.12885149942120921 0
+308 1 6.49249649 0.9984875 0.0021837677951713447 1
+309 0 -2.367922 0.08565173 0.12918431538000874 0
+310 0 -2.58705831 0.06997599 0.10466012826020632 0
+311 0 -1.95639491 0.123857722 0.19076292420475424 0
+312 1 0.721348763 0.672903955 0.571527494298949 1
+313 0 -1.95639491 0.123857722 0.19076292420475424 0
+314 0 -1.876097 0.132837832 0.20562627776187412 0
+315 0
+316 1 1.90932846 0.870943666 0.19934868964045313 1
+317 1 11.0919495 0.999984741 2.2013947263955502E-05 1
+318 0 -3.22425485 0.0382631 0.056285823149524759 0
+319 0 -0.5100851 0.375173569 0.67847261153874627 0
+320 1 3.40061569 0.9677238 0.047332770769414431 1
+321 0
+322 0 -2.60329771 0.06892648 0.10303300888915028 0
+323 1 4.2277 0.985623837 0.020890948016973505 1
+324 0 -2.37061882 0.08544078 0.12885149942120921 0
+325 0 -2.33610034 0.08817695 0.13317421658132386 0
+326 1 0.397289276 0.59803617 0.74169535175171442 1
+327 0 -2.16523314 0.1029163 0.15668549713042432 0
+328 1 3.29097939 0.964118063 0.052718268842774099 1
+329 1 4.33795357 0.98710525 0.018724174263564545 1
+330 1 2.89053345 0.947376549 0.077990134080806572 1
+331 0 -2.99748755 0.0475395061 0.070268841071409238 0
+332 0 -2.44493961 0.0798094 0.11999537191367729 0
+333 1 3.38947582 0.967374 0.047854291800112088 1
+334 1 5.177353 0.994388759 0.0081181070550104366 1
+335 0 -1.95639491 0.123857722 0.19076292420475424 0
+336 1 4.698082 0.9909696 0.013087296874456903 1
+337 0 -2.37061882 0.08544078 0.12885149942120921 0
+338 0 -1.876097 0.132837832 0.20562627776187412 0
+339 1 4.343502 0.987175643 0.018621295533697572 1
+340 1 3.39017 0.9673959 0.047821668989543743 1
+341 0 -2.37061882 0.08544078 0.12885149942120921 0
+342 0 -2.11052465 0.108078077 0.16501067045825779 0
+343 0 -1.95639491 0.123857722 0.19076292420475424 0
+344 1 7.7177906 0.9995553 0.00064172398023939646 1
+345 0 -1.95639491 0.123857722 0.19076292420475424 0
+346 0 -2.565462 0.07139457 0.10686237809729705 0
+347 0 -1.79727888 0.142182633 0.22125757090956827 0
+348 1 -1.28854942 0.216098443 2.2102394195203448 0
+349 1 1.21367073 0.770947754 0.37529500004713245 1
+350 0 -2.57519341 0.0707521 0.10586457082203461 0
+351 0 -2.49228716 0.07640065 0.11466094104662786 0
+352 0 -1.23204327 0.225824 0.36926650823776686 0
+353 1 11.2146692 0.9999865 1.9434170443242565E-05 1
+354 0 -2.16523314 0.1029163 0.15668549713042432 0
+355 0 -2.8185854 0.0563280769 0.083642715320436059 0
+356 1 -2.201137 0.09964843 3.3270091010964316 0
+357 1 14.1900883 0.9999993 1.031896274211761E-06 1
+358 1 5.223918 0.9946426 0.0077498503631306228 1
+359 1 2.35608578 0.9134167 0.13065495391893572 1
+360 1 21.4813557 1 0 1
+361 1 5.294572 0.9950063 0.0072224014614089942 1
+362 0 -2.63699365 0.06679519 0.099734355600720456 0
+363 0 -1.5586791 0.173836261 0.27550035442459786 0
+364 0 -2.49228716 0.07640065 0.11466094104662786 0
+365 0 -2.303132 0.0908639 0.1374318066173198 0
+366 1 19.947937 1 0 1
+367 1 15.1915445 0.999999762 3.4396534272948301E-07 1
+368 0 -2.03972268 0.115094975 0.17640547222344355 0
+369 0 -1.93949687 0.125703141 0.19380487905648439 0
+370 0 -2.61506629 0.0681750551 0.10186914304032915 0
+371 0 -2.03972268 0.115094975 0.17640547222344355 0
+372 0 -2.52397871 0.0741941854 0.11121847131930589 0
+373 0 -2.67554665 0.06443181 0.096085283332132315 0
+374 0 -2.4251883 0.08127202 0.12229032950777963 0
+375 0 -1.95639491 0.123857722 0.19076292420475424 0
+376 0 -2.16523314 0.1029163 0.15668549713042432 0
+377 0 -1.876097 0.132837832 0.20562627776187412 0
+378 0 -2.79220486 0.0577468649 0.085813404732194079 0
+379 0 -2.74447179 0.06039962 0.089880800720694584 0
+380 0 -1.95639491 0.123857722 0.19076292420475424 0
+381 1 8.746121 0.999841 0.00022944310035113074 1
+382 0 -2.4639585 0.07842376 0.11782457436958683 0
+383 0 -2.11052465 0.108078077 0.16501067045825779 0
+384 0 -2.11052465 0.108078077 0.16501067045825779 0
+385 0 -2.79314542 0.05769571 0.085735081775246871 0
+386 1 4.176774 0.984884 0.021974246797932556 1
+387 0 -2.563189 0.0715454146 0.10709675129529037 0
+388 0 -2.33597 0.08818744 0.13319081471442681 0
+389 0 -2.77166653 0.0588746034 0.087541132766088936 0
+390 0 -2.24443483 0.09583059 0.14533498514227941 0
+391 1 12.28669 0.999995351 6.7073389754153415E-06 1
+392 0 -2.43037367 0.08088568 0.12168377661511501 0
+393 0 -1.663213 0.159331158 0.25039049156336257 0
+394 0 -2.18103456 0.101466566 0.15435590868043605 0
+395 0 -2.43037367 0.08088568 0.12168377661511501 0
+396 0 -2.60329771 0.06892648 0.10303300888915028 0
+397 0 -2.386812 0.0841838941 0.12687015785045277 0
+398 0 -2.39560652 0.0835083351 0.12580633468481714 0
+399 0 -2.0224514 0.116865739 0.17929531088912914 0
+400 1 9.442846 0.9999207 0.00011437299629627494 1
+401 0 -2.253997 0.095005244 0.14401866240282399 0
+402 0 -2.32173133 0.0893391 0.13501415250433074 0
+403 0 -2.04644132 0.114412457 0.17529316569822423 0
+404 0 -2.143785 0.104913421 0.15990085822156069 0
+405 0 -2.16523314 0.1029163 0.15668549713042432 0
+406 0 -2.44775438 0.07960292 0.11967168757719346 0
+407 0 -2.16523314 0.1029163 0.15668549713042432 0
+408 0 -2.22424483 0.09759433 0.14815195953491689 0
+409 0 -2.4251883 0.08127202 0.12229032950777963 0
+410 0 -2.16523314 0.1029163 0.15668549713042432 0
+411 0
+412 1 10.2703123 0.9999653 5.0047819483104426E-05 1
+413 0 -2.73679447 0.0608368 0.090552214775961787 0
+414 1 5.42697239 0.9956228 0.0063288062748059282 1
+415 0 -1.37955284 0.201080829 0.32387854587592818 0
+416 1 7.32360268 0.9993406 0.00095163582501576051 1
+417 0 -2.16523314 0.1029163 0.15668549713042432 0
+418 0 -2.48652363 0.07680834 0.11529790529919315 0
+419 0 -2.92190623 0.05108122 0.075643488086837282 0
+420 0 -2.17014742 0.102463476 0.15595744686821872 0
+421 1 14.1832743 0.9999993 1.031896274211761E-06 1
+422 0 -2.21935058 0.09802621 0.14884258107085416 0
+423 0 -2.56230116 0.0716044158 0.10718843415121884 0
+424 0 -2.253997 0.095005244 0.14401866240282399 0
+425 1 23.2536659 1 0 1
+426 0 -1.97403908 0.121955715 0.18763438978981944 0
+427 1 3.93143272 0.9807618 0.028025268445269182 1
+428 0 -2.16523314 0.1029163 0.15668549713042432 0
+429 0 -2.303132 0.0908639 0.1374318066173198 0
+430 0 -2.1786294 0.10168606 0.15470837385047881 0
+431 0 -3.24330187 0.0375683233 0.055243967912172835 0
+432 0 -2.63995266 0.06661098 0.099449594195521407 0
+433 0 -2.278904 0.09288526 0.14064305278762584 0
+434 0 4.94386959 0.9929235 7.1427479209872091 1
+435 1 7.12867451 0.999198854 0.0011562726386102314 1
+436 1 4.3324194 0.9870346 0.018827408813728724 1
+437 0 -2.386812 0.0841838941 0.12687015785045277 0
+438 0 -2.38723946 0.08415094 0.12681824612014014 0
+439 0 -2.50897 0.07523173 0.11283619840279428 0
+440 1 6.34583759 0.998249054 0.0025282952766864627 1
+441 0 -1.81499052 0.140036061 0.21765193123843524 0
+442 0 -2.020601 0.117056854 0.17960755137882647 0
+443 0 -1.93714356 0.125962 0.19423209736428568 0
+444 0 -2.91968775 0.0511888638 0.075807152007862255 0
+445 0 -2.11052465 0.108078077 0.16501067045825779 0
+446 0 -1.95639491 0.123857722 0.19076292420475424 0
+447 0 -2.50897 0.07523173 0.11283619840279428 0
+448 0 -1.663213 0.159331158 0.25039049156336257 0
+449 1 13.44416 0.999998569 2.0637932864940443E-06 1
+450 0 -2.347867 0.08723546 0.13168534659555281 0
+451 0 -2.50897 0.07523173 0.11283619840279428 0
+452 0 -2.37676024 0.08496209 0.12809658311879946 0
+453 1 7.50886631 0.999452055 0.00079073491429955357 1
+454 0 -2.37708616 0.08493676 0.12805664400028163 0
+455 1 -1.54050016 0.176462576 2.5025658459084399 0
+456 1 10.2714491 0.9999654 4.9875830876224796E-05 1
+457 1 9.928566 0.999951243 7.0342619050273796E-05 1
+458 0 -2.483977 0.07698911 0.11558042055485453 0
+459 0 -2.44994617 0.0794424862 0.11942023498557051 0
+460 0 -2.57519341 0.0707521 0.10586457082203461 0
+461 0 -1.95080662 0.124465436 0.19176396054905676 0
+462 0 -2.65842414 0.06547169 0.097689720656269172 0
+463 0 -2.40528941 0.0827702358 0.12464492366430695 0
+464 0 -2.386812 0.0841838941 0.12687015785045277 0
+465 1 11.50494 0.999989867 1.4618599387059818E-05 1
+466 1 8.586574 0.9998135 0.00026909195127407725 1
+467 1 6.92544365 0.999018431 0.0014168006049970199 1
+468 0 -2.386812 0.0841838941 0.12687015785045277 0
+469 0 -2.1956358 0.100143082 0.15223247204165077 0
+470 0 -2.15111279 0.104227282 0.15879536747436127 0
+471 0 -2.65842414 0.06547169 0.097689720656269172 0
+472 0 -2.35197687 0.0869087651 0.13116907530431268 0
+473 0 -2.386812 0.0841838941 0.12687015785045277 0
+474 0 -2.50897 0.07523173 0.11283619840279428 0
+475 0 -2.253997 0.095005244 0.14401866240282399 0
+476 0 -2.379613 0.08474059 0.12774738819728162 0
+477 0 -2.386812 0.0841838941 0.12687015785045277 0
+478 0 -2.45095563 0.0793686956 0.11930459528595055 0
+479 1 9.896286 0.9999497 7.2578504258149067E-05 1
+480 0 -2.23553848 0.09660421 0.14656990931584829 0
+481 0 -2.2461 0.0956864059 0.14510494370298066 0
+482 1 19.90526 1 0 1
+483 1 13.2226048 0.9999982 2.5797420694119618E-06 1
+484 0 -2.483977 0.07698911 0.11558042055485453 0
+485 0 -1.75304985 0.147662938 0.23050402825087019 0
+486 0 -2.15111279 0.104227282 0.15879536747436127 0
+487 1 16.2562943 0.9999999 1.7198266111377426E-07 1
+488 1 0.896533 0.710236549 0.49362849030318379 1
+489 1 -2.11052513 0.108078033 3.2098547752295934 0
+490 0 -1.95639491 0.123857722 0.19076292420475424 0
+491 1 6.42460346 0.9983815 0.0023369001030092021 1
+492 0 -2.27263451 0.09341486 0.14148557773460221 0
+493 1 13.9223871 0.999999046 1.3758618629646341E-06 1
+494 0 -1.544085 0.175942212 0.27918258388346984 0
+495 0 -2.15111279 0.104227282 0.15879536747436127 0
+496 0 -1.663213 0.159331158 0.25039049156336257 0
+497 0 -2.34868431 0.08717041 0.13158253208596138 0
+498 0 -2.546958 0.07263111 0.10878476978943961 0
+499 0 -2.546958 0.07263111 0.10878476978943961 0
+500 0 -2.87238669 0.0535355881 0.079379834832595766 0
+501 0 -2.546958 0.07263111 0.10878476978943961 0
+502 0 -2.58375478 0.07019129 0.10499414907116332 0
+503 0 -2.70365119 0.06275825 0.09350687664921721 0
+504 0 -1.95639491 0.123857722 0.19076292420475424 0
+505 0 -2.207967 0.09903733 0.15046075844804158 0
+506 1 13.1352835 0.999998 2.9237080272005804E-06 1
+507 0 -1.85896659 0.134823546 0.20893369205937654 0
+508 0 -2.50897 0.07523173 0.11283619840279428 0
+509 0 -2.11052465 0.108078077 0.16501067045825779 0
+510 0 -1.95639491 0.123857722 0.19076292420475424 0
+511 0 -2.652885 0.06581142 0.098214282791589516 0
+512 0 -2.50897 0.07523173 0.11283619840279428 0
+513 0 -2.15111279 0.104227282 0.15879536747436127 0
+514 1 13.1494493 0.9999981 2.751725038055267E-06 1
+515 1 11.2234116 0.999986649 1.926218548588174E-05 1
+516 0 -1.663213 0.159331158 0.25039049156336257 0
+517 0 -1.876097 0.132837832 0.20562627776187412 0
+518 0 -2.3127768 0.09007031 0.1361730175390829 0
+519 1 4.88246536 0.9924787 0.010891913451001586 1
+520 0 -2.249304 0.09540951 0.14466326997414256 0
+521 0 -2.66490865 0.0650760457 0.097079072351726234 0
+522 1 2.02141762 0.883027554 0.17946963916334691 1
+523 1 6.809782 0.998898268 0.0015903398846003496 1
+524 0 -2.43037367 0.08088568 0.12168377661511501 0
+525 0 -2.31983972 0.08949312 0.13525817349750544 0
+526 0 -2.386812 0.0841838941 0.12687015785045277 0
+527 0 -2.70365119 0.06275825 0.09350687664921721 0
+528 0 -2.51398182 0.0748837963 0.11229350114078318 0
+529 0 -2.27263451 0.09341486 0.14148557773460221 0
+530 1 4.92369843 0.992780268 0.010453653133861311 1
+531 0 -2.44775438 0.07960292 0.11967168757719346 0
+532 0 -2.08552837 0.110511363 0.16895191862826214 0
+533 0 -2.43037367 0.08088568 0.12168377661511501 0
+534 0 -2.303132 0.0908639 0.1374318066173198 0
+535 0 -2.41980267 0.08167506 0.12292336799782667 0
+536 0 -2.79334736 0.0576847345 0.085718279282943594 0
+537 0 -2.73679447 0.0608368 0.090552214775961787 0
+538 0 -2.546958 0.07263111 0.10878476978943961 0
+539 0 -2.74815464 0.0601909533 0.089560439570368855 0
+540 0 -2.543486 0.0728653148 0.10914915993929039 0
+541 0 -2.253997 0.095005244 0.14401866240282399 0
+542 0 -2.38339472 0.08444773 0.12728583594126508 0
+543 0 -2.546958 0.07263111 0.10878476978943961 0
+544 0 -2.34906387 0.08714021 0.13153480725866068 0
+545 0 -2.652885 0.06581142 0.098214282791589516 0
+546 1 18.4379044 1 0 1
+547 0 -1.82228041 0.139160469 0.21618376511481502 0
+548 0 -1.97069025 0.122314766 0.18822445904268387 0
+549 1 5.37553024 0.9953929 0.0066619714409914422 1
+550 0 -2.43037367 0.08088568 0.12168377661511501 0
+551 0 -2.37061882 0.08544078 0.12885149942120921 0
+552 0 -2.567891 0.07123372 0.10661249990663338 0
+553 0 -1.67024612 0.158391356 0.24877857334123982 0
+554 0 -2.253997 0.095005244 0.14401866240282399 0
+555 0 -2.15087152 0.104249813 0.15883165472574934 0
+556 0 -2.25254679 0.09513001 0.14421757377759611 0
+557 0 -2.57519341 0.0707521 0.10586457082203461 0
+558 0 -2.303132 0.0908639 0.1374318066173198 0
+559 0 -2.652885 0.06581142 0.098214282791589516 0
+560 0 -2.79334736 0.0576847345 0.085718279282943594 0
+561 0 -2.79334736 0.0576847345 0.085718279282943594 0
+562 0 -2.37061882 0.08544078 0.12885149942120921 0
+563 0 -2.43037367 0.08088568 0.12168377661511501 0
+564 0 -2.68982267 0.06357657 0.094767065231191869 0
+565 1 16.7462845 1 0 1
+566 0 -2.58233213 0.07028419 0.10513830229728056 0
+567 0 -2.35164356 0.08693522 0.13121087830396089 0
+568 1 1.66488457 0.840892553 0.25000662577655486 1
+569 1 11.8811359 0.9999931 9.9750282344474815E-06 1
+570 1 6.743778 0.9988232 0.001698726443122326 1
+571 1 13.4375887 0.999998569 2.0637932864940443E-06 1
+572 0 -2.43037367 0.08088568 0.12168377661511501 0
+573 0 -2.16523314 0.1029163 0.15668549713042432 0
+574 1 5.289529 0.9949812 0.0072587859583563598 1
+575 0 -2.73679447 0.0608368 0.090552214775961787 0
+576 0 -2.652885 0.06581142 0.098214282791589516 0
+577 0 -2.16523314 0.1029163 0.15668549713042432 0
+578 0 -2.16523314 0.1029163 0.15668549713042432 0
+579 0 -2.37061882 0.08544078 0.12885149942120921 0
+580 0 -2.61211181 0.06836298 0.10216012821346472 0
+581 1 9.491055 0.9999244 0.00010904112129145599 1
+582 1 10.5543289 0.9999739 3.7664692194035506E-05 1
+583 0 -2.253997 0.095005244 0.14401866240282399 0
+584 0 -2.854372 0.0544557646 0.080783141512571238 0
+585 0 -1.95639491 0.123857722 0.19076292420475424 0
+586 1 22.14612 1 0 1
+587 0 -2.63995266 0.06661098 0.099449594195521407 0
+588 1 4.183667 0.984986365 0.021824341396277779 1
+589 0 -2.50897 0.07523173 0.11283619840279428 0
+590 1 2.24761486 0.904444635 0.14489590312889089 1
+591 1 5.37478161 0.995389462 0.0066669820307222651 1
+592 1 2.52463913 0.925851166 0.11114780094090568 1
+593 0 -2.483977 0.07698911 0.11558042055485453 0
+594 1 3.883813 0.9798425 0.029378249558157231 1
+595 0 -2.652885 0.06581142 0.098214282791589516 0
+596 0 -2.52397871 0.0741941854 0.11121847131930589 0
+597 0 -2.67120314 0.06469413 0.096489851264687398 0
+598 0 -2.43037367 0.08088568 0.12168377661511501 0
+599 0 -2.042253 0.114837505 0.17598577040139771 0
+600 0 -2.43037367 0.08088568 0.12168377661511501 0
+601 0 -1.876097 0.132837832 0.20562627776187412 0
+602 0 -2.546958 0.07263111 0.10878476978943961 0
+603 1 2.76534462 0.9407741 0.088079778169719514 1
+604 1 1.64908409 0.8387672 0.25365769899634338 1
+605 1 9.463315 0.9999224 0.00011196505030560377 1
+606 0 -2.43851948 0.08028216 0.12073676854570529 0
+607 0 -1.95639491 0.123857722 0.19076292420475424 0
+608 1 13.1060286 0.999998 2.9237080272005804E-06 1
+609 0 -2.50897 0.07523173 0.11283619840279428 0
+610 1 5.4565506 0.995749831 0.0061447648647644489 1
+611 1 8.201106 0.999725759 0.00039570034941416836 1
+612 1 24.51187 1 0 1
+613 0 -2.25948548 0.0945344046 0.14326827011600504 0
+614 0 -2.0189333 0.117229328 0.1798893939370643 0
+615 0 -2.491962 0.0764236 0.11469678675361461 0
+616 0 -2.43037367 0.08088568 0.12168377661511501 0
+617 0
+618 0 -2.546958 0.07263111 0.10878476978943961 0
+619 0 -2.652885 0.06581142 0.098214282791589516 0
+620 0 -2.43037367 0.08088568 0.12168377661511501 0
+621 0 -2.19200563 0.100470684 0.15275779542018131 0
+622 0 -2.700953 0.06291714 0.093751478225403542 0
+623 0 -1.95639491 0.123857722 0.19076292420475424 0
+624 0 -2.41144156 0.0823043659 0.1239123508690735 0
+625 0 -2.19398546 0.1002919 0.15247108397309578 0
+626 1 3.657977 0.9748635 0.03672782424977717 1
+627 0 -1.88075233 0.1323025 0.20473591051784815 0
+628 0 -2.11052465 0.108078077 0.16501067045825779 0
+629 0 -2.386812 0.0841838941 0.12687015785045277 0
+630 0 -2.40915632 0.08247714 0.12418398785058164 0
+631 0 -2.652885 0.06581142 0.098214282791589516 0
+632 0 -1.95639491 0.123857722 0.19076292420475424 0
+633 1 3.20818138 0.961141 0.057180018700639101 1
+634 0 -2.253997 0.095005244 0.14401866240282399 0
+635 0 -2.25866842 0.09460436 0.14337973262597348 0
+636 1 11.0438576 0.999984 2.3045859283900738E-05 1
+637 0 -2.06156754 0.112888753 0.17281306044669567 0
+638 0 -2.386812 0.0841838941 0.12687015785045277 0
+639 0 -2.57519341 0.0707521 0.10586457082203461 0
+640 0 -2.48130536 0.07717918 0.11587753928641568 0
+641 0 -2.43037367 0.08088568 0.12168377661511501 0
+642 0 -2.43037367 0.08088568 0.12168377661511501 0
+643 0 -1.95639491 0.123857722 0.19076292420475424 0
+644 0 -2.11052465 0.108078077 0.16501067045825779 0
+645 0 -2.43037367 0.08088568 0.12168377661511501 0
+646 0 -2.00470734 0.118709572 0.18231055874000973 0
+647 0 -2.135698 0.105675265 0.16112931691390375 0
+648 1 13.45014 0.999998569 2.0637932864940443E-06 1
+649 0 -2.43037367 0.08088568 0.12168377661511501 0
+650 0 -2.31990623 0.0894877 0.13524959098307188 0
+651 0 -2.28205872 0.09261979 0.14022090268098497 0
+652 0 -2.63995266 0.06661098 0.099449594195521407 0
+653 0 -2.546958 0.07263111 0.10878476978943961 0
+654 0 -2.60329771 0.06892648 0.10303300888915028 0
+655 0 -2.43037367 0.08088568 0.12168377661511501 0
+656 0 -2.652885 0.06581142 0.098214282791589516 0
+657 0 -2.3856678 0.0842721462 0.12700918908215508 0
+658 1 9.13554 0.999892235 0.00015548069423433847 1
+659 0 -1.95639491 0.123857722 0.19076292420475424 0
+660 0 -2.16523314 0.1029163 0.15668549713042432 0
+661 0 -2.70365119 0.06275825 0.09350687664921721 0
+662 0 -2.21700072 0.09823417 0.14917524963571913 0
+663 0 -2.21700072 0.09823417 0.14917524963571913 0
+664 0 -2.52331543 0.07423976 0.11128949348681842 0
+665 0 -1.95639491 0.123857722 0.19076292420475424 0
+666 0 -2.6012094 0.06906062 0.10324087369927021 0
+667 0 -2.60329771 0.06892648 0.10303300888915028 0
+668 1 0.230495453 0.557370067 0.84329256977903522 1
+669 1 11.8682785 0.999992967 1.0147012084681539E-05 1
+670 1 7.613348 0.9995065 0.00071218392983748437 1
+671 0 -2.56594157 0.071362786 0.10681299857820942 0
+672 0 -2.49228716 0.07640065 0.11466094104662786 0
+673 0 -2.57178926 0.07097623 0.10621258116698419 0
+674 0 -2.16523314 0.1029163 0.15668549713042432 0
+675 0 -2.33810186 0.08801616 0.13291983400253021 0
+676 0 -2.1956358 0.100143082 0.15223247204165077 0
+677 0 -2.50897 0.07523173 0.11283619840279428 0
+678 0 -1.95639491 0.123857722 0.19076292420475424 0
+679 0 -2.11052465 0.108078077 0.16501067045825779 0
+680 1 25.43999 1 0 1
+681 1 14.1827745 0.9999993 1.031896274211761E-06 1
+682 0 -2.7642622 0.05928621 0.088172242863683387 0
+683 0 -1.95639491 0.123857722 0.19076292420475424 0
+684 0 -1.95639491 0.123857722 0.19076292420475424 0
+685 0 -1.95639491 0.123857722 0.19076292420475424 0
+686 0 -1.95639491 0.123857722 0.19076292420475424 0
+687 0 -2.46161175 0.07859354 0.11809037777838542 0
+688 0 -2.386812 0.0841838941 0.12687015785045277 0
+689 0 -3.64467454 0.0254645254 0.037213392183878896 0
+690 0 -2.135698 0.105675265 0.16112931691390375 0
+691 1 5.792588 0.9969592 0.0043936168831631463 1
+692 0 -2.253997 0.095005244 0.14401866240282399 0
+693 0 -2.65598679 0.06562097 0.097920204018736551 0
+694 0 -2.47353578 0.0777343661 0.11674575467283718 0
+695 0 -2.11052465 0.108078077 0.16501067045825779 0
+696 1 7.72577858 0.999558866 0.00063656221445328599 1
+697 1 3.85642529 0.97929436 0.030185519608588427 1
+698 1 5.01899147 0.993432164 0.009506637452477578 1
diff --git a/test/Microsoft.ML.Predictor.Tests/Microsoft.ML.Predictor.Tests.csproj b/test/Microsoft.ML.Predictor.Tests/Microsoft.ML.Predictor.Tests.csproj
index 2329491e08..12cddfca21 100644
--- a/test/Microsoft.ML.Predictor.Tests/Microsoft.ML.Predictor.Tests.csproj
+++ b/test/Microsoft.ML.Predictor.Tests/Microsoft.ML.Predictor.Tests.csproj
@@ -25,6 +25,7 @@
+