diff --git a/src/Microsoft.ML.Core/Utilities/Random.cs b/src/Microsoft.ML.Core/Utilities/Random.cs
index 88dd45209f..7b1b14c974 100644
--- a/src/Microsoft.ML.Core/Utilities/Random.cs
+++ b/src/Microsoft.ML.Core/Utilities/Random.cs
@@ -42,16 +42,6 @@ public interface IRandom
public static class RandomUtils
{
- public static Single NextFloat(this IRandom rand)
- {
- return rand.NextSingle();
- }
-
- public static Single NextFloat(this Random rand)
- {
- return rand.NextDouble().ToFloat();
- }
-
public static TauswortheHybrid Create()
{
// Seed from a system random.
diff --git a/src/Microsoft.ML.Core/Utilities/Stats.cs b/src/Microsoft.ML.Core/Utilities/Stats.cs
index 6b048c4de3..33992950db 100644
--- a/src/Microsoft.ML.Core/Utilities/Stats.cs
+++ b/src/Microsoft.ML.Core/Utilities/Stats.cs
@@ -202,7 +202,7 @@ public static int SampleFromPoisson(IRandom rand, double lambda)
// http://en.wikipedia.org/wiki/Laplace_distribution
public static Float SampleFromLaplacian(IRandom rand, Float mean, Float scale)
{
- Float u = rand.NextFloat();
+ Float u = rand.NextSingle();
u = u - 0.5f;
Float ret = mean;
if (u >= 0)
@@ -221,7 +221,7 @@ public static Float SampleFromLaplacian(IRandom rand, Float mean, Float scale)
///
public static Float SampleFromCauchy(IRandom rand)
{
- return (Float)Math.Tan(Math.PI * (rand.NextFloat() - 0.5));
+ return (Float)Math.Tan(Math.PI * (rand.NextSingle() - 0.5));
}
///
diff --git a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs
index 1b1fc0328d..f80589bdab 100644
--- a/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs
+++ b/src/Microsoft.ML.Data/Transforms/GenerateNumberTransform.cs
@@ -426,7 +426,7 @@ private void EnsureValue(ref long lastCounter, ref Float value, TauswortheHybrid
Ch.Assert(lastCounter <= Input.Position);
while (lastCounter < Input.Position)
{
- value = rng.NextFloat();
+ value = rng.NextSingle();
lastCounter++;
}
}
diff --git a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs
index 87fd220cf8..b833278890 100644
--- a/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs
+++ b/src/Microsoft.ML.KMeansClustering/KMeansPlusPlusTrainer.cs
@@ -328,7 +328,7 @@ public static void Initialize(
cumulativeWeight += probabilityWeight;
if (probabilityWeight > Epsilon &&
- host.Rand.NextFloat() < probabilityWeight / cumulativeWeight)
+ host.Rand.NextSingle() < probabilityWeight / cumulativeWeight)
{
// again, numerical error may cause selection of the same candidate twice, so ensure that the distance is non-trivially positive
Utils.Swap(ref cursor.Features, ref candidate);
diff --git a/src/Microsoft.ML.PipelineInference/AutoMlEngines/UniformRandomEngine.cs b/src/Microsoft.ML.PipelineInference/AutoMlEngines/UniformRandomEngine.cs
index 2a4bb51836..9f304312c2 100644
--- a/src/Microsoft.ML.PipelineInference/AutoMlEngines/UniformRandomEngine.cs
+++ b/src/Microsoft.ML.PipelineInference/AutoMlEngines/UniformRandomEngine.cs
@@ -123,11 +123,11 @@ private void RandomlyPerturbSweepableHyperparameters(IEnumerable)fvg.CreateFromNormalized(Host.Rand.NextFloat())).Value;
+ floParam.RawValue = ((IParameterValue)fvg.CreateFromNormalized(Host.Rand.NextSingle())).Value;
break;
case TlcModule.SweepableLongParamAttribute lonParam:
var lvg = AutoMlUtils.ToIValueGenerator(lonParam);
- lonParam.RawValue = ((IParameterValue)lvg.CreateFromNormalized(Host.Rand.NextFloat())).Value;
+ lonParam.RawValue = ((IParameterValue)lvg.CreateFromNormalized(Host.Rand.NextSingle())).Value;
break;
default:
throw new NotSupportedException($"Unknown type of sweepable parameter attribute: {param.GetType()}");
diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs
index e4273d7ce9..89f4866228 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs
@@ -201,7 +201,7 @@ protected virtual Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Fa
{
Float[] initWeights = new Float[BiasCount + WeightCount];
for (int j = 0; j < initWeights.Length; j++)
- initWeights[j] = InitWtsDiameter * (Host.Rand.NextFloat() - (Float)0.5);
+ initWeights[j] = InitWtsDiameter * (Host.Rand.NextSingle() - (Float)0.5);
init = new VBuffer(initWeights.Length, initWeights);
}
else if (SgdInitializationTolerance > 0)
diff --git a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs
index 253800171d..bcd4b33d58 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/Online/OnlineLinear.cs
@@ -238,8 +238,8 @@ protected virtual void InitCore(IChannel ch, int numFeatures, LinearPredictor pr
{
Weights = VBufferUtils.CreateDense(NumFeatures);
for (int i = 0; i < NumFeatures; i++)
- Weights.Values[i] = Args.InitWtsDiameter * (Host.Rand.NextFloat() - (Float)0.5);
- Bias = Args.InitWtsDiameter * (Host.Rand.NextFloat() - (Float)0.5);
+ Weights.Values[i] = Args.InitWtsDiameter * (Host.Rand.NextSingle() - (Float)0.5);
+ Bias = Args.InitWtsDiameter * (Host.Rand.NextSingle() - (Float)0.5);
}
else if (NumFeatures <= 1000)
Weights = VBufferUtils.CreateDense(NumFeatures);
diff --git a/src/Microsoft.ML.Transforms/RffTransform.cs b/src/Microsoft.ML.Transforms/RffTransform.cs
index 6063559923..b8f49b4dce 100644
--- a/src/Microsoft.ML.Transforms/RffTransform.cs
+++ b/src/Microsoft.ML.Transforms/RffTransform.cs
@@ -191,7 +191,7 @@ private void GetDDimensionalFeatureMapping(int rowSize)
private void GetDRotationTerms(int colSize)
{
for (int i = 0; i < colSize; ++i)
- RotationTerms[i] = (_rand.NextFloat() - (Float)0.5) * (Float)Math.PI;
+ RotationTerms[i] = (_rand.NextSingle() - (Float)0.5) * (Float)Math.PI;
}
private void InitializeFourierCoefficients(int rowSize, int colSize)