From 14050c97180530e409c5565c6142bb2f223953ca Mon Sep 17 00:00:00 2001 From: rithin-pullela-aws Date: Sat, 21 Dec 2024 21:46:18 -0800 Subject: [PATCH 1/3] Use AdaGrad optimiser by default in Liner Resgression Signed-off-by: rithin-pullela-aws --- .../algorithms/regression/LinearRegression.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java b/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java index 1bd78aa478..cea9b79a1e 100644 --- a/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java +++ b/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java @@ -52,7 +52,7 @@ public class LinearRegression implements Trainable, Predictable { public static final String VERSION = "1.0.0"; private static final LinearRegressionParams.ObjectiveType DEFAULT_OBJECTIVE_TYPE = LinearRegressionParams.ObjectiveType.SQUARED_LOSS; - private static final LinearRegressionParams.OptimizerType DEFAULT_OPTIMIZER_TYPE = LinearRegressionParams.OptimizerType.SIMPLE_SGD; + private static final LinearRegressionParams.OptimizerType DEFAULT_OPTIMIZER_TYPE = LinearRegressionParams.OptimizerType.ADA_GRAD; private static final double DEFAULT_LEARNING_RATE = 0.01; // Momentum private static final double DEFAULT_MOMENTUM_FACTOR = 0; @@ -134,15 +134,15 @@ private void createOptimiser() { break; } switch (optimizerType) { + case SIMPLE_SGD: + optimiser = SGD.getSimpleSGD(learningRate, momentumFactor, momentum); + break; case LINEAR_DECAY_SGD: optimiser = SGD.getLinearDecaySGD(learningRate, momentumFactor, momentum); break; case SQRT_DECAY_SGD: optimiser = SGD.getSqrtDecaySGD(learningRate, momentumFactor, momentum); break; - case ADA_GRAD: - optimiser = new AdaGrad(learningRate, epsilon); - break; case ADA_DELTA: optimiser = new AdaDelta(momentumFactor, epsilon); break; @@ -153,8 +153,8 @@ private void createOptimiser() { optimiser = new RMSProp(learningRate, momentumFactor, epsilon, decayRate); break; default: - // Use default SGD with a constant learning rate. - optimiser = SGD.getSimpleSGD(learningRate, momentumFactor, momentum); + // Use AdaGrad by default + optimiser = new AdaGrad(learningRate, epsilon); break; } } From b85914fd757e84ffe9fa1630f6aecdb653d32178 Mon Sep 17 00:00:00 2001 From: rithin-pullela-aws Date: Thu, 26 Dec 2024 10:31:55 -0800 Subject: [PATCH 2/3] Added issue link in the code comment as a reference. Signed-off-by: rithin-pullela-aws --- .../ml/engine/algorithms/regression/LinearRegression.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java b/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java index cea9b79a1e..36f369600e 100644 --- a/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java +++ b/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java @@ -153,7 +153,7 @@ private void createOptimiser() { optimiser = new RMSProp(learningRate, momentumFactor, epsilon, decayRate); break; default: - // Use AdaGrad by default + // Use AdaGrad by default, reference issue: https://github.com/opensearch-project/ml-commons/issues/3210#issuecomment-2556119802 optimiser = new AdaGrad(learningRate, epsilon); break; } From f3f3f4a75dedcfbfc1a3ad5fd3b7c0808e18d5c7 Mon Sep 17 00:00:00 2001 From: rithin-pullela-aws Date: Thu, 26 Dec 2024 14:23:59 -0800 Subject: [PATCH 3/3] Apply Spotless Signed-off-by: rithin-pullela-aws --- .../ml/engine/algorithms/regression/LinearRegression.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java b/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java index 36f369600e..895261201e 100644 --- a/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java +++ b/ml-algorithms/src/main/java/org/opensearch/ml/engine/algorithms/regression/LinearRegression.java @@ -153,7 +153,8 @@ private void createOptimiser() { optimiser = new RMSProp(learningRate, momentumFactor, epsilon, decayRate); break; default: - // Use AdaGrad by default, reference issue: https://github.com/opensearch-project/ml-commons/issues/3210#issuecomment-2556119802 + // Use AdaGrad by default, reference issue: + // https://github.com/opensearch-project/ml-commons/issues/3210#issuecomment-2556119802 optimiser = new AdaGrad(learningRate, epsilon); break; }