From fe3530b775ba27159fe831c7066b54f542d96477 Mon Sep 17 00:00:00 2001 From: RollerKnobster Date: Mon, 24 Jun 2024 23:28:35 +0300 Subject: [PATCH] Fix: refactor `getattr(keras.optimizers, optimizer)` to `keras.optimizers.get(optimizer)` in autoencoders --- gordo/machine/model/factories/feedforward_autoencoder.py | 2 +- gordo/machine/model/factories/lstm_autoencoder.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gordo/machine/model/factories/feedforward_autoencoder.py b/gordo/machine/model/factories/feedforward_autoencoder.py index 1e569dac5..1234fc9f5 100644 --- a/gordo/machine/model/factories/feedforward_autoencoder.py +++ b/gordo/machine/model/factories/feedforward_autoencoder.py @@ -88,7 +88,7 @@ class (e.x. Adam(lr=0.01,beta_1=0.9, beta_2=0.999)). If no arguments are # Instantiate optimizer with kwargs if isinstance(optimizer, str): - Optim = getattr(keras.optimizers, optimizer) + Optim = keras.optimizers.get(optimizer) optimizer = Optim(**optimizer_kwargs) # Final output layer diff --git a/gordo/machine/model/factories/lstm_autoencoder.py b/gordo/machine/model/factories/lstm_autoencoder.py index a0e334fa6..4864321a6 100644 --- a/gordo/machine/model/factories/lstm_autoencoder.py +++ b/gordo/machine/model/factories/lstm_autoencoder.py @@ -90,7 +90,7 @@ class (e.x. Adam(lr=0.01,beta_1=0.9, beta_2=0.999)). If no arguments are # output layer if isinstance(optimizer, str): - Optim = getattr(keras.optimizers, optimizer) + Optim = keras.optimizers.get(optimizer) optimizer = Optim(**optimizer_kwargs) model.add(Dense(units=n_features_out, activation=out_func))