如果是上面提到的,那可能是在 lstm 网络的背景下。我建议使用 keras 调谐器贝叶斯优化器并将 l1 或 l2 数字作为内核空间的参数。通过这种方式,您可以找到最佳值,并且是超调的好方法。请记住,如果我没记错的话,参数或内核的范围越大,您需要的计算机能力就越高。
from tensorflow import keras
import keras_tuner as kt
def model1(hp):
model=Sequential()
model.add(keras.layers.LSTM(units=hp.Int('units',min_value=40, max_value=800, step=20),
dropout=hp.Float('droput',min_value=0.15, max_value=0.99, step=0.05),
recurrent_dropout=hp.Float('redroput',min_value=0.05, max_value=0.99, step=0.05),
activation='relu',
return_sequences=True,
input_shape=(30,1)))
Attention()
model.add(keras.layers.LSTM(units=hp.Int('units',min_value=40, max_value=800, step=20),
dropout=hp.Float('droput',min_value=0.15, max_value=0.99, step=0.05),
activation='relu',return_sequences=True))
Attention()
model.add(keras.layers.LSTM(units=hp.Int('units',min_value=40, max_value=800, step=20), activation='relu'))
model.add(keras.layers.Dense(1))
model.compile(loss='mean_squared_error',optimizer=tf.keras.optimizers.Adam(hp.Choice('learning_rate', values=[1e-2, 1e-3, 1e-4, 1e-7, 1e-10])))
return model
bayesian_opt_tuner = kt.BayesianOptimization(
model1,
objective='val_loss',
max_trials=200,
executions_per_trial=1,
project_name='timeseries_bayes_opt_POC',
overwrite=True,)
xval=X_test
bayesian_opt_tuner.search(x=X_train ,y=X_train,
epochs=300,
#validation_data=(xval ,xval),
validation_split=0.95,
validation_steps=30,
steps_per_epoch=30,
callbacks=[tf.keras.callbacks.EarlyStopping(monitor='val_loss',
patience=4,
verbose=1,
restore_best_weights=True),
tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',
factor=0.1,
patience= 2,
verbose=1,
min_delta=1e-5,
mode='min')]
)
This is where the magic happens. Something I composed myself. If interested holla