我正在尝试训练 LSTM 网络以进一步预测时间步长。我有一个查询列表,当前问题基于其中一个。
验证损失(使用 mse)总是低于火车损失(mse),我知道我不合适,因此泛化非常糟糕。
所以至少过拟合一段时间,网络会带来什么变化。下面是相同的代码。
learning_rate = 0.001
n_neurons = [150, 80, 60, 40, 25, 10]
dropout = 0.2
def fit_lstm(train, n_lag, n_seq, n_batch, nb_epoch, n_neurons, dropout=dropout, reset_state=False):
# reshape training into [samples, timesteps, features]...
X, y = train[:, :n_lag], train[:, n_lag:]
X = X.reshape(X.shape[0], 1, X.shape[1])
# design network..
model = Sequential()
model.add(LSTM(n_neurons[5], batch_input_shape=(n_batch, X.shape[1], X.shape[2]),
dropout_U=dropout, stateful=True, return_sequences=True))
model.add(LSTM(n_neurons[5], batch_input_shape=(n_batch, X.shape[1], X.shape[2]),
dropout_U=dropout, stateful=True, return_sequences=True))
model.add(LSTM(n_neurons[5], batch_input_shape=(n_batch, X.shape[1], X.shape[2]),
stateful=True, return_sequences=False))
model.add(Dense(y.shape[1], activation='tanh'))
tic = time.time()
model.compile(optimizer=adam, loss='mse') #, metrics=[mean_absolute_percentage_error]
# fit network..
loss, val_loss = list(), list()
for i in range(nb_epoch):
print('Running Epoch ==> %s' %i)
history = model.fit(X, y, nb_epoch=1, batch_size=n_batch, validation_split=0.1,
callbacks=[early_stop,reduce_lr], verbose=2, shuffle=False)
loss.append(history.history['loss'])
val_loss.append(history.history['val_loss'])
model.reset_states() ## clears the state...
toc = time.time()
print('====='*10)
print('Total computation time to train the Model : %0.2f' %((toc - tic) * 100) + ' secs')
return model, loss, val_loss
任何帮助都将是非常可观的。
提前致谢。