text
stringlengths 0
4.99k
|
---|
Epoch 00002: val_loss improved from 0.16357 to 0.13362, saving model to model_checkpoint.h5
|
1172/1172 [==============================] - 107s 92ms/step - loss: 0.1271 - val_loss: 0.1336
|
Epoch 3/10
|
1172/1172 [==============================] - ETA: 0s - loss: 0.1089
|
Epoch 00005: val_loss did not improve from 0.13362
|
1172/1172 [==============================] - 110s 94ms/step - loss: 0.1089 - val_loss: 0.1481
|
Epoch 6/10
|
271/1172 [=====>........................] - ETA: 1:12 - loss: 0.1117
|
We can visualize the loss with the function below. After one point, the loss stops decreasing.
|
def visualize_loss(history, title):
|
loss = history.history[\"loss\"]
|
val_loss = history.history[\"val_loss\"]
|
epochs = range(len(loss))
|
plt.figure()
|
plt.plot(epochs, loss, \"b\", label=\"Training loss\")
|
plt.plot(epochs, val_loss, \"r\", label=\"Validation loss\")
|
plt.title(title)
|
plt.xlabel(\"Epochs\")
|
plt.ylabel(\"Loss\")
|
plt.legend()
|
plt.show()
|
visualize_loss(history, \"Training and Validation Loss\")
|
png
|
Prediction
|
The trained model above is now able to make predictions for 5 sets of values from validation set.
|
def show_plot(plot_data, delta, title):
|
labels = [\"History\", \"True Future\", \"Model Prediction\"]
|
marker = [\".-\", \"rx\", \"go\"]
|
time_steps = list(range(-(plot_data[0].shape[0]), 0))
|
if delta:
|
future = delta
|
else:
|
future = 0
|
plt.title(title)
|
for i, val in enumerate(plot_data):
|
if i:
|
plt.plot(future, plot_data[i], marker[i], markersize=10, label=labels[i])
|
else:
|
plt.plot(time_steps, plot_data[i].flatten(), marker[i], label=labels[i])
|
plt.legend()
|
plt.xlim([time_steps[0], (future + 5) * 2])
|
plt.xlabel(\"Time-Step\")
|
plt.show()
|
return
|
for x, y in dataset_val.take(5):
|
show_plot(
|
[x[0][:, 1].numpy(), y[0].numpy(), model.predict(x)[0]],
|
12,
|
\"Single Step Prediction\",
|
)
|
png
|
png
|
png
|
png
|
png
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.