diff --git a/site/en/guide/basic_training_loops.ipynb b/site/en/guide/basic_training_loops.ipynb index a1558b1903e..b6c6fa80908 100644 --- a/site/en/guide/basic_training_loops.ipynb +++ b/site/en/guide/basic_training_loops.ipynb @@ -266,7 +266,7 @@ "id": "-50nq-wPBsAW" }, "source": [ - "Before training the model, you can visualize the loss value by plotting the model's predictions in red and the training data in blue:" + "Before training the model, you can visualize the loss value by plotting the model's predictions and the training data:" ] }, { @@ -322,11 +322,11 @@ " current_loss = loss(y, model(x))\n", "\n", " # Use GradientTape to calculate the gradients with respect to W and b\n", - " dw, db = t.gradient(current_loss, [model.w, model.b])\n", + " grad_w, grad_b = t.gradient(current_loss, [model.w, model.b])\n", "\n", " # Subtract the gradient scaled by the learning rate\n", - " model.w.assign_sub(learning_rate * dw)\n", - " model.b.assign_sub(learning_rate * db)" + " model.w.assign_sub(learning_rate * grad_w)\n", + " model.b.assign_sub(learning_rate * grad_b)" ] }, { @@ -491,7 +491,7 @@ "training_loop(keras_model, x, y)\n", "\n", "# You can also save a checkpoint using Keras's built-in support\n", - "keras_model.save_weights(\"my_checkpoint\")" + "keras_model.save_weights(\"my_checkpoint.weights.h5\")" ] }, { @@ -595,4 +595,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} +} \ No newline at end of file