diff --git a/lab2/Part1_MNIST.ipynb b/lab2/Part1_MNIST.ipynb
index ea230590..9ef3427c 100644
--- a/lab2/Part1_MNIST.ipynb
+++ b/lab2/Part1_MNIST.ipynb
@@ -671,7 +671,7 @@
         "  # GradientTape to record differentiation operations\n",
         "  with tf.GradientTape() as tape:\n",
         "    #'''TODO: feed the images into the model and obtain the predictions'''\n",
-        "    logits = # TODO\n",
+        "    predictions = # TODO\n",
         "\n",
         "    #'''TODO: compute the categorical cross entropy loss\n",
         "    loss_value = tf.keras.backend.sparse_categorical_crossentropy('''TODO''', '''TODO''') # TODO\n",
@@ -699,4 +699,4 @@
       ]
     }
   ]
-}
\ No newline at end of file
+}
diff --git a/lab2/solutions/Part1_MNIST_Solution.ipynb b/lab2/solutions/Part1_MNIST_Solution.ipynb
index 8e7a2139..290c623d 100644
--- a/lab2/solutions/Part1_MNIST_Solution.ipynb
+++ b/lab2/solutions/Part1_MNIST_Solution.ipynb
@@ -685,11 +685,11 @@
         "  # GradientTape to record differentiation operations\n",
         "  with tf.GradientTape() as tape:\n",
         "    #'''TODO: feed the images into the model and obtain the predictions'''\n",
-        "    logits = cnn_model(images)\n",
-        "    # logits = # TODO\n",
+        "    predictions = cnn_model(images)\n",
+        "    # predictions = # TODO\n",
         "\n",
         "    #'''TODO: compute the categorical cross entropy loss\n",
-        "    loss_value = tf.keras.backend.sparse_categorical_crossentropy(labels, logits)\n",
+        "    loss_value = tf.keras.backend.sparse_categorical_crossentropy(labels, predictions)\n",
         "    # loss_value = tf.keras.backend.sparse_categorical_crossentropy('''TODO''', '''TODO''') # TODO\n",
         "\n",
         "  loss_history.append(loss_value.numpy().mean()) # append the loss to the loss_history record\n",
@@ -716,4 +716,4 @@
       ]
     }
   ]
-}
\ No newline at end of file
+}