diff --git a/lab-guided-regression-models-with-keras.ipynb b/lab-guided-regression-models-with-keras.ipynb
index b3234ec..3e55554 100644
--- a/lab-guided-regression-models-with-keras.ipynb
+++ b/lab-guided-regression-models-with-keras.ipynb
@@ -855,16 +855,7 @@
"cell_type": "code",
"execution_count": 11,
"metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "2024-08-14 17:38:10.878028: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n",
- "To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"from tensorflow import keras"
]
@@ -889,8 +880,8 @@
"metadata": {},
"outputs": [],
"source": [
- "from keras.models import Sequential\n",
- "from keras.layers import Dense"
+ "from tensorflow.keras.models import Sequential\n",
+ "from tensorflow.keras.layers import Dense"
]
},
{
@@ -972,16 +963,7 @@
"cell_type": "code",
"execution_count": 14,
"metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "/Users/joaorochaemelo/code/IH/venv_ironhack/lib/python3.11/site-packages/keras/src/layers/core/dense.py:87: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.\n",
- " super().__init__(activity_regularizer=activity_regularizer, **kwargs)\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"# build the model\n",
"model = regression_model()"
@@ -1011,7 +993,19 @@
"cell_type": "code",
"execution_count": 15,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "ename": "",
+ "evalue": "",
+ "output_type": "error",
+ "traceback": [
+ "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n",
+ "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n",
+ "\u001b[1;31mClick here for more info. \n",
+ "\u001b[1;31mView Jupyter log for further details."
+ ]
+ }
+ ],
"source": [
"from sklearn.model_selection import train_test_split\n",
"\n",
@@ -1030,224 +1024,7 @@
"cell_type": "code",
"execution_count": 16,
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Epoch 1/100\n",
- "23/23 - 1s - 40ms/step - loss: 1589.7310\n",
- "Epoch 2/100\n",
- "23/23 - 0s - 2ms/step - loss: 1498.0045\n",
- "Epoch 3/100\n",
- "23/23 - 0s - 2ms/step - loss: 1356.2686\n",
- "Epoch 4/100\n",
- "23/23 - 0s - 2ms/step - loss: 1121.4917\n",
- "Epoch 5/100\n",
- "23/23 - 0s - 2ms/step - loss: 804.4485\n",
- "Epoch 6/100\n",
- "23/23 - 0s - 2ms/step - loss: 484.7380\n",
- "Epoch 7/100\n",
- "23/23 - 0s - 2ms/step - loss: 283.7404\n",
- "Epoch 8/100\n",
- "23/23 - 0s - 2ms/step - loss: 223.6398\n",
- "Epoch 9/100\n",
- "23/23 - 0s - 2ms/step - loss: 209.2920\n",
- "Epoch 10/100\n",
- "23/23 - 0s - 5ms/step - loss: 199.1528\n",
- "Epoch 11/100\n",
- "23/23 - 0s - 2ms/step - loss: 191.6396\n",
- "Epoch 12/100\n",
- "23/23 - 0s - 2ms/step - loss: 185.0003\n",
- "Epoch 13/100\n",
- "23/23 - 0s - 2ms/step - loss: 179.1767\n",
- "Epoch 14/100\n",
- "23/23 - 0s - 2ms/step - loss: 174.0735\n",
- "Epoch 15/100\n",
- "23/23 - 0s - 2ms/step - loss: 170.2730\n",
- "Epoch 16/100\n",
- "23/23 - 0s - 2ms/step - loss: 166.6689\n",
- "Epoch 17/100\n",
- "23/23 - 0s - 2ms/step - loss: 162.4221\n",
- "Epoch 18/100\n",
- "23/23 - 0s - 2ms/step - loss: 159.4540\n",
- "Epoch 19/100\n",
- "23/23 - 0s - 2ms/step - loss: 156.3569\n",
- "Epoch 20/100\n",
- "23/23 - 0s - 2ms/step - loss: 153.3033\n",
- "Epoch 21/100\n",
- "23/23 - 0s - 2ms/step - loss: 150.7955\n",
- "Epoch 22/100\n",
- "23/23 - 0s - 2ms/step - loss: 148.7971\n",
- "Epoch 23/100\n",
- "23/23 - 0s - 2ms/step - loss: 146.6381\n",
- "Epoch 24/100\n",
- "23/23 - 0s - 2ms/step - loss: 144.7416\n",
- "Epoch 25/100\n",
- "23/23 - 0s - 2ms/step - loss: 142.6251\n",
- "Epoch 26/100\n",
- "23/23 - 0s - 2ms/step - loss: 141.0247\n",
- "Epoch 27/100\n",
- "23/23 - 0s - 2ms/step - loss: 139.7123\n",
- "Epoch 28/100\n",
- "23/23 - 0s - 2ms/step - loss: 137.8783\n",
- "Epoch 29/100\n",
- "23/23 - 0s - 2ms/step - loss: 136.1062\n",
- "Epoch 30/100\n",
- "23/23 - 0s - 2ms/step - loss: 134.7343\n",
- "Epoch 31/100\n",
- "23/23 - 0s - 2ms/step - loss: 133.0127\n",
- "Epoch 32/100\n",
- "23/23 - 0s - 2ms/step - loss: 131.7408\n",
- "Epoch 33/100\n",
- "23/23 - 0s - 2ms/step - loss: 130.2917\n",
- "Epoch 34/100\n",
- "23/23 - 0s - 2ms/step - loss: 128.9789\n",
- "Epoch 35/100\n",
- "23/23 - 0s - 2ms/step - loss: 128.0439\n",
- "Epoch 36/100\n",
- "23/23 - 0s - 2ms/step - loss: 127.1930\n",
- "Epoch 37/100\n",
- "23/23 - 0s - 2ms/step - loss: 125.3434\n",
- "Epoch 38/100\n",
- "23/23 - 0s - 2ms/step - loss: 123.9078\n",
- "Epoch 39/100\n",
- "23/23 - 0s - 2ms/step - loss: 122.7656\n",
- "Epoch 40/100\n",
- "23/23 - 0s - 2ms/step - loss: 121.3873\n",
- "Epoch 41/100\n",
- "23/23 - 0s - 2ms/step - loss: 120.2595\n",
- "Epoch 42/100\n",
- "23/23 - 0s - 2ms/step - loss: 119.1386\n",
- "Epoch 43/100\n",
- "23/23 - 0s - 2ms/step - loss: 117.5028\n",
- "Epoch 44/100\n",
- "23/23 - 0s - 2ms/step - loss: 116.4576\n",
- "Epoch 45/100\n",
- "23/23 - 0s - 2ms/step - loss: 114.6131\n",
- "Epoch 46/100\n",
- "23/23 - 0s - 2ms/step - loss: 113.0364\n",
- "Epoch 47/100\n",
- "23/23 - 0s - 2ms/step - loss: 111.7229\n",
- "Epoch 48/100\n",
- "23/23 - 0s - 2ms/step - loss: 110.1791\n",
- "Epoch 49/100\n",
- "23/23 - 0s - 2ms/step - loss: 108.3175\n",
- "Epoch 50/100\n",
- "23/23 - 0s - 2ms/step - loss: 107.3923\n",
- "Epoch 51/100\n",
- "23/23 - 0s - 2ms/step - loss: 105.5512\n",
- "Epoch 52/100\n",
- "23/23 - 0s - 5ms/step - loss: 103.5289\n",
- "Epoch 53/100\n",
- "23/23 - 0s - 2ms/step - loss: 101.4534\n",
- "Epoch 54/100\n",
- "23/23 - 0s - 2ms/step - loss: 100.1987\n",
- "Epoch 55/100\n",
- "23/23 - 0s - 2ms/step - loss: 99.1770\n",
- "Epoch 56/100\n",
- "23/23 - 0s - 2ms/step - loss: 97.6906\n",
- "Epoch 57/100\n",
- "23/23 - 0s - 2ms/step - loss: 94.2602\n",
- "Epoch 58/100\n",
- "23/23 - 0s - 2ms/step - loss: 92.5050\n",
- "Epoch 59/100\n",
- "23/23 - 0s - 2ms/step - loss: 89.8930\n",
- "Epoch 60/100\n",
- "23/23 - 0s - 2ms/step - loss: 87.6395\n",
- "Epoch 61/100\n",
- "23/23 - 0s - 2ms/step - loss: 85.7578\n",
- "Epoch 62/100\n",
- "23/23 - 0s - 2ms/step - loss: 83.9698\n",
- "Epoch 63/100\n",
- "23/23 - 0s - 2ms/step - loss: 81.4788\n",
- "Epoch 64/100\n",
- "23/23 - 0s - 2ms/step - loss: 78.8132\n",
- "Epoch 65/100\n",
- "23/23 - 0s - 2ms/step - loss: 76.6305\n",
- "Epoch 66/100\n",
- "23/23 - 0s - 2ms/step - loss: 74.4165\n",
- "Epoch 67/100\n",
- "23/23 - 0s - 2ms/step - loss: 72.1750\n",
- "Epoch 68/100\n",
- "23/23 - 0s - 2ms/step - loss: 70.6617\n",
- "Epoch 69/100\n",
- "23/23 - 0s - 2ms/step - loss: 68.0864\n",
- "Epoch 70/100\n",
- "23/23 - 0s - 2ms/step - loss: 66.9356\n",
- "Epoch 71/100\n",
- "23/23 - 0s - 2ms/step - loss: 64.2999\n",
- "Epoch 72/100\n",
- "23/23 - 0s - 2ms/step - loss: 62.2969\n",
- "Epoch 73/100\n",
- "23/23 - 0s - 2ms/step - loss: 61.3270\n",
- "Epoch 74/100\n",
- "23/23 - 0s - 2ms/step - loss: 60.2607\n",
- "Epoch 75/100\n",
- "23/23 - 0s - 2ms/step - loss: 57.8469\n",
- "Epoch 76/100\n",
- "23/23 - 0s - 2ms/step - loss: 56.7945\n",
- "Epoch 77/100\n",
- "23/23 - 0s - 2ms/step - loss: 55.5016\n",
- "Epoch 78/100\n",
- "23/23 - 0s - 2ms/step - loss: 54.0605\n",
- "Epoch 79/100\n",
- "23/23 - 0s - 2ms/step - loss: 52.8370\n",
- "Epoch 80/100\n",
- "23/23 - 0s - 2ms/step - loss: 51.5477\n",
- "Epoch 81/100\n",
- "23/23 - 0s - 2ms/step - loss: 50.3530\n",
- "Epoch 82/100\n",
- "23/23 - 0s - 2ms/step - loss: 50.2427\n",
- "Epoch 83/100\n",
- "23/23 - 0s - 2ms/step - loss: 48.8843\n",
- "Epoch 84/100\n",
- "23/23 - 0s - 2ms/step - loss: 47.7792\n",
- "Epoch 85/100\n",
- "23/23 - 0s - 2ms/step - loss: 47.3964\n",
- "Epoch 86/100\n",
- "23/23 - 0s - 2ms/step - loss: 46.3647\n",
- "Epoch 87/100\n",
- "23/23 - 0s - 2ms/step - loss: 45.4960\n",
- "Epoch 88/100\n",
- "23/23 - 0s - 2ms/step - loss: 44.3812\n",
- "Epoch 89/100\n",
- "23/23 - 0s - 2ms/step - loss: 43.8020\n",
- "Epoch 90/100\n",
- "23/23 - 0s - 2ms/step - loss: 43.1113\n",
- "Epoch 91/100\n",
- "23/23 - 0s - 2ms/step - loss: 42.5655\n",
- "Epoch 92/100\n",
- "23/23 - 0s - 2ms/step - loss: 41.5551\n",
- "Epoch 93/100\n",
- "23/23 - 0s - 2ms/step - loss: 41.3190\n",
- "Epoch 94/100\n",
- "23/23 - 0s - 2ms/step - loss: 41.0615\n",
- "Epoch 95/100\n",
- "23/23 - 0s - 5ms/step - loss: 40.1528\n",
- "Epoch 96/100\n",
- "23/23 - 0s - 2ms/step - loss: 39.6206\n",
- "Epoch 97/100\n",
- "23/23 - 0s - 2ms/step - loss: 38.8477\n",
- "Epoch 98/100\n",
- "23/23 - 0s - 2ms/step - loss: 39.0186\n",
- "Epoch 99/100\n",
- "23/23 - 0s - 2ms/step - loss: 38.2057\n",
- "Epoch 100/100\n",
- "23/23 - 0s - 2ms/step - loss: 37.8294\n"
- ]
- },
- {
- "data": {
- "text/plain": [
- ""
- ]
- },
- "execution_count": 16,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
+ "outputs": [],
"source": [
"# fit the model\n",
"model.fit(X_train, y_train, epochs=100, verbose=2)"
@@ -1268,7 +1045,7 @@
},
{
"cell_type": "code",
- "execution_count": 17,
+ "execution_count": null,
"metadata": {},
"outputs": [
{
@@ -1293,7 +1070,7 @@
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": null,
"metadata": {},
"outputs": [
{
@@ -1365,7 +1142,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.11.9"
+ "version": "3.8.15"
}
},
"nbformat": 4,