From 49c19e3c886f011dbec4ab151bdf82aa674cfe23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Pedro=20Martins?= Date: Fri, 19 Mar 2021 21:49:49 -0300 Subject: [PATCH 1/3] =?UTF-8?q?Adicionando=20altera=C3=A7=C3=A3o=20da=20fu?= =?UTF-8?q?n=C3=A7=C3=A3o=20de=20ativa=C3=A7=C3=A3o?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ArtificialNeuralNetwork.ipynb | 230 +++++++++++++++++----------------- 1 file changed, 115 insertions(+), 115 deletions(-) diff --git a/ArtificialNeuralNetwork.ipynb b/ArtificialNeuralNetwork.ipynb index a038c81..5ab8075 100644 --- a/ArtificialNeuralNetwork.ipynb +++ b/ArtificialNeuralNetwork.ipynb @@ -54,7 +54,7 @@ { "data": { "text/plain": [ - "'2.3.1'" + "'2.4.1'" ] }, "execution_count": 2, @@ -444,7 +444,7 @@ }, "outputs": [], "source": [ - "ann.add(tf.keras.layers.Dense(units=6, activation='relu'))" + "ann.add(tf.keras.layers.Dense(units=6, activation='softplus'))" ] }, { @@ -467,7 +467,7 @@ }, "outputs": [], "source": [ - "ann.add(tf.keras.layers.Dense(units=1, activation='sigmoid'))" + "ann.add(tf.keras.layers.Dense(units=1, activation='tanh'))" ] }, { @@ -531,211 +531,211 @@ "output_type": "stream", "text": [ "Epoch 1/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.5339 - accuracy: 0.7921\n", + "250/250 [==============================] - 2s 997us/step - loss: 1.8349 - accuracy: 0.3000\n", "Epoch 2/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.4702 - accuracy: 0.7960\n", + "250/250 [==============================] - 0s 920us/step - loss: 0.8271 - accuracy: 0.6432\n", "Epoch 3/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.4468 - accuracy: 0.8034\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.7009 - accuracy: 0.7044\n", "Epoch 4/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.4356 - accuracy: 0.8067\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.6300 - accuracy: 0.7222\n", "Epoch 5/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.4288 - accuracy: 0.8110\n", + "250/250 [==============================] - 0s 904us/step - loss: 0.6032 - accuracy: 0.7301\n", "Epoch 6/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.4243 - accuracy: 0.8155\n", + "250/250 [==============================] - 0s 967us/step - loss: 0.5617 - accuracy: 0.7820\n", "Epoch 7/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.4210 - accuracy: 0.8195\n", + "250/250 [==============================] - 0s 984us/step - loss: 0.5409 - accuracy: 0.7891\n", "Epoch 8/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.4185 - accuracy: 0.8205\n", + "250/250 [==============================] - 0s 928us/step - loss: 0.5544 - accuracy: 0.7916\n", "Epoch 9/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.4168 - accuracy: 0.8216\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.5514 - accuracy: 0.8040\n", "Epoch 10/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.4151 - accuracy: 0.8239\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.5439 - accuracy: 0.7750\n", "Epoch 11/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.4136 - accuracy: 0.8242: 0s - loss: 0.4148 - accuracy: 0.\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.5165 - accuracy: 0.8050\n", "Epoch 12/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.4114 - accuracy: 0.8256\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.5011 - accuracy: 0.8072\n", "Epoch 13/100\n", - "250/250 [==============================] - 0s 971us/step - loss: 0.4091 - accuracy: 0.8269\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4778 - accuracy: 0.8095\n", "Epoch 14/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.4056 - accuracy: 0.8289\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4854 - accuracy: 0.8056\n", "Epoch 15/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3999 - accuracy: 0.8319\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4724 - accuracy: 0.8097\n", "Epoch 16/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3930 - accuracy: 0.8363\n", + "250/250 [==============================] - 0s 880us/step - loss: 0.4779 - accuracy: 0.8081\n", "Epoch 17/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3858 - accuracy: 0.8399\n", + "250/250 [==============================] - 0s 835us/step - loss: 0.4603 - accuracy: 0.8138\n", "Epoch 18/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3793 - accuracy: 0.8445\n", + "250/250 [==============================] - 0s 847us/step - loss: 0.4565 - accuracy: 0.8093\n", "Epoch 19/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3745 - accuracy: 0.8447\n", + "250/250 [==============================] - 0s 859us/step - loss: 0.4447 - accuracy: 0.8192\n", "Epoch 20/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3701 - accuracy: 0.8482\n", + "250/250 [==============================] - 0s 888us/step - loss: 0.4423 - accuracy: 0.8184\n", "Epoch 21/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3667 - accuracy: 0.8499\n", + "250/250 [==============================] - 0s 972us/step - loss: 0.4430 - accuracy: 0.8098\n", "Epoch 22/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3643 - accuracy: 0.8515\n", + "250/250 [==============================] - 0s 843us/step - loss: 0.4376 - accuracy: 0.8113\n", "Epoch 23/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3620 - accuracy: 0.8504\n", + "250/250 [==============================] - 0s 912us/step - loss: 0.4195 - accuracy: 0.8284\n", "Epoch 24/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3597 - accuracy: 0.8541\n", + "250/250 [==============================] - 0s 976us/step - loss: 0.4287 - accuracy: 0.8235\n", "Epoch 25/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3579 - accuracy: 0.8544\n", + "250/250 [==============================] - 0s 969us/step - loss: 0.4274 - accuracy: 0.8137\n", "Epoch 26/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3567 - accuracy: 0.8534\n", + "250/250 [==============================] - ETA: 0s - loss: 0.4083 - accuracy: 0.8190 ETA: 0s - loss: 0.4075 - accuracy: 0. - 0s 967us/step - loss: 0.4099 - accuracy: 0.8190\n", "Epoch 27/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3548 - accuracy: 0.8564\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4174 - accuracy: 0.8256\n", "Epoch 28/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3529 - accuracy: 0.8560\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4236 - accuracy: 0.8213\n", "Epoch 29/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3508 - accuracy: 0.8575\n", + "250/250 [==============================] - 0s 952us/step - loss: 0.3992 - accuracy: 0.8277\n", "Epoch 30/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3488 - accuracy: 0.8596\n", + "250/250 [==============================] - 0s 992us/step - loss: 0.4132 - accuracy: 0.8200\n", "Epoch 31/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3478 - accuracy: 0.8597\n", + "250/250 [==============================] - 0s 971us/step - loss: 0.4119 - accuracy: 0.8154\n", "Epoch 32/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3459 - accuracy: 0.8597\n", + "250/250 [==============================] - 0s 956us/step - loss: 0.3883 - accuracy: 0.8246\n", "Epoch 33/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3454 - accuracy: 0.8600\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3990 - accuracy: 0.8309\n", "Epoch 34/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3441 - accuracy: 0.8600\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4401 - accuracy: 0.8260\n", "Epoch 35/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3438 - accuracy: 0.8609\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4157 - accuracy: 0.8294\n", "Epoch 36/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3432 - accuracy: 0.8608\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4059 - accuracy: 0.8267\n", "Epoch 37/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3425 - accuracy: 0.8614\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3909 - accuracy: 0.8352\n", "Epoch 38/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3422 - accuracy: 0.8620\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3976 - accuracy: 0.8289\n", "Epoch 39/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3410 - accuracy: 0.8605\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4040 - accuracy: 0.8262\n", "Epoch 40/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3412 - accuracy: 0.8608\n", + "250/250 [==============================] - 0s 1000us/step - loss: 0.4139 - accuracy: 0.8223\n", "Epoch 41/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3404 - accuracy: 0.8609\n", + "250/250 [==============================] - 0s 993us/step - loss: 0.3841 - accuracy: 0.8312\n", "Epoch 42/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3406 - accuracy: 0.8619\n", + "250/250 [==============================] - 0s 952us/step - loss: 0.3875 - accuracy: 0.8328\n", "Epoch 43/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3399 - accuracy: 0.8625\n", + "250/250 [==============================] - 0s 996us/step - loss: 0.3985 - accuracy: 0.8391\n", "Epoch 44/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3396 - accuracy: 0.8636\n", + "250/250 [==============================] - 0s 972us/step - loss: 0.3916 - accuracy: 0.8299\n", "Epoch 45/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3397 - accuracy: 0.8620\n", + "250/250 [==============================] - 0s 996us/step - loss: 0.3808 - accuracy: 0.8332\n", "Epoch 46/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3391 - accuracy: 0.8615\n", + "250/250 [==============================] - 0s 952us/step - loss: 0.3991 - accuracy: 0.8327\n", "Epoch 47/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3391 - accuracy: 0.8626\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3791 - accuracy: 0.8382\n", "Epoch 48/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3387 - accuracy: 0.8620\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3907 - accuracy: 0.8345\n", "Epoch 49/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3388 - accuracy: 0.8619\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3782 - accuracy: 0.8371: 0s - loss: 0.3772 - accuracy: 0.83\n", "Epoch 50/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3383 - accuracy: 0.8637\n", + "250/250 [==============================] - ETA: 0s - loss: 0.3966 - accuracy: 0.8362 ETA: 0s - loss: 0.4301 - accuracy - 0s 960us/step - loss: 0.3943 - accuracy: 0.8365\n", "Epoch 51/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3384 - accuracy: 0.8596\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3865 - accuracy: 0.8381\n", "Epoch 52/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3379 - accuracy: 0.8622\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3866 - accuracy: 0.8420\n", "Epoch 53/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3376 - accuracy: 0.8622\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3938 - accuracy: 0.8344\n", "Epoch 54/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3373 - accuracy: 0.8612\n", + "250/250 [==============================] - 0s 944us/step - loss: 0.3863 - accuracy: 0.8397\n", "Epoch 55/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3376 - accuracy: 0.8614\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3856 - accuracy: 0.8465\n", "Epoch 56/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3372 - accuracy: 0.8625\n", + "250/250 [==============================] - 0s 992us/step - loss: 0.3836 - accuracy: 0.8444\n", "Epoch 57/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3372 - accuracy: 0.8608: 0s - loss: 0.3252 - ac\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3824 - accuracy: 0.8418\n", "Epoch 58/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3368 - accuracy: 0.8606\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3784 - accuracy: 0.8468\n", "Epoch 59/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3362 - accuracy: 0.8621\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3778 - accuracy: 0.8493\n", "Epoch 60/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3365 - accuracy: 0.8615\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3935 - accuracy: 0.8513\n", "Epoch 61/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3363 - accuracy: 0.8616\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3642 - accuracy: 0.8441\n", "Epoch 62/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3362 - accuracy: 0.8611\n", + "250/250 [==============================] - 0s 972us/step - loss: 0.3717 - accuracy: 0.8526\n", "Epoch 63/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3360 - accuracy: 0.8618\n", + "250/250 [==============================] - 0s 960us/step - loss: 0.3807 - accuracy: 0.8463\n", "Epoch 64/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3360 - accuracy: 0.8614\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3822 - accuracy: 0.8453\n", "Epoch 65/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3361 - accuracy: 0.8616\n", + "250/250 [==============================] - 0s 992us/step - loss: 0.4440 - accuracy: 0.8419\n", "Epoch 66/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3354 - accuracy: 0.8612\n", + "250/250 [==============================] - 0s 968us/step - loss: 0.4225 - accuracy: 0.8412\n", "Epoch 67/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3354 - accuracy: 0.8614\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4033 - accuracy: 0.8511\n", "Epoch 68/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3355 - accuracy: 0.8622\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4138 - accuracy: 0.8463\n", "Epoch 69/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3352 - accuracy: 0.8624\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3998 - accuracy: 0.8459\n", "Epoch 70/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3356 - accuracy: 0.8609\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.4125 - accuracy: 0.8419\n", "Epoch 71/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3351 - accuracy: 0.8627\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3999 - accuracy: 0.8455\n", "Epoch 72/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3348 - accuracy: 0.8609\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3752 - accuracy: 0.8569\n", "Epoch 73/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3352 - accuracy: 0.8615\n", + "250/250 [==============================] - 0s 968us/step - loss: 0.3718 - accuracy: 0.8548\n", "Epoch 74/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3352 - accuracy: 0.8622\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3824 - accuracy: 0.8483\n", "Epoch 75/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3349 - accuracy: 0.8626\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3774 - accuracy: 0.8447\n", "Epoch 76/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3349 - accuracy: 0.8624\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3694 - accuracy: 0.8502\n", "Epoch 77/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3350 - accuracy: 0.8634\n", + "250/250 [==============================] - 0s 968us/step - loss: 0.3628 - accuracy: 0.8517\n", "Epoch 78/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3350 - accuracy: 0.8615\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3645 - accuracy: 0.8504\n", "Epoch 79/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3345 - accuracy: 0.8624\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3690 - accuracy: 0.8452\n", "Epoch 80/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3345 - accuracy: 0.8620\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3728 - accuracy: 0.8540\n", "Epoch 81/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3348 - accuracy: 0.8618\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3967 - accuracy: 0.8533\n", "Epoch 82/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3345 - accuracy: 0.8622\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3726 - accuracy: 0.8492\n", "Epoch 83/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3344 - accuracy: 0.8626\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3638 - accuracy: 0.8506\n", "Epoch 84/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3346 - accuracy: 0.8616\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3772 - accuracy: 0.8449\n", "Epoch 85/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3344 - accuracy: 0.8624\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3705 - accuracy: 0.8567\n", "Epoch 86/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3344 - accuracy: 0.8624\n", + "250/250 [==============================] - 0s 968us/step - loss: 0.3632 - accuracy: 0.8526\n", "Epoch 87/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3342 - accuracy: 0.8626\n", + "250/250 [==============================] - 0s 884us/step - loss: 0.3737 - accuracy: 0.8459\n", "Epoch 88/100\n", - "250/250 [==============================] - 0s 2ms/step - loss: 0.3347 - accuracy: 0.8621\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3681 - accuracy: 0.8551\n", "Epoch 89/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3347 - accuracy: 0.8636\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3680 - accuracy: 0.8528\n", "Epoch 90/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3341 - accuracy: 0.8626\n", + "250/250 [==============================] - 0s 972us/step - loss: 0.3572 - accuracy: 0.8555\n", "Epoch 91/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3346 - accuracy: 0.8610\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3634 - accuracy: 0.8455\n", "Epoch 92/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3345 - accuracy: 0.8622\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3562 - accuracy: 0.8496\n", "Epoch 93/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3342 - accuracy: 0.8624\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3532 - accuracy: 0.8539\n", "Epoch 94/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3341 - accuracy: 0.8620\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3605 - accuracy: 0.8555\n", "Epoch 95/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3340 - accuracy: 0.8629\n", + "250/250 [==============================] - 0s 932us/step - loss: 0.3627 - accuracy: 0.8577\n", "Epoch 96/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3339 - accuracy: 0.8621\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3713 - accuracy: 0.8541\n", "Epoch 97/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3341 - accuracy: 0.8609\n", + "250/250 [==============================] - 0s 980us/step - loss: 0.3697 - accuracy: 0.8491\n", "Epoch 98/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3344 - accuracy: 0.8610\n", + "250/250 [==============================] - 0s 876us/step - loss: 0.3519 - accuracy: 0.8593\n", "Epoch 99/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3338 - accuracy: 0.8636\n", + "250/250 [==============================] - 0s 1ms/step - loss: 0.3554 - accuracy: 0.8519\n", "Epoch 100/100\n", - "250/250 [==============================] - 0s 1ms/step - loss: 0.3340 - accuracy: 0.8621\n" + "250/250 [==============================] - 0s 864us/step - loss: 0.3544 - accuracy: 0.8636\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 14, @@ -758,22 +758,22 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 15, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "array([[0.2643727 ],\n", - " [0.33818087],\n", - " [0.13732666],\n", + "array([[0.25259203],\n", + " [0.2828929 ],\n", + " [0.13736442],\n", " ...,\n", - " [0.12566164],\n", - " [0.1755141 ],\n", - " [0.30401647]], dtype=float32)" + " [0.29478523],\n", + " [0.14647618],\n", + " [0.14230612]], dtype=float32)" ] }, - "execution_count": 16, + "execution_count": 15, "metadata": {}, "output_type": "execute_result" } @@ -785,7 +785,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 16, "metadata": {}, "outputs": [ { @@ -800,7 +800,7 @@ " [False]])" ] }, - "execution_count": 17, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -869,7 +869,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.9" + "version": "3.8.5" } }, "nbformat": 4, From 91ac1575caee1400bb77438185ea7cacc40678b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Pedro=20Martins?= Date: Fri, 19 Mar 2021 21:52:59 -0300 Subject: [PATCH 2/3] =?UTF-8?q?Alterando=20fun=C3=A7=C3=B5es=20de=20ativa?= =?UTF-8?q?=C3=A7=C3=A3o?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ArtificialNeuralNetwork.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ArtificialNeuralNetwork.ipynb b/ArtificialNeuralNetwork.ipynb index 5ab8075..e07124d 100644 --- a/ArtificialNeuralNetwork.ipynb +++ b/ArtificialNeuralNetwork.ipynb @@ -819,15 +819,15 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 17, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[[1519 76]\n", - " [ 200 205]]\n" + "[[1523 72]\n", + " [ 211 194]]\n" ] } ], From 22b447daa9f4b87e72685acfac57d3a558964e33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Pedro=20Martins?= Date: Sat, 27 Mar 2021 23:45:56 -0300 Subject: [PATCH 3/3] =?UTF-8?q?Adicionando=20notebook=20com=20avalia=C3=A7?= =?UTF-8?q?=C3=A3o=20da=20rede?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 2 + Evaluating_Improving_Tunning.ipynb | 505 +++++++++++++++++++++++++++++ 2 files changed, 507 insertions(+) create mode 100644 Evaluating_Improving_Tunning.ipynb diff --git a/.gitignore b/.gitignore index 87620ac..7cdbbff 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,3 @@ .ipynb_checkpoints/ + +.vscode/settings.json diff --git a/Evaluating_Improving_Tunning.ipynb b/Evaluating_Improving_Tunning.ipynb new file mode 100644 index 0000000..f01177c --- /dev/null +++ b/Evaluating_Improving_Tunning.ipynb @@ -0,0 +1,505 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# REDES NEURAIS ARTIFICIAIS - AVALIAÇÃO, MELHORIA E AFINAÇÃO\n", + "\n", + "[Aula 5 de Deep Learning](http://bit.ly/dn-unb05) da Engenharia de Software da UnB" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Parte 1 - Pré-processamento dos Dados\n", + "\n", + "## Importar as libs" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import pandas as pd\n", + "import tensorflow as tf" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Importar o Dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "dataset = pd.read_csv('Churn_Modelling.csv')\n", + "X = dataset.iloc[:, 3:13].values\n", + "y = dataset.iloc[:, 13].values" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Transformar dados categóricos " + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[619 'France' 0 ... 1 1 101348.88]\n", + " [608 'Spain' 0 ... 0 1 112542.58]\n", + " [502 'France' 0 ... 1 0 113931.57]\n", + " ...\n", + " [709 'France' 0 ... 0 1 42085.58]\n", + " [772 'Germany' 1 ... 1 0 92888.52]\n", + " [792 'France' 0 ... 1 0 38190.78]]\n", + "[[1.0 0.0 0.0 ... 1 1 101348.88]\n", + " [0.0 0.0 1.0 ... 0 1 112542.58]\n", + " [1.0 0.0 0.0 ... 1 0 113931.57]\n", + " ...\n", + " [1.0 0.0 0.0 ... 0 1 42085.58]\n", + " [0.0 1.0 0.0 ... 1 0 92888.52]\n", + " [1.0 0.0 0.0 ... 1 0 38190.78]]\n" + ] + } + ], + "source": [ + "# Label Encoding the \"Gender\" column\n", + "from sklearn.preprocessing import LabelEncoder\n", + "le = LabelEncoder()\n", + "X[:, 2] = le.fit_transform(X[:, 2])\n", + "print(X)\n", + "# One Hot Encoding the \"Geography\" column\n", + "from sklearn.compose import ColumnTransformer\n", + "from sklearn.preprocessing import OneHotEncoder\n", + "ct = ColumnTransformer(transformers=[('encoder', OneHotEncoder(), [1])], remainder='passthrough')\n", + "X = np.array(ct.fit_transform(X))\n", + "print(X)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Dividindo o dataset em conjunto de treinamento e conjunto de teste" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "from sklearn.model_selection import train_test_split\n", + "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Feature Scaling" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "from sklearn.preprocessing import StandardScaler\n", + "sc = StandardScaler()\n", + "X_train = sc.fit_transform(X_train)\n", + "X_test = sc.transform(X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(8000, 12)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.shape(X_train)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---\n", + "# Parte 2 - Vamos construir uma ANN!" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Importar as libs e pacotes do Keras " + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "from tensorflow.keras.models import Sequential\n", + "from tensorflow.keras.layers import Dense\n", + "from tensorflow.keras.layers import Dropout" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Inicializar a ANN" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "ann = tf.keras.models.Sequential()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Adicionar a camada de entrada e 1ª camada oculta" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "ann.add(tf.keras.layers.Dense(units = 6, activation = 'relu'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Adicionar 2ª camada oculta" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "ann.add(tf.keras.layers.Dense(units = 6, activation = 'softplus'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Adicionar camada de saída" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "ann.add(tf.keras.layers.Dense(units = 1, activation = 'tanh'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Parte 3 - Treinando a ANN" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Compilar a rede neural" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "ann.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Treinando a ANN com o conjunto de treinamento" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/100\n", + "800/800 [==============================] - 2s 1ms/step - loss: 3.2290 - accuracy: 0.7907\n", + "Epoch 2/100\n", + "800/800 [==============================] - 1s 1ms/step - loss: 3.1540 - accuracy: 0.7955\n", + "Epoch 3/100\n", + "800/800 [==============================] - 1s 875us/step - loss: 3.1826 - accuracy: 0.7937\n", + "Epoch 4/100\n", + "147/800 [====>.........................] - ETA: 0s - loss: 2.9590 - accuracy: 0.8082" + ] + } + ], + "source": [ + "ann.fit(X_train, y_train, batch_size = 10, epochs = 100)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Parte 4 - Fazendo predições e avaliando o modelo" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prevendo os resultados com o conjunto de teste" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "y_pred = ann.predict(X_test)\n", + "y_pred" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "y_pred = (y_pred > 0.5)\n", + "y_pred" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Criando uma Matriz de Confusão" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from sklearn.metrics import confusion_matrix\n", + "cm = confusion_matrix(y_test, y_pred)\n", + "print(cm)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Parte 5 - Avaliação, Melhoria e Afinação" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Avaliar a ANN" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from tensorflow.keras.wrappers.scikit_learn import KerasClassifier\n", + "from sklearn.model_selection import cross_val_score\n", + "from tensorflow.keras.layers import Dense\n", + "def build_classifier():\n", + " classifier = Sequential()\n", + " classifier.add(Dense(units = 6, kernel_initializer = 'uniform', activation = 'relu', input_dim = 11))\n", + " classifier.add(Dense(units = 6, kernel_initializer = 'uniform', activation = 'softplus'))\n", + " classifier.add(Dense(units = 1, kernel_initializer = 'uniform', activation = 'tanh'))\n", + " classifier.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])\n", + " return classifier\n", + "\n", + "classifier = KerasClassifier(build_fn = build_classifier, batch_size = 10, epochs = 10)\n", + "accuracies = cross_val_score(estimator = classifier, X = X_train, y = y_train, cv = 10, n_jobs = -1)\n", + "mean = accuracies.mean()\n", + "variance = accuracies.std()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Melhorar a ANN" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def build_classifier():\n", + " classifier = Sequential()\n", + " classifier.add(Dense(units = 6, kernel_initializer = 'uniform', activation = 'relu', input_dim = 11))\n", + " classifier.add(Dropout(rate = 0.1))\n", + " classifier.add(Dense(units = 6, kernel_initializer = 'uniform', activation = 'softplus'))\n", + " classifier.add(Dropout(rate = 0.1))\n", + " classifier.add(Dense(units = 1, kernel_initializer = 'uniform', activation = 'tanh'))\n", + " classifier.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])\n", + " return classifier\n", + "\n", + "classifier = KerasClassifier(build_fn = build_classifier, batch_size = 10, epochs = 10)\n", + "accuracies = cross_val_score(estimator = classifier, X = X_train, y = y_train, cv = 10, n_jobs = -1)\n", + "mean = accuracies.mean()\n", + "variance = accuracies.std()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Afinar a ANN" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from tensorflow.keras.wrappers.scikit_learn import KerasClassifier\n", + "from sklearn.model_selection import GridSearchCV\n", + "from tensorflow.keras.models import Sequential\n", + "from tensorflow.keras.layers import Dense\n", + "\n", + "def build_classifier(optimizer):\n", + " classifier = Sequential()\n", + " classifier.add(Dense(units = 6, kernel_initializer = 'uniform', activation = 'relu'))\n", + " classifier.add(Dropout(rate = 0.1))\n", + " classifier.add(Dense(units = 6, kernel_initializer = 'uniform', activation = 'softplus'))\n", + " classifier.add(Dropout(rate = 0.1))\n", + " classifier.add(Dense(units = 1, kernel_initializer = 'uniform', activation = 'tanh'))\n", + " classifier.compile(optimizer = optimizer, loss = 'binary_crossentropy', metrics = ['accuracy'])\n", + " return classifier" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "classifier = KerasClassifier(build_fn = build_classifier)\n", + "\n", + "parameters = {'batch_size': [10, 25],\n", + " 'epochs': [50, 100],\n", + " 'optimizer': ['adam', 'rmsprop']}\n", + "\n", + "grid_search = GridSearchCV(estimator = classifier,\n", + " scoring = 'accuracy',\n", + " param_grid = parameters,\n", + " cv = 10)\n", + "\n", + "grid_search = grid_search.fit(X_train, y_train)\n", + "\n", + "best_parameters = grid_search.best_params_\n", + "best_accuracy = grid_search.best_score_\n", + "mean_score = grid_search.cv_results_['mean_test_score']\n", + "std_score = grid_search.cv_results_['std_test_score']\n", + "params_score = grid_search.cv_results_['params']\n", + "\n", + "print('Melhores parametros {}\\nMelhor Acuracia: {}'.format(best_parameters, best_accuracy))\n", + "print('Média: Var: Param:')\n", + "for mean, stdev, param in zip(mean_score, std_score, params_score):\n", + " print(\"%f (%f) com: %r\" % (mean, stdev, param))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +}