Commit e23e3239 authored by Mahesha999's avatar Mahesha999

All four algos refactored.

parent c0bbd4e8
# music_genre_classification
\ No newline at end of file
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import Adam
def nn_fit_predict(X_train, y_train, X_test):
model = Sequential([
Dense(58, input_shape=(58,), activation='relu'),
Dense(58, input_shape=(58,), activation='relu'),
Dense(10, activation='softmax'),
])
model.compile(optimizer="Adam", loss='categorical_crossentropy', metrics=['accuracy'])
model.fit(X_train, y_train, batch_size=10,epochs=100)
return model.predict(X_test)
# ANN_score=np.zeros(5,dtype=float)
# i=0
# for train_index,test_index in kf.split(X):
# X_train,X_test,y_train,y_test=X[train_index],X[test_index],Y[train_index],Y[test_index]
# ANN_score[i]=fn_ANN(X_train,X_test,y_train,y_test)
# i=i+1
# avg_ANN_score=np.sum(ANN_score)/5.0
# print(avg_ANN_score)
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -6,7 +6,8 @@ ...@@ -6,7 +6,8 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"from util import *" "from util import *\n",
"from sklearn.model_selection import KFold"
] ]
}, },
{ {
...@@ -22,7 +23,8 @@ ...@@ -22,7 +23,8 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"X_train, X_test, y_train, y_test = load_scale_xy_with_25p_split()" "#X_train, X_test, y_train, y_test = load_scale_xy_with_25p_split()\n",
"X_train, X_test, y_train, y_test = load_preprocess_xy(25, True, False, False)"
] ]
}, },
{ {
...@@ -34,7 +36,7 @@ ...@@ -34,7 +36,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 3,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -111,12 +113,13 @@ ...@@ -111,12 +113,13 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"Accuracy: 76.0\n" "Hypperparameters: {'bootstrap': True, 'ccp_alpha': 0.0, 'class_weight': None, 'criterion': 'entropy', 'max_depth': None, 'max_features': 'auto', 'max_leaf_nodes': None, 'max_samples': None, 'min_impurity_decrease': 0.0, 'min_impurity_split': None, 'min_samples_leaf': 1, 'min_samples_split': 2, 'min_weight_fraction_leaf': 0.0, 'n_estimators': 100, 'n_jobs': None, 'oob_score': False, 'random_state': 0, 'verbose': 0, 'warm_start': False}\n",
"Accuracy: 84.0\n"
] ]
}, },
{ {
"data": { "data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAPoAAAECCAYAAADXWsr9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAALR0lEQVR4nO3dXWjd9R3H8c/HPPRJU4WNiU2xBV1HFWYliFpwWytsTtGLjeFAmSKUPWkVQXQ3wq5F9GI4sjp3YdGLWtiQoW44L4RZlz6gtvGhU9dWK3YXa7RUk6bfXeSIbZr1/LP8f/mf0+/7BUJzPP35Jead3zkn//OLI0IAzmxnNT0AgPIIHUiA0IEECB1IgNCBBAgdSKCx0G1/z/Zbtvfavr+pOaqyvdz232zvsb3b9samZ6rCdo/tnbafbXqWKmyfa3uL7Tdtj9q+qumZ2rF9T+tr4g3bT9le2PRM0zUSuu0eSb+RdJ2k1ZJ+bHt1E7PMwjFJ90bEaklXSvpFF8wsSRsljTY9xCw8Kum5iPiGpG+qw2e3vUzSXZKGIuJSST2Sbm52qlM1taNfIWlvRLwbEeOSnpZ0U0OzVBIRByNiR+vPn2jqC3BZs1Odnu1BSddL2tT0LFXYXirpGkmPS1JEjEfEfxodqppeSYts90paLOnDhuc5RVOhL5O0/4SPD6jDozmR7RWS1kja1vAo7Twi6T5Jxxueo6qVkg5JeqL1dGOT7SVND3U6EfGBpIck7ZN0UNLhiHih2alOxYtxs2T7bEnPSLo7Isaanud/sX2DpI8jYnvTs8xCr6TLJT0WEWskHZHU0a/f2D5PU49GV0q6QNIS27c0O9Wpmgr9A0nLT/h4sHVbR7Pdp6nIN0fE1qbnaWOtpBttv6+pp0brbD/Z7EhtHZB0ICK+eKS0RVPhd7JrJb0XEYciYkLSVklXNzzTKZoK/R+SLra90na/pl68+FNDs1Ri25p67jgaEQ83PU87EfFARAxGxApNfX5fjIiO22lOFBEfSdpve1XrpvWS9jQ4UhX7JF1pe3Hra2S9OvAFxN4m/qMRccz2LyU9r6lXKX8fEbubmGUW1kq6VdLrtne1bvtVRPy5uZHOSHdK2tzaAN6VdHvD85xWRGyzvUXSDk39ZGanpOFmpzqVeZsqcObjxTggAUIHEiB0IAFCBxIgdCCBxkO3vaHpGWaj2+aVmHk+dPq8jYcuqaM/QTPotnklZp4PHT1vJ4QOoLAiF8z0L10Ui84fqHTf8cNH1b90UaX7xtsTcxmrFhP6XH1a0PQYszLbmd3T/Pf/8eOfqf+s6uc3xGSZN+hV/VzMdt5Sjh7/VOPHP/P024tcArvo/AFdNVz/e+8nv9Nxb/M9I/WcXe2bdCeZHCvzRsJu+1z8/dM/znh789+6ARRH6EAChA4kQOhAAoQOJFAp9G47gx3AydqG3qVnsAM4QZUdvevOYAdwsiqhd/UZ7ABqfDHO9gbbI7ZHxg8frWtZADWoEnqlM9gjYjgihiJiqOq16wDmR5XQu+4MdgAna/umli49gx3ACSq9e631Swr4RQVAl+LKOCABQgcSIHQgAUIHEiB0IIEyvzb5n5PSTZ/Wvuzbv72i9jW/8PWfvlps7W7jpWXOSTu2/0CRdUsqdRZdKREzH5LJjg4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAJFjnuOhQs0uXpF7euWPJL5sp1l1n3jxsEyC6s7j0/Gl3ouWVX7mt778oy3s6MDCRA6kAChAwkQOpAAoQMJEDqQAKEDCbQN3fZy23+zvcf2btsb52MwAPWpcsHMMUn3RsQO2+dI2m77LxGxp/BsAGrSdkePiIMRsaP1508kjUpaVnowAPWZ1XN02yskrZG0rcg0AIqoHLrtsyU9I+nuiBib4d9vsD1ie2Ri4kidMwKYo0qh2+7TVOSbI2LrTPeJiOGIGIqIob6+JXXOCGCOqrzqbkmPSxqNiIfLjwSgblV29LWSbpW0zvau1j/fLzwXgBq1/fFaRLwsyfMwC4BCuDIOSIDQgQQIHUiA0IEECB1IoMgpsDpyVHrltSJLl/L6twaKrLth+0tF1pWk3637dpF1OV32Sz0DZb4uJEn7D9a/5sTEjDezowMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kECR457d36fe8wdrX7fkMcSTY2NF1n3s4ouKrCtJl+0s8/nYtabIskWPTi71/++zKy4usq4kLXz1nWJrT8eODiRA6EAChA4kQOhAAoQOJEDoQAKEDiRQOXTbPbZ32n625EAA6jebHX2jpNFSgwAop1LotgclXS9pU9lxAJRQdUd/RNJ9ko6XGwVAKW1Dt32DpI8jYnub+22wPWJ7ZHzyaG0DApi7Kjv6Wkk32n5f0tOS1tl+cvqdImI4IoYiYqi/Z1HNYwKYi7ahR8QDETEYESsk3SzpxYi4pfhkAGrDz9GBBGb1fvSIeEnSS0UmAVAMOzqQAKEDCRA6kAChAwkQOpBAkVNgY3yiyImtPZesqn3NL3jsSJF1S55cW+q01p+9s7fIuiVPxC2l76+nvSB0TiYLrBkx81Xq7OhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAJFToEtZXL3W02PMGs9AwPF1p4cGyuybqnTWp//cFeRdSXpuxdcVmTd3uWDRdaVyp4QPB07OpAAoQMJEDqQAKEDCRA6kAChAwkQOpBApdBtn2t7i+03bY/avqr0YADqU/WCmUclPRcRP7TdL2lxwZkA1Kxt6LaXSrpG0m2SFBHjksbLjgWgTlUeuq+UdEjSE7Z32t5ke0nhuQDUqErovZIul/RYRKyRdETS/dPvZHuD7RHbIxP6vOYxAcxFldAPSDoQEdtaH2/RVPgniYjhiBiKiKE+LahzRgBz1Db0iPhI0n7bq1o3rZe0p+hUAGpV9VX3OyVtbr3i/q6k28uNBKBulUKPiF2ShsqOAqAUrowDEiB0IAFCBxIgdCABQgcSIHQgga467rkblTqSuRuVOpJZkv6w7+Ui695x3R1F1p1v7OhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKcAtvFegYGiqzbjSfX3vajnxdZ98CvJ4usK0mDPyi29CnY0YEECB1IgNCBBAgdSIDQgQQIHUiA0IEEKoVu+x7bu22/Yfsp2wtLDwagPm1Dt71M0l2ShiLiUkk9km4uPRiA+lR96N4raZHtXkmLJX1YbiQAdWsbekR8IOkhSfskHZR0OCJeKD0YgPpUeeh+nqSbJK2UdIGkJbZvmeF+G2yP2B6Z0Of1Twrg/1blofu1kt6LiEMRMSFpq6Srp98pIoYjYigihvq0oO45AcxBldD3SbrS9mLblrRe0mjZsQDUqcpz9G2StkjaIen11t8ZLjwXgBpVej96RDwo6cHCswAohCvjgAQIHUiA0IEECB1IgNCBBAgdSKDIcc9euEA9F62qf+H9B+tfs8VLyxydHANLiqwrqejno4RSx1NL0uQrrxVZ98Lby838tVfOqX3N/p/MvHezowMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCTgi6l/UPiTpXxXv/hVJ/659iHK6bV6JmedDp8x7YUR8dfqNRUKfDdsjETHU6BCz0G3zSsw8Hzp9Xh66AwkQOpBAJ4Q+3PQAs9Rt80rMPB86et7Gn6MDKK8TdnQAhRE6kAChAwkQOpAAoQMJ/Bdo15FvGsWfVAAAAABJRU5ErkJggg==\n", "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPoAAAECCAYAAADXWsr9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKRklEQVR4nO3dT4ic9R3H8c+nu2t0o6itUsgmNBGKRYQaM4gm4GEj9I+ilx7SolAve2k1iuCfXjwLInoowhJrD0Y9xByKFLU0eijS1M0f1GStSLQxMeJWqIaUJrv67WFGSLJp5pnu85tnZr/vFwjJOD5+CPvmmR1nfzoiBGB5+1bTAwCUR+hAAoQOJEDoQAKEDiRA6EACjYVu+8e2/277A9sPN7WjKttrbL9u+6DtA7a3Nr2pCtsjtvfZfrnpLVXYvsz2Dtvv2Z61fVPTm7qxfX/na+Jd2y/YvrDpTWdrJHTbI5J+K+knkq6R9HPb1zSxpQcLkh6IiGsk3SjpV0OwWZK2SpptekQPnpL0SkT8QNIPNeDbbU9IuldSKyKulTQiaUuzqxZr6o5+g6QPIuJQRJyS9KKkOxraUklEHIuIvZ1fH1f7C3Ci2VXnZ3u1pFslbWt6SxW2L5V0s6RnJCkiTkXEvxodVc2opItsj0oal/RJw3sWaSr0CUkfn/b7IxrwaE5ne62k9ZJ2NzylmyclPSjp64Z3VLVO0pykZzvfbmyzvbLpUecTEUclPS7psKRjkr6IiNeaXbUYb8b1yPbFkl6SdF9EfNn0nv/F9m2SPouIPU1v6cGopOslPR0R6yWdkDTQ79/YvlztV6PrJK2StNL2nc2uWqyp0I9KWnPa71d3HhtotsfUjnx7ROxsek8XmyTdbvsjtb81mrT9XLOTujoi6UhEfPNKaYfa4Q+yWyR9GBFzETEvaaekjQ1vWqSp0N+S9H3b62xfoPabF39oaEsltq32946zEfFE03u6iYhHImJ1RKxV+893V0QM3J3mdBHxqaSPbV/deWizpIMNTqrisKQbbY93vkY2awDfQBxt4l8aEQu2fy3pVbXfpfxdRBxoYksPNkm6S9I7tvd3HvtNRPyxuUnL0j2StnduAIck3d3wnvOKiN22d0jaq/Z/mdknabrZVYuZH1MFlj/ejAMSIHQgAUIHEiB0IAFCBxJoPHTbU01v6MWw7ZXY3A+Dvrfx0CUN9B/QOQzbXonN/TDQewchdACFFfnAzBXfHom1a8YqPXfu86905XdGKj33/bfHlzKrFvM6qTGtaHpGT9hc3qDs/Y9O6FSc9NmPF/kI7No1Y/rbq2u6P7FHP1p1Xe3XBJaT3fHncz7OS3cgAUIHEiB0IAFCBxIgdCCBSqEP2xnsAM7UNfQhPYMdwGmq3NGH7gx2AGeqEvpQn8EOoMY342xP2Z6xPTP3+Vd1XRZADaqEXukM9oiYjohWRLSqfnYdQH9UCX3ozmAHcKauP9QypGewAzhNpZ9e6/xPCvgfFQBDik/GAQkQOpAAoQMJEDqQAKEDCRQ5M+79t8eLnO+2MLmh9mt+Y3TXnmLXBprGHR1IgNCBBAgdSIDQgQQIHUiA0IEECB1IgNCBBAgdSIDQgQQIHUiA0IEECB1IgNCBBAgdSIDQgQQIHUiA0IEECB1IgNCBBAgdSIDQgQSKHPdcSskjmY8+tLHIdb/71ski15U4ohrVcUcHEiB0IAFCBxIgdCABQgcSIHQgAUIHEugauu01tl+3fdD2Adtb+zEMQH2qfGBmQdIDEbHX9iWS9tj+U0QcLLwNQE263tEj4lhE7O38+rikWUkTpYcBqE9P36PbXitpvaTdRdYAKKLyZ91tXyzpJUn3RcSX5/j7U5KmJOlCjdc2EMDSVbqj2x5TO/LtEbHzXM+JiOmIaEVEa0wr6twIYImqvOtuSc9Imo2IJ8pPAlC3Knf0TZLukjRpe3/nr58W3gWgRl2/R4+Iv0hyH7YAKIRPxgEJEDqQAKEDCRA6kAChAwkM1SmwJU089maR6x56/roi15Wkq3YVuzSWGe7oQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kUOS457hkXAs3bKj9uqO79tR+zdKu+sX+pidgCRYm6/86/kY/v565owMJEDqQAKEDCRA6kAChAwkQOpAAoQMJVA7d9ojtfbZfLjkIQP16uaNvlTRbagiAciqFbnu1pFslbSs7B0AJVe/oT0p6UNLX5aYAKKVr6LZvk/RZRJz3g7m2p2zP2J6Znz9R20AAS1fljr5J0u22P5L0oqRJ28+d/aSImI6IVkS0xsZW1jwTwFJ0DT0iHomI1RGxVtIWSbsi4s7iywDUhv+ODiTQ08+jR8Qbkt4osgRAMdzRgQQIHUiA0IEECB1IgNCBBIqcAuvj/x7KE1vRduj564pcdxhPxF0uX8fc0YEECB1IgNCBBAgdSIDQgQQIHUiA0IEECB1IgNCBBAgdSIDQgQQIHUiA0IEECB1IgNCBBAgdSIDQgQQIHUiA0IEECB1IgNCBBIqcAovhVuq01oXJDUWuKy2f01pL4Y4OJEDoQAKEDiRA6EAChA4kQOhAAoQOJFApdNuX2d5h+z3bs7ZvKj0MQH2qfmDmKUmvRMTPbF8gabzgJgA16xq67Usl3Szpl5IUEacknSo7C0Cdqrx0XydpTtKztvfZ3mZ7ZeFdAGpUJfRRSddLejoi1ks6Ienhs59ke8r2jO2ZeZ2seSaApagS+hFJRyJid+f3O9QO/wwRMR0RrYhojWlFnRsBLFHX0CPiU0kf276689BmSQeLrgJQq6rvut8jaXvnHfdDku4uNwlA3SqFHhH7JbXKTgFQCp+MAxIgdCABQgcSIHQgAUIHEiB0IAGOe0bflDyS+ehDG4tcd+KxN4tct9+4owMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCXAKLJaFUqe1ljpdVurvCbPc0YEECB1IgNCBBAgdSIDQgQQIHUiA0IEEKoVu+37bB2y/a/sF2xeWHgagPl1Dtz0h6V5JrYi4VtKIpC2lhwGoT9WX7qOSLrI9Kmlc0iflJgGoW9fQI+KopMclHZZ0TNIXEfFa6WEA6lPlpfvlku6QtE7SKkkrbd95judN2Z6xPTOvk/UvBfB/q/LS/RZJH0bEXETMS9opadEn/SNiOiJaEdEa04q6dwJYgiqhH5Z0o+1x25a0WdJs2VkA6lTle/TdknZI2ivpnc4/M114F4AaVfp59Ih4VNKjhbcAKIRPxgEJEDqQAKEDCRA6kAChAwkQOpAAxz13LExuKHLd0V17ilwX/VHySOYSR0nP//6v53ycOzqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kIAjov6L2nOS/lHx6VdI+mftI8oZtr0Sm/thUPZ+LyKuPPvBIqH3wvZMRLQaHdGDYdsrsbkfBn0vL92BBAgdSGAQQp9uekCPhm2vxOZ+GOi9jX+PDqC8QbijAyiM0IEECB1IgNCBBAgdSOC/MBhIYBM0FScAAAAASUVORK5CYII=\n",
"text/plain": [ "text/plain": [
"<Figure size 288x288 with 1 Axes>" "<Figure size 288x288 with 1 Axes>"
] ]
...@@ -212,6 +215,1222 @@ ...@@ -212,6 +215,1222 @@
"source": [ "source": [
"fit_predict_print_unsupervised(kmeans_fit_predict,X,X,y)" "fit_predict_print_unsupervised(kmeans_fit_predict,X,X,y)"
] ]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Neural Network"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"from ann import *"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"##### Load data, scale X, encode and dummify y"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"X, y = load_preprocess_xy(0, True, True, True)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/100\n",
"80/80 [==============================] - 0s 600us/step - loss: 1.8110 - accuracy: 0.3462\n",
"Epoch 2/100\n",
"80/80 [==============================] - 0s 513us/step - loss: 1.2180 - accuracy: 0.5850\n",
"Epoch 3/100\n",
"80/80 [==============================] - 0s 588us/step - loss: 0.9723 - accuracy: 0.6687\n",
"Epoch 4/100\n",
"80/80 [==============================] - 0s 763us/step - loss: 0.8227 - accuracy: 0.7212\n",
"Epoch 5/100\n",
"80/80 [==============================] - 0s 563us/step - loss: 0.7266 - accuracy: 0.7638\n",
"Epoch 6/100\n",
"80/80 [==============================] - 0s 562us/step - loss: 0.6465 - accuracy: 0.7925\n",
"Epoch 7/100\n",
"80/80 [==============================] - 0s 612us/step - loss: 0.5700 - accuracy: 0.8163\n",
"Epoch 8/100\n",
"80/80 [==============================] - 0s 587us/step - loss: 0.5076 - accuracy: 0.8388\n",
"Epoch 9/100\n",
"80/80 [==============================] - 0s 513us/step - loss: 0.4585 - accuracy: 0.8625\n",
"Epoch 10/100\n",
"80/80 [==============================] - 0s 509us/step - loss: 0.4188 - accuracy: 0.8662\n",
"Epoch 11/100\n",
"80/80 [==============================] - 0s 563us/step - loss: 0.3788 - accuracy: 0.8838\n",
"Epoch 12/100\n",
"80/80 [==============================] - 0s 625us/step - loss: 0.3465 - accuracy: 0.8988\n",
"Epoch 13/100\n",
"80/80 [==============================] - 0s 587us/step - loss: 0.3059 - accuracy: 0.9162\n",
"Epoch 14/100\n",
"80/80 [==============================] - 0s 550us/step - loss: 0.2850 - accuracy: 0.9225\n",
"Epoch 15/100\n",
"80/80 [==============================] - 0s 587us/step - loss: 0.2539 - accuracy: 0.9350\n",
"Epoch 16/100\n",
"80/80 [==============================] - 0s 537us/step - loss: 0.2287 - accuracy: 0.9425\n",
"Epoch 17/100\n",
"80/80 [==============================] - 0s 528us/step - loss: 0.2050 - accuracy: 0.9488\n",
"Epoch 18/100\n",
"80/80 [==============================] - 0s 506us/step - loss: 0.1860 - accuracy: 0.9600\n",
"Epoch 19/100\n",
"80/80 [==============================] - 0s 550us/step - loss: 0.1673 - accuracy: 0.9625\n",
"Epoch 20/100\n",
"80/80 [==============================] - 0s 662us/step - loss: 0.1641 - accuracy: 0.9563\n",
"Epoch 21/100\n",
"80/80 [==============================] - 0s 587us/step - loss: 0.1436 - accuracy: 0.9663\n",
"Epoch 22/100\n",
"80/80 [==============================] - 0s 663us/step - loss: 0.1281 - accuracy: 0.9737\n",
"Epoch 23/100\n",
"80/80 [==============================] - 0s 600us/step - loss: 0.1149 - accuracy: 0.9762\n",
"Epoch 24/100\n",
"80/80 [==============================] - 0s 813us/step - loss: 0.0995 - accuracy: 0.9887\n",
"Epoch 25/100\n",
"80/80 [==============================] - 0s 650us/step - loss: 0.0905 - accuracy: 0.9887\n",
"Epoch 26/100\n",
"80/80 [==============================] - 0s 675us/step - loss: 0.0809 - accuracy: 0.9925\n",
"Epoch 27/100\n",
"80/80 [==============================] - 0s 787us/step - loss: 0.0705 - accuracy: 0.9937\n",
"Epoch 28/100\n",
"80/80 [==============================] - 0s 712us/step - loss: 0.0703 - accuracy: 0.9912\n",
"Epoch 29/100\n",
"80/80 [==============================] - 0s 600us/step - loss: 0.0650 - accuracy: 0.9937\n",
"Epoch 30/100\n",
"80/80 [==============================] - 0s 663us/step - loss: 0.0567 - accuracy: 0.9962\n",
"Epoch 31/100\n",
"80/80 [==============================] - 0s 775us/step - loss: 0.0485 - accuracy: 0.9962\n",
"Epoch 32/100\n",
"80/80 [==============================] - 0s 624us/step - loss: 0.0426 - accuracy: 1.0000\n",
"Epoch 33/100\n",
"80/80 [==============================] - 0s 575us/step - loss: 0.0407 - accuracy: 0.9962\n",
"Epoch 34/100\n",
"80/80 [==============================] - 0s 600us/step - loss: 0.0362 - accuracy: 0.9975\n",
"Epoch 35/100\n",
"80/80 [==============================] - 0s 562us/step - loss: 0.0335 - accuracy: 0.9975\n",
"Epoch 36/100\n",
"80/80 [==============================] - 0s 502us/step - loss: 0.0285 - accuracy: 0.9962\n",
"Epoch 37/100\n",
"80/80 [==============================] - 0s 562us/step - loss: 0.0248 - accuracy: 0.9987\n",
"Epoch 38/100\n",
"80/80 [==============================] - 0s 662us/step - loss: 0.0236 - accuracy: 0.9987\n",
"Epoch 39/100\n",
"80/80 [==============================] - 0s 675us/step - loss: 0.0222 - accuracy: 0.9975\n",
"Epoch 40/100\n",
"80/80 [==============================] - 0s 763us/step - loss: 0.0219 - accuracy: 0.9987\n",
"Epoch 41/100\n",
"80/80 [==============================] - 0s 837us/step - loss: 0.0186 - accuracy: 0.9987\n",
"Epoch 42/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0191 - accuracy: 0.9975\n",
"Epoch 43/100\n",
"80/80 [==============================] - 0s 950us/step - loss: 0.0152 - accuracy: 1.0000\n",
"Epoch 44/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0174 - accuracy: 0.9975\n",
"Epoch 45/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0147 - accuracy: 0.9975\n",
"Epoch 46/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0176 - accuracy: 0.9975\n",
"Epoch 47/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0137 - accuracy: 0.9975\n",
"Epoch 48/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0135 - accuracy: 0.9975\n",
"Epoch 49/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0195 - accuracy: 0.9975\n",
"Epoch 50/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0102 - accuracy: 0.9987\n",
"Epoch 51/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0199 - accuracy: 0.9950\n",
"Epoch 52/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0121 - accuracy: 0.9975\n",
"Epoch 53/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0117 - accuracy: 0.9975\n",
"Epoch 54/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0100 - accuracy: 0.9975\n",
"Epoch 55/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0071 - accuracy: 0.9987\n",
"Epoch 56/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0093 - accuracy: 0.9975\n",
"Epoch 57/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0127 - accuracy: 0.9962\n",
"Epoch 58/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0171 - accuracy: 0.9975: 0s - loss: 0.0257 - accuracy: \n",
"Epoch 59/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0069 - accuracy: 0.9987\n",
"Epoch 60/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0092 - accuracy: 0.9975\n",
"Epoch 61/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0134 - accuracy: 0.9975\n",
"Epoch 62/100\n",
"80/80 [==============================] - 0s 4ms/step - loss: 0.0066 - accuracy: 0.9975\n",
"Epoch 63/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0153 - accuracy: 0.9962\n",
"Epoch 64/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0097 - accuracy: 0.9975\n",
"Epoch 65/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0123 - accuracy: 0.9962\n",
"Epoch 66/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0077 - accuracy: 0.9987\n",
"Epoch 67/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0071 - accuracy: 0.9987\n",
"Epoch 68/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0114 - accuracy: 0.9975\n",
"Epoch 69/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0083 - accuracy: 0.9975\n",
"Epoch 70/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0074 - accuracy: 0.9987\n",
"Epoch 71/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0064 - accuracy: 0.9987\n",
"Epoch 72/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0058 - accuracy: 0.9975\n",
"Epoch 73/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0089 - accuracy: 0.9987\n",
"Epoch 74/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0076 - accuracy: 0.9975\n",
"Epoch 75/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0066 - accuracy: 0.9975\n",
"Epoch 76/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0044 - accuracy: 0.9975\n",
"Epoch 77/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0052 - accuracy: 0.9987\n",
"Epoch 78/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0078 - accuracy: 0.9975\n",
"Epoch 79/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0098 - accuracy: 0.9975\n",
"Epoch 80/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0069 - accuracy: 0.9987\n",
"Epoch 81/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"80/80 [==============================] - 0s 2ms/step - loss: 0.0053 - accuracy: 0.9987\n",
"Epoch 82/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0049 - accuracy: 0.9987\n",
"Epoch 83/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0119 - accuracy: 0.9975\n",
"Epoch 84/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0101 - accuracy: 0.9975\n",
"Epoch 85/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0087 - accuracy: 0.9975\n",
"Epoch 86/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0087 - accuracy: 0.9975\n",
"Epoch 87/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0134 - accuracy: 0.9962\n",
"Epoch 88/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0164 - accuracy: 0.9962\n",
"Epoch 89/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.2000 - accuracy: 0.9463\n",
"Epoch 90/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0682 - accuracy: 0.9787\n",
"Epoch 91/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0179 - accuracy: 0.9950\n",
"Epoch 92/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0302 - accuracy: 0.9950\n",
"Epoch 93/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0131 - accuracy: 0.9962\n",
"Epoch 94/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0075 - accuracy: 0.9975\n",
"Epoch 95/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0077 - accuracy: 0.9962\n",
"Epoch 96/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0083 - accuracy: 0.9975\n",
"Epoch 97/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0075 - accuracy: 0.9975\n",
"Epoch 98/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0042 - accuracy: 0.9987\n",
"Epoch 99/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0053 - accuracy: 0.9987\n",
"Epoch 100/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0055 - accuracy: 0.9987\n",
"Accuracy: 0.0\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAPoAAAECCAYAAADXWsr9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKH0lEQVR4nO3dzY9dB32H8efLjF9ih9e0m9hR4wVQRUg0aNSGRGIRI7VABBsWQQoSbLxpQ0BICLrhH0AIKlVIVigbIliYLCqgQCVgwQKrjh2JxAYJBUicBMUFkiBD/PrrYqaqY7u5Z+JzfO7k93wkS57rm5OvxvPo3Ht950yqCkmvba+be4Ck6Rm61IChSw0YutSAoUsNGLrUwGyhJ/mHJL9I8sskn51rx1BJbknyoyTHkzye5IG5Nw2RZCXJsSTfnnvLEEnelORQkp8nOZHk3XNvWiTJpza+Jh5L8o0kO+fedLlZQk+yAvwr8D7gNuAjSW6bY8smnAc+XVW3AXcA/7gFNgM8AJyYe8QmfBn4XlX9NfBOlnx7kj3AJ4C1qnoHsALcO++qK811Rv9b4JdV9URVnQW+CXxopi2DVNWzVXV04/d/ZP0LcM+8q15Zkr3AB4AH594yRJI3Au8BvgpQVWer6vlZRw2zCtyQZBXYBTwz854rzBX6HuCpSz4+yZJHc6kktwK3A4dnnrLIl4DPABdn3jHUPuAU8LWNpxsPJtk996hXUlVPA18AngSeBV6oqh/Mu+pKvhi3SUluBL4FfLKqXpx7z/8nyT3Ac1X1yNxbNmEVeBfwlaq6HTgNLPXrN0nezPqj0X3AzcDuJPfNu+pKc4X+NHDLJR/v3bhtqSXZxnrkD1XVw3PvWeAu4INJfs36U6O7k3x93kkLnQROVtX/PlI6xHr4y+y9wK+q6lRVnQMeBu6cedMV5gr9v4C3JtmXZDvrL178+0xbBkkS1p87nqiqL869Z5Gq+lxV7a2qW1n//P6wqpbuTHOpqvot8FSSt2/ctB84PuOkIZ4E7kiya+NrZD9L+ALi6hz/06o6n+SfgO+z/irlv1XV43Ns2YS7gI8CP0vy6MZt/1xV351v0mvS/cBDGyeAJ4CPz7znFVXV4SSHgKOs/8vMMeDgvKuuFL9NVXrt88U4qQFDlxowdKkBQ5caMHSpgdlDT3Jg7g2bsdX2gpuvh2XfO3vowFJ/gq5iq+0FN18PS713GUKXNLFJ3jCzPTtqJ8O+6egcZ9jGjtE3bNaFm4btPf/SaVZ3Dv+GqtUXXnq1kxYb+Fd3tv7M9tww+LAXb5zm7yPnh3+tnTt/mm2rm/jGtdN/fhWLFsvKyqD7bfZzDNN8nl/60x84d/Z0Lr99krfA7mQ3f5f9Uxx6Ms/fM82FTG76zi8mOS4AFy5Mctg/3fm2SY67/fmzkxwXID99bJLjrrzhxkmOC9N8no/95F+uersP3aUGDF1qwNClBgxdasDQpQYGhb7VrsEu6eUWhr5Fr8Eu6RJDzuhb7hrskl5uSOhb+hrskkZ8Z9zGd+8cANjJrrEOK2kEQ87og67BXlUHq2qtqtaW4b3rkv7PkNC33DXYJb3cwofuW/Qa7JIuMeg5+sYPKfAHFUhblO+MkxowdKkBQ5caMHSpAUOXGpjk4pBvyFtqq10zbuWmt0xy3At/eGGS4wJwcZprxn3/mUcnOe7f3/w3kxx3q8qO8d9Y9tMz/8GLF393xcUhPaNLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutTAoB+y2MGF3/1+7glLw8syXx915swEB7365ds9o0sNGLrUgKFLDRi61IChSw0YutSAoUsNLAw9yS1JfpTkeJLHkzxwPYZJGs+QN8ycBz5dVUeTvB54JMl/VtXxibdJGsnCM3pVPVtVRzd+/0fgBLBn6mGSxrOp5+hJbgVuBw5PskbSJAa/1z3JjcC3gE9W1YtX+fMDwAGAnewabaCkazfojJ5kG+uRP1RVD1/tPlV1sKrWqmptGzvG3CjpGg151T3AV4ETVfXF6SdJGtuQM/pdwEeBu5M8uvHr/RPvkjSihc/Rq+onQK7DFkkT8Z1xUgOGLjVg6FIDhi41YOhSA4YuNWDoUgOGLjVg6FIDhi41YOhSA4YuNWDoUgOGLjVg6FIDhi41YOhSA4YuNWDoUgOGLjVg6FIDhi41YOhSA4YuNWDoUgOGLjVg6FIDhi41YOhSA4YuNWDoUgOGLjVg6FIDhi41YOhSA4YuNTA49CQrSY4l+faUgySNbzNn9AeAE1MNkTSdQaEn2Qt8AHhw2jmSpjD0jP4l4DPAxemmSJrKwtCT3AM8V1WPLLjfgSRHkhw5x5nRBkq6dkPO6HcBH0zya+CbwN1Jvn75narqYFWtVdXaNnaMPFPStVgYelV9rqr2VtWtwL3AD6vqvsmXSRqN/44uNbC6mTtX1Y+BH0+yRNJkPKNLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNDAo9yZuSHEry8yQnkrx76mGSxrM68H5fBr5XVR9Osh3YNeEmSSNbGHqSNwLvAT4GUFVngbPTzpI0piEP3fcBp4CvJTmW5MEkuyfeJWlEQ0JfBd4FfKWqbgdOA5+9/E5JDiQ5kuTIOc6MPFPStRgS+kngZFUd3vj4EOvhv0xVHayqtapa28aOMTdKukYLQ6+q3wJPJXn7xk37geOTrpI0qqGvut8PPLTxivsTwMenmyRpbINCr6pHgbVpp0iaiu+MkxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qYFBoSf5VJLHkzyW5BtJdk49TNJ4FoaeZA/wCWCtqt4BrAD3Tj1M0niGPnRfBW5IsgrsAp6ZbpKksS0MvaqeBr4APAk8C7xQVT+Yepik8Qx56P5m4EPAPuBmYHeS+65yvwNJjiQ5co4z4y+V9KoNeej+XuBXVXWqqs4BDwN3Xn6nqjpYVWtVtbaNHWPvlHQNhoT+JHBHkl1JAuwHTkw7S9KYhjxHPwwcAo4CP9v4bw5OvEvSiFaH3KmqPg98fuItkibiO+OkBgxdasDQpQYMXWrA0KUGDF1qwNClBgxdasDQpQYMXWrA0KUGDF1qwNClBgxdasDQpQYMXWrA0KUGDF1qwNClBgxdasDQpQYMXWrA0KUGDF1qwNClBgxdasDQpQYMXWogVTX+QZNTwG8G3v0vgP8efcR0ttpecPP1sCx7/6qq/vLyGycJfTOSHKmqtVlHbMJW2wtuvh6Wfa8P3aUGDF1qYBlCPzj3gE3aanvBzdfDUu+d/Tm6pOktwxld0sQMXWrA0KUGDF1qwNClBv4HcMI80z/c9lwAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 288x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 1.7226 - accuracy: 0.4263\n",
"Epoch 2/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.9868 - accuracy: 0.6675\n",
"Epoch 3/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.7276 - accuracy: 0.7650\n",
"Epoch 4/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.5800 - accuracy: 0.8037\n",
"Epoch 5/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.4916 - accuracy: 0.8438\n",
"Epoch 6/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.4213 - accuracy: 0.8625\n",
"Epoch 7/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.3660 - accuracy: 0.8825\n",
"Epoch 8/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.3326 - accuracy: 0.8875\n",
"Epoch 9/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.2828 - accuracy: 0.9175\n",
"Epoch 10/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.2601 - accuracy: 0.9237\n",
"Epoch 11/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.2148 - accuracy: 0.9337\n",
"Epoch 12/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1955 - accuracy: 0.9375\n",
"Epoch 13/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1774 - accuracy: 0.9550\n",
"Epoch 14/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1511 - accuracy: 0.9650\n",
"Epoch 15/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1350 - accuracy: 0.9675\n",
"Epoch 16/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1224 - accuracy: 0.9737\n",
"Epoch 17/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1050 - accuracy: 0.9800\n",
"Epoch 18/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0925 - accuracy: 0.9862\n",
"Epoch 19/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0835 - accuracy: 0.9900\n",
"Epoch 20/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0727 - accuracy: 0.9950\n",
"Epoch 21/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0640 - accuracy: 0.9937\n",
"Epoch 22/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0577 - accuracy: 0.9937\n",
"Epoch 23/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0520 - accuracy: 0.9950\n",
"Epoch 24/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0519 - accuracy: 0.9950\n",
"Epoch 25/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0380 - accuracy: 1.0000\n",
"Epoch 26/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0369 - accuracy: 0.9975\n",
"Epoch 27/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0315 - accuracy: 0.9975\n",
"Epoch 28/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0318 - accuracy: 0.9987\n",
"Epoch 29/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0304 - accuracy: 0.9962\n",
"Epoch 30/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0318 - accuracy: 0.9925\n",
"Epoch 31/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0251 - accuracy: 0.9975\n",
"Epoch 32/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0194 - accuracy: 0.9987\n",
"Epoch 33/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0216 - accuracy: 0.9962\n",
"Epoch 34/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0198 - accuracy: 0.9975\n",
"Epoch 35/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0178 - accuracy: 0.9975\n",
"Epoch 36/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0141 - accuracy: 0.9975\n",
"Epoch 37/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0159 - accuracy: 0.9975\n",
"Epoch 38/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0136 - accuracy: 0.9975\n",
"Epoch 39/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0156 - accuracy: 0.9975\n",
"Epoch 40/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0156 - accuracy: 0.9962\n",
"Epoch 41/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0114 - accuracy: 0.9987\n",
"Epoch 42/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0145 - accuracy: 0.9975\n",
"Epoch 43/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0092 - accuracy: 0.9987\n",
"Epoch 44/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0151 - accuracy: 0.9975\n",
"Epoch 45/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0155 - accuracy: 0.9962\n",
"Epoch 46/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0173 - accuracy: 0.9962\n",
"Epoch 47/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0095 - accuracy: 0.9987\n",
"Epoch 48/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0113 - accuracy: 0.9975\n",
"Epoch 49/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0067 - accuracy: 0.9987\n",
"Epoch 50/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0141 - accuracy: 0.9975\n",
"Epoch 51/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0110 - accuracy: 0.9962\n",
"Epoch 52/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0117 - accuracy: 0.9962\n",
"Epoch 53/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0094 - accuracy: 0.9987\n",
"Epoch 54/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0067 - accuracy: 0.9975\n",
"Epoch 55/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0075 - accuracy: 0.9987\n",
"Epoch 56/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0078 - accuracy: 0.9975\n",
"Epoch 57/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0133 - accuracy: 0.9975\n",
"Epoch 58/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0125 - accuracy: 0.9975\n",
"Epoch 59/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0112 - accuracy: 0.9987\n",
"Epoch 60/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0055 - accuracy: 0.9987\n",
"Epoch 61/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0095 - accuracy: 0.9975\n",
"Epoch 62/100\n",
"80/80 [==============================] - 0s 4ms/step - loss: 0.0082 - accuracy: 0.9975\n",
"Epoch 63/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0060 - accuracy: 0.9987\n",
"Epoch 64/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0054 - accuracy: 0.9975\n",
"Epoch 65/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0142 - accuracy: 0.9975\n",
"Epoch 66/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0168 - accuracy: 0.9950\n",
"Epoch 67/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0079 - accuracy: 0.9975\n",
"Epoch 68/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0073 - accuracy: 0.9987\n",
"Epoch 69/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0060 - accuracy: 0.9975\n",
"Epoch 70/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0052 - accuracy: 0.9987\n",
"Epoch 71/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0052 - accuracy: 0.9987\n",
"Epoch 72/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0051 - accuracy: 0.9987\n",
"Epoch 73/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0063 - accuracy: 0.9987\n",
"Epoch 74/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0542 - accuracy: 0.9875\n",
"Epoch 75/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1171 - accuracy: 0.9700\n",
"Epoch 76/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0734 - accuracy: 0.9750\n",
"Epoch 77/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0148 - accuracy: 0.9975\n",
"Epoch 78/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0045 - accuracy: 0.9987\n",
"Epoch 79/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0093 - accuracy: 0.9975\n",
"Epoch 80/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0055 - accuracy: 0.9987\n",
"Epoch 81/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0073 - accuracy: 0.9975\n",
"Epoch 82/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0048 - accuracy: 0.9987\n",
"Epoch 83/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"80/80 [==============================] - 0s 2ms/step - loss: 0.0061 - accuracy: 0.9987\n",
"Epoch 84/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0064 - accuracy: 0.9987\n",
"Epoch 85/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0073 - accuracy: 0.9975\n",
"Epoch 86/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0084 - accuracy: 0.9975\n",
"Epoch 87/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0095 - accuracy: 0.9975\n",
"Epoch 88/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0070 - accuracy: 0.9975\n",
"Epoch 89/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0102 - accuracy: 0.9975\n",
"Epoch 90/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0060 - accuracy: 0.9975\n",
"Epoch 91/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0057 - accuracy: 0.9987\n",
"Epoch 92/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0094 - accuracy: 0.9975\n",
"Epoch 93/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0044 - accuracy: 0.9987\n",
"Epoch 94/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0063 - accuracy: 0.9975\n",
"Epoch 95/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0066 - accuracy: 0.9975\n",
"Epoch 96/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0087 - accuracy: 0.9975\n",
"Epoch 97/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0047 - accuracy: 0.9987\n",
"Epoch 98/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0053 - accuracy: 0.9975\n",
"Epoch 99/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0045 - accuracy: 0.9987\n",
"Epoch 100/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0046 - accuracy: 0.9987\n",
"Accuracy: 0.0\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAPoAAAECCAYAAADXWsr9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKAklEQVR4nO3dW4hchR3H8d+v2Zi4sY3aloKJNKEURSwSWeolINT4YKvoSwsWVCqUvLQmilS0UKTPFdGH1rLE+mKqDzHQIsVavDwUSuiaCJpsSsVLEo01rdQbNpf668OONJdt5mz3nJxZ/98PCNlxPP4I++XMzM6ecRIB+HT7TN8DAHSP0IECCB0ogNCBAggdKIDQgQJ6C9321bb/Yvtl23f1taMp2+faftb2Lts7bW/se1MTthfZ3mH7ib63NGH7TNtbbO+2PW37sr43DWP79sH3xEu2H7W9tO9Nx+sldNuLJP1c0jclXSDpu7Yv6GPLHByRdEeSCyRdKukHC2CzJG2UNN33iDl4QNKTSc6XdJFGfLvtFZI2SJpIcqGkRZJu6HfVifo6o39d0stJXklySNJjkq7vaUsjSfYn2T748/ua+QZc0e+qk7O9UtI1kjb1vaUJ28slXSHpIUlKcijJP3sd1cyYpNNtj0kal/Rmz3tO0FfoKyTtPerrfRrxaI5me5WkNZK29TxlmPsl3Snp4553NLVa0gFJDw+ebmyyvazvUSeT5A1J90raI2m/pHeTPNXvqhPxYtwc2T5D0uOSbkvyXt97/hfb10p6O8nzfW+ZgzFJF0t6MMkaSR9KGunXb2yfpZlHo6slnSNpme0b+111or5Cf0PSuUd9vXJw20izvVgzkW9OsrXvPUOslXSd7dc089ToStuP9DtpqH2S9iX55JHSFs2EP8qukvRqkgNJDkvaKunynjedoK/Q/yzpq7ZX2z5NMy9e/LanLY3YtmaeO04nua/vPcMkuTvJyiSrNPP3+0ySkTvTHC3JW5L22j5vcNM6Sbt6nNTEHkmX2h4ffI+s0wi+gDjWx/80yRHbP5T0e828SvmrJDv72DIHayXdJOlF2y8Mbvtxkt/1N+lT6VZJmwcngFck3dLznpNKss32FknbNfOTmR2SJvtddSLza6rApx8vxgEFEDpQAKEDBRA6UAChAwX0Hrrt9X1vmIuFtldi86kw6nt7D13SSP8FzWKh7ZXYfCqM9N5RCB1Axzp5w8xpXpKlavZLR4d1UIu1pPUNXVloeyU2nwqjsvdf+lCHctDH397JW2CXapku8bouDg3gJLbl6Vlv56E7UAChAwUQOlAAoQMFEDpQQKPQF9o12AEca2joC/Qa7ACO0uSMvuCuwQ7gWE1CX9DXYAfQ4jvjBr+9s16Slmq8rcMCaEGTM3qja7AnmUwykWRiFN7zC+C/moS+4K7BDuBYQx+6L9BrsAM4SqPn6IMPKeCDCoAFinfGAQUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQQKMPWZyrL134kTb8Znfrx/3FN65q/ZifOLJ3X2fH7spnxsc7Oe4HV3+tk+Oe8eSLnRxXkv5280WdHPfQcndyXEn6/s3tf27p7u8cmfV2zuhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAUNDt32u7Wdt77K90/bGUzEMQHuavGHmiKQ7kmy3/VlJz9v+Q5JdHW8D0JKhZ/Qk+5NsH/z5fUnTklZ0PQxAe+b0HN32KklrJG3rZA2ATjQO3fYZkh6XdFuS92b59+ttT9meevedf7e5EcA8NQrd9mLNRL45ydbZ7pNkMslEkonlZy9qcyOAeWryqrslPSRpOsl93U8C0LYmZ/S1km6SdKXtFwb/fKvjXQBaNPTHa0n+KKm7X8oF0DneGQcUQOhAAYQOFEDoQAGEDhTQyVVg39r7ef1sw02tH/f0f+xs/ZgL2V9/2s2VT7/yoz91ctzXf3J5J8eVpI9WzH710/k6/5cfdHJcSbpt42utH/PXiw7NejtndKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCnCS1g/6OZ+dS7yu9eMCOLlteVrv5Z0TPhSVMzpQAKEDBRA6UAChAwUQOlAAoQMFEDpQQOPQbS+yvcP2E10OAtC+uZzRN0qa7moIgO40Ct32SknXSNrU7RwAXWh6Rr9f0p2SPu5uCoCuDA3d9rWS3k7y/JD7rbc9ZXvqsA62NhDA/DU5o6+VdJ3t1yQ9JulK248cf6ckk0kmkkws1pKWZwKYj6GhJ7k7ycokqyTdIOmZJDd2vgxAa/g5OlDA2FzunOQ5Sc91sgRAZzijAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBTQK3faZtrfY3m172vZlXQ8D0J6xhvd7QNKTSb5t+zRJ4x1uAtCyoaHbXi7pCknfk6QkhyQd6nYWgDY1eei+WtIBSQ/b3mF7k+1lHe8C0KImoY9JuljSg0nWSPpQ0l3H38n2ettTtqcO62DLMwHMR5PQ90nal2Tb4Ostmgn/GEkmk0wkmVisJW1uBDBPQ0NP8pakvbbPG9y0TtKuTlcBaFXTV91vlbR58Ir7K5Ju6W4SgLY1Cj3JC5Imup0CoCu8Mw4ogNCBAggdKIDQgQIIHSiA0IECCB0ogNCBAggdKIDQgQIIHSiA0IECCB0ogNCBAggdKIDQgQIIHSiA0IECCB0ogNCBAggdKIDQgQIIHSiA0IECCB0ogNCBAggdKIDQgQIIHSiA0IECCB0ogNCBAggdKIDQgQIIHSiA0IECGoVu+3bbO22/ZPtR20u7HgagPUNDt71C0gZJE0kulLRI0g1dDwPQnqYP3ccknW57TNK4pDe7mwSgbUNDT/KGpHsl7ZG0X9K7SZ7qehiA9jR56H6WpOslrZZ0jqRltm+c5X7rbU/Znjqsg+0vBfB/a/LQ/SpJryY5kOSwpK2SLj/+Tkkmk0wkmVisJW3vBDAPTULfI+lS2+O2LWmdpOluZwFoU5Pn6NskbZG0XdKLg/9msuNdAFo01uROSe6RdE/HWwB0hHfGAQUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQgJO0f1D7gKTXG979C5L+3vqI7iy0vRKbT4VR2fvlJF88/sZOQp8L21NJJnodMQcLba/E5lNh1Pfy0B0ogNCBAkYh9Mm+B8zRQtsrsflUGOm9vT9HB9C9UTijA+gYoQMFEDpQAKEDBRA6UMB/AEN3N3qb+7f2AAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 288x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 1.7293 - accuracy: 0.3800\n",
"Epoch 2/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 1.1456 - accuracy: 0.5612\n",
"Epoch 3/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.9108 - accuracy: 0.6875\n",
"Epoch 4/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.7623 - accuracy: 0.7412\n",
"Epoch 5/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.6525 - accuracy: 0.7837\n",
"Epoch 6/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.5685 - accuracy: 0.8188\n",
"Epoch 7/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.4998 - accuracy: 0.8325\n",
"Epoch 8/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.4316 - accuracy: 0.8650\n",
"Epoch 9/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.4004 - accuracy: 0.8763\n",
"Epoch 10/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.3459 - accuracy: 0.8938\n",
"Epoch 11/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.3099 - accuracy: 0.9112\n",
"Epoch 12/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.2783 - accuracy: 0.9225\n",
"Epoch 13/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.2482 - accuracy: 0.9325\n",
"Epoch 14/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.2247 - accuracy: 0.9375\n",
"Epoch 15/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.2029 - accuracy: 0.9463\n",
"Epoch 16/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.1825 - accuracy: 0.9450\n",
"Epoch 17/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1669 - accuracy: 0.9538\n",
"Epoch 18/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1529 - accuracy: 0.9613\n",
"Epoch 19/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1369 - accuracy: 0.9638\n",
"Epoch 20/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1217 - accuracy: 0.9762\n",
"Epoch 21/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1169 - accuracy: 0.9775\n",
"Epoch 22/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0990 - accuracy: 0.9825\n",
"Epoch 23/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0866 - accuracy: 0.9862\n",
"Epoch 24/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0855 - accuracy: 0.9850\n",
"Epoch 25/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0751 - accuracy: 0.9862\n",
"Epoch 26/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0658 - accuracy: 0.9887\n",
"Epoch 27/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0660 - accuracy: 0.9862\n",
"Epoch 28/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0581 - accuracy: 0.9925\n",
"Epoch 29/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0515 - accuracy: 0.9925\n",
"Epoch 30/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0433 - accuracy: 0.9962\n",
"Epoch 31/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0398 - accuracy: 0.9937\n",
"Epoch 32/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0352 - accuracy: 0.9962\n",
"Epoch 33/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0313 - accuracy: 0.9987\n",
"Epoch 34/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0351 - accuracy: 0.9962\n",
"Epoch 35/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0285 - accuracy: 0.9962\n",
"Epoch 36/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0233 - accuracy: 0.9987\n",
"Epoch 37/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0223 - accuracy: 0.9975\n",
"Epoch 38/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0193 - accuracy: 0.9987\n",
"Epoch 39/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0195 - accuracy: 0.9975\n",
"Epoch 40/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0220 - accuracy: 0.9975\n",
"Epoch 41/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0150 - accuracy: 1.0000\n",
"Epoch 42/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0209 - accuracy: 0.9962\n",
"Epoch 43/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0161 - accuracy: 0.9962\n",
"Epoch 44/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0200 - accuracy: 0.9950\n",
"Epoch 45/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0258 - accuracy: 0.9962\n",
"Epoch 46/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0103 - accuracy: 1.0000\n",
"Epoch 47/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0294 - accuracy: 0.9912\n",
"Epoch 48/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0107 - accuracy: 1.0000\n",
"Epoch 49/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0155 - accuracy: 0.9975\n",
"Epoch 50/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0159 - accuracy: 0.9975\n",
"Epoch 51/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0111 - accuracy: 0.9987\n",
"Epoch 52/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0147 - accuracy: 0.9975\n",
"Epoch 53/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0116 - accuracy: 0.9987\n",
"Epoch 54/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0117 - accuracy: 0.9950\n",
"Epoch 55/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0061 - accuracy: 0.9987\n",
"Epoch 56/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0109 - accuracy: 0.9987\n",
"Epoch 57/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0099 - accuracy: 0.9975\n",
"Epoch 58/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0124 - accuracy: 0.9975\n",
"Epoch 59/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0108 - accuracy: 0.9975\n",
"Epoch 60/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0122 - accuracy: 0.9975\n",
"Epoch 61/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0126 - accuracy: 0.9975\n",
"Epoch 62/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0093 - accuracy: 0.9975\n",
"Epoch 63/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0068 - accuracy: 0.9987\n",
"Epoch 64/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0078 - accuracy: 0.9987\n",
"Epoch 65/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0068 - accuracy: 0.9987\n",
"Epoch 66/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0112 - accuracy: 0.9975\n",
"Epoch 67/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0094 - accuracy: 0.9987\n",
"Epoch 68/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0096 - accuracy: 0.9975\n",
"Epoch 69/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0120 - accuracy: 0.9975\n",
"Epoch 70/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0085 - accuracy: 0.9987\n",
"Epoch 71/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0054 - accuracy: 0.9987\n",
"Epoch 72/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0095 - accuracy: 0.9975: 0s - loss: 0.0065 - accuracy: 0.99\n",
"Epoch 73/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0095 - accuracy: 0.9962\n",
"Epoch 74/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0107 - accuracy: 0.9975\n",
"Epoch 75/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0113 - accuracy: 0.9975\n",
"Epoch 76/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0087 - accuracy: 0.9975\n",
"Epoch 77/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0072 - accuracy: 0.9975\n",
"Epoch 78/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0047 - accuracy: 0.9987\n",
"Epoch 79/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0122 - accuracy: 0.9962\n",
"Epoch 80/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0066 - accuracy: 0.9987\n",
"Epoch 81/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0077 - accuracy: 0.9975\n",
"Epoch 82/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"80/80 [==============================] - 0s 1ms/step - loss: 0.0091 - accuracy: 0.9975\n",
"Epoch 83/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0138 - accuracy: 0.9975\n",
"Epoch 84/100\n",
"80/80 [==============================] - ETA: 0s - loss: 0.0126 - accuracy: 0.99 - 0s 1ms/step - loss: 0.0117 - accuracy: 0.9962\n",
"Epoch 85/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0080 - accuracy: 0.9975\n",
"Epoch 86/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0073 - accuracy: 0.9975\n",
"Epoch 87/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0049 - accuracy: 0.9987\n",
"Epoch 88/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0051 - accuracy: 0.9987\n",
"Epoch 89/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0081 - accuracy: 0.9975\n",
"Epoch 90/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0048 - accuracy: 0.9987\n",
"Epoch 91/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0120 - accuracy: 0.9975\n",
"Epoch 92/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0103 - accuracy: 0.9975\n",
"Epoch 93/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0079 - accuracy: 0.9987\n",
"Epoch 94/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0070 - accuracy: 0.9975\n",
"Epoch 95/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0048 - accuracy: 0.9987\n",
"Epoch 96/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0046 - accuracy: 0.9987: 0s - loss: 0.0051 - accuracy: 0.99\n",
"Epoch 97/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0078 - accuracy: 0.9975\n",
"Epoch 98/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0094 - accuracy: 0.9962\n",
"Epoch 99/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0031 - accuracy: 0.9987\n",
"Epoch 100/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0038 - accuracy: 0.9987\n",
"Accuracy: 0.0\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAPoAAAECCAYAAADXWsr9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKAElEQVR4nO3dTYhd9R3G8edpJq8Tq5F2YxKaCMVWhBIZNJriwrioVbSLQi0o6CbQWo1iEe2ibtqdiC6KdIiVgkELMYsixVp8WZTS0JgImoyCaEyiEdOWqoSal/p0MVeat+aeMefMuePv+wEhc73+fQj5cu69uXPHSQTgi+1LfQ8A0D1CBwogdKAAQgcKIHSgAEIHCugtdNvfsf2G7Tdt39fXjqZsr7T9ou3dtnfZ3tj3piZsz7O90/YzfW9pwvZ5trfYft32lO0r+t40jO27B38mXrP9pO1FfW86WS+h254n6VeSrpV0saQf2r64jy0zcEzSPUkulrRW0u1zYLMkbZQ01feIGXhE0rNJviHpWxrx7baXS7pT0kSSSyTNk3RTv6tO1dcV/TJJbyZ5K8kRSU9JurGnLY0kOZBkx+DXH2v6D+Dyfledme0Vkq6TtKnvLU3YPlfSVZIek6QkR5L8q9dRzYxJWmx7TNISSe/1vOcUfYW+XNK+477erxGP5ni2V0laI2lbz1OGeVjSvZI+7XlHU6slHZT0+ODpxibb432POpMk70p6UNJeSQckfZjkuX5XnYoX42bI9lJJT0u6K8lHfe/5f2xfL+mDJC/3vWUGxiRdKunRJGskHZI00q/f2F6m6UejqyVdIGnc9s39rjpVX6G/K2nlcV+vGNw20mzP13Tkm5Ns7XvPEOsk3WB7j6afGl1t+4l+Jw21X9L+JJ89Utqi6fBH2TWS3k5yMMlRSVslXdnzplP0FfrfJH3d9mrbCzT94sXve9rSiG1r+rnjVJKH+t4zTJL7k6xIskrTv78vJBm5K83xkrwvaZ/tiwY3rZe0u8dJTeyVtNb2ksGfkfUawRcQx/r4nyY5Zvsnkv6o6Vcpf5NkVx9bZmCdpFskvWr7lcFtP0vyh/4mfSHdIWnz4ALwlqTbet5zRkm22d4iaYem/2Zmp6TJfledynybKvDFx4txQAGEDhRA6EABhA4UQOhAAb2HbntD3xtmYq7tldg8G0Z9b++hSxrp36DTmGt7JTbPhpHeOwqhA+hYJ2+YWeCFWaRm33R0VIc1Xwtb39CVubZXYvNsGJW9n+iQjuSwT769k7fALtK4Lvf6Lo4GcAbb8vxpb+ehO1AAoQMFEDpQAKEDBRA6UECj0OfaZ7ADONHQ0OfoZ7ADOE6TK/qc+wx2ACdqEvqc/gx2AC2+M27w3TsbJGmRlrR1LIAWNLmiN/oM9iSTSSaSTIzCe34B/E+T0OfcZ7ADONHQh+5z9DPYARyn0XP0wQ8p4AcVAHMU74wDCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSggKGh215p+0Xbu23vsr1xNoYBaM9Yg/sck3RPkh22z5H0su0/Jdnd8TYALRl6RU9yIMmOwa8/ljQlaXnXwwC0Z0bP0W2vkrRG0rZO1gDoRJOH7pIk20slPS3priQfnebfb5C0QZIWaUlrAwGcvUZXdNvzNR355iRbT3efJJNJJpJMzNfCNjcCOEtNXnW3pMckTSV5qPtJANrW5Iq+TtItkq62/crgn+92vAtAi4Y+R0/yZ0mehS0AOsI744ACCB0ogNCBAggdKIDQgQIIHSiA0IECCB0ogNCBAggdKIDQgQIIHSiA0IECCB0ogNCBAggdKIDQgQIIHSiA0IECCB0ogNCBAggdKIDQgQIIHSiA0IECCB0ogNCBAggdKGDoD1n8PI5cMK49P76i9XMv/OXO1s/8jBcv7uTcPbd/s5NzJWnlL/7S2dld+PgHazs7e+m+f3dy7nNbftvJuZJ03be/1/qZ3r/gtLdzRQcKIHSgAEIHCiB0oABCBwogdKAAQgcKaBy67Xm2d9p+pstBANo3kyv6RklTXQ0B0J1GodteIek6SZu6nQOgC02v6A9LulfSp91NAdCVoaHbvl7SB0leHnK/Dba3297+n0OHWhsI4Ow1uaKvk3SD7T2SnpJ0te0nTr5TkskkE0km5o2PtzwTwNkYGnqS+5OsSLJK0k2SXkhyc+fLALSGv0cHCpjR96MneUnSS50sAdAZruhAAYQOFEDoQAGEDhRA6EABnXwK7IKlR7Tqyn2tn/vOTy9t/czPLLrsH52cu/pHezo5V5KOdXZyN8753V87O3vesmWdnHvthd19cu0bvz6/9TM/+fnpk+aKDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4U4CStH/pln5/Lvb71cwGc2bY8r4/yT598O1d0oABCBwogdKAAQgcKIHSgAEIHCiB0oIBGods+z/YW26/bnrJ9RdfDALSn6Y9NfkTSs0m+b3uBpCUdbgLQsqGh2z5X0lWSbpWkJEckHel2FoA2NXnovlrSQUmP295pe5Pt8Y53AWhRk9DHJF0q6dEkayQdknTfyXeyvcH2dtvbj+pwyzMBnI0moe+XtD/JtsHXWzQd/gmSTCaZSDIxXwvb3AjgLA0NPcn7kvbZvmhw03pJuztdBaBVTV91v0PS5sEr7m9Juq27SQDa1ij0JK9Imuh2CoCu8M44oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCmgUuu27be+y/ZrtJ20v6noYgPYMDd32ckl3SppIcomkeZJu6noYgPY0feg+Jmmx7TFJSyS9190kAG0bGnqSdyU9KGmvpAOSPkzyXNfDALSnyUP3ZZJulLRa0gWSxm3ffJr7bbC93fb2ozrc/lIAn1uTh+7XSHo7ycEkRyVtlXTlyXdKMplkIsnEfC1seyeAs9Ak9L2S1tpeYtuS1kua6nYWgDY1eY6+TdIWSTskvTr4byY73gWgRWNN7pTkAUkPdLwFQEd4ZxxQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwU4SfuH2gclvdPw7l+R9PfWR3Rnru2V2DwbRmXv15J89eQbOwl9JmxvTzLR64gZmGt7JTbPhlHfy0N3oABCBwoYhdAn+x4wQ3Ntr8Tm2TDSe3t/jg6ge6NwRQfQMUIHCiB0oABCBwogdKCA/wJTVC5TWCmZ+wAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 288x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 1.9168 - accuracy: 0.3212\n",
"Epoch 2/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 1.2566 - accuracy: 0.5575\n",
"Epoch 3/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.9967 - accuracy: 0.6575\n",
"Epoch 4/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.8314 - accuracy: 0.7212\n",
"Epoch 5/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.7058 - accuracy: 0.7725\n",
"Epoch 6/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.6056 - accuracy: 0.8100\n",
"Epoch 7/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.5234 - accuracy: 0.8338\n",
"Epoch 8/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.4634 - accuracy: 0.8512\n",
"Epoch 9/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.4038 - accuracy: 0.8763\n",
"Epoch 10/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.3496 - accuracy: 0.9025\n",
"Epoch 11/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.3141 - accuracy: 0.9112\n",
"Epoch 12/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.2754 - accuracy: 0.9300\n",
"Epoch 13/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.2437 - accuracy: 0.9400\n",
"Epoch 14/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.2171 - accuracy: 0.9563\n",
"Epoch 15/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.1970 - accuracy: 0.9525\n",
"Epoch 16/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.1697 - accuracy: 0.9688\n",
"Epoch 17/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.1503 - accuracy: 0.9712\n",
"Epoch 18/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1312 - accuracy: 0.9762\n",
"Epoch 19/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.1150 - accuracy: 0.9800\n",
"Epoch 20/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1014 - accuracy: 0.9887\n",
"Epoch 21/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0891 - accuracy: 0.9912\n",
"Epoch 22/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0803 - accuracy: 0.9900\n",
"Epoch 23/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0724 - accuracy: 0.9937: 0s - loss: 0.0735 - accuracy: 0.99\n",
"Epoch 24/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0642 - accuracy: 0.9950\n",
"Epoch 25/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0574 - accuracy: 0.9962\n",
"Epoch 26/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0499 - accuracy: 0.9962\n",
"Epoch 27/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0443 - accuracy: 0.9975\n",
"Epoch 28/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0374 - accuracy: 1.0000\n",
"Epoch 29/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0345 - accuracy: 0.9987\n",
"Epoch 30/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0300 - accuracy: 1.0000\n",
"Epoch 31/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0251 - accuracy: 1.0000\n",
"Epoch 32/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0245 - accuracy: 1.0000\n",
"Epoch 33/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0209 - accuracy: 1.0000\n",
"Epoch 34/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0186 - accuracy: 1.0000\n",
"Epoch 35/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0170 - accuracy: 1.0000\n",
"Epoch 36/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0158 - accuracy: 1.0000\n",
"Epoch 37/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0140 - accuracy: 1.0000\n",
"Epoch 38/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0131 - accuracy: 1.0000\n",
"Epoch 39/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0124 - accuracy: 1.0000\n",
"Epoch 40/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0104 - accuracy: 1.0000\n",
"Epoch 41/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0095 - accuracy: 1.0000\n",
"Epoch 42/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0085 - accuracy: 1.0000\n",
"Epoch 43/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0082 - accuracy: 1.0000\n",
"Epoch 44/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0074 - accuracy: 1.0000\n",
"Epoch 45/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0071 - accuracy: 1.0000\n",
"Epoch 46/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0063 - accuracy: 1.0000\n",
"Epoch 47/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0060 - accuracy: 1.0000\n",
"Epoch 48/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0055 - accuracy: 1.0000\n",
"Epoch 49/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0052 - accuracy: 1.0000\n",
"Epoch 50/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0046 - accuracy: 1.0000\n",
"Epoch 51/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0044 - accuracy: 1.0000\n",
"Epoch 52/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0040 - accuracy: 1.0000\n",
"Epoch 53/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0038 - accuracy: 1.0000\n",
"Epoch 54/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0035 - accuracy: 1.0000\n",
"Epoch 55/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0033 - accuracy: 1.0000\n",
"Epoch 56/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0031 - accuracy: 1.0000\n",
"Epoch 57/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0029 - accuracy: 1.0000\n",
"Epoch 58/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0027 - accuracy: 1.0000\n",
"Epoch 59/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0025 - accuracy: 1.0000\n",
"Epoch 60/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0024 - accuracy: 1.0000\n",
"Epoch 61/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0022 - accuracy: 1.0000\n",
"Epoch 62/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0021 - accuracy: 1.0000\n",
"Epoch 63/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0020 - accuracy: 1.0000\n",
"Epoch 64/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0018 - accuracy: 1.0000\n",
"Epoch 65/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0017 - accuracy: 1.0000\n",
"Epoch 66/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0016 - accuracy: 1.0000\n",
"Epoch 67/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0015 - accuracy: 1.0000\n",
"Epoch 68/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0014 - accuracy: 1.0000\n",
"Epoch 69/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0014 - accuracy: 1.0000\n",
"Epoch 70/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0013 - accuracy: 1.0000\n",
"Epoch 71/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0012 - accuracy: 1.0000\n",
"Epoch 72/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0012 - accuracy: 1.0000\n",
"Epoch 73/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0011 - accuracy: 1.0000\n",
"Epoch 74/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0010 - accuracy: 1.0000\n",
"Epoch 75/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 9.7223e-04 - accuracy: 1.0000\n",
"Epoch 76/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 9.0185e-04 - accuracy: 1.0000\n",
"Epoch 77/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 8.5633e-04 - accuracy: 1.0000\n",
"Epoch 78/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 8.1181e-04 - accuracy: 1.0000\n",
"Epoch 79/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 7.5462e-04 - accuracy: 1.0000\n",
"Epoch 80/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 7.2169e-04 - accuracy: 1.0000\n",
"Epoch 81/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 6.8701e-04 - accuracy: 1.0000\n",
"Epoch 82/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"80/80 [==============================] - 0s 2ms/step - loss: 6.4608e-04 - accuracy: 1.0000\n",
"Epoch 83/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 6.0663e-04 - accuracy: 1.0000\n",
"Epoch 84/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 5.7897e-04 - accuracy: 1.0000\n",
"Epoch 85/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 5.5154e-04 - accuracy: 1.0000\n",
"Epoch 86/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 5.2252e-04 - accuracy: 1.0000\n",
"Epoch 87/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 4.9657e-04 - accuracy: 1.0000\n",
"Epoch 88/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 4.7991e-04 - accuracy: 1.0000\n",
"Epoch 89/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 4.4980e-04 - accuracy: 1.0000\n",
"Epoch 90/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 4.2578e-04 - accuracy: 1.0000\n",
"Epoch 91/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 4.0327e-04 - accuracy: 1.0000\n",
"Epoch 92/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 3.8471e-04 - accuracy: 1.0000\n",
"Epoch 93/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 3.5967e-04 - accuracy: 1.0000\n",
"Epoch 94/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 3.4583e-04 - accuracy: 1.0000\n",
"Epoch 95/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 3.2590e-04 - accuracy: 1.0000\n",
"Epoch 96/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 3.1527e-04 - accuracy: 1.0000\n",
"Epoch 97/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.9816e-04 - accuracy: 1.0000\n",
"Epoch 98/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.7618e-04 - accuracy: 1.0000\n",
"Epoch 99/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.6721e-04 - accuracy: 1.0000\n",
"Epoch 100/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.4960e-04 - accuracy: 1.0000\n",
"Accuracy: 0.0\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAPoAAAECCAYAAADXWsr9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAJ8klEQVR4nO3d24uc9R3H8c/HbExMrIfSg5hIk4tWsUKJXeoh4IXxoq2iN16kEKHeBGyrUQTR3vgPiCilCEusNwa9iLlopViLB2gpbJuDoMlaEA8xGmuk1EiqOeCnFzvSHLaZZ5vn2WfG7/sFQnYcf35Y9s0zM5lMnEQAvtzO6nsAgO4ROlAAoQMFEDpQAKEDBRA6UEBvodv+oe2/237D9v197WjK9iW2X7K9x/Zu25v63tSE7UW2d9l+tu8tTdi+wPZW26/bnrF9Td+bhrF9z+Bn4jXbT9le2vemk/USuu1Fkn4t6UeSLpf0E9uX97FlHo5JujfJ5ZKulvTzMdgsSZskzfQ9Yh4elfRcksskfU8jvt32Ckl3SZpMcoWkRZLW97vqVH1d0X8g6Y0kbyY5IulpSbf0tKWRJPuT7Bz8+hPN/gCu6HfV6dleKelGSZv73tKE7fMlXSfpcUlKciTJv3od1cyEpHNsT0haJun9nvecoq/QV0h697iv92nEozme7VWS1kia7nnKMI9Iuk/S5z3vaGq1pAOSnhg83dhse3nfo04nyXuSHpK0V9J+SR8neb7fVafixbh5sn2upGck3Z3kYN97/hfbN0n6MMmOvrfMw4SkKyU9lmSNpEOSRvr1G9sXavbR6GpJF0tabntDv6tO1Vfo70m65LivVw5uG2m2F2s28i1JtvW9Z4i1km62/bZmnxpdb/vJficNtU/SviRfPFLaqtnwR9kNkt5KciDJUUnbJF3b86ZT9BX63yR92/Zq22dr9sWL3/a0pRHb1uxzx5kkD/e9Z5gkDyRZmWSVZr+/LyYZuSvN8ZJ8IOld25cOblonaU+Pk5rYK+lq28sGPyPrNIIvIE708T9Ncsz2LyT9QbOvUv4mye4+tszDWkm3SXrV9iuD236Z5Pf9TfpSulPSlsEF4E1Jt/e857SSTNveKmmnZn9nZpekqX5Xncr8MVXgy48X44ACCB0ogNCBAggdKIDQgQJ6D932xr43zMe47ZXYvBBGfW/voUsa6W/QHMZtr8TmhTDSe0chdAAd6+QNM2d7SZaq2R86OqrDWqwlrW/oyrjtldi8EEZl72c6pCM57JNv7+QtsEu1XFd5XRdHAziN6bww5+08dAcKIHSgAEIHCiB0oABCBwpoFPq4fQY7gBMNDX1MP4MdwHGaXNHH7jPYAZyoSehj/RnsAFp8Z9zgT+9slKSlWtbWsQBa0OSK3ugz2JNMJZlMMjkK7/kF8F9NQh+7z2AHcKKhD93H9DPYARyn0XP0wV9SwF9UAIwp3hkHFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAUNDt32J7Zds77G92/amhRgGoD0TDe5zTNK9SXba/oqkHbb/mGRPx9sAtGToFT3J/iQ7B7/+RNKMpBVdDwPQnnk9R7e9StIaSdOdrAHQiSYP3SVJts+V9Iyku5McnOPfb5S0UZKWallrAwGcuUZXdNuLNRv5liTb5rpPkqkkk0kmF2tJmxsBnKEmr7pb0uOSZpI83P0kAG1rckVfK+k2SdfbfmXwz4873gWgRUOfoyf5syQvwBYAHeGdcUABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhRA6EABhA4UQOhAAYQOFEDoQAGEDhTQOHTbi2zvsv1sl4MAtG8+V/RNkma6GgKgO41Ct71S0o2SNnc7B0AXml7RH5F0n6TPu5sCoCtDQ7d9k6QPk+wYcr+Ntrfb3n5Uh1sbCODMNbmir5V0s+23JT0t6XrbT558pyRTSSaTTC7WkpZnAjgTQ0NP8kCSlUlWSVov6cUkGzpfBqA1/D46UMDEfO6c5GVJL3eyBEBnuKIDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwXM6+9ea+rYN5brH+uvbf3cb/7qL62f+QV//7udnPvZRcs6OVeS3rnJnZz7nTv+2sm542jReed1dvZVf/qo9TN3r/98ztu5ogMFEDpQAKEDBRA6UAChAwUQOlAAoQMFNArd9gW2t9p+3faM7Wu6HgagPU3fMPOopOeS3Gr7bEndvQsEQOuGhm77fEnXSfqpJCU5IulIt7MAtKnJQ/fVkg5IesL2LtubbS/veBeAFjUJfULSlZIeS7JG0iFJ9598J9sbbW+3vf3Yp4dangngTDQJfZ+kfUmmB19v1Wz4J0gylWQyyeTEOVzwgVEyNPQkH0h61/alg5vWSdrT6SoArWr6qvudkrYMXnF/U9Lt3U0C0LZGoSd5RdJkt1MAdIV3xgEFEDpQAKEDBRA6UAChAwUQOlBAJx/3PHHg37poakf7By9Z0v6ZA2cd/LSTc1/+3ZZOzpWkyzbf0cm57uj7nMOHOzm3S3t/dkV3h9/6QetHHnpnes7buaIDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwUQOlAAoQMFEDpQAKEDBRA6UAChAwU4SeuHnuev5iqva/1cAKc3nRd0MP/0ybdzRQcKIHSgAEIHCiB0oABCBwogdKAAQgcKaBS67Xts77b9mu2nbC/tehiA9gwN3fYKSXdJmkxyhaRFktZ3PQxAe5o+dJ+QdI7tCUnLJL3f3SQAbRsaepL3JD0kaa+k/ZI+TvJ818MAtKfJQ/cLJd0iabWkiyUtt71hjvtttL3d9vajOtz+UgD/tyYP3W+Q9FaSA0mOStom6dqT75RkKslkksnFWtL2TgBnoEnoeyVdbXuZbUtaJ2mm21kA2tTkOfq0pK2Sdkp6dfDfTHW8C0CLJprcKcmDkh7seAuAjvDOOKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCiB0oABCBwogdKAAQgcKIHSgAEIHCnCS9g+1D0h6p+Hdvybpo9ZHdGfc9kpsXgijsvdbSb5+8o2dhD4ftrcnmex1xDyM216JzQth1Pfy0B0ogNCBAkYh9Km+B8zTuO2V2LwQRnpv78/RAXRvFK7oADpG6EABhA4UQOhAAYQOFPAfY3gwpEgFGhEAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 288x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/100\n",
"80/80 [==============================] - 0s 1000us/step - loss: 1.7124 - accuracy: 0.4212\n",
"Epoch 2/100\n",
"80/80 [==============================] - 0s 913us/step - loss: 0.9685 - accuracy: 0.6662\n",
"Epoch 3/100\n",
"80/80 [==============================] - 0s 988us/step - loss: 0.7108 - accuracy: 0.7513\n",
"Epoch 4/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.5636 - accuracy: 0.8100\n",
"Epoch 5/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.4768 - accuracy: 0.8400\n",
"Epoch 6/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.3955 - accuracy: 0.8800\n",
"Epoch 7/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.3455 - accuracy: 0.8950\n",
"Epoch 8/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.2970 - accuracy: 0.9075\n",
"Epoch 9/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.2636 - accuracy: 0.9187\n",
"Epoch 10/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.2263 - accuracy: 0.9350\n",
"Epoch 11/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.1918 - accuracy: 0.9450\n",
"Epoch 12/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1787 - accuracy: 0.9513\n",
"Epoch 13/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1627 - accuracy: 0.9650\n",
"Epoch 14/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1348 - accuracy: 0.9750\n",
"Epoch 15/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1137 - accuracy: 0.9737\n",
"Epoch 16/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.1029 - accuracy: 0.9775\n",
"Epoch 17/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0893 - accuracy: 0.9825\n",
"Epoch 18/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0773 - accuracy: 0.9875\n",
"Epoch 19/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0639 - accuracy: 0.9950\n",
"Epoch 20/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0558 - accuracy: 0.9962\n",
"Epoch 21/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0503 - accuracy: 0.9950\n",
"Epoch 22/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0434 - accuracy: 0.9987\n",
"Epoch 23/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0386 - accuracy: 0.9975\n",
"Epoch 24/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0351 - accuracy: 1.0000\n",
"Epoch 25/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0298 - accuracy: 0.9987\n",
"Epoch 26/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0253 - accuracy: 1.0000\n",
"Epoch 27/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0229 - accuracy: 1.0000\n",
"Epoch 28/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0236 - accuracy: 0.9987\n",
"Epoch 29/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0183 - accuracy: 1.0000\n",
"Epoch 30/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0157 - accuracy: 1.0000\n",
"Epoch 31/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0155 - accuracy: 1.0000\n",
"Epoch 32/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0137 - accuracy: 1.0000\n",
"Epoch 33/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0120 - accuracy: 1.0000\n",
"Epoch 34/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0109 - accuracy: 1.0000\n",
"Epoch 35/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0097 - accuracy: 1.0000\n",
"Epoch 36/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0090 - accuracy: 1.0000\n",
"Epoch 37/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0078 - accuracy: 1.0000\n",
"Epoch 38/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0075 - accuracy: 1.0000\n",
"Epoch 39/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0067 - accuracy: 1.0000\n",
"Epoch 40/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0062 - accuracy: 1.0000\n",
"Epoch 41/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0057 - accuracy: 1.0000\n",
"Epoch 42/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0052 - accuracy: 1.0000\n",
"Epoch 43/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0049 - accuracy: 1.0000\n",
"Epoch 44/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0045 - accuracy: 1.0000\n",
"Epoch 45/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0040 - accuracy: 1.0000\n",
"Epoch 46/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0039 - accuracy: 1.0000\n",
"Epoch 47/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0036 - accuracy: 1.0000\n",
"Epoch 48/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0033 - accuracy: 1.0000\n",
"Epoch 49/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0031 - accuracy: 1.0000\n",
"Epoch 50/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0029 - accuracy: 1.0000\n",
"Epoch 51/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0027 - accuracy: 1.0000\n",
"Epoch 52/100\n",
"80/80 [==============================] - 0s 3ms/step - loss: 0.0026 - accuracy: 1.0000\n",
"Epoch 53/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0023 - accuracy: 1.0000\n",
"Epoch 54/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0022 - accuracy: 1.0000\n",
"Epoch 55/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0020 - accuracy: 1.0000\n",
"Epoch 56/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0019 - accuracy: 1.0000\n",
"Epoch 57/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0018 - accuracy: 1.0000\n",
"Epoch 58/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0017 - accuracy: 1.0000\n",
"Epoch 59/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0016 - accuracy: 1.0000\n",
"Epoch 60/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0015 - accuracy: 1.0000\n",
"Epoch 61/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0015 - accuracy: 1.0000\n",
"Epoch 62/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0013 - accuracy: 1.0000\n",
"Epoch 63/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0013 - accuracy: 1.0000\n",
"Epoch 64/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 0.0012 - accuracy: 1.0000\n",
"Epoch 65/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0011 - accuracy: 1.0000\n",
"Epoch 66/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 0.0010 - accuracy: 1.0000\n",
"Epoch 67/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 9.8200e-04 - accuracy: 1.0000\n",
"Epoch 68/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 9.1587e-04 - accuracy: 1.0000\n",
"Epoch 69/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 8.7920e-04 - accuracy: 1.0000\n",
"Epoch 70/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 8.2718e-04 - accuracy: 1.0000\n",
"Epoch 71/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 7.8329e-04 - accuracy: 1.0000\n",
"Epoch 72/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 7.2940e-04 - accuracy: 1.0000\n",
"Epoch 73/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 6.8870e-04 - accuracy: 1.0000\n",
"Epoch 74/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 6.5892e-04 - accuracy: 1.0000\n",
"Epoch 75/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 6.1487e-04 - accuracy: 1.0000\n",
"Epoch 76/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 5.9747e-04 - accuracy: 1.0000\n",
"Epoch 77/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 5.6380e-04 - accuracy: 1.0000\n",
"Epoch 78/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 5.1814e-04 - accuracy: 1.0000\n",
"Epoch 79/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 4.8919e-04 - accuracy: 1.0000\n",
"Epoch 80/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 4.7064e-04 - accuracy: 1.0000\n",
"Epoch 81/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 4.3856e-04 - accuracy: 1.0000\n",
"Epoch 82/100\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"80/80 [==============================] - 0s 2ms/step - loss: 4.3296e-04 - accuracy: 1.0000\n",
"Epoch 83/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 4.0910e-04 - accuracy: 1.0000\n",
"Epoch 84/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 3.7840e-04 - accuracy: 1.0000\n",
"Epoch 85/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 3.5079e-04 - accuracy: 1.0000\n",
"Epoch 86/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 3.3604e-04 - accuracy: 1.0000\n",
"Epoch 87/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 3.3392e-04 - accuracy: 1.0000\n",
"Epoch 88/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 3.1145e-04 - accuracy: 1.0000\n",
"Epoch 89/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 2.9306e-04 - accuracy: 1.0000\n",
"Epoch 90/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.7964e-04 - accuracy: 1.0000\n",
"Epoch 91/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.6452e-04 - accuracy: 1.0000\n",
"Epoch 92/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.5428e-04 - accuracy: 1.0000\n",
"Epoch 93/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.3855e-04 - accuracy: 1.0000\n",
"Epoch 94/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.2384e-04 - accuracy: 1.0000\n",
"Epoch 95/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 2.1728e-04 - accuracy: 1.0000\n",
"Epoch 96/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 2.0463e-04 - accuracy: 1.0000\n",
"Epoch 97/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 1.9412e-04 - accuracy: 1.0000\n",
"Epoch 98/100\n",
"80/80 [==============================] - 0s 2ms/step - loss: 1.8523e-04 - accuracy: 1.0000\n",
"Epoch 99/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 1.7388e-04 - accuracy: 1.0000\n",
"Epoch 100/100\n",
"80/80 [==============================] - 0s 1ms/step - loss: 1.6511e-04 - accuracy: 1.0000\n",
"Accuracy: 0.0\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAPoAAAECCAYAAADXWsr9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAJ20lEQVR4nO3d3atdB52H8edrTpo0qaigM9CkTHIhDqUqLQenteBFU3DGir2Ziw5UGG/CwEytIoh64z8gohejEOr0xqIMsReDiOPgy8VcTDBNi9ocR5yqbfqi8cIXCuZFf16cXUyTTPc6Zq2z9unv+UAhZ3d39Us4D2vtnX1WUlVIenV7zdwDJE3P0KUGDF1qwNClBgxdasDQpQZmCz3J3yb53yQ/TvKxuXYMleSmJN9OcjrJk0kenHvTEEl2JXk8yVfn3jJEktcnOZ7kh0k2ktwx96Zlknx48T3xgyRfSrJ37k2XmyX0JLuAfwX+DrgZ+IckN8+xZQsuAh+pqpuB24F/3gGbAR4ENuYesQWfBb5eVX8NvJ0V357kAPBBYL2qbgF2AffNu+pKc53R3wH8uKqeqqrzwJeBe2faMkhVPV9Vpxa//i2b34AH5l31ypIcBO4BHpp7yxBJXge8C/gCQFWdr6pfzTpqmDXg+iRrwD7guZn3XGGu0A8Az1zy9RlWPJpLJTkE3AqcmHnKMp8BPgr8YeYdQx0GzgIPL15uPJRk/9yjXklVPQt8CngaeB74dVV9Y95VV/LNuC1KcgPwFeBDVfWbuff8f5K8F/hFVT0295YtWANuAz5fVbcCLwIr/f5NkjeweTV6GLgR2J/k/nlXXWmu0J8Fbrrk64OLx1Zakt1sRv5IVT06954l7gTel+SnbL40uivJF+edtNQZ4ExVvXSldJzN8FfZ3cBPqupsVV0AHgXeOfOmK8wV+neBNyc5nOQ6Nt+8+I+ZtgySJGy+dtyoqk/PvWeZqvp4VR2sqkNs/v5+q6pW7kxzqap6AXgmyVsWDx0BTs84aYingduT7Ft8jxxhBd9AXJvjf1pVF5P8C/CfbL5L+W9V9eQcW7bgTuD9wPeTPLF47BNV9bX5Jr0qPQA8sjgBPAV8YOY9r6iqTiQ5Dpxi809mHgeOzbvqSvHHVKVXP9+MkxowdKkBQ5caMHSpAUOXGpg99CRH596wFTttL7h5O6z63tlDB1b6N+gqdtpecPN2WOm9qxC6pIlN8oGZ67Kn9jLsh44ucI7d7Bl9w1R22l5w83ZYlb2/40XO17lc/vgkH4Hdy37+JkemOLSkV3CivnnVx710lxowdKkBQ5caMHSpAUOXGhgU+k67B7ukl1sa+g69B7ukSww5o++4e7BLerkhoe/oe7BLGvGTcYuf3jkKsJd9Yx1W0giGnNEH3YO9qo5V1XpVra/CZ34l/cmQ0HfcPdglvdzSS/cdeg92SZcY9Bp98ZcU+BcVSDuUn4yTGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpgaWhJ7kpybeTnE7yZJIHt2OYpPGsDXjOReAjVXUqyWuBx5L8V1WdnnibpJEsPaNX1fNVdWrx698CG8CBqYdJGs+WXqMnOQTcCpyYZI2kSQy5dAcgyQ3AV4APVdVvrvLvjwJHAfayb7SBkq7doDN6kt1sRv5IVT16tedU1bGqWq+q9d3sGXOjpGs05F33AF8ANqrq09NPkjS2IWf0O4H3A3cleWLxz3sm3iVpREtfo1fVfwPZhi2SJuIn46QGDF1qwNClBgxdasDQpQYMXWrA0KUGDF1qwNClBgxdasDQpQYMXWrA0KUGDF1qwNClBgxdasDQpQYMXWrA0KUGDF1qwNClBgxdasDQpQYMXWrA0KUGDF1qwNClBgxdasDQpQYMXWrA0KUGDF1qwNClBgxdasDQpQYMXWpgcOhJdiV5PMlXpxwkaXxbOaM/CGxMNUTSdAaFnuQgcA/w0LRzJE1h6Bn9M8BHgT9MN0XSVJaGnuS9wC+q6rElzzua5GSSkxc4N9pASdduyBn9TuB9SX4KfBm4K8kXL39SVR2rqvWqWt/NnpFnSroWS0Ovqo9X1cGqOgTcB3yrqu6ffJmk0fjn6FIDa1t5clV9B/jOJEskTcYzutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61IChSw0YutSAoUsNGLrUgKFLDRi61MCg0JO8PsnxJD9MspHkjqmHSRrP2sDnfRb4elX9fZLrgH0TbpI0sqWhJ3kd8C7gHwGq6jxwftpZksY05NL9MHAWeDjJ40keSrJ/4l2SRjQk9DXgNuDzVXUr8CLwscuflORokpNJTl7g3MgzJV2LIaGfAc5U1YnF18fZDP9lqupYVa1X1fpu9oy5UdI1Whp6Vb0APJPkLYuHjgCnJ10laVRD33V/AHhk8Y77U8AHppskaWyDQq+qJ4D1aadImoqfjJMaMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qQFDlxowdKkBQ5caMHSpAUOXGjB0qYGhf5vq1uy7ntzy1vGP+70fjX/Mhd8dedskx/35+u5Jjguw8U+fm+S499z27kmOy9490xwX+NU7bpzkuDf8+/9Mctzt5hldasDQpQYMXWrA0KUGDF1qwNClBgxdamBQ6Ek+nOTJJD9I8qUke6ceJmk8S0NPcgD4ILBeVbcAu4D7ph4maTxDL93XgOuTrAH7gOemmyRpbEtDr6pngU8BTwPPA7+uqm9MPUzSeIZcur8BuBc4DNwI7E9y/1WedzTJySQnL1x8cfylkv5sQy7d7wZ+UlVnq+oC8CjwzsufVFXHqmq9qtZ3r+0fe6ekazAk9KeB25PsSxLgCLAx7SxJYxryGv0EcBw4BXx/8d8cm3iXpBEN+nn0qvok8MmJt0iaiJ+MkxowdKkBQ5caMHSpAUOXGjB0qYFJbvd88YbX8MIdrx39uH/53XOjH/MlP7t3muO+8cAvpzkw092W+fc3/cUkx33m7vG/J15y6OH/m+S4Fyc56vbzjC41YOhSA4YuNWDoUgOGLjVg6FIDhi41YOhSA4YuNWDoUgOGLjVg6FIDhi41YOhSA4YuNWDoUgOGLjVg6FIDhi41YOhSA4YuNZCqGv+gyVngZwOf/kZgululjm+n7QU3b4dV2ftXVfWmyx+cJPStSHKyqtZnHbEFO20vuHk7rPpeL92lBgxdamAVQj8294At2ml7wc3bYaX3zv4aXdL0VuGMLmlihi41YOhSA4YuNWDoUgN/BA+FH6Pgkc4wAAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 288x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"kf=KFold(n_splits=5)\n",
"\n",
"for train_index,test_index in kf.split(X):\n",
" X_train,X_test,y_train,y_test=X[train_index],X[test_index],y[train_index],y[test_index]\n",
" fit_predict_print(nn_fit_predict,X_train,y_train,X_test,y_test)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# SVM"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"from svm import *"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Accuracy: 68.0\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAPoAAAECCAYAAADXWsr9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKZklEQVR4nO3dTYhd9RnH8d+vM1N1ovWlujEJTQrFIkKNXqwacDER+qLopgtbFOpmNq1GEax247ogoosiDLFujLqIWRQpajG6KJVJJy+oyViRaGNixKmlGiI1M/p0ca+QZMbcc53zn3NPnu8HApmT6/FhuF/+95459z+OCAE4s32r6QEAlEfoQAKEDiRA6EAChA4kQOhAAo2Fbvuntv9p+x3bDzQ1R1W219p+xfZ+2/tsb256pipsj9jeY/v5pmepwvYFtrfZfsv2rO3rmp6pH9v39p4Tb9p+xvbZTc90qkZCtz0i6Y+Sfibpckm/tH15E7MMYEHSfRFxuaRrJf2mBTNL0mZJs00PMYDHJL0QET+U9CMN+ey2V0u6W1InIq6QNCLptmanWqypFf0aSe9ExIGIOC7pWUm3NjRLJRFxJCJ29/5+VN0n4Opmpzo922sk3SRpS9OzVGH7fEk3SHpCkiLieET8t9GhqhmVdI7tUUnjkj5oeJ5Fmgp9taT3T/j6kIY8mhPZXidpg6Tphkfp51FJ90v6suE5qlovaU7Sk723G1tsr2p6qNOJiMOSHpZ0UNIRSZ9ExEvNTrUYF+MGZPtcSc9JuiciPm16nq9j+2ZJH0XErqZnGcCopKskPR4RGyQdkzTU129sX6juq9H1ki6VtMr27c1OtVhToR+WtPaEr9f0jg0122PqRr41IrY3PU8fGyXdYvs9dd8aTdh+qtmR+jok6VBEfPVKaZu64Q+zGyW9GxFzETEvabuk6xueaZGmQv+HpB/YXm/72+pevPhzQ7NUYtvqvnecjYhHmp6nn4h4MCLWRMQ6db+/OyJi6FaaE0XEh5Let31Z79AmSfsbHKmKg5KutT3ee45s0hBeQBxt4n8aEQu2fyvpRXWvUv4pIvY1McsANkq6Q9Ibtvf2jv0+Iv7S3EhnpLskbe0tAAck3dnwPKcVEdO2t0nare5PZvZImmp2qsXMx1SBMx8X44AECB1IgNCBBAgdSIDQgQQaD932ZNMzDKJt80rMvBKGfd7GQ5c01N+gJbRtXomZV8JQzzsMoQMorMgNMxdfNBLr1o5Veuzcx1/oku+OVHrs26+PL2esWszrc43prKbHGAgzlzcs8/5Px3Q8Pvepx4vcArtu7Zh2vri2/wMH9JNLr6z9nMCZZDpeXvI4L92BBAgdSIDQgQQIHUiA0IEEKoXetj3YAZysb+gt3YMdwAmqrOit24MdwMmqhN7qPdgB1Hgxzvak7RnbM3Mff1HXaQHUoErolfZgj4ipiOhERKfqvesAVkaV0Fu3BzuAk/X9UEtL92AHcIJKn17r/ZICflEB0FLcGQckQOhAAoQOJEDoQAKEDiRQZM+4t18fL7K/28LE1bWf8yujO3YVOzfQNFZ0IAFCBxIgdCABQgcSIHQgAUIHEiB0IAFCBxIgdCABQgcSIHQgAUIHEiB0IAFCBxIgdCABQgcSIHQgAUIHEiB0IAFCBxIgdCABQgcSKLLdcyklt2Q+/Lvri5x39R/+XuS8aL8S25fHzteWPM6KDiRA6EAChA4kQOhAAoQOJEDoQAKEDiTQN3Tba22/Ynu/7X22N6/EYADqU+WGmQVJ90XEbtvnSdpl+68Rsb/wbABq0ndFj4gjEbG79/ejkmYlrS49GID6DPQe3fY6SRskTReZBkARle91t32upOck3RMRny7x75OSJiXpbI3XNiCA5au0otseUzfyrRGxfanHRMRURHQiojOms+qcEcAyVbnqbklPSJqNiEfKjwSgblVW9I2S7pA0YXtv78/PC88FoEZ936NHxN8keQVmAVAId8YBCRA6kAChAwkQOpAAoQMJOCJqP+l3fFH82JtqP28bHXj6ymLn/v6v9hY7N9ppOl7Wp/GfRT8lY0UHEiB0IAFCBxIgdCABQgcSIHQgAUIHEiB0IAFCBxIgdCABQgcSIHQgAUIHEiB0IAFCBxIgdCABQgcSIHQgAUIHEiB0IAFCBxIgdCCBvr9k8ZuI88a1cM3VtZ93dMeu2s9ZGlsyt9vCRP3P46+s5POZFR1IgNCBBAgdSIDQgQQIHUiA0IEECB1IoHLotkds77H9fMmBANRvkBV9s6TZUoMAKKdS6LbXSLpJ0pay4wAooeqK/qik+yV9WW4UAKX0Dd32zZI+iojT3phre9L2jO2Z+fljtQ0IYPmqrOgbJd1i+z1Jz0qasP3UqQ+KiKmI6EREZ2xsVc1jAliOvqFHxIMRsSYi1km6TdKOiLi9+GQAasPP0YEEBvo8ekS8KunVIpMAKIYVHUiA0IEECB1IgNCBBAgdSKDILrA++lkrd2xF14Gnryxy3jbuiHumPI9Z0YEECB1IgNCBBAgdSIDQgQQIHUiA0IEECB1IgNCBBAgdSIDQgQQIHUiA0IEECB1IgNCBBAgdSIDQgQQIHUiA0IEECB1IgNCBBIrsAot2K7Vb68LE1UXOK505u7WWwooOJEDoQAKEDiRA6EAChA4kQOhAAoQOJFApdNsX2N5m+y3bs7avKz0YgPpUvWHmMUkvRMQvbH9b0njBmQDUrG/ots+XdIOkX0tSRByXdLzsWADqVOWl+3pJc5KetL3H9hbbqwrPBaBGVUIflXSVpMcjYoOkY5IeOPVBtidtz9iemdfnNY8JYDmqhH5I0qGImO59vU3d8E8SEVMR0YmIzpjOqnNGAMvUN/SI+FDS+7Yv6x3aJGl/0akA1KrqVfe7JG3tXXE/IOnOciMBqFul0CNir6RO2VEAlMKdcUAChA4kQOhAAoQOJEDoQAKEDiTAds9YMW3ckrnkFtUlxM7XljzOig4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJEDoQAKEDiRA6EAChA4kQOhAAoQOJFBkF9g4b1wL19S/e2YbdxEFvk6J57PjsyWPs6IDCRA6kAChAwkQOpAAoQMJEDqQAKEDCVQK3fa9tvfZftP2M7bPLj0YgPr0Dd32akl3S+pExBWSRiTdVnowAPWp+tJ9VNI5tkcljUv6oNxIAOrWN/SIOCzpYUkHJR2R9ElEvFR6MAD1qfLS/UJJt0paL+lSSats377E4yZtz9iemZ8/Vv+kAL6xKi/db5T0bkTMRcS8pO2Srj/1QRExFRGdiOiMja2qe04Ay1Al9IOSrrU9btuSNkmaLTsWgDpVeY8+LWmbpN2S3uj9N1OF5wJQo0qfR4+IhyQ9VHgWAIVwZxyQAKEDCRA6kAChAwkQOpAAoQMJFNnu2Uc/a93WzAsT9W9PXVrbvsdtVPJ7XOI5FztfW/I4KzqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kAChAwkQOpAAoQMJEDqQAKEDCRA6kIAjov6T2nOS/lXx4RdL+nftQ5TTtnklZl4JwzLv9yLiklMPFgl9ELZnIqLT6BADaNu8EjOvhGGfl5fuQAKEDiQwDKFPNT3AgNo2r8TMK2Go5238PTqA8oZhRQdQGKEDCRA6kAChAwkQOpDA/wEsylwp88iiEgAAAABJRU5ErkJggg==\n",
"text/plain": [
"<Figure size 288x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"fit_predict_print(svm_fit_predict, X_train, y_train, X_test, y_test)"
]
} }
], ],
"metadata": { "metadata": {
......
...@@ -11,10 +11,10 @@ import numpy as np ...@@ -11,10 +11,10 @@ import numpy as np
def rf_fit_predict(X_train, y_train, X_test): def rf_fit_predict(X_train, y_train, X_test):
classifier = RandomForestClassifier(max_samples=0.95, n_estimators= 3000, bootstrap=True, min_samples_split=2, min_samples_leaf=1, criterion="entropy", random_state=0) classifier = RandomForestClassifier(n_estimators= 100, criterion="entropy", random_state=0)
# classifier = ExtraTreesClassifier(max_samples=0.75, n_estimators= 3000, bootstrap=True, min_samples_split=2, min_samples_leaf=1, criterion="entropy", random_state=0) # classifier = ExtraTreesClassifier(max_samples=0.75, n_estimators= 3000, bootstrap=True, min_samples_split=2, min_samples_leaf=1, criterion="entropy", random_state=0)
classifier.fit(X_train,y_train) classifier.fit(X_train,y_train)
# print(classifier.get_params()) print("Hypperparameters: ", classifier.get_params())
return classifier.predict(X_test) return classifier.predict(X_test)
def randomized_search_fold_size_rf_fit_predict(X_train, y_train, X_test): def randomized_search_fold_size_rf_fit_predict(X_train, y_train, X_test):
......
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from util import *
from ann import *
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import KFold
# def load_data():
# dataset = pd.read_csv("data/features_30_sec.csv")
# X = dataset.iloc[:, 1:59].values
# y = dataset.iloc[:, 59].values
# #importing the dataset
# from sklearn.model_selection import train_test_split
# X_train, X_test = train_test_split(X, test_size=0.25, random_state= 0)
# y_train, y_test = train_test_split(y, test_size=0.25, random_state= 0)
# #splitting our data set into training set and test set
# from sklearn.model_selection import train_test_split
# X_train, X_test = train_test_split(X, test_size=0.25, random_state= 0)
# y_train, y_test = train_test_split(y, test_size=0.25, random_state= 0)
# #feature scaling
# from sklearn.preprocessing import StandardScaler
# sc_X = StandardScaler()
# X_train = sc_X.fit_transform(X_train)
# X_test = sc_X.transform(X_test)
# return X_train, X_test, y_train, y_test
# def get_accuracy(cm):
# sum = 0
# for i in range(cm.shape[0]):
# sum = sum + cm[i][i]
# return 100*(sum/np.sum(cm))
# def fit_predict_print(fit_predict_function, X_train, y_train, X_test, y_test):
# y_pred = fit_predict_function(X_train, y_train, X_test)
# cm = confusion_matrix(y_test, y_pred)
# #print(cm)
# print(get_accuracy(cm))
X_train, X_test, y_train, y_test = load_scale_xy_with_25p_split()
X, y = load_preprocess_xy(0, True, True, True)
kf=KFold(n_splits=5)
for train_index,test_index in kf.split(X):
X_train,X_test,y_train,y_test=X[train_index],X[test_index],y[train_index],y[test_index]
fit_predict_print(nn_fit_predict,X_train,y_train,X_test,y_test)
# y_pred = rf_fit_predict(X_train, y_train, X_test)
# cm = confusion_matrix(y_test, y_pred)
# #print(cm)
# print(get_accuracy(cm))
# fit_predict_print(randomized_search_cv_rf_fit_predict, X_train, y_train, X_test, y_test)
#fit_predict_print(randomized_search_cv_rf_fit_predict, X_train, y_train, X_test, y_test)
# fit_predict_print(rf_fit_predict, X_train, y_train, X_test, y_test)
# y_pred = randomized_search_fold_size_rf_fit_predict(X_train, y_train, X_test)
pass
#fit_predict_print(nn_fit_predict, X_train, y_train, X_test, y_test)
# cm = confusion_matrix(y_test, y_pred)
# #print(cm)
# print(get_accuracy(cm))
# plt.matshow(cm)
# plt.show()
pass
from sklearn import svm
def svm_fit_predict(X_train, y_train, X_test):
model=svm.SVC(C=2,kernel='rbf') #regularization parameter, radial basis function,
model.fit(X_train,y_train)
return model.predict(X_test)
\ No newline at end of file
...@@ -22,6 +22,31 @@ def load_scale_xy_with_25p_split(): ...@@ -22,6 +22,31 @@ def load_scale_xy_with_25p_split():
return X_train, X_test, y_train, y_test return X_train, X_test, y_train, y_test
def load_preprocess_xy(split_percentage, scale_x, encode_y, dummify_y):
dataset = pd.read_csv("data/features_30_sec.csv")
X = dataset.iloc[:, 1:59].values
y = dataset.iloc[:, 59].values
if encode_y:
encoder = LabelEncoder()
y = encoder.fit_transform(y)
if scale_x:
sc_X = StandardScaler()
X = sc_X.fit_transform(X) #TODO should we scale test and train separately? Yes, but wont have much difference?
if dummify_y:
y = pd.get_dummies(y).values
if split_percentage > 0:
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = split_percentage, random_state = 0)
return X_train, X_test, y_train, y_test
return X, y
def load_scale_x_encode_y(): def load_scale_x_encode_y():
dataset = pd.read_csv("data/features_30_sec.csv") dataset = pd.read_csv("data/features_30_sec.csv")
X = dataset.iloc[:, 1:59].values X = dataset.iloc[:, 1:59].values
...@@ -42,6 +67,9 @@ def get_accuracy(cm): ...@@ -42,6 +67,9 @@ def get_accuracy(cm):
def fit_predict_print(fit_predict_function, X_train, y_train, X_test, y_test): def fit_predict_print(fit_predict_function, X_train, y_train, X_test, y_test):
y_pred = fit_predict_function(X_train, y_train, X_test) y_pred = fit_predict_function(X_train, y_train, X_test)
if y_pred.dtype != y_test.dtype:
y_pred = np.argmax(y_pred,axis=1)
y_test = np.argmax(y_test,axis=1)
cm = confusion_matrix(y_test, y_pred) cm = confusion_matrix(y_test, y_pred)
#print(cm) #print(cm)
print("Accuracy: ", get_accuracy(cm)) print("Accuracy: ", get_accuracy(cm))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment