From 00e5a70eafb353516a11e28251859d7fccef53d9 Mon Sep 17 00:00:00 2001 From: ECsushmi <109535319+ECsushmi@users.noreply.github.com> Date: Mon, 18 Jul 2022 23:17:42 +0530 Subject: [PATCH] traffic sign recgn --- code.ipynb | 501 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 501 insertions(+) create mode 100644 code.ipynb diff --git a/code.ipynb b/code.ipynb new file mode 100644 index 0000000..489e429 --- /dev/null +++ b/code.ipynb @@ -0,0 +1,501 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 3, + "id": "2d1c1373", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "import cv2\n", + "import tensorflow as tf\n", + "from PIL import Image\n", + "from sklearn.model_selection import train_test_split\n", + "from keras.utils import to_categorical\n", + "from keras.models import Sequential, load_model\n", + "from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n", + "import os\n", + "os.chdir(\"C:\\TrafficSign\")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "3738f245", + "metadata": {}, + "outputs": [], + "source": [ + "data =[]\n", + "labels = []\n", + "classes =43 \n", + "cur_path = os.getcwd()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "f1a56d0c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'C:\\\\TrafficSign'" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cur_path" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "fddc70bb", + "metadata": {}, + "outputs": [], + "source": [ + "for i in range(classes): \n", + " path = os.path.join(cur_path,'train',str(i)) \n", + " images = os.listdir(path)\n", + " for a in images:\n", + " try: \n", + " image = Image.open(path +'\\\\'+ a) \n", + " image = image.resize((30,30)) \n", + " # Resizing all images into 30*30 \n", + " image =np.array(image) \n", + " data.append(image) \n", + " labels.append(i)\n", + " except Exception as e:\n", + " print(e)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "05e71639", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(39209, 30, 30, 3) (39209,)\n" + ] + } + ], + "source": [ + "data = np.array(data) \n", + "labels = np.array(labels)\n", + "print(data.shape, labels.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "71990c12", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(31367, 30, 30, 3) (7842, 30, 30, 3) (31367,) (7842,)\n" + ] + } + ], + "source": [ + "X_train, X_test, y_train, y_test =train_test_split(data, labels, test_size=0.2, random_state=0)\n", + "print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "47f6c17a", + "metadata": {}, + "outputs": [], + "source": [ + "y_train = to_categorical(y_train,43) \n", + "y_test = to_categorical(y_test,43)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "d03b122a", + "metadata": {}, + "outputs": [], + "source": [ + "model =Sequential() \n", + "model.add(Conv2D(filters=32, kernel_size=(5,5), activation='relu', input_shape=X_train.shape[1:])) \n", + "model.add(Conv2D(filters=32, kernel_size=(5,5), activation='relu'))\n", + "model.add(MaxPool2D(pool_size=(2,2))) \n", + "model.add(Dropout(rate=0.25))\n", + "model.add(Conv2D(filters=64, kernel_size=(3,3), activation='relu'))\n", + "model.add(Conv2D(filters=64, kernel_size=(3,3), activation='relu')) \n", + "model.add(MaxPool2D(pool_size=(2,2))) \n", + "model.add(Dropout(rate=0.25)) \n", + "model.add(Flatten()) \n", + "model.add(Dense(256, activation='relu'))\n", + "model.add(Dropout(rate=0.5))\n", + "model.add(Dense(43, activation='softmax'))\n", + "# We have 43 classes that's why we have defined 43in the dense model.add(Dense(43, activation='softmax'))" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "5fe46a28", + "metadata": {}, + "outputs": [], + "source": [ + "model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) " + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "19ae23c7", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/20\n", + "981/981 [==============================] - 67s 68ms/step - loss: 1.8418 - accuracy: 0.5321 - val_loss: 0.5088 - val_accuracy: 0.8634\n", + "Epoch 2/20\n", + "981/981 [==============================] - 67s 69ms/step - loss: 0.8116 - accuracy: 0.7580 - val_loss: 0.3184 - val_accuracy: 0.8961\n", + "Epoch 3/20\n", + "981/981 [==============================] - 66s 68ms/step - loss: 0.5926 - accuracy: 0.8209 - val_loss: 0.2286 - val_accuracy: 0.9350\n", + "Epoch 4/20\n", + "981/981 [==============================] - 65s 67ms/step - loss: 0.4608 - accuracy: 0.8611 - val_loss: 0.2445 - val_accuracy: 0.9337\n", + "Epoch 5/20\n", + "981/981 [==============================] - 67s 68ms/step - loss: 0.3735 - accuracy: 0.8872 - val_loss: 0.1413 - val_accuracy: 0.9606\n", + "Epoch 6/20\n", + "981/981 [==============================] - 65s 67ms/step - loss: 0.3304 - accuracy: 0.9023 - val_loss: 0.1390 - val_accuracy: 0.9578\n", + "Epoch 7/20\n", + "981/981 [==============================] - 70s 71ms/step - loss: 0.2975 - accuracy: 0.9126 - val_loss: 0.0934 - val_accuracy: 0.9744\n", + "Epoch 8/20\n", + "981/981 [==============================] - 68s 70ms/step - loss: 0.2769 - accuracy: 0.9197 - val_loss: 0.1453 - val_accuracy: 0.9551\n", + "Epoch 9/20\n", + "981/981 [==============================] - 71s 73ms/step - loss: 0.2600 - accuracy: 0.9246 - val_loss: 0.0965 - val_accuracy: 0.9727\n", + "Epoch 10/20\n", + "981/981 [==============================] - 65s 66ms/step - loss: 0.2375 - accuracy: 0.9302 - val_loss: 0.0755 - val_accuracy: 0.9819\n", + "Epoch 11/20\n", + "981/981 [==============================] - 65s 66ms/step - loss: 0.2486 - accuracy: 0.9290 - val_loss: 0.0718 - val_accuracy: 0.9792\n", + "Epoch 12/20\n", + "981/981 [==============================] - 67s 69ms/step - loss: 0.2142 - accuracy: 0.9379 - val_loss: 0.0948 - val_accuracy: 0.9730\n", + "Epoch 13/20\n", + "981/981 [==============================] - 66s 68ms/step - loss: 0.2222 - accuracy: 0.9358 - val_loss: 0.0530 - val_accuracy: 0.9851\n", + "Epoch 14/20\n", + "981/981 [==============================] - 71s 72ms/step - loss: 0.2021 - accuracy: 0.9430 - val_loss: 0.0860 - val_accuracy: 0.9763\n", + "Epoch 15/20\n", + "981/981 [==============================] - 69s 70ms/step - loss: 0.2068 - accuracy: 0.9415 - val_loss: 0.0640 - val_accuracy: 0.9818\n", + "Epoch 16/20\n", + "981/981 [==============================] - 66s 68ms/step - loss: 0.2175 - accuracy: 0.9393 - val_loss: 0.0693 - val_accuracy: 0.9805\n", + "Epoch 17/20\n", + "981/981 [==============================] - 66s 67ms/step - loss: 0.1980 - accuracy: 0.9434 - val_loss: 0.0794 - val_accuracy: 0.9784\n", + "Epoch 18/20\n", + "981/981 [==============================] - 65s 66ms/step - loss: 0.1971 - accuracy: 0.9465 - val_loss: 0.0795 - val_accuracy: 0.9791\n", + "Epoch 19/20\n", + "981/981 [==============================] - 68s 69ms/step - loss: 0.1895 - accuracy: 0.9480 - val_loss: 0.0580 - val_accuracy: 0.9853\n", + "Epoch 20/20\n", + "981/981 [==============================] - 67s 68ms/step - loss: 0.2153 - accuracy: 0.9418 - val_loss: 0.0583 - val_accuracy: 0.9833\n" + ] + } + ], + "source": [ + "epochs = 20\n", + "history = model.fit(X_train, y_train, batch_size=32, epochs=epochs, validation_data=(X_test, y_test))" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "7a84f2ad", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "model.save(\"./training/TSR.h5\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "898a6980", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "3c52b78a", + "metadata": {}, + "outputs": [], + "source": [ + "def testing(testcsv):\n", + " y_test = pd.read_csv(testcsv)\n", + " label = y_test[\"ClassId\"].values\n", + " imgs = y_test[\"Path\"].values\n", + " data=[]\n", + " for img in imgs:\n", + " image = Image.open(img)\n", + " image = image.resize((30,30))\n", + " data.append(np.array(image))\n", + " X_test=np.array(data)\n", + " return X_test,label" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "4b891639", + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "X_test, label = testing('Test.csv')" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "id": "58e55f72", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "395/395 [==============================] - 6s 16ms/step\n" + ] + } + ], + "source": [ + "Y_pred=model.predict(X_test) \n", + "classes_x=np.argmax(Y_pred,axis=1)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "69ed1b6f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([16, 1, 38, ..., 32, 7, 10], dtype=int64)" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "classes_x" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "id": "1eeae535", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0.9491686460807601" + ] + }, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from sklearn.metrics import accuracy_score\n", + "accuracy_score(label,classes_x)" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "id": "7533effa", + "metadata": {}, + "outputs": [], + "source": [ + "model.save(\"./training/TSR.h5\")" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "id": "2b17436b", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "os.chdir(\"C:\\TrafficSign\")\n", + "from keras.models import load_model\n", + "model = load_model('./training/TSR.h5')" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "e0728655", + "metadata": {}, + "outputs": [], + "source": [ + "classes = { 0:'Speed limit (20km/h)',\n", + " 1:'Speed limit (30km/h)',\n", + " 2:'Speed limit (50km/h)',\n", + " 3:'Speed limit (60km/h)',\n", + " 4:'Speed limit (70km/h)',\n", + " 5:'Speed limit (80km/h)',\n", + " 6:'End of speed limit (80km/h)',\n", + " 7:'Speed limit (100km/h)',\n", + " 8:'Speed limit (120km/h)',\n", + " 9:'No passing',\n", + " 10:'No passing veh over 3.5 tons',\n", + " 11:'Right-of-way at intersection',\n", + " 12:'Priority road',\n", + " 13:'Yield',\n", + " 14:'Stop',\n", + " 15:'No vehicles',\n", + " 16:'Vehicle > 3.5 tons prohibited',\n", + " 17:'No entry',\n", + " 18:'General caution',\n", + " 19:'Dangerous curve left',\n", + " 20:'Dangerous curve right',\n", + " 21:'Double curve',\n", + " 22:'Bumpy road',\n", + " 23:'Slippery road',\n", + " 24:'Road narrows on the right',\n", + " 25:'Road work',\n", + " 26:'Traffic signals',\n", + " 27:'Pedestrians',\n", + " 28:'Children crossing',\n", + " 29:'Bicycles crossing',\n", + " 30:'Beware of ice/snow',\n", + " 31:'Wild animals crossing',\n", + " 32:'End speed + passing limits',\n", + " 33:'Turn right ahead',\n", + " 34:'Turn left ahead',\n", + " 35:'Ahead only',\n", + " 36:'Go straight or right',\n", + " 37:'Go straight or left',\n", + " 38:'Keep right',\n", + " 39:'Keep left',\n", + " 40:'Roundabout mandatory',\n", + " 41:'End of no passing',\n", + " 42:'End no passing vehicle > 3.5 tons' }\n" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "id": "fc0f9519", + "metadata": {}, + "outputs": [], + "source": [ + "from PIL import Image\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "def test_on_img(img):\n", + " data=[]\n", + " image = Image.open(img)\n", + " image = image.resize((30,30))\n", + " data.append(np.array(image))\n", + " X_test=np.array(data)\n", + " Y_pred=model.predict(X_test) \n", + " classes_x=np.argmax(Y_pred,axis=1)\n", + " return image,classes_x" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "d5e330f6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1/1 [==============================] - 0s 18ms/step\n", + "Predicted traffic sign is: General caution\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD5CAYAAADhukOtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAYI0lEQVR4nO3de4zdZZkH8O9zLnPvtJ12GHqlthQFFQuOpasFMQYCyC6YbIhsopiQrdlIohuzu+j+IcmuKzFQxayyFuhSXAVU6FpdvHQJiBUsLVhKL5RemLYznWvnfj23Z/+YUzPU8zxnmDmXcd/vJ5l05vfM7/e+/Z3zzO+c8/ze9xVVBRH9/xcpdweIqDSY7ESBYLITBYLJThQIJjtRIJjsRIGIzWZnEbkBwAMAogAeVtV7vd+vq6vTRQ0NOWNDA/1uW+lk0ozNq62126yfb8Zq5s1z24zEZnV6iIomY+TDqdbT6DnbK7liM342i0gUwHcAXAegFcAeEdmhqoesfRY1NODuf/yHnLHnf7HdbW/wTKcZ+9gH15uxDTd+woxd+dGPum3WLW5040TlMtp5Juf2jdffaO4zm5fx6wEcU9UTqpoA8ASAW2ZxPCIqotkk+zIAp6f83JrdRkRzUNE/oBORTSKyV0T2Dg8PF7s5IjLMJtnbAKyY8vPy7La3UdUtqtqsqs11dXWzaI6IZmM2yb4HwFoReZeIVAD4FIAdhekWERXajD+NV9WUiNwF4FeYLL1tVdWD3j5Dfb14/idP5oxde/XVbnsf//J1ZuySqzbaO8Yr3OMS5ZNKjrvx/t4OOzjRZYYqahaasaGzA26b7W/lbnN8bNTcZ1aFZFV9BsAzszkGEZUG76AjCgSTnSgQTHaiQDDZiQLBZCcKBJOdKBAlHcO5as0aPPLjn+SM1TVeWMquEL1Nf2+/GXvyse+4+x47uNuMLY+PmLHKSvsekIERv7Y/0NWfc3tf15/cxPpHvLITBYLJThQIJjtRIJjsRIFgshMFgslOFIiSlt4isThLbH9mOg/sM2OV9fXuvgtWri5wb2bn9KkWM/afD281Y2+deN09bmrQLnc11GfMWNdgwowl026TqKnIvSCriL1QK6/sRIFgshMFgslOFAgmO1EgmOxEgWCyEwWCKxcSxnrsGVB3fPnvzdiilRe7x731Ww+asUi8OE+911+xy2SbH/hXMzaasvsTi/gLgCaScTM2mJwwY2Mp+5iap/ZWVWG3aeGVnSgQTHaiQDDZiQLBZCcKBJOdKBBMdqJAzKr+ISItAIYApAGkVLW5EJ2i0vrDkz80Yx1vHDNj/ac73eOeeulFM7bqmmvydyyHk8da3Ph3tzxkxobG7BFoNXWVZiym9kgyABiCPXFkz9iYGdOE3R+k/TZTxv8l5RyyEMXOj6lqTwGOQ0RFxJfxRIGYbbIrgF+LyCsisqkQHSKi4pjty/iNqtomIhcA2Ckib6jqC1N/IftHYBMArFy5cpbNEdFMzerKrqpt2X+7AGwHsD7H72xR1WZVbW5sbJxNc0Q0CzNOdhGpFZF5574HcD2AA4XqGBEV1mxexjcB2C4i547zQ1X9ZUF6RUQFN+NkV9UTAD5QwL5QkQy0trrxl5/6sRnTqP0UGR0Zdo+7+/HHzNiqqz5sxo4cO2LGvv2db7lt9o7aQ0PnL7TfRk6MdpuxC+vtOjoApOwSPTJqn7/KuF1Ln0jZM88CwJmzuev3SafQztIbUSCY7ESBYLITBYLJThQIJjtRIJjsRIEow+yyVmmAf3eK5eXHv+/G+9rshQmrFtnlqnhNlXvcE3t2m7GdD37bjO3usgdR9g+Oum3Go3aZbGxs3Iylxu3YoPa7bTZWJu2gExpTu0zmzTwLACnknl1WIeY+zDCiQDDZiQLBZCcKBJOdKBBMdqJAMNmJAlGG0hv/vhRD78mTZuwP/73d3VedmUwvuuETZixe7T99jj62zYy98j8/M2OVG+yZZyNil5YAIJG0R4tFI3ZZLl67wIwN9J1225wXsWeQjY7bsbG0nQsa90faIWnV9Fh6Iwoek50oEEx2okAw2YkCwWQnCgSTnSgQZSi9UTHscUa2Dbb5E07WXrTajK243i69IemPQOv6zfNmrKfFLmct6O81Y/Nratw2uwcHzVhlzC5LJTJ2+TFeXe222dF6xowlx+1SYH3tzEboAYBa82o6I+l4ZScKBJOdKBBMdqJAMNmJAsFkJwoEk50oEEx2okDkrbOLyFYANwPoUtX3Zbc1AHgSwCoALQBuU9W+4nWTAKDn6JtmbP+Op81YJOY/zCuvu8GMtfT2m7G+ATsGAAuv2mDG+k+eMmOJN98wY8s/eJXbZmJixIylnFjaGRo6lPCH1UplnRmLp+y6f++gvQhlylmgEQAi0dzb7bsFpndlfxTA+c+GuwE8q6prATyb/ZmI5rC8ya6qLwA4/5amWwCcm5lgG4BbC9stIiq0mb5nb1LV9uz3HQCarF8UkU0isldE9nZ322tgE1FxzfoDOlVVOG8VVHWLqjaranNjo726CBEV10yTvVNElgBA9t+uwnWJiIphpsm+A8Ad2e/vAPDTwnSHiIplOqW3xwFcC2CxiLQC+CqAewH8SETuBHASwG3F7CRNevG/HjVjA+2dZmzBJe92j1u/7gozdt99XzNjfeN+eejv7vysGVt48Roz1nvsLTNWvXqt2+aCmkozNtRnn6NUzSL7oLX1bpvD48NmLDY2ZO+Ysldv1EjuhRvPmUjlnl3WGambP9lV9XYj9PF8+xLR3ME76IgCwWQnCgSTnSgQTHaiQDDZiQLB2WXnmI5DB8zYa8/83IyJM7Jt1Q03u21mnHJVX4ddruodnnCPm563wIytvNZevLHv+HEzNv7mYbfNpZfbpbkWtWfDrY5UmbH6Wn/U2/F+OxavtY+bytjX2jiMYW1ZEaPGFnUu37yyEwWCyU4UCCY7USCY7ESBYLITBYLJThQIlt5KzVl4DwBeePRhMzba3WPGFr7nMjO29JqPuW2eHe43YzXV9kKK/XlGvVXX2RMxLv7QRjM2/7nnzVjnUXtEHABUrlhm96e21ozVps7aBx3zS2+NVfboteGkfT2NRuwhalUxezJKABgcyj3qDWofk1d2okAw2YkCwWQnCgSTnSgQTHaiQDDZiQLBZCcKBOvsJXbmtX1u/NDOX5sxqagwY6tv/EszFp8/320z5syOWlVtD3+N5xniWhG1h2nGG81FhLD0anv469njLW6bwy0nzVjje1easb5THfZB0/Z5BwBJ2TXxsWF7MUlrmCoARCP+dXg0nvsxzYi96hKv7ESBYLITBYLJThQIJjtRIJjsRIFgshMFYjoLO24FcDOALlV9X3bbPQD+FsC5z/m/oqrPFKuTf3Yy9tDPXdu2uruO9fWbsUXvfb8ZW/Jhe8hoOp1w26yotGdAraqxh4VWVPqlt0qnVJjJ2MNGG53hr/W7drltdr5x1IzVXGQPf43WLTRjowP2DLsA0D9iP97puL1AY2WlXZrsStqPCQAkqnMvNpmJ2KXH6VzZHwVwQ47t31TVddkvJjrRHJc32VX1BQC9JegLERXRbN6z3yUi+0Vkq4jYr4GIaE6YabI/CGANgHUA2gHcb/2iiGwSkb0isre7276Vj4iKa0bJrqqdqppW1QyAhwCsd353i6o2q2pzY2PjTPtJRLM0o2QXkSVTfvwkAHuBMiKaE6ZTenscwLUAFotIK4CvArhWRNYBUAAtAD5XvC7++Xlrz+/N2IFnd7r7xpwy2OpP/JW933x7Jtf0hF96i8fs8lBdTbUZq6iyYwBQWeWV3uyRYlUXLDVjy6/+qNtm39FjZmy4pc2MVb7bbjPd57/9rK6wy4hVVXbpMp22S3bRjD3aEAAWYCzn9hjsY+ZNdlW9PcfmR/LtR0RzC++gIwoEk50oEEx2okAw2YkCwWQnCgSTnSgQnF12htLj9vDO5x76DzM2OjDoHrfpiivN2NING8xYKmnX0jPOkFsAELHrxDXOyqfVNblrvedUOENcNW2vfJqM2v1p+tBVbpttv33ejPW8Ydfg4yuWm7GaeYvdNgd77JlpddR+vCtr7fsU6iP2rLQAMDGSe0ZgydjnlVd2okAw2YkCwWQnCgSTnSgQTHaiQDDZiQLB0ptjdHTIjP3miZ+YscO/e9GMxar8WUPX3HizvW+dPYw1NWGXAkX90lvGKb1VO0Ncq6r9Ia5xp4SmaXuIq7PeISoW2wtCAsDSa+whsH3HTpixsZOnzFjtZSvcNgfP2KW3dCppxpZV2elXnecxk4rcw5IjEfuc88pOFAgmO1EgmOxEgWCyEwWCyU4UCCY7USCCLr2dabUXwQOAX/xyuxnr2PErM5YcGTVjjc3NbptL1tsj29LOSDGnegankjXJqXUtXb7EjEXnzXcPWxW1ryUZp/QmTtVJ1b8+NTXb5+/Mrt+asZ5D9oKQMmCfAwConW+vh9Bx2i7pnbQrdkir/6jV11qzz9r78cpOFAgmO1EgmOxEgWCyEwWCyU4UCCY7USCms7DjCgCPAWjC5Of6W1T1ARFpAPAkgFWYXNzxNlXt8441PjaGwwf354xd+t7L31HHp6vjVKsZ+/a/f9Pdd7HYfwtHjh03Y1plL8q36iZ7cUYAiFbbo+KSCWdSSa9Sk6eMk0nb8ca1l5qx+Lg9ogsAxhN2vDIWNWMKu46YcRZDBICqxReasRVXX2vGBo7aj2eizV4QEgCWrF1jxtpb2+02x+zz46wVCQAYMRZw9BaLnM6VPQXgS6p6GYANAD4vIpcBuBvAs6q6FsCz2Z+JaI7Km+yq2q6qr2a/HwJwGMAyALcA2Jb9tW0Abi1SH4moAN7Re3YRWQXgCgC7ATSp6rnXKB2YfJlPRHPUtJNdROoAPAXgi6r6tpnvVVVh3KcnIptEZK+I7O3tc9/SE1ERTSvZRSSOyUT/gao+nd3cKSJLsvElALpy7auqW1S1WVWbGxYuLESfiWgG8ia7TK4N9AiAw6q6eUpoB4A7st/fAeCnhe8eERXKdEa9fQTApwG8LiL7stu+AuBeAD8SkTsBnARwW1F6SEQFkTfZVXUXYBY+P/5OGmvvaMe/3fsvOWMPbP6uu29Doz2M8K0TR8zY5vu+bsZq6xa5beLYm2YoOWzPPOstPtj0gXVukwmnNm2UVgEA4owLlYj/Au7s2W4z9sT3HjZjPUP2UF4AWPBluxq77tJLzJimvFq6f89AylnEsrF5vRlb+NvnzdgZZ/grAFQvsxeFXO4MEe7usmvwYwn/foLhidznwbvfgnfQEQWCyU4UCCY7USCY7ESBYLITBYLJThSIks4uOzIygj17Xs0Z27Hj6Zzbz/ngBz5kxu7f/A0zNib2LKZXzb/AbfPsoYNmLFpba8befbM9jDVWbQ9/BYB0xpl11fnTLE7JxRsyCgCZpL0oZCQxYsaSg/7tz7EZlgPV2S/tlNYAQJzhvPEG+/FedrW9IGSXM/wVACZOnzZji1cvNWMdXTlvOgUAxGDPJAw4Mw07/39e2YkCwWQnCgSTnSgQTHaiQDDZiQLBZCcKRElLbyKCmFE/evjhLe6+Tzc8Zcbqm5aZsUtWrjJjQ7t3uW0mRsfNWOM6e4HGuqV2f4Y6O902vTFd0Zj9tzni1OVS+WZkrag2Y3/9mc+YsUSe465ZZp+HxLh9bt0RfHnKiOqch0zGjjVeaY9UXOwsCAkAZw4cMmOxJrvcd+ESuyzX02qX8wAgGs+9PeKs8MkrO1EgmOxEgWCyEwWCyU4UCCY7USCY7ESBKG3pDUA0mvvvS2//YM7tfxSvMEOLltuLIc4bHjBjncf9iQSjcaO+AWDYKY3s/vrXzJhXGgGAqDMaTIxzN7mj/VBKJM8qgVF7kcVIpV2Wi8T8p8++F1+ym4zZ5zbmPNaap81Y3I5HK+zjevslR+1RgQAQVbtUmOnoMGMNqy8yY+1t9mSUAOzMZemNiJjsRIFgshMFgslOFAgmO1EgmOxEgZjOKq4rROQ5ETkkIgdF5AvZ7feISJuI7Mt+3VT87hLRTE2nzp4C8CVVfVVE5gF4RUR2ZmPfVNX7pt2aiFmbTU/4iwR2d9qLD65cbc+AOt7bZsbSKXsmVwCAU/MecxZDlG67thp1atoAgKUXmqFUlV3zRmrMDOWbkdUbAqvObKV5bhmA12zcmQ5XnWtQnibdmndFf78ZiySc2Vzz/Ee9YbeZ3rNmrNZZEHLhIn/R0VNtuZ/Xaefxms4qru0A2rPfD4nIYQD2QGUimpPe0Xt2EVkF4AoAu7Ob7hKR/SKyVUQWFrpzRFQ40052EakD8BSAL6rqIIAHAawBsA6TV/77jf02icheEdmbyveymYiKZlrJLiJxTCb6D1T1aQBQ1U5VTatqBsBDAHKudK+qW1S1WVWbY7E871eJqGim82m8AHgEwGFV3Txl+5Ipv/ZJAAcK3z0iKpTpfBr/EQCfBvC6iOzLbvsKgNtFZB0m50hsAfC5IvSPiApkOp/G70Luiscz77QxEUHMGNroLfQHAIkJezbSE0feMGNNl19uxir/YoPbZjKRNGMVzvDXuDNHrFcaAYDIuy42Y++5dK19XOfzkLT6n5V4M7165cd0xv+/dPT0mrGmxYvNWNwpn6WdGAD0d9qLJQ6//LIZqxqxS7+pdJ7PmpzSpUTtslzUWeSzYtwejgsAUeMx9YqEvIOOKBBMdqJAMNmJAsFkJwoEk50oEEx2okCUdHZZVUU6nXt0UZ6KFOLOjKNnu+zFEl86eNiM1dXVum2m0nanaurqzdgFTY1mrHK8321zxXz7uKvW2qW3pFMeylci8/7mn3VG9/38Z79yj3r8+FtmbOP115mx9Ve834zVVvjXpxZnJt2upfbwjWS6zozlu8s7nbZLtGknxbrGeszYYI89WhMAMpr7/+nlEa/sRIFgshMFgslOFAgmO1EgmOxEgWCyEwWipKU3qD06K+2MHAKAjFNaqq2yR6ClhuyRV93D9kSVALD84veYsZFeuySVWdRg92fQLrcAgDplnIRzDrzz400aCQBj4/aIryce/b4Ze+l3v3ePGxF7spLuM/bChcnU35ixjevXuW2mM/bEke0tJ8zYiDMxZMRbUBNAxJmTRZ1ZN8fHE2Ys7ix8CQCV8dyNemt48spOFAgmO1EgmOxEgWCyEwWCyU4UCCY7USCY7ESBKPkQ11Qydx00MWHXHAFgwpkB9YJGe+m5NWtWmbGTp+yhsQAw35kBdXygz4xVV1XZB43bM4oCQMYbjurUy716rnjFVwAnjxwxYwf+8JoZi8X8GVC99RBHB+3zt3P7djNWU+U/ZZtq7Hg8ascqjQVHAaBvdMJts8Zps8I5B7Goc39InmG1sajxXGCdnYiY7ESBYLITBYLJThQIJjtRIJjsRIGQfMMfC9qYSDeAk1M2LQbgj/ksLfbHN9f6A8y9PpW7Pxepas7pjUua7H/SuMheVW0uWwfOw/745lp/gLnXp7nWn6n4Mp4oEEx2okCUO9m3lLn987E/vrnWH2Du9Wmu9eePyvqenYhKp9xXdiIqkbIku4jcICJHROSYiNxdjj6c158WEXldRPaJyN4y9WGriHSJyIEp2xpEZKeIHM3+a69MWJr+3CMibdnztE9Ebiphf1aIyHMickhEDorIF7Lby3KOnP6U7RzlU/KX8SISBfAmgOsAtALYA+B2VT1U0o68vU8tAJpVtWz1URG5BsAwgMdU9X3Zbd8A0Kuq92b/KC5U1X8qY3/uATCsqveVog/n9WcJgCWq+qqIzAPwCoBbAXwWZThHTn9uQ5nOUT7luLKvB3BMVU+oagLAEwBuKUM/5hRVfQHA+ZPc3wJgW/b7bZh8MpWzP2Wjqu2q+mr2+yEAhwEsQ5nOkdOfOascyb4MwOkpP7ei/CdJAfxaRF4RkU1l7stUTap6bjWFDgBN5exM1l0isj/7Mr9kbyumEpFVAK4AsBtz4Byd1x9gDpyjXPgB3aSNqnolgBsBfD77EnZO0cn3W+UunTwIYA2AdQDaAdxf6g6ISB2ApwB8UVUHp8bKcY5y9Kfs58hSjmRvA7Biys/Ls9vKRlXbsv92AdiOybcac0Fn9r3hufeIXeXsjKp2qmpaVTMAHkKJz5OIxDGZWD9Q1aezm8t2jnL1p9znyFOOZN8DYK2IvEtEKgB8CsCOMvQDACAitdkPWCAitQCuB3DA36tkdgC4I/v9HQB+Wsa+nEumcz6JEp4nEREAjwA4rKqbp4TKco6s/pTzHOWlqiX/AnATJj+RPw7gn8vRhyl9WQ3gtezXwXL1B8DjmHzZl8Tk5xh3AlgE4FkARwH8L4CGMvfn+wBeB7Afk0m2pIT92YjJl+j7AezLft1UrnPk9Kds5yjfF++gIwoEP6AjCgSTnSgQTHaiQDDZiQLBZCcKBJOdKBBMdqJAMNmJAvF//pSMMA6BGzAAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plot,prediction = test_on_img(r'C:\\TrafficSign\\Test\\00006.png')\n", + "s = [str(i) for i in prediction] \n", + "a = int(\"\".join(s)) \n", + "print(\"Predicted traffic sign is: \", classes[a])\n", + "plt.imshow(plot)\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ecea326d", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}